index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
6,337
|
sbxg/sbxg
|
refs/heads/master
|
/sbxg/__main__.py
|
# Copyright (c) 2017 Jean Guyomarc'h
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import argparse
import os
import shutil
import sys
import sbxg
from sbxg.utils import ANSI_STYLE
from sbxg import error as E
def show_library(board_dirs, lib_dirs):
"""
Dump the board and lib path's contents.
"""
# First, go through the board directories to see the boards that
# are available.
print("List of available boards (with variants):")
for board_dir in board_dirs:
# Boards are directories that reside directly with a board dir
boards = os.listdir(board_dir)
for board in boards:
print(" - {}{}{}".format(
ANSI_STYLE['okblue'], board, ANSI_STYLE['endc']
), end='')
# Search with the board directory for variants. Variants are
# .yml files, and exclude board.yml.
file_list = os.listdir(os.path.join(board_dir, board))
variants = []
for variant in file_list:
if variant.endswith(".yml") and variant != "board.yml":
variants.append(os.path.splitext(variant)[0])
if len(variants) > 0:
print(' (', end='')
for variant in variants:
print(" {}{}{}".format(ANSI_STYLE['okgreen'], variant,
ANSI_STYLE['endc']), end='')
print(' )')
else:
print("")
# Then, grab the list of sources, for each lib dir in the lib path.
# We will search in the sources directory with these paths.
print("\nList of sources:")
for lib_dir in lib_dirs:
for root, _, files in os.walk(os.path.join(lib_dir, "sources")):
for item in files:
# We search for .yml files only
if item.endswith('.yml'):
item_type = os.path.basename(root)
item = os.path.splitext(item)[0]
print(" - {}{}{}: {}{}{}".format(
ANSI_STYLE['okblue'], item_type, ANSI_STYLE['endc'],
ANSI_STYLE['okgreen'], item, ANSI_STYLE['endc'],
))
# And finally, do the same for configurations.
print("\nList of configurations:")
for lib_dir in lib_dirs:
for root, _, files in os.walk(os.path.join(lib_dir, "configs")):
for item in files:
# We search for .yml files only
if os.path.isfile(os.path.join(root, item)):
item_type = os.path.basename(root)
# Remove the "s" to "bootscripts" for pretty print
if item_type == "bootscripts":
item_type = "bootscript"
# Remove the .j2 extensions
if item.endswith(".j2"):
item = os.path.splitext(item)[0]
print(" - {}{}{}: {}{}{}".format(
ANSI_STYLE['okblue'], item_type, ANSI_STYLE['endc'],
ANSI_STYLE['okgreen'], item, ANSI_STYLE['endc'],
))
def install_rootfs(rootfs, dest):
if os.path.isfile(rootfs):
shutil.copy(rootfs, dest)
else:
raise E.SbxgError("Rootfs file '{}' does not exist".format(rootfs))
def getopts(argv):
parser = argparse.ArgumentParser(description='SBXG Boostrapper')
parser.add_argument(
'--subcomponent', type=str, default='subcomponent',
help='Provide the path to the subcomponent program'
)
parser.add_argument(
'--no-download', '-n', action='store_true',
help="Don't make subcomponent download the components"
)
parser.add_argument(
'--no-color', action='store_true',
help='Disable colored output when this option is specified'
)
parser.add_argument(
'--kernel', nargs=2, metavar='FILE',
help="""specifying this option makes SBXG to only build a kernel.
Xen does not fall under this category.
A toolchain must be specified"""
)
parser.add_argument(
'--xen', nargs=2, metavar='FILE',
help="""specifying this option makes SBXG to only build a Xen kernel.
A toolchain must be specified"""
)
parser.add_argument(
'--uboot', nargs=2, metavar='FILE',
help="""specifying this option makes SBXG to only build U-Boot.
A toolchain must be specified"""
)
parser.add_argument(
'--toolchain', type=str, metavar='TOOLCHAIN',
help="""specify a toolchain to be used outside of the board execution.
This option must be specified when building a component on demand."""
)
parser.add_argument(
'--board', '-B', type=str,
help="""Name of an SBXG board that reside within a directory specified
by the --board-dir arguments"""
)
parser.add_argument(
'--board-variant', '-b', type=str,
help="""Name of a variant configuration for a selected board. If none
is provided, a default configuration will be used"""
)
parser.add_argument(
'--board-dir', nargs='+',
help="""Add a directory to the boards search path. When this argument
is not specified, the boards/ directory of SBXG will be used"""
)
parser.add_argument(
'--lib-dir', '-L', nargs='+',
help="""Add a directory to the library search path. When this argument
is not specified, the lib/ directory of SBXG will be used"""
)
parser.add_argument(
'--show-library', action='store_true',
help="Prints in stdout the library of available components and exits"
)
args = parser.parse_args(argv[1:])
# If we required no colors to be printed out, unset the ANSI codes that
# were provided.
if args.no_color:
for key in ANSI_STYLE:
ANSI_STYLE[key] = ''
# If --board-variant is used, --board must have been specified
if args.board and not args.toolchain:
raise E.SbxgError("--board requires the use of --toolchain")
if args.board_variant and not args.board:
raise E.SbxgError("--board-variant cannot be used without --board")
if args.kernel and args.board:
raise E.SbxgError("--kernel and --board cannot be used together")
if args.xen and args.board:
raise E.SbxgError("--xen and --board cannot be used together")
if args.uboot and args.board:
raise E.SbxgError("--uboot and --board cannot be used together")
if args.kernel and not args.toolchain:
raise E.SbxgError("--kernel requires the use of --toolchain")
if args.uboot and not args.toolchain:
raise E.SbxgError("--uboot requires the use of --toolchain")
if args.xen and not args.toolchain:
raise E.SbxgError("--xen requires the use of --toolchain")
if not args.board and not args.kernel and not args.uboot and not args.xen \
and not args.show_library:
raise E.SbxgError("At least one of the following option is expected: "
"--board, --kernel, --uboot, --xen")
return args
def main(argv):
args = getopts(argv)
# The top source directory is where this script resides, whereas the build
# directory is where this script was called from.
top_src_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
top_build_dir = os.getcwd()
# The default board directory search path is boards/
if not args.board_dir:
args.board_dir = [os.path.join(top_src_dir, "boards")]
# The default lib directory search path is lib/
if not args.lib_dir:
args.lib_dir = [os.path.join(top_src_dir, "lib")]
# Dump the library, and exit with success
if args.show_library:
show_library(args.board_dir, args.lib_dir)
sys.exit(0)
# Initialize the templates directory to the one contained within SBXG
template_dirs = [os.path.join(top_src_dir, "templates")]
components = []
# I forbid you to use the source directory as the build directory!
if os.path.normpath(top_src_dir) == os.path.normpath(top_build_dir):
raise E.SbxgError("Run bootstrap.py from a build directory that is "
"distinct from the source directory.")
sys.exit(1)
# The lib dirs provide a template path. We must add them!
for lib_dir in args.lib_dir:
bootscript_path = os.path.join(lib_dir, "configs", "bootscripts")
if os.path.exists(bootscript_path):
template_dirs.append(bootscript_path)
# The main database that will hold our configuration
database = sbxg.model.Database(top_src_dir, top_build_dir)
if args.toolchain:
args.toolchain = sbxg.utils.get_toolchain(
args.lib_dir, args.toolchain
)
local_toolchain = False
if os.path.basename(args.toolchain) == "local.yml":
local_toolchain = True
toolchain = sbxg.model.Toolchain(args.toolchain, local_toolchain)
toolchain.load()
database.set_toolchain(toolchain)
if not local_toolchain:
components.append('toolchain')
if args.board:
# Select the configuration file for the previously selected board. It
# is either 'board.yml' for the default configuration, or another yaml
# file if a variant is provided. Fail if the configuration file does
# not exist.
config, board_dir = sbxg.utils.get_board_config(
args.board_dir,
args.board,
args.board_variant if args.board_variant else 'board'
)
board = sbxg.model.Board(config, toolchain)
board.load(args.lib_dir, board_dir)
database.set_board(board)
# Copy the rootfs to the input path of genimage
components.extend(['kernel', 'genimage'])
if not board.vm:
components.append('uboot')
template_dirs.append(os.path.join(board_dir, 'images'))
if board.xen:
components.append('xen')
if args.kernel:
args.kernel[0] = sbxg.utils.get_kernel_source(
args.lib_dir, args.kernel[0]
)
args.kernel[1] = sbxg.utils.get_kernel_config(
args.lib_dir, args.kernel[1]
)
kernel_source = sbxg.model.Kernel(args.kernel[0])
kernel_source.load()
kernel_config = args.kernel[1]
database.set_kernel(kernel_source, kernel_config)
components.append('kernel')
if args.uboot:
args.uboot[0] = sbxg.utils.get_uboot_source(
args.lib_dir, args.uboot[0]
)
args.uboot[1] = sbxg.utils.get_uboot_config(
args.lib_dir, args.uboot[1]
)
uboot_source = sbxg.model.Uboot(args.uboot[0])
uboot_source.load()
uboot_config = args.uboot[1]
database.set_uboot(uboot_source, uboot_config)
components.append('uboot')
if args.xen:
args.xen[0] = sbxg.utils.get_xen_source(
args.lib_dir, args.xen[0]
)
args.xen[1] = sbxg.utils.get_xen_config(
args.lib_dir, args.xen[1]
)
xen_source = sbxg.model.Xen(args.xen[0])
xen_source.load()
xen_config = args.xen[1]
database.set_xen(xen_source, xen_config)
components.append('xen')
# Now that we are done collecting the data from the configurations, and we
# have fed our data model, initialize the templating engine.
templater = sbxg.template.Templater(database, template_dirs)
# Fetch the required components
subcomponent = sbxg.subcomponent.Subcomponent(templater, args.subcomponent)
subcomponent.add_components(components)
subcomponent.call(top_build_dir, no_download=args.no_download)
# If we are to use genimage, create right now the directories that genimage
# will need.
if database.genimage:
keys = ['build_dir', 'output_path', 'input_path', 'root_path', 'tmp_path']
for key in keys:
gen_dir = database.genimage[key]
if not os.path.exists(gen_dir):
os.makedirs(gen_dir)
genimage_in = database.genimage['input_path']
if database.board:
install_rootfs(database.board.rootfs, genimage_in)
if database.board:
# Generate the boot script from a template, if one was specified. This
# generated bootscript will just be a templated file. When dealing with
# U-Boot bootscript, the generated makefile will create the final
# boot script with tools like mkimage.
boot_cmd = os.path.join(
top_build_dir,
database.board.templated_boot_script_name
)
if not database.board.vm:
templater.template_file(database.board.boot_script, boot_cmd)
# And finally generate the genimage configuration
templater.template_file(
os.path.basename(database.board.image),
database.genimage['config']
)
# Generate the makefile, which will control the build system
templater.template_file(
"Makefile.j2", os.path.join(top_build_dir, "Makefile")
)
if __name__ == "__main__":
main(sys.argv)
|
{"/sbxg/__main__.py": ["/sbxg/utils.py"]}
|
6,338
|
sbxg/sbxg
|
refs/heads/master
|
/tests/test_boards.py
|
# Copyright (c) 2017 Jean Guyomarc'h
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os
import pytest
import subprocess
import sys
@pytest.mark.parametrize("variant", ["xen", "board"])
def test_cubietruck(env, variant):
"""Build the cubietruck board with several variants"""
env.bootstrap_board("cubietruck", "armv7-eabihf", variant)
env.run_make("-j2")
@pytest.mark.parametrize("variant", ["vexpress-v7"])
def test_virtual(env, variant):
"""Build the virtual board with several variants"""
env.bootstrap_board("virtual", "armv7-eabihf", variant)
env.run_make()
@pytest.mark.parametrize("variant", ["board"])
def test_orangepi_zero(env, variant):
"""Build the orange-pi board with several variants"""
env.bootstrap_board("orangepi-zero", "armv7-eabihf", variant)
env.run_make("-j2")
|
{"/sbxg/__main__.py": ["/sbxg/utils.py"]}
|
6,339
|
sbxg/sbxg
|
refs/heads/master
|
/sbxg/model.py
|
# Copyright (c) 2017 Jean Guyomarc'h
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import abc
import collections
import os
import yaml
from . import error as E
from . import utils
class Model(object):
def __getitem__(self, attr):
return getattr(self, attr)
@abc.abstractproperty
def config_file(self):
pass
@abc.abstractproperty
def namespace(self):
pass
def _yaml_load(self):
""""
Load the configuration from a file that is provided through the
virtual property config_file().
A dictionary will be returned from the parsed YAML configuration.
"""
with open(self.config_file(), 'r') as stream:
config_contents = stream.read()
return yaml.load(config_contents)
def check_mandatory(self, element, obj=None):
obj = self if obj is None else obj
attr = getattr(obj, element)
if not attr:
raise E.MissingRequiredData(self.config_file(),
self.namespace() + element)
return attr
def check_mandatory_file(self, element, obj=None):
obj = self if obj is None else obj
attr = self.check_mandatory(element, obj)
if not os.path.isfile(attr):
raise E.InvalidFileData(self.config_file(),
self.namespace() + element,
attr)
def check_optional_list(self, element, obj=None):
obj = self if obj is None else obj
attr = getattr(obj, element)
if attr and not isinstance(attr, list):
raise E.NotAList(self.config_file(),
self.namespace() + element)
def _get_source(self, source, db, key, lib_dirs):
name = self.get_mandatory(db, key)
for lib_dir in lib_dirs:
search = os.path.join(lib_dir, "sources", source, name + ".yml")
if os.path.isfile(search):
return search
raise E.InvalidFileData(self.config_file(),
self.namespace() + key,
name + ".yml")
def _get_config(self, config, db, key, lib_dirs):
name = self.get_mandatory(db, key)
config_path = os.path.join("configs", config, name)
for lib_dir in lib_dirs:
search = os.path.join(lib_dir, config_path)
if os.path.isfile(search):
return search
raise E.InvalidFileData(self.config_file(),
self.namespace() + key,
config_path)
def get_mandatory(self, db, attribute):
if attribute in db:
return db[attribute]
raise E.MissingRequiredData(self.config_file(),
self.namespace() + attribute)
def get_toolchain_source(self, db, key, lib_dirs):
config_file = self._get_source("toolchain", db, key, lib_dirs)
toolchain = Toolchain(config_file)
toolchain.load()
return toolchain
def get_kernel_source(self, db, key, lib_dirs, suffix=""):
config_file = self._get_source("kernel", db, key, lib_dirs)
kernel = Kernel(config_file, suffix)
kernel.load()
return kernel
def get_uboot_source(self, db, key, lib_dirs):
config = self._get_source("uboot", db, key, lib_dirs)
uboot = Uboot(config)
uboot.load()
return uboot
def get_xen_source(self, db, key, lib_dirs):
config = self._get_source("xen", db, key, lib_dirs)
xen = Xen(config)
xen.load()
return xen
def get_kernel_config(self, db, key, lib_dirs):
return self._get_config("kernel", db, key, lib_dirs)
def get_xen_config(self, db, key, lib_dirs):
return self._get_config("xen", db, key, lib_dirs)
def get_uboot_config(self, db, key, lib_dirs):
return self._get_config("uboot", db, key, lib_dirs)
def get_bootscript(self, db, key, lib_dirs):
return self._get_config("bootscripts", db, key, lib_dirs)
def get_genimage_config(self, db, key, board_dir):
name = self.get_mandatory(db, key)
search = os.path.join(board_dir, "images", name)
if os.path.isfile(search):
return search
raise E.InvalidFileData(self.config_file(),
self.namespace() + key,
search)
def get_rootfs(self, db):
attr = self.get_mandatory(db, "rootfs")
return "rootfs.ext3" if attr == "automatic" else attr
def get_boolean(self, db, key):
attr = self.get_mandatory(db, key)
if type(attr) is not bool:
raise E.NotABoolean(self.config_file(),
self.namespace() + key)
return attr
@abc.abstractmethod
def load(self, lib_dirs, board_dir, **kwargs):
"""
Load a data model
Returns: the parse configuration
"""
pass
class Board(Model):
def config_file(self):
return self._config_file
def namespace(self):
return self._namespace
def __init__(self, config_file, toolchain):
self._config_file = config_file
self._namespace = "::"
self.kernel_bootargs = ""
self.toolchain = toolchain
self.kernel = None
self.kernel_config = None
self.uboot = None
self.uboot_config = None
self.boot_script = None
self.image = None
self.linux_dtb = None
self.linux_image = None
self.linux_bootargs = ""
self.uboot_image = None
self.root = None
self.templated_boot_script_name = "boot.cmd"
self.output_boot_script_name = "boot.scr"
self.xen = None
self.xen_config = None
self.arch = None
self.xen_arch = None
self.rootfs = None
self.vm = False
def _check_vm_parameters(self, db):
no_vm = (
"uboot", "uboot_config", "boot_script",
"kernel_bootargs", "linux_bootargs",
"linux_dtb", "uboot_image", "root", "output_boot_script_name",
"xen", "xen_arch", "xen_config",
)
if self.vm:
for attr in no_vm:
if attr in db:
raise E.InvalidVMParameters(attr)
def load(self, lib_dirs, board_dir):
config = self._yaml_load()
if not "board" in config:
raise E.MissingRequiredData(self.config_file(), self.namespace() + "board")
# Open up the board "namespace"
db = config["board"]
self._namespace += "board::"
# Grab the architecture of the board
self.arch = self.get_mandatory(db, "arch")
# We can easily check if the provided toolchain is suitable to compile
# for the board or not. If we use a local toolchain, we will determine
# the current architecture and then compare it to the board's one.
expected_arch = utils.get_arch() if self.toolchain.local else self.toolchain.arch
if self.arch != expected_arch:
raise E.InvalidToolchain(self.arch, expected_arch)
if "vm" in db:
self.vm = self.get_boolean(db, "vm")
self.image = self.get_genimage_config(db, "image", board_dir)
self.linux_image = self.get_mandatory(db, "linux_image")
self.linux_bootargs = db.get("linux_bootargs", "")
self.kernel = self.get_kernel_source(db, "kernel", lib_dirs)
self.kernel_config = self.get_kernel_config(db, "kernel_config", lib_dirs)
self.kernel_bootargs = db.get("kernel_bootargs", "")
self.rootfs = self.get_rootfs(db)
self._check_vm_parameters(db)
# At this point, if we are parsing a VM board, don't go further as the
# rest of this method parses VM-exclusive parameters.
if self.vm:
return config
self.uboot = self.get_uboot_source(db, "uboot", lib_dirs)
self.uboot_config = self.get_uboot_config(db, "uboot_config", lib_dirs)
self.boot_script = os.path.basename(self.get_bootscript(db, "boot_script", lib_dirs))
self.linux_dtb = self.get_mandatory(db, "linux_dtb")
self.uboot_image = self.get_mandatory(db, "uboot_image")
self.root = self.get_mandatory(db, "root")
if "output_boot_script_name" in db:
self.output_boot_script_name = db["output_boot_script_name"]
if "xen" in db:
self.xen = self.get_xen_source(db, "xen", lib_dirs)
self.xen_arch = self.get_mandatory(db, "xen_arch")
self.xen_config = self.get_xen_config(db, "xen_config", lib_dirs)
return config
class Source(Model):
def config_file(self):
return self._config_file
def namespace(self):
# Sources have their configuration in the 'global namespace'. It means
# the properties reside at the top level of the dictionary, there is no
# nesting.
return "::"
def __init__(self, in_file, must_fetch=True):
self._config_file = in_file
self._subconfig = None
self._must_fetch = must_fetch
self.path = None
self.url = None
self.compression = None
self.pgp_signature = None
self.pgp_pubkey = None
self.sha256 = None
self.suffix = ""
self.build_dir = None
self.toolchain = None
def set_toolchain(self, toolchain):
self.toolchain = toolchain
def load(self):
config = self._yaml_load()
if self._must_fetch:
self.url = self.get_mandatory(config, "url")
self.path = os.path.abspath(
self.get_mandatory(config, "path")
)
self.sha256 = config.get("sha256")
self.compressions = config.get("compressions")
self.pgp_signature = config.get("pgp_signature")
self.pgp_pubkey = config.get("pgp_pubkey")
self.build_dir = os.path.join(
os.path.dirname(self.path),
"build_" + os.path.basename(self.path) + self.suffix
)
self.check_optional_list("compression")
if self.pgp_signature:
self.check_mandatory("pgp_pubkey")
if self.pgp_pubkey:
self.check_mandatory("pgp_signature")
return config
class Xen(Source):
pass
class Kernel(Source):
"""
The Kernel class handle kernel sources configuration. Regarding the
other sources, kernel sources require additional parameters, suchs as the
type of the kernel, which is deduced from the name of its source
configuration file, as well as its suffix, that is used to produce guests.
"""
def __init__(self, in_file, suffix=""):
super().__init__(in_file)
self.type = None
self.suffix = suffix
self.arch = None
def set_arch(self, arch):
self.arch = arch
def _known_types(self):
"""
We only support Linux as kernel for now. Others may flawlessly work
fine, but they have not be tested.
"""
return ['linux']
def load(self):
super().load()
config_name = os.path.basename(self._config_file)
self.type = config_name.split('-')[0]
if self.type not in self._known_types():
raise E.InvalidKernelType(self._config_file,
self.type,
self._known_types())
class Uboot(Source):
pass
class Toolchain(Source):
def __init__(self, config_file, local=False):
must_fetch = False if local else True
super().__init__(config_file, must_fetch)
self.prefix = None
self.local = local
self.arch = None
self.xen_arch = None
def load(self):
config = super().load()
self.prefix = self.get_mandatory(config, "prefix")
if not self.local:
self.arch = self.get_mandatory(config, "arch")
self.xen_arch = self.get_mandatory(config, "xen_arch")
# Auto-detect the HOST (in the autotools terminology) The host is the
# cross-compilation target. It shall not end with a dash (that is
# brought by the prefix
self.host = os.path.basename(self.prefix)
if self.host.endswith('-'):
self.host = self.host[:-1]
return config
class Database(collections.MutableMapping):
"""
The Database class holds the SBXG configuration. It is an aggregation of
data models and can be accessed in the same fashion than a dictionary.
This allows this class to be passed directly to the jinja templating engine
flawlessly.
"""
def __init__(self, top_source_dir, top_build_dir):
self.top_source_dir = top_source_dir
self.top_build_dir = top_build_dir
self.board = None
self.genimage = None
self.kernel = None
self.uboot = None
self.toolchain = None
def use_genimage(self):
self.genimage = {
'path': os.path.join(self.top_build_dir, 'genimage_sources'),
'build_dir': os.path.join(self.top_build_dir, 'build_genimage'),
'output_path': os.path.join(self.top_build_dir, 'images'),
'input_path': os.path.join(self.top_build_dir, 'genimage-input'),
'root_path': os.path.join(self.top_build_dir, '.genimage-root'),
'tmp_path': os.path.join(self.top_build_dir, '.genimage-tmp'),
'config': os.path.join(self.top_build_dir, 'genimage.cfg'),
}
def set_kernel(self, kernel, kernel_config):
self.kernel = kernel
self.kernel.config = kernel_config
if self.toolchain.local:
self.kernel.set_arch(utils.get_arch())
else:
self.kernel.toolchain = self.toolchain
self.kernel.set_arch(self.toolchain.arch)
def set_uboot(self, uboot, uboot_config):
self.uboot = uboot
self.uboot.config = uboot_config
if not self.toolchain.local:
self.uboot.toolchain = self.toolchain
def set_toolchain(self, toolchain):
self.toolchain = toolchain
def set_xen(self, xen, xen_config):
self.xen = xen
self.xen.config = xen_config
self.xen.host = os.path.basename(self.toolchain.prefix)
if not self.toolchain.local:
self.xen.toolchain = self.toolchain
def set_board(self, board):
self.board = board
self.set_toolchain(board.toolchain)
self.use_genimage()
self.set_kernel(board.kernel, board.kernel_config)
if not board.vm:
self.set_uboot(board.uboot, board.uboot_config)
if board.xen:
self.set_xen(board.xen, board.xen_config)
def __getitem__(self, attr):
return getattr(self, attr)
def __setitem__(self, key, value):
pass # Immutable, do nothing
def __delitem__(self, key):
pass # Immutable, do nothing
def __len__(self):
return len(vars(self))
def __iter__(self):
for item in vars(self).keys():
yield item
|
{"/sbxg/__main__.py": ["/sbxg/utils.py"]}
|
6,340
|
sbxg/sbxg
|
refs/heads/master
|
/tests/conftest.py
|
# Copyright (c) 2017 Jean Guyomarc'h
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os
import subprocess
import sys
import tempfile
import pytest
TOP_SRC_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
class TestEnv(object):
def __init__(self):
self._build_dir_handle = tempfile.TemporaryDirectory()
self.build_dir = self._build_dir_handle.name
self.rootfs = os.path.join(self.build_dir, "rootfs.ext3")
def bootstrap_board(self, board, toolchain, variant="board"):
subprocess.check_call([
sys.executable,
os.path.join(TOP_SRC_DIR, "bootstrap.py"),
"--board", board,
"--board-variant", variant,
"--toolchain", toolchain
], cwd=self.build_dir)
def run_make(self, *args):
"""Execute the make command within the build directory, with optional
arguments to be passed to make
"""
cmd = ["make"]
if args:
cmd.extend(args)
subprocess.check_call(cmd, cwd=self.build_dir)
@pytest.fixture
def env():
testenv = TestEnv()
# Create a dummy ext3 rootfs of 1MB, just for testing purposes
subprocess.check_call([
"dd", "if=/dev/zero", "of={}".format(testenv.rootfs),
"bs=1M", "count=1"])
subprocess.check_call(["sync"])
subprocess.check_call(["mkfs.ext3", "-F", testenv.rootfs])
return testenv
|
{"/sbxg/__main__.py": ["/sbxg/utils.py"]}
|
6,341
|
sbxg/sbxg
|
refs/heads/master
|
/sbxg/error.py
|
# Copyright (c) 2017 Jean Guyomarc'h
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import abc
class SbxgError(Exception):
"""
Super class, used by exception handlers to filter-out SBXG-related
exceptions.
"""
pass
class InvalidToolchain(SbxgError):
def __init__(self, expected_arch, toolchain_arch):
self.expected_arch = expected_arch
self.toolchain_arch = toolchain_arch
def __str__(self):
return "Invalid toolchain architecture '{}'. '{}' was expected".format(
self.toolchain_arch, self.expected_arch
)
class MissingRequiredData(SbxgError):
def __init__(self, in_file, prop):
self.in_file = in_file
self.property = prop
def __str__(self):
return "Missing mandatory property '{}' in '{}'".format(
self.property, self.in_file
)
class InvalidFileData(SbxgError):
def __init__(self, in_file, prop, target):
self.in_file = in_file
self.property = prop
self.target = target
def __str__(self):
return "Cannot find file '{}' requested by property '{}' from file '{}'".format(
self.target, self.property, self.in_file
)
class InvalidKernelType(SbxgError):
def __init__(self, config_file, found_type, expected_types):
self.config_file = config_file
self.found_type = found_type
self.expected_types = expected_types
def __str__(self):
return "Kernel type '{}' deduced from file '{}' is not one of '{}'".format(
self.found_type, self.config_file, ' '.join(self.expected_types)
)
class InvalidVMParameters(SbxgError):
def __init__(self, param):
self._param = param
def __str__(self):
return "Parameter '{}' is forbidden when 'vm' is set to 'true'".format(
self._param)
class SbxgTypeError(SbxgError):
@abc.abstractproperty
def typename(self):
pass
def __init__(self, in_file, prop):
self.in_file = in_file
self.property = prop
def __str__(self):
return "Property '{}' in file '{}' is expected to be a list".format(
self.property, self.in_file
)
class NotAList(SbxgTypeError):
def typename(self):
return "list"
class NotAString(SbxgTypeError):
def typename(self):
return "string"
class NotABoolean(SbxgTypeError):
def typename(self):
return "boolean"
|
{"/sbxg/__main__.py": ["/sbxg/utils.py"]}
|
6,342
|
sbxg/sbxg
|
refs/heads/master
|
/bootstrap.py
|
#! /usr/bin/env python3
#
# Copyright (c) 2017 Jean Guyomarc'h
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
import traceback
import sbxg
def error(message):
print("{}error:{} {}".format(
sbxg.utils.ANSI_STYLE['fail'],
sbxg.utils.ANSI_STYLE['endc'],
message,
), file=sys.stderr)
# Run the main entry point. We will also catch all the exceptions to
# pretty-format the reason of failure.
if __name__ == "__main__":
try:
sbxg.runner.main(sys.argv)
except sbxg.error.SbxgError as exception:
# SBXG will raise its own errors through custom exceptions. They are
# already well-formated, and correspond to nominal failures.
error(exception)
sys.exit(1)
except Exception:
# Generale exceptions are the one not planned by SBXG.
error("Unhandled error! Please report the following trace:")
traceback.print_exc(file=sys.stderr)
error("Aborting!")
sys.exit(127)
|
{"/sbxg/__main__.py": ["/sbxg/utils.py"]}
|
6,343
|
sbxg/sbxg
|
refs/heads/master
|
/scripts/gen-rootfs.py
|
#! /usr/bin/env python
#
# Copyright (c) 2017 Jean Guyomarc'h
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import argparse
import subprocess
import sys
def getopts(argv):
parser = argparse.ArgumentParser(description='Rootfs generator')
parser.add_argument(
'--mkfs', type=str, default='ext3',
help='Type of filesystem to be generated. This option is passed to mkfs'
)
parser.add_argument(
'--ok', action='store_true',
help='Use this option to confirm the commands to be run'
)
parser.add_argument(
'rootfs_dir', type=str,
help='Path to the directory containing the rootfs'
)
parser.add_argument(
'output', type=str,
help='Path to the filesystem block to be created'
)
parser.add_argument(
'size', type=int,
help='Size (in MB) of the output filesystem'
)
return parser.parse_args(argv[1:])
def main(argv):
args = getopts(argv)
cmds = []
cmds.append("dd if=/dev/zero of={} bs=1M count={} iflag=fullblock".format(
args.output, args.size
))
cmds.append("sync")
cmds.append("mkfs.{} -d {} {}".format(
args.mkfs, args.rootfs_dir, args.output
))
if args.ok:
for cmd in cmds:
print("Running {}".format(cmd))
subprocess.check_call(cmd.split(' '))
else:
print("The following commands are planned to be run:\n")
for cmd in cmds:
print(" {}".format(cmd))
print("\nRe-run with the --ok option to run them.")
if __name__ == "__main__":
main(sys.argv)
|
{"/sbxg/__main__.py": ["/sbxg/utils.py"]}
|
6,344
|
sbxg/sbxg
|
refs/heads/master
|
/scripts/create-initramfs.py
|
#! /usr/bin/env python
#
# Copyright (c) 2017 Jean Guyomarc'h
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import argparse
import os
import subprocess
import sys
FSTAB = """# SBXG-Powered fstab for busybox
# <file system> <mount point> <type> <options> <dump> <pass>
proc /proc proc defaults 0 0
sysfs /sys sysfs defaults 0 0
"""
RCS = """# SBXG-Powered rcS for busybox
#! /bin/sh
set +e # Do *NOT* fail on error!!!
/bin/mount -a # Mount things in fstab.
/bin/mount -t devtmpfs devtmpfs /dev # Mount devtmpfs
cat /etc/motd # Say welcome
"""
MOTD = """
____ ____ _ _ ___
/ ___)( _ \( \/ )/ __)
\___ \ ) _ ( ) (( (_ \\
(____/(____/(_/\_)\___/
"""
def getopts(argv):
parser = argparse.ArgumentParser(description='Initramfs packager')
parser.add_argument(
'path', type=str,
help='Path to the busybox build directory'
)
parser.add_argument(
'--output', '-o', type=str, default='initramfs.cpio',
help='Path where the initramfs will be generated'
)
return parser.parse_args(argv[1:])
def main(argv):
args = getopts(argv)
# This part shall be executed in the busysbox install dir
cwd = os.getcwd()
os.chdir(args.path)
# Create some mountpoints and the configuration directory
for new_dir in ["dev", "proc", "sys", "mnt", "etc/init.d"]:
os.makedirs(new_dir, exist_ok=True)
# Symlink /init to /sbin/init
if not os.path.exists("init"):
os.symlink("/sbin/init", "init")
# Create the fstab, initial configuration and motd
with open('etc/fstab', 'w') as stream:
stream.write(FSTAB)
with open('etc/init.d/rcS', 'w') as stream:
stream.write(RCS)
with open('etc/motd', 'w') as stream:
stream.write(MOTD)
# Make the initial configuration executable
os.chmod('etc/init.d/rcS', 0o755)
# Run cpio to create the initramfs
output = subprocess.check_output(
"find . -print | cpio --quiet -o --format=newc", shell=True
)
# Change directory back, so we don't have to recalculate thhe output path
os.chdir(cwd)
# Write the initramfs on the filesystem
with open(args.output, 'wb') as stream:
stream.write(output)
if __name__ == "__main__":
main(sys.argv)
|
{"/sbxg/__main__.py": ["/sbxg/utils.py"]}
|
6,346
|
Naatoo/energy-visualization
|
refs/heads/master
|
/app/data_crud/__init__.py
|
from flask import Blueprint
data = Blueprint('data_crud', __name__)
from app.data_crud import views
|
{"/app/__init__.py": ["/app/analyse/__init__.py", "/app/data_crud/__init__.py", "/app/models.py"], "/run.py": ["/app/__init__.py"], "/app/analyse/views.py": ["/app/analyse/__init__.py", "/app/analyse/forms.py", "/app/analyse/chart_tool.py"], "/app/analyse/plots_generation/column_line_chart.py": ["/app/tools/global_paths.py"], "/app/data_crud/prepare_data.py": ["/app/models.py", "/app/tools/global_paths.py"], "/app/analyse/chart_tool.py": ["/app/analyse/plots_generation/surface_chart.py", "/app/models.py", "/app/tools/global_paths.py", "/app/analyse/plots_generation/column_line_chart.py"], "/app/data_crud/views.py": ["/app/data_crud/__init__.py", "/app/data_crud/forms.py", "/app/data_crud/prepare_data.py", "/app/models.py"]}
|
6,347
|
Naatoo/energy-visualization
|
refs/heads/master
|
/app/models.py
|
from database import db
class Energy(db.Model):
building = db.Column(db.String, primary_key=True)
year = db.Column(db.Integer, primary_key=True)
month = db.Column(db.Integer, primary_key=True)
quantity = db.Column(db.Integer, unique=False)
consumption_price = db.Column(db.Float, unique=False)
transmission_price = db.Column(db.Float, unique=False)
def __repr__(self):
return '{} Energy {}.{} quantity={}'.format('School' if self.building == 'SCH' else 'Workshop',
self.year, self.month, self.quantity)
class Gas(db.Model):
building = db.Column(db.String, primary_key=True)
year = db.Column(db.Integer, primary_key=True)
month = db.Column(db.Integer, primary_key=True)
quantity = db.Column(db.Integer, unique=False)
price = db.Column(db.Float, unique=False)
def __repr__(self):
return '{} Gas {}.{} quantity={}'.format('School' if self.building == 'SCH' else 'Workshop',
self.year, self.month, self.quantity)
|
{"/app/__init__.py": ["/app/analyse/__init__.py", "/app/data_crud/__init__.py", "/app/models.py"], "/run.py": ["/app/__init__.py"], "/app/analyse/views.py": ["/app/analyse/__init__.py", "/app/analyse/forms.py", "/app/analyse/chart_tool.py"], "/app/analyse/plots_generation/column_line_chart.py": ["/app/tools/global_paths.py"], "/app/data_crud/prepare_data.py": ["/app/models.py", "/app/tools/global_paths.py"], "/app/analyse/chart_tool.py": ["/app/analyse/plots_generation/surface_chart.py", "/app/models.py", "/app/tools/global_paths.py", "/app/analyse/plots_generation/column_line_chart.py"], "/app/data_crud/views.py": ["/app/data_crud/__init__.py", "/app/data_crud/forms.py", "/app/data_crud/prepare_data.py", "/app/models.py"]}
|
6,348
|
Naatoo/energy-visualization
|
refs/heads/master
|
/app/analyse/plots_generation/surface_chart.py
|
from math import pi
import pandas as pd
from bokeh.models import LinearColorMapper, ColorBar, BasicTicker
from bokeh.plotting import figure
def generate_surface_chart(data: dict, title: str):
data = pd.DataFrame(data)
data['Year'] = data['Year'].astype(str)
data = data.set_index('Year')
data.columns.name = 'Month'
years = list(data.index)
months = list(data.columns)
df = pd.DataFrame(data.stack(), columns=['rate']).reset_index()
colors = ["#75968f", "#a5bab7", "#c9d9d3", "#e2e2e2", "#dfccce", "#ddb7b1", "#cc7878", "#933b41", "#550b1d"]
mapper = LinearColorMapper(palette=colors, low=df.rate.min(), high=df.rate.max())
TOOLS = "hover,save,pan,box_zoom,reset,wheel_zoom"
p = figure(title=title,
y_range=years, x_range=list(reversed(months)),
x_axis_location="above", plot_width=900, plot_height=400,
tools=TOOLS, toolbar_location='below', tooltips=[('Miesiąc', '@Month @Year'), ('Koszt', '@rate{0.00} zł')])
p.grid.grid_line_color = None
p.axis.axis_line_color = None
p.axis.major_tick_line_color = None
# p.axis.major_label_text_font_size = "5pt"
p.axis.major_label_standoff = 0
p.xaxis.major_label_orientation = pi / 3
p.rect(y="Year", x="Month", width=1, height=1,
source=df,
fill_color={'field': 'rate', 'transform': mapper},
line_color=None)
color_bar = ColorBar(color_mapper=mapper, title='Koszt [zł]',
# major_label_text_font_size="5pt",
ticker=BasicTicker(desired_num_ticks=len(colors)),
# formatter=PrintfTickFormatter(format="%d zł"),
label_standoff=12, border_line_color=None, location=(0, 0))
p.add_layout(color_bar, 'right')
return p
|
{"/app/__init__.py": ["/app/analyse/__init__.py", "/app/data_crud/__init__.py", "/app/models.py"], "/run.py": ["/app/__init__.py"], "/app/analyse/views.py": ["/app/analyse/__init__.py", "/app/analyse/forms.py", "/app/analyse/chart_tool.py"], "/app/analyse/plots_generation/column_line_chart.py": ["/app/tools/global_paths.py"], "/app/data_crud/prepare_data.py": ["/app/models.py", "/app/tools/global_paths.py"], "/app/analyse/chart_tool.py": ["/app/analyse/plots_generation/surface_chart.py", "/app/models.py", "/app/tools/global_paths.py", "/app/analyse/plots_generation/column_line_chart.py"], "/app/data_crud/views.py": ["/app/data_crud/__init__.py", "/app/data_crud/forms.py", "/app/data_crud/prepare_data.py", "/app/models.py"]}
|
6,349
|
Naatoo/energy-visualization
|
refs/heads/master
|
/app/__init__.py
|
import os
import pandas
from flask import Flask
from flask_bootstrap import Bootstrap
from .analyse import analyse as analyse_blueprint
from .data_crud import data as data_blueprint
from app.models import Gas, Energy
from database import db
def create_app():
app = Flask(__name__)
app.config['DEBUG'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite:///../main_data.db"
SECRET_KEY = os.urandom(32)
app.config['SECRET_KEY'] = SECRET_KEY
Bootstrap(app)
db.init_app(app)
app.register_blueprint(analyse_blueprint)
app.register_blueprint(data_blueprint)
return app
def setup_database(app):
with app.app_context():
db.create_all()
insert_initial_energy_data()
insert_initial_gas_data()
def insert_initial_energy_data():
for name in 'school', 'workshop':
data = pandas.read_excel('app/initial_data/{}_energy.xlsx'.format(name)).to_dict('list')
for year, month, quantity, consumption_price, transmission_price in zip(data['year'], data['month'],
data['quantity'],
data['consumption_price'],
data['transmission_price']):
db.session.add(Energy(year=year, month=month, quantity=quantity,
consumption_price=consumption_price, transmission_price=transmission_price,
building='SCH' if name == 'school' else 'WOR'))
db.session.commit()
def insert_initial_gas_data():
for name in 'school', 'workshop':
data = pandas.read_excel('app/initial_data/{}_gas.xlsx'.format(name)).to_dict('list')
for year, month, quantity, price in zip(data['year'], data['month'], data['quantity'], data['price']):
db.session.add(Gas(year=year, month=month, quantity=quantity, price=price,
building='SCH' if name == 'school' else 'WOR'))
db.session.commit()
|
{"/app/__init__.py": ["/app/analyse/__init__.py", "/app/data_crud/__init__.py", "/app/models.py"], "/run.py": ["/app/__init__.py"], "/app/analyse/views.py": ["/app/analyse/__init__.py", "/app/analyse/forms.py", "/app/analyse/chart_tool.py"], "/app/analyse/plots_generation/column_line_chart.py": ["/app/tools/global_paths.py"], "/app/data_crud/prepare_data.py": ["/app/models.py", "/app/tools/global_paths.py"], "/app/analyse/chart_tool.py": ["/app/analyse/plots_generation/surface_chart.py", "/app/models.py", "/app/tools/global_paths.py", "/app/analyse/plots_generation/column_line_chart.py"], "/app/data_crud/views.py": ["/app/data_crud/__init__.py", "/app/data_crud/forms.py", "/app/data_crud/prepare_data.py", "/app/models.py"]}
|
6,350
|
Naatoo/energy-visualization
|
refs/heads/master
|
/run.py
|
import os
from app import create_app, setup_database
app = create_app()
if __name__ == '__main__':
if not os.path.isfile('main_data.db'):
setup_database(app)
app.run(debug=True)
|
{"/app/__init__.py": ["/app/analyse/__init__.py", "/app/data_crud/__init__.py", "/app/models.py"], "/run.py": ["/app/__init__.py"], "/app/analyse/views.py": ["/app/analyse/__init__.py", "/app/analyse/forms.py", "/app/analyse/chart_tool.py"], "/app/analyse/plots_generation/column_line_chart.py": ["/app/tools/global_paths.py"], "/app/data_crud/prepare_data.py": ["/app/models.py", "/app/tools/global_paths.py"], "/app/analyse/chart_tool.py": ["/app/analyse/plots_generation/surface_chart.py", "/app/models.py", "/app/tools/global_paths.py", "/app/analyse/plots_generation/column_line_chart.py"], "/app/data_crud/views.py": ["/app/data_crud/__init__.py", "/app/data_crud/forms.py", "/app/data_crud/prepare_data.py", "/app/models.py"]}
|
6,351
|
Naatoo/energy-visualization
|
refs/heads/master
|
/app/analyse/views.py
|
from flask import request, render_template
from app.analyse import analyse
from app.analyse.forms import ChartForm
from app.analyse.chart_tool import ChartTool
from bokeh.embed import components
@analyse.route('/', methods=['GET', 'POST'])
def show():
"""
Handle requests to the /show route
Displays chart based on form
"""
form = ChartForm()
building = 'SCH'
year = 2017
energy_type = 'energy'
chart_type = 'Column'
if form.validate_on_submit():
year = form.year.data
building = form.building.data
energy_type = form.energy_type.data
chart_type = form.chart_type.data
chart = ChartTool(building=building, interval=year, energy_type=energy_type, chart_type=chart_type)
script, div = components(chart.plot)
return render_template("analyse.html", the_div=div, the_script=script, form=form)
|
{"/app/__init__.py": ["/app/analyse/__init__.py", "/app/data_crud/__init__.py", "/app/models.py"], "/run.py": ["/app/__init__.py"], "/app/analyse/views.py": ["/app/analyse/__init__.py", "/app/analyse/forms.py", "/app/analyse/chart_tool.py"], "/app/analyse/plots_generation/column_line_chart.py": ["/app/tools/global_paths.py"], "/app/data_crud/prepare_data.py": ["/app/models.py", "/app/tools/global_paths.py"], "/app/analyse/chart_tool.py": ["/app/analyse/plots_generation/surface_chart.py", "/app/models.py", "/app/tools/global_paths.py", "/app/analyse/plots_generation/column_line_chart.py"], "/app/data_crud/views.py": ["/app/data_crud/__init__.py", "/app/data_crud/forms.py", "/app/data_crud/prepare_data.py", "/app/models.py"]}
|
6,352
|
Naatoo/energy-visualization
|
refs/heads/master
|
/app/analyse/__init__.py
|
from flask import Blueprint
analyse = Blueprint('analyse', __name__)
from app.analyse import views
|
{"/app/__init__.py": ["/app/analyse/__init__.py", "/app/data_crud/__init__.py", "/app/models.py"], "/run.py": ["/app/__init__.py"], "/app/analyse/views.py": ["/app/analyse/__init__.py", "/app/analyse/forms.py", "/app/analyse/chart_tool.py"], "/app/analyse/plots_generation/column_line_chart.py": ["/app/tools/global_paths.py"], "/app/data_crud/prepare_data.py": ["/app/models.py", "/app/tools/global_paths.py"], "/app/analyse/chart_tool.py": ["/app/analyse/plots_generation/surface_chart.py", "/app/models.py", "/app/tools/global_paths.py", "/app/analyse/plots_generation/column_line_chart.py"], "/app/data_crud/views.py": ["/app/data_crud/__init__.py", "/app/data_crud/forms.py", "/app/data_crud/prepare_data.py", "/app/models.py"]}
|
6,353
|
Naatoo/energy-visualization
|
refs/heads/master
|
/app/analyse/plots_generation/column_line_chart.py
|
import json
from bokeh.models import HoverTool
from bokeh.models import ColumnDataSource, LinearAxis, Grid
from bokeh.plotting import figure
from bokeh.models import Legend, LegendItem
from app.tools.global_paths import TRANSLATIONS_FILE
def generate_stacked_chart(data: dict, text: dict, width=1200, height=800, chart_type: str = 'Column'):
months = data['months']
energy_types = [key for key in sorted(data.keys()) if key != "months"]
if len(energy_types) == 2:
colors = ["red", "green"]
elif len(energy_types) == 4:
colors = ["orange", "red", "blue", "green"]
else:
colors = ['red']
tools = ["pan", "wheel_zoom,save,reset"]
plot = figure(title=text['title'], x_range=months, plot_height=height, plot_width=width, h_symmetry=False,
v_symmetry=False,
min_border=0, toolbar_location="above", tools=tools, sizing_mode='scale_width',
outline_line_color="#666666", active_scroll='wheel_zoom', active_drag='pan')
if chart_type == 'Column':
source = ColumnDataSource(data)
renderers = plot.vbar_stack(energy_types, x='months', width=0.8, color=colors, source=source,
legend=[item for item in text['legend']] if text.get('legend') else None)
for r in renderers:
item = r.name
tooltips = [
("Koszt: ", "@%s{0.00} zł" % item),
("Miesiąc: ", "@months")]
if text.get("tooltip", None):
additional_tooltips = [
("{}: ".format(text["tooltip"]["energy_type"]["label"]), text["tooltip"]["energy_type"]["value"]),
("{}: ".format(text["tooltip"]["building"]["label"]), text["tooltip"]["building"]["value"])]
for t in additional_tooltips:
tooltips.insert(0, t)
hover = HoverTool(tooltips=tooltips, renderers=[r])
plot.add_tools(hover)
elif chart_type == 'Line':
r = plot.multi_line([[month for month in months] for item in energy_types],
[data[type_energy] for type_energy in energy_types], color=colors, line_width=4)
if text.get('legend', None):
legend = Legend(
items=[LegendItem(label=item, renderers=[r], index=index) for index, item in enumerate(text['legend'])])
plot.add_layout(legend)
xaxis = LinearAxis()
yaxis = LinearAxis()
plot.add_layout(Grid(dimension=0, ticker=xaxis.ticker))
plot.add_layout(Grid(dimension=1, ticker=yaxis.ticker))
plot.title.text_font_size = '12pt'
plot.toolbar.logo = None
plot.min_border_top = 0
plot.xgrid.grid_line_color = None
plot.ygrid.grid_line_color = "#999999"
plot.yaxis.axis_label = text['title']
plot.ygrid.grid_line_alpha = 0.1
plot.xaxis.axis_label = "Miesiąc"
plot.xaxis.axis_label_text_font_size = "11pt"
plot.xaxis.major_label_orientation = 1
return plot
def get_translations():
with open(TRANSLATIONS_FILE) as f:
return json.loads(f.read())
|
{"/app/__init__.py": ["/app/analyse/__init__.py", "/app/data_crud/__init__.py", "/app/models.py"], "/run.py": ["/app/__init__.py"], "/app/analyse/views.py": ["/app/analyse/__init__.py", "/app/analyse/forms.py", "/app/analyse/chart_tool.py"], "/app/analyse/plots_generation/column_line_chart.py": ["/app/tools/global_paths.py"], "/app/data_crud/prepare_data.py": ["/app/models.py", "/app/tools/global_paths.py"], "/app/analyse/chart_tool.py": ["/app/analyse/plots_generation/surface_chart.py", "/app/models.py", "/app/tools/global_paths.py", "/app/analyse/plots_generation/column_line_chart.py"], "/app/data_crud/views.py": ["/app/data_crud/__init__.py", "/app/data_crud/forms.py", "/app/data_crud/prepare_data.py", "/app/models.py"]}
|
6,354
|
Naatoo/energy-visualization
|
refs/heads/master
|
/app/data_crud/prepare_data.py
|
import json
from app.models import Energy, Gas
from app.tools.global_paths import MONTHS_NAMES_FILE, BUILDINGS_NAMES_POLISH_FILE
def get_data(type_choice):
with open(MONTHS_NAMES_FILE) as f:
months_names_mapping = json.loads(f.read())
with open(BUILDINGS_NAMES_POLISH_FILE) as f:
buildings_names = json.loads(f.read())
if type_choice == 'energy':
rows = Energy.query.order_by(Energy.year.desc(), Energy.month.desc()).limit(10).all()
data = [[row.year, months_names_mapping[str(row.month)], buildings_names[row.building],
row.quantity, row.consumption_price, row.transmission_price] for row in rows]
elif type_choice == 'gas':
rows = Gas.query.order_by(Gas.year.desc(), Gas.month.desc()).limit(10).all()
data = [[row.year, months_names_mapping[str(row.month)], buildings_names[row.building],
row.quantity, row.price] for row in rows]
return data
|
{"/app/__init__.py": ["/app/analyse/__init__.py", "/app/data_crud/__init__.py", "/app/models.py"], "/run.py": ["/app/__init__.py"], "/app/analyse/views.py": ["/app/analyse/__init__.py", "/app/analyse/forms.py", "/app/analyse/chart_tool.py"], "/app/analyse/plots_generation/column_line_chart.py": ["/app/tools/global_paths.py"], "/app/data_crud/prepare_data.py": ["/app/models.py", "/app/tools/global_paths.py"], "/app/analyse/chart_tool.py": ["/app/analyse/plots_generation/surface_chart.py", "/app/models.py", "/app/tools/global_paths.py", "/app/analyse/plots_generation/column_line_chart.py"], "/app/data_crud/views.py": ["/app/data_crud/__init__.py", "/app/data_crud/forms.py", "/app/data_crud/prepare_data.py", "/app/models.py"]}
|
6,355
|
Naatoo/energy-visualization
|
refs/heads/master
|
/app/tools/global_paths.py
|
import os
from pathlib import Path
MAIN_DIR = str(Path(__file__).parent.parent)
MAPPING_DIR = os.path.join(MAIN_DIR, "mapping/")
MONTHS_NAMES_FILE = os.path.join(MAPPING_DIR, "months_names.json")
BUILDINGS_NAMES_POLISH_FILE = os.path.join(MAPPING_DIR, "buildings_names_polish.json")
BUILDINGS_CODE_FILE = os.path.join(MAPPING_DIR, "buildings_codes.json")
TRANSLATIONS_FILE = os.path.join(MAPPING_DIR, "translations.json")
|
{"/app/__init__.py": ["/app/analyse/__init__.py", "/app/data_crud/__init__.py", "/app/models.py"], "/run.py": ["/app/__init__.py"], "/app/analyse/views.py": ["/app/analyse/__init__.py", "/app/analyse/forms.py", "/app/analyse/chart_tool.py"], "/app/analyse/plots_generation/column_line_chart.py": ["/app/tools/global_paths.py"], "/app/data_crud/prepare_data.py": ["/app/models.py", "/app/tools/global_paths.py"], "/app/analyse/chart_tool.py": ["/app/analyse/plots_generation/surface_chart.py", "/app/models.py", "/app/tools/global_paths.py", "/app/analyse/plots_generation/column_line_chart.py"], "/app/data_crud/views.py": ["/app/data_crud/__init__.py", "/app/data_crud/forms.py", "/app/data_crud/prepare_data.py", "/app/models.py"]}
|
6,356
|
Naatoo/energy-visualization
|
refs/heads/master
|
/app/analyse/forms.py
|
from flask_wtf import FlaskForm
from wtforms import SubmitField, SelectField
from wtforms.validators import DataRequired
class ChartForm(FlaskForm):
"""
Form for users to add new energy bill
"""
year = SelectField('Okres', validators=[DataRequired()], choices=[("2015", "2015"), ("2016", "2016"),
("2017", "2017"), ("2018", "2018"),
("2019", "2019"), ("All", "Wszystkie"),
('Avarage', 'Średnia')])
building = SelectField('Budynek', validators=[DataRequired()], choices=[("SCH", "Szkoła"), ("WOR", "Warsztat"),
("All", "Wszystkie")])
energy_type = SelectField('Rodzaj energii', validators=[DataRequired()],
choices=[("energy", "Energia elektryczna"), ("gas", "Gaz"),
("All", "Wszystkie")])
chart_type = SelectField('Typ wykresu', validators=[DataRequired()],
choices=[("Column", "Kolumnowy"), ("Line", "Liniowy"),
("Surface", "Powierzchniowy")])
submit = SubmitField('Wygeneruj wykres')
# TODO: validators
|
{"/app/__init__.py": ["/app/analyse/__init__.py", "/app/data_crud/__init__.py", "/app/models.py"], "/run.py": ["/app/__init__.py"], "/app/analyse/views.py": ["/app/analyse/__init__.py", "/app/analyse/forms.py", "/app/analyse/chart_tool.py"], "/app/analyse/plots_generation/column_line_chart.py": ["/app/tools/global_paths.py"], "/app/data_crud/prepare_data.py": ["/app/models.py", "/app/tools/global_paths.py"], "/app/analyse/chart_tool.py": ["/app/analyse/plots_generation/surface_chart.py", "/app/models.py", "/app/tools/global_paths.py", "/app/analyse/plots_generation/column_line_chart.py"], "/app/data_crud/views.py": ["/app/data_crud/__init__.py", "/app/data_crud/forms.py", "/app/data_crud/prepare_data.py", "/app/models.py"]}
|
6,357
|
Naatoo/energy-visualization
|
refs/heads/master
|
/app/analyse/chart_tool.py
|
import json
from collections import defaultdict, OrderedDict
from statistics import mean
from app.analyse.plots_generation.surface_chart import generate_surface_chart
from app.models import Energy, Gas
from app.tools.global_paths import MONTHS_NAMES_FILE
from app.analyse.plots_generation.column_line_chart import generate_stacked_chart
from app.tools.global_paths import TRANSLATIONS_FILE
class ChartTool:
def __init__(self, building: str, interval: str, energy_type: str, chart_type: str):
self.building = building
self.interval = interval
self.energy_type = energy_type
self.chart_type = chart_type
self.tra = self.get_translations()
self.plot = self.handle_input()
# TODO data for 2018
def handle_input(self):
if self.chart_type == 'Surface':
title, data = self.get_data_surface()
plot = generate_surface_chart(data, title)
else:
if self.energy_type != "All":
if self.building != "All":
text, data = self.get_single_data()
else:
text, data = self.get_all_building_types_data()
else:
if self.building != "All":
text, data = self.get_all_energy_types_data()
else:
text, data = self.get_all_energy_types_all_building_types_data()
if self.interval == 'Avarage':
initial_title = text['title']
final_title = self.tra['names_title']['avg'] + initial_title.replace("Avarage", self.tra["names_title"][
"All_years"]).lower()
text['title'] = final_title
plot = generate_stacked_chart(data, text,
chart_type=self.chart_type)
return plot
def get_data_surface(self):
data = OrderedDict((self.months_names_mapping[str(month)], []) for month in reversed(range(1, 13)))
data['Year'] = ['2015', '2016', '2017', '2018']
if self.energy_type != "All":
energy_types = [self.models_mapping(self.energy_type)]
if self.building != "All":
title = self.tra["title"]["single"].format(
energy_type=self.tra["names_title"][self.energy_type],
building=self.tra["names_title"][self.building],
interval=self.tra["names_title"]["All_years"])
buildings = [self.building]
else:
title = self.tra["title"]["both_buildings"].format(
energy_type=self.tra["names_title"][self.energy_type],
building_1=self.tra["names_title"]["SCH"],
building_2=self.tra["names_title"]["WOR"],
interval=self.tra["names_title"]["All_years"])
buildings = ['SCH', 'WOR']
else:
energy_types = [Energy, Gas]
if self.building != "All":
title = self.tra["title"]["both_mediums"].format(
energy_type_1=self.tra["names_title"]["energy"],
energy_type_2=self.tra["names_title"]["gas"],
building=self.tra["names_title"][self.building],
interval=self.tra["names_title"]["All_years"])
buildings = [self.building]
else:
title = self.tra["title"]["both_mediums_and_buildings"].format(
energy_type_1=self.tra["names_title"]["energy"],
energy_type_2=self.tra["names_title"]["gas"],
building_1=self.tra["names_title"]["SCH"],
building_2=self.tra["names_title"]["WOR"],
interval=self.tra["names_title"]["All_years"])
buildings = ['SCH', 'WOR']
for model in energy_types:
for building in buildings:
for row in model.query.filter_by(building=building).all():
if model == Energy:
price = row.consumption_price if row.consumption_price is not None else 0 \
+ row.transmission_price if row.transmission_price is not None else 0
else:
price = row.price
if len(data[self.months_names_mapping[str(row.month)]]) != 4:
data[self.months_names_mapping[str(row.month)]].append(price)
else:
data[self.months_names_mapping[str(row.month)]][data['Year'].index(str(row.year))] \
+= price
for k, quan in data.items():
if len(quan) != len(data['Styczeń']):
quan.append(0)
data[k] = [r for r in reversed(data[k])]
return title, data
def get_data_column_line(self, filters: dict, energy_type: str = None):
model = self.models_mapping(energy_type)
months, prices = [], []
if self.interval in ["All", "Avarage"] and 'year' in filters.keys():
del filters['year']
if self.interval == 'Avarage':
price_def = defaultdict(list)
for row in model.query.filter_by(**filters).all():
if model == Energy:
price = row.consumption_price if row.consumption_price is not None else 0 + row.transmission_price \
if row.transmission_price is not None else 0
else:
price = row.price
if self.months_names_mapping[str(row.month)] not in months:
months.append(self.months_names_mapping[str(row.month)])
price_def[self.months_names_mapping[str(row.month)]].append(price)
for month in months:
prices.append(round(mean(price_def[month]), 1))
else:
for row in model.query.filter_by(**filters).all():
if model == Energy:
price = row.consumption_price if row.consumption_price is not None else 0 + row.transmission_price \
if row.transmission_price is not None else 0
else:
price = row.price
months.append("{} {}".format(self.months_names_mapping[str(row.month)], str(row.year)))
prices.append(price)
return months, prices
def get_single_data(self):
filters = {'year': self.interval, 'building': self.building}
months, data = self.get_data_column_line(filters=filters, energy_type=self.energy_type)
text = {
"title": self.tra["title"]["single"].format(energy_type=self.tra["names_title"][self.energy_type],
building=self.tra["names_title"][self.building],
interval=self.interval if self.interval != "All" else
self.tra["names_title"]["All_years"]),
"tooltip": {
"energy_type": {
"label": self.tra["tooltip_labels"][self.energy_type],
"value": self.tra["tooltip_values"][self.energy_type]
},
"building":
{
"label": self.tra["tooltip_labels"][self.building],
"value": self.tra["tooltip_values"][self.building]
}
}
}
return text, {
'months': months,
'data': data,
}
def get_all_building_types_data(self):
school_data_months, school_data = self.get_data_column_line(filters={'year': self.interval, 'building': 'SCH'},
energy_type=self.energy_type)
workshop_data_months, workshop_data = self.get_data_column_line(
filters={'year': self.interval, 'building': 'WOR'},
energy_type=self.energy_type)
self.assert_intervals_correct(school_data_months, workshop_data_months)
text = {
"title": self.tra["title"]["both_buildings"].format(energy_type=self.tra["names_title"][self.energy_type],
building_1=self.tra["names_title"]["SCH"],
building_2=self.tra["names_title"]["WOR"],
interval=self.interval if self.interval != "All" else
self.tra["names_title"]["All_years"]),
"legend": self.tra["legend"]["both_buildings"]
}
return text, {
'months': school_data_months,
'school_data': school_data,
'workshop_data': workshop_data
}
def get_all_energy_types_data(self, building: str = None):
if not building:
building = self.building
filters = {'year': self.interval, 'building': self.building if building is None else building}
energy_data_months, energy_data = self.get_data_column_line(filters=filters, energy_type='energy')
gas_data_months, gas_data = self.get_data_column_line(filters=filters, energy_type='gas')
self.assert_intervals_correct(energy_data_months, gas_data_months)
text = {
"title": self.tra["title"]["both_mediums"].format(energy_type_1=self.tra["names_title"]["energy"],
energy_type_2=self.tra["names_title"]["gas"],
building=self.tra["names_title"][building],
interval=self.interval if self.interval != "All" else
self.tra["names_title"]["All_years"]),
"legend": self.tra["legend"]["both_mediums"]
}
return text, {
'months': energy_data_months,
'{}energy_data'.format("" if building is None else building + "_"): energy_data,
'{}gas_data'.format("" if building is None else building + "_"): gas_data
}
def get_all_energy_types_all_building_types_data(self):
all_school_data = self.get_all_energy_types_data(building='SCH')[1]
all_workshop_data = self.get_all_energy_types_data(building='WOR')[1]
all_energy_types_all_building_types_data = {**all_school_data, **all_workshop_data}
text = {
"title": self.tra["title"]["both_mediums_and_buildings"].format(
energy_type_1=self.tra["names_title"]["energy"],
energy_type_2=self.tra["names_title"]["gas"],
building_1=self.tra["names_title"]["SCH"],
building_2=self.tra["names_title"]["WOR"],
interval=self.interval if self.interval != "All" else
self.tra["names_title"]["All_years"]),
"legend": self.tra["legend"]["both_mediums_and_buildings"]
}
return text, all_energy_types_all_building_types_data
@staticmethod
def assert_intervals_correct(first_interval: list, second_interval: list) -> None:
assert first_interval == second_interval, '{} is not equal to {}'.format(first_interval, second_interval)
@property
def months_names_mapping(self):
with open(MONTHS_NAMES_FILE) as f:
return json.loads(f.read())
def models_mapping(self, energy_type=None):
mapping = {'energy': Energy, 'gas': Gas}
return mapping[self.energy_type] if energy_type is None else mapping[energy_type]
def get_translations(self):
with open(TRANSLATIONS_FILE) as f:
return json.loads(f.read())
|
{"/app/__init__.py": ["/app/analyse/__init__.py", "/app/data_crud/__init__.py", "/app/models.py"], "/run.py": ["/app/__init__.py"], "/app/analyse/views.py": ["/app/analyse/__init__.py", "/app/analyse/forms.py", "/app/analyse/chart_tool.py"], "/app/analyse/plots_generation/column_line_chart.py": ["/app/tools/global_paths.py"], "/app/data_crud/prepare_data.py": ["/app/models.py", "/app/tools/global_paths.py"], "/app/analyse/chart_tool.py": ["/app/analyse/plots_generation/surface_chart.py", "/app/models.py", "/app/tools/global_paths.py", "/app/analyse/plots_generation/column_line_chart.py"], "/app/data_crud/views.py": ["/app/data_crud/__init__.py", "/app/data_crud/forms.py", "/app/data_crud/prepare_data.py", "/app/models.py"]}
|
6,358
|
Naatoo/energy-visualization
|
refs/heads/master
|
/app/data_crud/views.py
|
from flask import render_template, redirect, url_for
from app.data_crud import data
from app.data_crud.forms import EnergyAdditionForm, GasAdditionForm
from app.data_crud.prepare_data import get_data
from app.models import Energy, Gas
from database import db
@data.route('/add/energy', methods=['GET', 'POST'])
def add_energy():
"""
Handle requests to the /add/energy route
Add an electricity bill to the database through the form
"""
form = EnergyAdditionForm()
if form.validate_on_submit():
bill = Energy(year=form.date.data.year,
month=form.date.data.month,
quantity=form.quantity.data,
consumption_price=form.consumption_price.data,
transmission_price=form.transmission_price.data,
building=form.building.data)
db.session.add(bill)
db.session.commit()
return redirect(url_for("data_crud.add_energy"))
return render_template("add_energy.html", rows=get_data(type_choice="energy"), form=form)
@data.route('/add/gas', methods=['GET', 'POST'])
def add_gas():
"""
Handle requests to the /add/gas route
Add an gas bill to the database through the form
"""
form = GasAdditionForm()
if form.validate_on_submit():
bill = Gas(year=form.date.data.year,
month=form.date.data.month,
quantity=form.quantity.data,
price=form.price.data,
building=form.building.data)
db.session.add(bill)
db.session.commit()
return redirect(url_for("data_crud.add_gas"))
return render_template("add_gas.html", rows=get_data(type_choice="gas"), form=form)
|
{"/app/__init__.py": ["/app/analyse/__init__.py", "/app/data_crud/__init__.py", "/app/models.py"], "/run.py": ["/app/__init__.py"], "/app/analyse/views.py": ["/app/analyse/__init__.py", "/app/analyse/forms.py", "/app/analyse/chart_tool.py"], "/app/analyse/plots_generation/column_line_chart.py": ["/app/tools/global_paths.py"], "/app/data_crud/prepare_data.py": ["/app/models.py", "/app/tools/global_paths.py"], "/app/analyse/chart_tool.py": ["/app/analyse/plots_generation/surface_chart.py", "/app/models.py", "/app/tools/global_paths.py", "/app/analyse/plots_generation/column_line_chart.py"], "/app/data_crud/views.py": ["/app/data_crud/__init__.py", "/app/data_crud/forms.py", "/app/data_crud/prepare_data.py", "/app/models.py"]}
|
6,359
|
Naatoo/energy-visualization
|
refs/heads/master
|
/app/data_crud/forms.py
|
from flask_wtf import FlaskForm
from wtforms import SubmitField, FloatField, SelectField
from wtforms.fields.html5 import DateField
from wtforms.validators import DataRequired
class EnergyAdditionForm(FlaskForm):
"""
Form for users to add new energy bill
"""
date = DateField('Data', validators=[DataRequired()])
quantity = FloatField('Ilość zużytej energii elektrycznej [kWh]', validators=[DataRequired()])
consumption_price = FloatField('Cena za zużycie energii elektrycznej [zł]', validators=[DataRequired()])
transmission_price = FloatField('Cena za przesył energii elektrycznej [zł]', validators=[DataRequired()], )
building = SelectField('Budynek', validators=[DataRequired()], choices=[("SCH", "Szkoła"), ("WOR", "Warsztat")])
submit = SubmitField('Dodaj')
# TODO: validators
class GasAdditionForm(FlaskForm):
"""
Form for users to add new gas bill
"""
date = DateField('Data', validators=[DataRequired()])
quantity = FloatField('Ilość zużytego gazu [m3]', validators=[DataRequired()])
price = FloatField('Cena za zużyty gaz [zł]', validators=[DataRequired()])
building = SelectField('Budynek', validators=[DataRequired()], choices=[("SCH", "Szkoła"), ("WOR", "Warsztat")])
submit = SubmitField('Dodaj')
# TODO: validators
|
{"/app/__init__.py": ["/app/analyse/__init__.py", "/app/data_crud/__init__.py", "/app/models.py"], "/run.py": ["/app/__init__.py"], "/app/analyse/views.py": ["/app/analyse/__init__.py", "/app/analyse/forms.py", "/app/analyse/chart_tool.py"], "/app/analyse/plots_generation/column_line_chart.py": ["/app/tools/global_paths.py"], "/app/data_crud/prepare_data.py": ["/app/models.py", "/app/tools/global_paths.py"], "/app/analyse/chart_tool.py": ["/app/analyse/plots_generation/surface_chart.py", "/app/models.py", "/app/tools/global_paths.py", "/app/analyse/plots_generation/column_line_chart.py"], "/app/data_crud/views.py": ["/app/data_crud/__init__.py", "/app/data_crud/forms.py", "/app/data_crud/prepare_data.py", "/app/models.py"]}
|
6,367
|
robotriot/smart_reminders
|
refs/heads/master
|
/custom_components/smart_reminders/__init__.py
|
from datetime import datetime, timedelta
import logging
import re
import traceback
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.util import Throttle
import psycopg2
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=30)
ATTR_TITLE = 'title'
ATTR_DESC = 'description'
ATTR_DUE = 'due_date'
ATTR_PRIORITY = 'priority'
CONF_DATABASE = 'database'
CONST_LEADING_ENTITY_NAME = 'reminder_'
def setup(hass, config):
"""Set up is called when Home Assistant is loading our component."""
conf = config[DOMAIN]
hass.data[DOMAIN] = {}
reminders = SmartReminders(hass, conf)
# Return boolean to indicate that initialization was successfully.
return True
class SmartReminders:
"""Main Smart Reminders Service"""
def __init__(self, hass, config):
self.hass = hass
self.conf = config
self.db = SmartReminderDB(config)
items = []
try:
self.items = self.db.get_all_reminders()
except:
self.items = []
self.component = EntityComponent(_LOGGER, DOMAIN, hass)
entities = []
if self.items:
for item in self.items:
ent = SmartReminderItem(hass, item, self.db)
entities.append(ent)
if entities:
self.component.add_entities(entities)
hass.services.register(DOMAIN, "add_task", self.handle_add_task)
hass.services.register(DOMAIN, "complete_task", self.handle_complete_task)
hass.services.register(DOMAIN, "delete_task", self.handle_delete_task)
async def handle_add_task(self, call):
"""Handle the service call."""
await self.add_task(call.data)
async def handle_delete_task(self, call):
try:
entity_id = call.data.get('id')
ent = self.component.get_entity(entity_id)
idx = ent._id
await self.db.delete_reminder(idx)
await self.component.async_remove_entity(entity_id)
except Exception as e:
logging.error(traceback.format_exc())
async def handle_complete_task(self, call):
"""Handle completing the task and removing it from entities"""
try:
entity_id = call.data.get('id')
ent = self.component.get_entity(entity_id)
idx = ent._id
self.db.complete_reminder(idx)
if ent.is_repeatable:
derp = {ent._repeat_type: ent._repeat_number}
due_date = ent._original_due_date + timedelta(**derp)
data = {
ATTR_TITLE: ent._title,
"user": ent._username,
ATTR_DUE: due_date,
"repeat_type": ent._repeat_type,
"repeat_number": ent._repeat_number,
"repeatable": True
}
await self.add_task(data)
await self.component.async_remove_entity(entity_id)
except Exception as e:
logging.error(traceback.format_exc())
async def add_task(self, data):
try:
new_item = await self.db.add_reminder(data)
ent = SmartReminderItem(self.hass, new_item, self.db)
await self.component.async_add_entities([ent])
except Exception as e:
logging.error(traceback.format_exc())
class SmartReminderItem(Entity):
"""An individual Smart Reminder"""
def __init__(self, hass, data, db):
self.hass = hass
self._title = data[0]
self._due = data[1]
self._priority = data[2]
self._completed = data[3]
self._id = data[4]
self._username = data[5]
self._ignore_count = data[6] if data[6] is not None else 0
self._repeat_type = data[7]
self._repeat_number = data[8]
self._original_due_date = data[9]
self._db = db
self._overdue = self.is_overdue()
def is_overdue(self, _overdue=False):
now = datetime.now()
overdue = _overdue
if now >= self._due and not _overdue:
overdue = True
message = f"{self._username}, I'm reminding you to {self._title}"
self.hass.services.call("tts", "google_translate_say", {
'entity_id': 'all',
'message': message
})
new_time = datetime.now() + timedelta(hours=1)
self._db.set_due_time(self._id, new_time, self._ignore_count)
self._due = new_time
elif now <= self._due and _overdue:
overdue = False
return overdue
@ property
def is_repeatable(self):
return self._repeat_type is not None and self._repeat_number > 0
@ property
def name(self):
return f"{CONST_LEADING_ENTITY_NAME}{self._id}"
@ property
def state_attributes(self):
"""Returns the name of the reminder"""
return {
"title": self._title,
"due": self._due,
"completed": self._completed,
"user": self._username,
"ignore_count": self._ignore_count,
"repeatable": self.is_repeatable,
"repeats": f"Repeats every {self._repeat_number}{self._repeat_type}",
}
@ property
def state(self):
return self._overdue
def update(self):
try:
self._overdue = self.is_overdue(self._overdue)
except Exception as e:
logging.error(traceback.format_exc())
class SmartReminderDB:
"""Interface with Postgresql"""
def __init__(self, config):
self.psql = psycopg2.connect(
user=config.get(CONF_USERNAME),
password=config.get(CONF_PASSWORD),
host=config.get(CONF_HOST),
port=config.get(CONF_PORT),
database=config.get(CONF_DATABASE))
def get_all_reminders(self):
cursor = self.psql.cursor()
cursor.execute("""select * from reminders where completed = false""")
items = cursor.fetchall()
cursor.close()
return items
async def add_reminder(self, data):
cursor = self.psql.cursor()
is_repeatable = data.get('repeatable')
repeat_type = data.get('repeat_type') if is_repeatable else ''
repeat_number = data.get('repeat_number') if is_repeatable else 0
cursor.execute("""INSERT INTO reminders (title, due_date, username, repeat_type, repeat_number, original_due_date) VALUES (%s, %s, %s, %s, %s, %s) RETURNING *""",
(data.get(ATTR_TITLE), data.get(ATTR_DUE), data.get('user'), repeat_type, repeat_number, data.get(ATTR_DUE)))
item = cursor.fetchone()
self.psql.commit()
cursor.close()
return item
def complete_reminder(self, idx):
cursor = self.psql.cursor()
cursor.execute("""UPDATE reminders SET completed=true WHERE id=%s""", [idx])
self.psql.commit()
cursor.close()
async def delete_reminder(self, idx):
cursor = self.psql.cursor()
cursor.execute("""DELETE FROM reminders WHERE id=%s""", [idx])
self.psql.commit()
cursor.close()
return True
def set_due_time(self, idx, due_date, ct=0):
cursor = self.psql.cursor()
new_count = ct + 1
cursor.execute("""UPDATE reminders SET due_date=%s, ignore_count=%s WHERE id=%s""", [due_date, new_count, idx])
self.psql.commit()
cursor.close()
|
{"/custom_components/smart_reminders/__init__.py": ["/custom_components/smart_reminders/const.py"]}
|
6,368
|
robotriot/smart_reminders
|
refs/heads/master
|
/custom_components/smart_reminders/const.py
|
DOMAIN = "smart_reminders"
DB = "db"
|
{"/custom_components/smart_reminders/__init__.py": ["/custom_components/smart_reminders/const.py"]}
|
6,408
|
logarithm27/Object_Store_Versioning
|
refs/heads/main
|
/client.py
|
import socket
from menu import *
import pickle
import os
''' I write some few comments here
because I have already mentioned it in the server file
since the latter contains some symmetric functions '''
ADDR_PORT = (socket.gethostbyname(socket.gethostname()),1234)
FORMAT = 'utf-8'
HEADER_SIZE = 16
QUIT_MESSAGE = 'quit'
FILE_TRANSFER_ACTIVATED_MSG = "File transfer Activated"
ST_MODE_W_T = "Storage mode with transfer activated "
ST_MODE_WITHOUT_T = "Storage mode without transfer activated "
SEND_CLIENT_TO_SERVER = "PUT FILE : TRANSFER FROM CLIENT TO SERVER"
SEND_SERVER_TO_CLIENT = "GET FILE : TRANSFER FROM SERVER TO CLIENT"
CONTINUE = "CONTINUE"
class Client:
def __init__(self):
# create client
self.client_socket = socket.create_connection(ADDR_PORT)
self.start_client()
def start_client(self):
self.menu = Menu()
# select the storage mode
self.storage_mode = self.menu.storage_mode()
# to store the response got from the server
self.resp = None
# send the storage mode chose by user
self.send_msg_to_server(self.storage_mode)
self.resp = self.receive_ack_from_server()
# print for debug
print(self.resp)
# checking paths if the given command is PUT or GET
self.command_operation = self.path_checker(self.menu.printing())
# send the command the the server
self.send_msg_to_server(self.command_operation)
# get response from server to know further steps to take
self.resp = self.receive_ack_from_server()
# if the server requested a file ( in case of put command ( with transfer mode ) )
if self.resp == SEND_CLIENT_TO_SERVER:
self.send_file_through_network(self.command_operation)
print(self.receive_ack_from_server())
# if the server will a file ( in case of get command ( with transfer mode ) )
if self.resp == SEND_SERVER_TO_CLIENT:
self.send_msg_to_server('')
self.receive_file_from_server(self.command_operation.split(' ')[1])
# if the input contains the command list
elif self.command_operation.split(' ')[0] == "list":
# since listing depends on whether the user have mentioned the name of the object or not
# we call the listing method to do so
self.listing(self.resp)
# if the input contains the delete operation
elif self.command_operation.split(' ')[0] == "delete":
print(self.resp)
else:
print(self.resp)
# demanding to user if he wants to quit or continue after the command have been achieved
quit_or_repeat = self.menu.quit()
# if chose to quit
if quit_or_repeat:
# send quit message to server
self.send_msg_to_server(QUIT_MESSAGE)
# close connection
self.client_socket.close()
# else, repeat
elif not quit_or_repeat:
self.send_msg_to_server(CONTINUE)
self.start_client()
# check if a given path is valid depending on the given command
def path_checker(self,command):
if command.split(' ')[0].lower() == "get":
while (not os.path.exists(command.split(' ')[2]) or
not os.path.isdir(command.split(' ')[2])):
print("Invalid path, try again : ")
command = input()
if command.split(' ')[0].lower() == "put":
while (not os.path.exists(command.split(' ')[2])):
print("Invalid path, try again : ")
command = input()
return command
# listing objects or version of objects
def listing(self, listing_response):
if listing_response == "Object not found":
print("Object not found")
else :
for element in listing_response:
# if we have retrieved versions of objects
if len(element) > 1:
version = element[0]
# element[1] is the content name and element[2] is the path
content = list(
map(lambda x, y: os.path.join(x, y), element[2].split(";")[0:-1], element[1].split(";")[0:-1]))
print(f"Version {str(version)} contains :")
for c in content:
print("\t" + c)
# if we have retrieved objects
else:
print(f"Object {element[0]}")
def receive_ack_from_server(self):
receiving_response = True
response_length = 0
i = 0
while receiving_response:
response_from_server = self.client_socket.recv(HEADER_SIZE)
if i == 0 and response_from_server:
response_length = int(response_from_server)
i += 1
full_response = pickle.loads(self.client_socket.recv(response_length))
return full_response
def send_msg_to_server(self, message):
message_to_send = pickle.dumps(message)
message_to_send = bytes(f'{len(message_to_send):<{HEADER_SIZE}}', FORMAT) + message_to_send
self.client_socket.send(message_to_send)
def receive_file_from_server(self, file_name):
cwr = os.getcwd()
receiving_response = True
response_length = 0
i = 0
while receiving_response:
receive_file_from_server = self.client_socket.recv(HEADER_SIZE)
if i == 0 and receive_file_from_server:
response_length = int(receive_file_from_server)
i += 1
full_response = pickle.loads(self.client_socket.recv(response_length))
receiving_response = False
path_given_in_get_command = self.command_operation.split(' ')[2]
with open(os.path.join(str(path_given_in_get_command), file_name+".txt"), "wb") as f:
print('writing...')
for chunk in full_response:
f.write(chunk)
print('writing finished')
print(f"File received successfully via GET command on {str(os.path.join(str(path_given_in_get_command), file_name))}")
return str(os.path.join(str(cwr), file_name))
def send_file_through_network(self,command):
path = command.split(' ')[2]
file_data = []
with open(path, "rb") as f:
print('reading ...')
while True:
binary_data_read = f.read(1024)
file_data.append(binary_data_read)
if not binary_data_read:
break
file_data_to_send = pickle.dumps(file_data)
file_data_to_send = bytes(f'{len(file_data_to_send):<{HEADER_SIZE}}', FORMAT) + file_data_to_send
print('reading finished')
self.client_socket.send(file_data_to_send)
c = Client()
|
{"/server.py": ["/operations.py"], "/main.py": ["/operations.py"], "/operations.py": ["/database.py"]}
|
6,409
|
logarithm27/Object_Store_Versioning
|
refs/heads/main
|
/server.py
|
import socket
from operations import *
import threading
import pickle
# get the host address by its hostname and set a port
ADDR_PORT = (socket.gethostbyname(socket.gethostname()), 1234)
# used as flag to listening to clients
LISTENING = True
# the format of encoding when we receive/retrieve data with client
FORMAT = 'utf-8'
# the header size by which we know how many chunks of bytes we will send/receive
HEADER_SIZE = 16
# the quit message if the user decided to quit
QUIT_MESSAGE = 'quit'
# message to send to client if the client is about to send a file to the server
SEND_CLIENT_TO_SERVER = "PUT FILE : TRANSFER FROM CLIENT TO SERVER"
# same as the previous in inverse way
SEND_SERVER_TO_CLIENT = "GET FILE : TRANSFER FROM SERVER TO CLIENT"
# acknowledge message to send to client about which storage mode has been created
ST_MODE_W_T = "Storage mode with transfer activated "
ST_MODE_WITHOUT_T = "Storage mode without transfer activated "
# continue message if the user wants to maintain connection with server
CONTINUE = "CONTINUE"
class Server:
def __init__(self):
# init server
self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server.bind(ADDR_PORT)
self.server.listen()
# flag to test if we are already connected with a client
self.cnt_with_client = False
while LISTENING:
# if not connected with a client, accept further connection with clients
if not self.cnt_with_client:
print("Waiting for a client to connect ...")
self.client_socket, self.client_address = self.server.accept()
# to store the command received from the client user
self.command = ""
# initialise the operations engine
self.op = Operations()
# to store response received from client during the connection
self.resp = None
# to store the storage mode chosen by user
self.transfer_mode = ""
# to store the content/file name given in the put command (from the client)
# will be used only in storage mode with transfer
self.put_file_name = ""
# to store the path where the file is saved in the server's local system
# also it's used only in storage mode with transfer
self.new_put_path = ""
# to store the path where the user have chosen to save its get file
# used in case of storage mode with transfer is activated
self.client_get_path = ""
# receive the first message from the client
# it will be the storage mode chose by the user in the client console
self.resp = self.receive_response_from_client()
# for debug (printing the storage mode)
print(self.resp)
# if the storage mode is 2
if self.resp.__eq__("2"):
# set the transfer mode to 'storage with transfer'
self.transfer_mode = "2"
# set the transfer mode in the operation's engine to 2
# because its output will depends in this variable for some commands
self.op.transfer_mode = 2
# send to client that the server is acknowledged about the transfer mode
self.send_acknowledge(ST_MODE_W_T)
# if the storage mode is 1 ( do the same as we have done with storage mode 2)
elif self.resp.__eq__("1"):
self.transfer_mode = "1"
# tell the operations engine that we will operate remotely
# in order to tell to the server that it have access to the clients' contents
self.op.remote_or_local = "from_server"
self.send_acknowledge(ST_MODE_WITHOUT_T)
# get back the command from the client
self.resp = self.receive_response_from_client()
# convert the command to a list to distinguish the command itself and its arguments
self.command = self.resp.split(' ')
# perform the operation requested by the client
self.perform_operation_with_transfer_mode()
self.resp = self.receive_response_from_client()
if self.resp.__eq__(QUIT_MESSAGE):
print('Disconnected from the client ')
self.cnt_with_client = False
self.client_socket.close()
if self.resp.__eq__(CONTINUE):
self.cnt_with_client = True
# self.send_receive(client_socket,client_address)
def perform_operation_with_transfer_mode(self):
# if the command is a "put" and the storage is with transfer
if self.command[0] == "put":
if self.transfer_mode == "2":
# get the put file name given by the client in the input
# the path is always the third element in the after splitting the command into a list
self.put_file_name = ntpath.split(self.command[2].rstrip('/'))[1]
# send to the client that the server is about to send a file to it
# so the client will be prepared
self.send_acknowledge(SEND_CLIENT_TO_SERVER)
# after receiving file from the client, it will be stored in the server
# so we get its path inside the server
self.new_put_path = self.receive_file_from_client(self.put_file_name)
# we still have the old file's path (of the client, which the server can't access to it)
# so we set the path inside the server as the new path of the file
self.command[2] = self.new_put_path
# we perform the put, and we store our object in the database (which is in the server)
ack = self.server_put(self.command, self.op)
# we send to the client information about the performed command put
self.send_acknowledge(ack)
# if the transfer mode is 1, then the server have access to the clients files
# so we perform the put operation normally
elif self.transfer_mode == "1":
ack = self.server_put(self.command, self.op)
self.send_acknowledge(ack)
# if the command is get and transfer mode is 2
if self.command[0] == "get":
if self.transfer_mode == "2":
# we store the path (where to store the object) given by client
self.client_get_path = self.command[2]
# we modify the path and we set it as the current directory (where the server is being executed)
self.command[2] = os.getcwd()
# perform the get operation inside the engine
# we get the path where the object is stored in the server
path_of_get_file_in_server = self.server_get(self.command, self.op)
# if the object name is found and the file was created
if path_of_get_file_in_server != "Object not Found":
# let the client to know
self.send_acknowledge(SEND_SERVER_TO_CLIENT)
self.receive_response_from_client()
# send that file to the client
self.send_file_through_network(path_of_get_file_in_server)
# if the object requested by the the client was not found by the engine
else:
self.send_acknowledge("Object not Found")
# perform get command normally if the storage is without transfer
if self.transfer_mode == "1":
ack = self.server_get(self.command, self.op)
self.send_acknowledge(ack)
# perform delete and list commands whatever the storage mode is
if self.command[0] == "delete":
ack = self.server_delete(self.command, self.op)
self.send_acknowledge(ack)
if self.command[0] == "list":
ack = self.server_list(self.command, self.op)
self.send_acknowledge(ack)
# delete command
def server_delete(self, received_command, operation):
acknowledge = ''
# test if the input contains 2 elements (the command and its first optional argument)
# we do the same with all the operations and depending to each one
if len(received_command[1:]) == 1:
object_name = received_command[1]
acknowledge = operation.delete(object_name)
# test if the input contains 3 elements (the command and its two optional arguments)
if len(received_command[1:]) == 2:
object_name, version = received_command[1:]
acknowledge = operation.delete(object_name, int(version))
return acknowledge
# get command
def server_get(self, received_command, operation):
if len(received_command[1:]) == 3:
object_name, path, version = received_command[1:]
acknowledge = operation.get(object_name, path, int(version))
else:
object_name, path = received_command[1:]
acknowledge = operation.get(object_name, path)
return acknowledge
# put command
def server_put(self, received_command, operation):
acknowledge = ''
# get the 2 necessary arguments (object_name and path)
object_name, path = received_command[1:3]
# if the storing is with transfer :
# if the put command got 4 arguments from the user
if len(received_command[1:]) == 4:
# take the 2 last arguments
max_versions, policy = received_command[3:]
acknowledge = operation.put(object_name, path, int(max_versions), int(policy))
# if the put command got 3 arguments
if len(received_command[1:]) == 3:
# take the third optional argument
max_versions = received_command[3]
acknowledge = operation.put(object_name, path, int(max_versions))
if len(received_command[1:]) == 2:
object_name, path = received_command[1:]
acknowledge = operation.put(object_name, path)
return acknowledge
# listing
def server_list(self, received_command, operation):
if len(received_command[1:]) == 1:
object_name = received_command[1]
return operation.list(object_name)
else:
return operation.list()
# send a message through network
# takes the message as argument
def send_acknowledge(self, acknowledge):
# using pickle, we can transform anything (object, string, dict...) to a byte stream
# we use dumps to serialize the message and transform it to a format where it can be easily reconstructed in the client
acknowledge_to_send = pickle.dumps(acknowledge)
# we insert with the serialized message its length,
# followed by a large space alignment to the right that have the size of the header_size, then followed
# by the message to send
# so the client will know at the first reception the length of the message because its stored at the first
# and then can quickly loads it
acknowledge_to_send = bytes(f'{len(acknowledge_to_send):<{HEADER_SIZE}}', FORMAT) + acknowledge_to_send
# send the message to the client
self.client_socket.send(acknowledge_to_send)
# to receive a message from the client
def receive_response_from_client(self):
receiving_response = True
response_length = 0
i = 0
while receiving_response:
# receive the first HEADER_SIZE bytes chunk of data
# the first chunk contains surely the length of the message and spaces
# example 84______________
response_from_server = self.client_socket.recv(HEADER_SIZE)
if i == 0 and response_from_server:
# we convert the length of the message to int
response_length = int(response_from_server)
i += 1
# we de-serialize and get the entire message by giving the full message length to the recv method
full_response = pickle.loads(self.client_socket.recv(response_length))
# we return the message
return full_response
# receive file (used in the case of get operation performed)
def receive_file_from_client(self, file_name):
# store the file got from the client in the current directory
cwr = os.getcwd()
receiving_response = True
response_length = 0
i = 0
# pickle is powerful, so we can receive the file data as a list consisting of binary data elements
while receiving_response:
receive_file_from_client = self.client_socket.recv(HEADER_SIZE)
if i == 0 and receive_file_from_client:
response_length = int(receive_file_from_client)
i += 1
full_response = pickle.loads(self.client_socket.recv(response_length))
receiving_response = False
# open the file in write binary mode after receiving all the file data
with open(os.path.join(str(cwr), file_name), "wb") as f:
print('writing...')
# for each binary data element in the list of that stores all files data
for chunk in full_response:
# write that data into the file
f.write(chunk)
print(f'File received to server and placed on {str(os.path.join(str(cwr), file_name))}')
return str(os.path.join(str(cwr), file_name))
# send file to the client
def send_file_through_network(self,path):
# initialize the list that stores the files data
file_data = []
# open the file in read binary mode
with open(path, "rb") as f:
print('reading file data ...')
while True:
# read 1024 bytes of file's binary data and store it as an element in the list
binary_data_read = f.read(1024)
file_data.append(binary_data_read)
# if there is no more data to read exit
if not binary_data_read:
break
# convert list to stream of bytes and serialize it
file_data_to_send = pickle.dumps(file_data)
file_data_to_send = bytes(f'{len(file_data_to_send):<{HEADER_SIZE}}', FORMAT) + file_data_to_send
print('File data sent to client')
# send the list
self.client_socket.send(file_data_to_send)
Server()
|
{"/server.py": ["/operations.py"], "/main.py": ["/operations.py"], "/operations.py": ["/database.py"]}
|
6,410
|
logarithm27/Object_Store_Versioning
|
refs/heads/main
|
/main.py
|
from operations import *
from menu import *
'''
TO TEST THE OPERATIONS ENGINE IN LOCAL SYSTEM, USE THIS FILE
TO TEST THE ENGINE WITH CLIENT SERVER, RUN THE server.py then client.py
'''
def run():
op = Operations()
m = Menu()
input_command = m.printing()
input_command = input_command.split(' ')
print(len(input_command[1:]))
# if put
if m.val.__eq__("1"):
object_name, path = input_command[1:3]
if len(input_command[1:]) == 4:
max_versions, policy = input_command[3:]
print(op.put(object_name, path, int(max_versions), int(policy)))
if len(input_command[1:]) == 3:
max_versions = input_command[3]
print(op.put(object_name, path, int(max_versions)))
if len(input_command[1:]) == 2:
object_name, path = input_command[1:]
print(op.put(object_name, path))
# if get
if m.val.__eq__("2"):
if len(input_command[1:]) == 3:
object_name, path, version = input_command[1:]
print(op.get(object_name, path, int(version)))
else:
object_name, path = input_command[1:]
print(op.get(object_name, path))
if m.val.__eq__("3"):
if len(input_command[1:]) == 1:
object_name = input_command[1]
print(object_name)
print(op.delete(object_name))
if len(input_command[1:]) == 2:
object_name, version = input_command[1:]
print(op.delete(object_name, int(version)))
if m.val.__eq__("4"):
list_ = None
if len(input_command[1:]) == 1:
object_name = input_command[1]
list_ = op.list(object_name)
else:
list_ = op.list()
if list_ == "Object not found":
print("Object not found")
else:
for element in list_:
# if we have retrieved versions of objects
if len(element) > 1:
version = element[0]
# element[1] is the content name and element[2] is the path
content = list(
map(lambda x, y: os.path.join(x, y), element[2].split(";")[0:-1], element[1].split(";")[0:-1]))
print(f"Version {str(version)} contains :")
for c in content:
print("\t" + c)
# if we have retrieved objects
else:
print(f"Object {element[0]}")
quit_or_not = m.quit()
if quit_or_not:
pass
elif not quit_or_not:
run()
run()
|
{"/server.py": ["/operations.py"], "/main.py": ["/operations.py"], "/operations.py": ["/database.py"]}
|
6,411
|
logarithm27/Object_Store_Versioning
|
refs/heads/main
|
/operations.py
|
from database import *
import os
import ntpath
import platform
import posixpath
import macpath
MAX_ver = 100
class Policy():
Global = 1
Dynamic = 2
class Operations :
def __init__(self):
self.db = Database()
self.db.connect_db()
self.remote_or_local = "from_local"
self.transfer_mode = 1
#Create object
def put(self,object_name,path,max_obj=MAX_ver,policy=Policy.Global):
path_to_content, content_name = ["",""]
# if we operate with client-server, we will not test if the path exists ( the client should do it)
if (os.path.exists(path) and self.remote_or_local.__eq__("from_local")) or self.remote_or_local.__eq__("from_server"):
# removing last slash or backslash so the content_name won't be empty in the db using rstrip
# split the path into file or directory name and its path
if platform.system().__eq__("Windows"):
path_to_content, content_name = ntpath.split(path.rstrip('/'))
elif platform.system().__eq__("Linux"):
path_to_content, content_name = posixpath.split(path.rstrip('/'))
elif platform.system().__eq__("Darwin"):
path_to_content, content_name = macpath.split(path.rstrip('/'))
# the function takes the object name, the path of the file that contains the content,
# the name of the content itself and the function that verifies if the objects exists already or not
return self.db.create_object(object_name,self.db.conn,path_to_content,content_name,self.db.object_name_exists(object_name,self.db.conn),max_obj,policy)
elif (not os.path.exists(path)) and self.remote_or_local.__eq__("from_local"):
return "Wrong Path"
return None
#Get an object
def get(self,object_name,path, version=None):
# the path shouldn't be a file because the get must store the object as a new file
if (not os.path.exists(path) or os.path.isfile(path)) and self.remote_or_local.__eq__("from_local"):
return "Wrong Path"
else :
# change condition if we operate with client-server or in local machine
condition = None
if self.remote_or_local.__eq__("from_server"):
condition = os.path.isdir(path) or self.remote_or_local.__eq__("from_server")
elif self.remote_or_local.__eq__("from_local"):
condition = os.path.isdir(path)
if condition:
# if the object name exists
if self.db.object_name_exists(object_name,self.db.conn):
# if the version is not None and exists (by default the user will get the last version)
if (get_data := self.db.get_version(object_name,self.db.conn,version)) is not None:
# create object as new text file
# get the last attribute of the Versions' table which is the name of the object
# and set it as the name of the file the will be written
crt_obj_as_file = open(os.path.join(path,(str(get_data[-1])+".txt")),"w")
# get all attributes' data except the object name
version_, content_names,content_paths = get_data[0:len(get_data) - 1]
contents = list(map(lambda x, y: os.path.join(x,y), content_paths.split(";"), content_names.split(";")[0:-1]))
# write into the object's file the data and the attributes of the data to be readable
# use | as a separator instead of comma and removing parenthesis got from db fetched data
#.format is used to write multiple lines
crt_obj_as_file.write("{:<15}{:>15}".format("Version", "Contents\n"))
crt_obj_as_file.write(f"{version_}\n")
for content in contents:
crt_obj_as_file.write("{:<20}{:>18}".format("",content + "\n"))
# close the file
crt_obj_as_file.close()
if self.transfer_mode == 2:
return (os.path.join(path,(str(get_data[-1])+".txt")))
# if transfer mode is without transfer
if self.transfer_mode == 1:
return "The GET file is made and ready!"
return "The GET file is made and ready!"
else:
return "Object not Found"
# Delete an object
def delete(self,object_name, version=None):
if self.db.object_name_exists(object_name, self.db.conn):
return self.db.delete_obj(object_name,self.db.conn,version)
else:
return f"Object {object_name} not found"
# listing objects
# if the object name is specified by the user then it shows only objects
# else, it will show all versions of the given object
def list(self, object_name=None):
return self.db.get_objects_versions(self.db.conn,object_name)
|
{"/server.py": ["/operations.py"], "/main.py": ["/operations.py"], "/operations.py": ["/database.py"]}
|
6,412
|
logarithm27/Object_Store_Versioning
|
refs/heads/main
|
/database.py
|
import os
import sqlite3
from sqlite3 import Error
# global variable that creates a working directory where we can store the database file
current_working_directory = os.getcwd()
# sqlite3 database file name
db_file_name = "/drive.db"
# database creation and connection
class Database:
def __init__(self):
self.conn = None
def connect_db(self):
# if database file doesn't exist, create new one and create database from scratch then connect
if not os.path.exists(db_file := str(current_working_directory) + db_file_name):
print(db_file)
open(db_file, 'w').close()
self.conn = self.create_connection(db_file)
self.create_db_tables(self.conn)
# else, simply connect to db
else:
self.conn = self.create_connection(db_file)
def create_connection(self, db_file):
connection = None
try:
connection = sqlite3.connect(db_file)
print(f"connected with {sqlite3.version}")
return connection
except Error as error:
print(error)
return connection
# create necessary tables for our object storage inside the database
def create_db_tables(self, connection):
objects_table = """ CREATE TABLE IF NOT EXISTS Objects (
o_name text NOT NULL PRIMARY KEY,
Max_versions INTEGER,
Policy INTEGER); """
# Version table contains the version(the number of the version auto incremented each time a new version comes)
# content : the name of the content
# content_path : the path of the file or directory that contains the object_data
versions_table = """ CREATE TABLE IF NOT EXISTS Versions (
ID integer PRIMARY KEY,
version integer ,
content_name text NOT NULL,
content_path text NOT NULL); """
# Create foreign key constraint that references the Versions' o_name to the Objects' o_name
object_versions_fk = """ ALTER TABLE Versions ADD COLUMN o_name text REFERENCES Objects(o_name); """
if connection is not None:
try:
c = connection.cursor()
c.execute(objects_table)
c.execute(versions_table)
c.execute(object_versions_fk)
print("database created")
except Error as error:
print(error)
# test if a certain object_name already exists
def object_name_exists(self, object_name, c):
cursor = c.cursor()
search_for_object_name = """ SELECT * FROM Objects WHERE o_name=? """
cursor.execute(search_for_object_name, (object_name,))
if (data := cursor.fetchone()) is not None:
print(f"The Object '{data[0]}' exists")
return True
print("Object not found")
return False
# get version of an object
def get_version(self, object_name, c, version=None):
cursor = c.cursor()
# by default, version parameter is optional
# if the version is not specified by the user, the latter will get the latest version of the object
if version is None:
get_last_version = """ SELECT version, content_name, content_path, o_name
FROM Versions
WHERE o_name=? and version = (SELECT MAX(version) from Versions)"""
cursor.execute(get_last_version, (object_name,))
fetched_data = cursor.fetchone()
return fetched_data
else:
get_requested_version = """ SELECT version, content_name, content_path, o_name
FROM Versions
WHERE o_name=? and version =?"""
cursor.execute(get_requested_version, (object_name, version,))
fetched_data = cursor.fetchone()
# if the version requested by the user don't exist, return nothing
if fetched_data is None:
print(f"This version : {str(version)} don't exist")
return None
else:
return fetched_data
# it creates an object in the database
# the exists parameters is boolean that indicates whether the objects exists in the db or not
def create_object(self, object_name, c, path, content_name, exists, MAX_Ver, policy):
cursor = c.cursor()
create_version = """ INSERT INTO Versions (version,content_name, content_path, o_name)
VALUES(?,?,?,?)"""
if not exists:
create_new_object = """INSERT INTO Objects Values (?,?,?)"""
cursor.execute(create_new_object, (object_name, MAX_Ver, policy))
# the object is new and have only one version (the first version)
# add ";" as a separator (in order to separate further contents names and paths on future versions)
cursor.execute(create_version, (1, content_name+";", path+";", object_name))
c.commit()
return "New Object created"
else:
# get number of versions
cursor.execute('''SELECT count(o_name) FROM Versions WHERE o_name=?''', (object_name,))
number_of_versions = cursor.fetchone()[0]
# get the max number of versions allowed to be stored that corresponds to the object
cursor.execute('''SELECT Max_versions FROM Objects WHERE o_name=?''',(object_name,))
max_versions = cursor.fetchone()[0]
# if it exceeds the max number of versions set up by the program(by default) or by the user
if number_of_versions >= max_versions:
# we should now the policy used to manage the object
cursor.execute('''SELECT policy FROM Objects WHERE o_name=?''', (object_name,))
policy = cursor.fetchone()[0]
# if the policy is global, we delete the oldest version
if policy == 1:
oldest_version = ''' Select min(version) from versions where o_name=?'''
cursor.execute(oldest_version, (object_name,))
oldest_version = cursor.fetchone()[0]
self.delete_obj(object_name, c, oldest_version)
# if its dynamic policy, whenever we add a new version, we delete 25% of the oldest versions
elif policy == 2:
delete_quarter = ''' DELETE FROM Versions
WHERE version
IN (
SELECT version
FROM Versions
WHERE o_name=?
ORDER BY version ASC LIMIT ?);'''
quarter_of_versions = number_of_versions / 4
cursor.execute(delete_quarter, (object_name, quarter_of_versions))
c.commit()
# get the number of previous latest version of object and adding one to it
last_version = ''' Select max(version) from versions where o_name=?'''
cursor.execute(last_version, (object_name,))
last_version = cursor.fetchone()
last_version = last_version[0] + 1
# get the content names and content paths of the object from previous version
# plus the current content name and content path added
content_n, paths = self.add_or_replace_paths_for_new_version(object_name,self.conn,path,content_name)
cursor.execute(create_version, (last_version, content_n, paths, object_name))
c.commit()
return "New Version of the object created"
# the version parameter is optional
def delete_obj(self, object_name, c, version=None):
cursor = c.cursor()
del_all_versions = """Delete From Versions Where o_name=?"""
del_obj = """Delete From Objects Where o_name=?"""
# the user can specify a version, and the latter could be the only version existing
# if its the case we skip to else and
# so we delete the version and the object from the Objects table
cursor.execute('''SELECT count(o_name) from Versions where o_name =?''', (object_name,))
# if version is specified by user and exists and there is more than 1 version
v_exist = self.get_version(object_name, c, version)
if (version is not None) and (v_exist) is not None and (cursor.fetchone()[0] > 1):
del_version = """DELETE FROM Versions WHERE o_name=? and version=?"""
cursor.execute(del_version, (object_name, version))
c.commit()
return f"Version {str(version)} deleted"
# if the version don't exist
elif v_exist is None:
return "Version not found"
else:
cursor.execute(del_all_versions, (object_name,))
cursor.execute(del_obj, (object_name,))
c.commit()
return "Object Deleted"
# listing objects or objects' versions
# object name is optional
def get_objects_versions(self, c, object_name=None):
cursor = c.cursor()
# if object name is not specified then list all existing object
# else list all object's versions
if object_name is not None:
if self.object_name_exists(object_name, c):
get_all_versions_of_objects = '''SELECT version,content_name,content_path FROM Versions where o_name=?'''
cursor.execute(get_all_versions_of_objects, (object_name,))
return cursor.fetchall()
else :
return "Object not found"
else:
get_all_objets = '''SELECT o_name FROM Objects'''
cursor.execute(get_all_objets)
return cursor.fetchall()
# this function gets the latest version
# and compares the content's name and path of the current version that is being created with the last version
def add_or_replace_paths_for_new_version(self,object_name,c,path,content_name):
cursor = c.cursor()
# get content's name and path from the last version of the object
last_version = '''SELECT content_name, content_path
From Versions
WHERE
o_name=?
AND
version=
(SELECT MAX(version)
FROM Versions)'''
cursor.execute(last_version, (object_name,))
fetched_data = list(cursor.fetchone())
# all contents are separated by semicolons, so we remove it
# and each path/content name will be an element of a list
# when we remove the semicolon, the last element of the array is empty, so we remove the last element
content_names = fetched_data[0].split(";")[0:-1]
content_paths = fetched_data[1].split(";")[0:-1]
# for each element name in the latest version
for index,c_name in enumerate(content_names):
# if the content name given by the user is already existing(latest version)
if content_name.lower().__eq__(c_name.lower()):
# the name of the content remains the same, but the path change ( overwriting the content )
content_paths[index]=path
break
# # if the content name given by the user is not existing
if content_name not in content_names :
# we add the new content name and path with the others from the latest version
content_names.append(content_name)
content_paths.append(path)
# we convert lists to strings and we join them by making semicolons separators
# in order to execute it with SQL query
content_names = ";".join(content_names)+";"
content_paths = ";".join(content_paths)+";"
# we return content names and content paths that will be in the new version of the object
return [content_names,content_paths]
|
{"/server.py": ["/operations.py"], "/main.py": ["/operations.py"], "/operations.py": ["/database.py"]}
|
6,421
|
RedaMansy/ProjectV_3
|
refs/heads/master
|
/player_project.py
|
from item_project import *
from map_project import rooms
inventory = []
#For example: item_id
# Start game at the reception
current_room = rooms["Reception"]
#====================================
# Player status
energy_min = 0
#Minimum energyof player
energy_max = 100
#Maximum energy of player
project_process = 0
#Original project process
project_process_max = 100
#Maximum project process
|
{"/player_project.py": ["/map_project.py"]}
|
6,422
|
RedaMansy/ProjectV_3
|
refs/heads/master
|
/map_project.py
|
from item_project import *
room_reception = {
"name": "The Reception",
"description":
"""You have just entered the Reception. The interior is very modern, with sleek furnishings and glass walls.
To your North, you can see the cafeteria.
To your west, you can see the cinema.
To your east, you can see the shopping centre. In front of you, sits a short, moustached receptionist, reading a newspaper.
""",
"exits": {"east": "The Shopping Centre", "west": "The Cinema", "north": "The Cafeteria"},
"items": [item_keycard]
}
room_lab = {
"name": "The Lab",
"description":
"""You enter the lab. It isn't too crowded. A few people are around, working and seeing that stresses you
out becuase you feel that you should be working too. You look around, not knowing who you're looking for.""",
"exits": {"west": "The Library", "east": "The Cafeteria", "south": "The Cinema"},
"items": [item_github, item_thebae]
}
room_library = {
"name": "The Library",
"description":
"""You are standing in the library, Everyone is working or reading quietly.
As everyone is focusing on their own work, nobody notice that there is a notepad on the floor.
The exit is to the east""",
"exits": {"east": "The Lab"},
"items": [item_notes, item_voucher, item_profoak]
}
room_cinema = {
"name": "The Cinema",
"description":
"""You wander around the city aimlessly and you find a cinema that you've never noticed before.
It is oddly empty and you are perplexed as you've been in this area many times in the past and have never noticed it.
The only movie you find playing is David Fincher's The Social Network.""",
"exits": {"east": "The Reception", "north": "The Lab"},
"items": [item_zucc]
}
room_cafeteria = {
"name": "The Cafeteria",
"description":
"""You walk in the cafeteria and immediately notice the lingering waft of coffee in the air.
You look at the black board by the cashier and notice that they're serving hotdogs and tiramisu. """,
"exits": {"west": "The Lab", "north": "Home", "east": "The Closet", "south": "The Reception"},
"items": [item_food, item_water, item_bluebear]
}
room_shoppingcentre = {
"name": "The Shopping Centre",
"description":
"""INSERT DESCRIPTION""",
"exits": {"west": "The Reception", "north": "The Closet"},
"items": [item_laptop, item_hideokojima]
}
room_home = {
"name": "Home",
"description":
"""INSERT DESCRIPTION""",
"exits": {"east": "The Closet", "south": "The Cafeteria"},
"items": [item_phone]
}
room_bar = {
"name": "The Closet",
"description":
"""You walk into The Closet to the sounds of loud music and people shouting.
There is a constant flashing of disco lights and on the board you see that all drinks are £1.13""",
"exits": {"west": "The Cafeteria", "north": "Home", "south": "The Shopping Centre"},
"items": [item_NICoffee, item_turing]
}
room_petshop = {
"name": "The Petshop",
"description":
"""INSERT DESCRIPTION""",
"exits": {"INSERT EXITS"},
"items": [item_food, item_water, item_bluebear, item_pythonguy]
}
room_cofffeeshop = {
"name": "The Coffee Shop",
"description":
"""You walk into the coffee shop and the strong aroma of coffee beans lingers around the room.
You can hear the constant grinding of beans and in the corner of your eye you notice a man with a grey beard wearing glasses.
""",
"exits": {"east": "The Shopping Centre", "west": "The Cinema", "north": "The Cafeteria"},
"items": [item_keycard]
}
rooms = {
"The Reception": room_reception,
"The Lab": room_lab,
"The Library": room_library,
"The Cinema": room_cinema,
"The Cafeteria": room_cafeteria,
"The Shopping Centre": room_shoppingcentre,
"The Closet": room_bar,
"Home": room_home,
"The Petshop": room_petshop,
}
|
{"/player_project.py": ["/map_project.py"]}
|
6,423
|
tcb72/motif-mark
|
refs/heads/master
|
/motif_mark.py
|
import cairocffi as cairo
from gene import Gene
from itertools import product
import random
import seaborn as sns
import argparse
def multiline_to_dict_fasta(file):
fasta_dict = dict()
with open(file) as f:
for index,line in enumerate(f):
if (line.startswith('>')):
curr_header = line.strip()
fasta_dict[curr_header] = ''
else:
fasta_dict[curr_header] += line.strip()
return(fasta_dict)
def get_motifs(file):
with open(file) as f:
raw_motifs = [i.strip() for i in f.readlines()]
ambiguous_bases = {'y':['c','t'], 'u':['u','t']}
motif_dict = {}
for motif in raw_motifs:
motif_char_list = []
motif_chars = list(motif)
for char in motif_chars:
if char.lower() in ambiguous_bases:
motif_char_list.append(ambiguous_bases[char.lower()])
else:
motif_char_list.append([char])
if motif.isupper():
curr_motifs = [''.join(i).upper() for i in list(product(*motif_char_list))]
else:
curr_motifs = [''.join(i) for i in list(product(*motif_char_list))]
motif_dict[motif] = curr_motifs
return(motif_dict)
def draw_surface(fasta_dict, num_motifs):
num_genes = len(fasta_dict)
# width is length of longest sequence plus 100 (for some extra white space)
WIDTH = len(sorted(fasta_dict.values(), key=len)[-1]) + 100
# height is the y_offset * number of genes
HEIGHT = 100*num_genes + 20*num_motifs
surface = cairo.SVGSurface("plot.svg", WIDTH, HEIGHT)
context = cairo.Context(surface)
return(context)
def draw_introns(introns, X_OFFSET, y_offset, Y_INITIAL):
for intron in introns:
intron_start = intron[0]
intron_end = intron[1]
context.set_source_rgb(0, 0, 0)
context.move_to(X_OFFSET+intron_start,Y_INITIAL+y_offset)
context.line_to(X_OFFSET+intron_end,Y_INITIAL+y_offset)
context.stroke()
def draw_exons(exons, X_OFFSET, y_offset, Y_INITIAL, RECTANGLE_HEIGHT):
for exon in exons:
exon_start = exon[0]
exon_end = exon[1]
context.set_source_rgb(0, 0, 0)
context.rectangle(X_OFFSET+exon_start, Y_INITIAL-(RECTANGLE_HEIGHT/2)+y_offset, exon_end-exon_start, RECTANGLE_HEIGHT)
context.stroke()
def draw_motifs(palette, motif_locations,X_OFFSET, y_offset, Y_INITIAL):
for index,motif in enumerate(motif_locations):
context.set_source_rgb(*palette[index])
for pos in motif_locations[motif]:
context.move_to(X_OFFSET + pos, Y_INITIAL + y_offset + 10)
context.line_to(X_OFFSET + pos, Y_INITIAL + y_offset - 10)
context.stroke()
def draw_legend(palette, X_OFFSET, y_offset,motifs):
legend_offset = 0
for index,rgb in enumerate(palette):
context.move_to(X_OFFSET+15,y_offset+legend_offset+3)
context.set_source_rgb(0, 0, 0)
context.show_text(motifs[index])
context.set_source_rgb(rgb[0],rgb[1],rgb[2])
context.move_to(X_OFFSET,y_offset+legend_offset)
context.line_to(X_OFFSET+10, y_offset+legend_offset)
context.stroke()
legend_offset += 20
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--fasta')
parser.add_argument('--motifs')
args = parser.parse_args()
#'/home/tcb/Downloads/Figure_1.fasta'
#'/home/tcb/Downloads/Fig_1_motifs.txt'
fasta = multiline_to_dict_fasta(args.fasta)
motifs = get_motifs(args.motifs)
X_OFFSET = 75
Y_INITIAL = 25
RECTANGLE_HEIGHT = 30
palette = sns.color_palette(None, len(motifs))
context = draw_surface(fasta, len(motifs))
context.set_line_width(1)
context.save()
context.set_source_rgb(1, 1, 1)
context.paint()
context.restore()
y_offset = 0
for header,sequence in fasta.items():
gene = Gene(header,sequence)
gene_name = gene.get_gene_name()
gene_length = gene.get_gene_length()
introns, exons = gene.get_intron_exon_locations()
motif_locations = gene.find_motif_locations(motifs)
context.set_source_rgb(0, 0, 0)
context.move_to(20,Y_INITIAL+y_offset+5)
context.show_text(gene_name)
draw_introns(introns,X_OFFSET, y_offset, Y_INITIAL)
draw_exons(exons, X_OFFSET, y_offset, Y_INITIAL, RECTANGLE_HEIGHT)
draw_motifs(palette,motif_locations, X_OFFSET, y_offset, Y_INITIAL)
y_offset += 100
draw_legend(palette,X_OFFSET,y_offset,list(motifs.keys()))
|
{"/motif_mark.py": ["/gene.py"]}
|
6,424
|
tcb72/motif-mark
|
refs/heads/master
|
/gene.py
|
import re
from operator import itemgetter
from itertools import *
class Gene:
def __init__(self, header, sequence):
self.header = header
self.sequence = sequence
def get_gene_name(self):
return(self.header.split(' ')[0][1:])
def get_intron_exon_locations(self):
exon_indexes = []
intron_indexes = []
for index,char in enumerate(self.sequence):
#deal w/ beginning and end of sequence
if char.islower():
intron_indexes.append(index)
else:
exon_indexes.append(index)
intron_groups=[]
exon_groups = []
for k, g in groupby(enumerate(intron_indexes), lambda x: x[0]-x[1]):
intron_groups.append(list(map(itemgetter(1), g)))
for k, g in groupby(enumerate(exon_indexes), lambda x: x[0]-x[1]):
exon_groups.append(list(map(itemgetter(1), g)))
intron_start_end = [(item[0],item[-1]) for item in intron_groups]
exon_start_end = [(item[0],item[-1]) for item in exon_groups]
return intron_start_end, exon_start_end
def find_motif_locations(self, motifs):
motif_count = {}
motif_pos_info = {}
# for each motif
for motif in motifs:
curr_motif_start_positions = []
curr_motif_seqs = [i.lower() for i in motifs[motif]]
for i in range(len(self.sequence)):
curr_kmer = self.sequence[i:i+len(motif)]
if curr_kmer.lower() in curr_motif_seqs:
curr_motif_start_positions.append(i)
if len(curr_kmer) < len(motif):
break
motif_pos_info[motif] = curr_motif_start_positions
return(motif_pos_info)
def get_gene_length(self):
return(len(self.sequence))
|
{"/motif_mark.py": ["/gene.py"]}
|
6,439
|
PradeepNalluri/Prefix-Tuning-Bert
|
refs/heads/master
|
/distributed_training.py
|
import numpy as np
import pandas as pd
import torch
from transformers import BertTokenizer
import numpy as np
import multiprocessing as mp
import time
from tqdm import tqdm
import os
os.environ["MASTER_ADDR"] = "127.0.0.1"
os.environ['MASTER_PORT'] = '8888'
from transformers import BertModel
from transformers.modeling_outputs import SequenceClassifierOutput
from torch.nn import CrossEntropyLoss
import torch.nn as nn
from transformers import BertForSequenceClassification, AdamW, BertConfig
import pickle
from torch.utils.data import TensorDataset, random_split
from torch.utils.data import DataLoader, RandomSampler, SequentialSampler
from torch.nn.parallel import DistributedDataParallel as DDP
import torch.distributed as dist
from SARCBertClassifier import SARCBertClassifier
import torch
import torch.distributed as dist
import torch.multiprocessing as mp
import torch.nn as nn
import torch.optim as optim
from torch.nn.parallel import DistributedDataParallel as DDP
from transformers import get_linear_schedule_with_warmup
import random
import numpy as np
import numpy as np
import time
import datetime
import tempfile
# Function to calculate the accuracy of our predictions vs labels
def flat_accuracy(preds, labels):
pred_flat = np.argmax(preds, axis=1).flatten()
labels_flat = labels.flatten()
return np.sum(pred_flat == labels_flat) / len(labels_flat)
def format_time(elapsed):
'''
Takes a time in seconds and returns a string hh:mm:ss
'''
# Round to the nearest second.
elapsed_rounded = int(round((elapsed)))
# Format as hh:mm:ss
return str(datetime.timedelta(seconds=elapsed_rounded))
def train(rank, world_size):
f = open('processed_data.pckl', 'rb')
test = pickle.load(f)
f.close()
input_ids = test[0]
attention_masks = test[1]
labels = test[2]
# Combine the training inputs into a TensorDataset.
dataset = TensorDataset(input_ids, attention_masks, labels)
# Create a 90-10 train-validation split.
# Calculate the number of samples to include in each set.
train_size = int(0.9 * len(dataset))
val_size = len(dataset) - train_size
# Divide the dataset by randomly selecting samples.
train_dataset, val_dataset = random_split(dataset, [train_size, val_size])
print('{:>5,} training samples'.format(train_size))
print('{:>5,} validation samples'.format(val_size))
# The DataLoader needs to know our batch size for training, so we specify it
# here. For fine-tuning BERT on a specific task, the authors recommend a batch
# size of 16 or 32.
batch_size = 32
# Create the DataLoaders for our training and validation sets.
# We'll take training samples in random order.
train_dataloader = DataLoader(
train_dataset, # The training samples.
sampler = RandomSampler(train_dataset), # Select batches randomly
batch_size = batch_size # Trains with this batch size.
)
# For validation the order doesn't matter, so we'll just read them sequentially.
validation_dataloader = DataLoader(
val_dataset, # The validation samples.
sampler = SequentialSampler(val_dataset), # Pull out batches sequentially.
batch_size = batch_size # Evaluate with this batch size.
)
# This training code is based on the `run_glue.py` script here:
# https://github.com/huggingface/transformers/blob/5bfcd0485ece086ebcbed2d008813037968a9e58/examples/run_glue.py#L128
# Set the seed value all over the place to make this reproducible.
dist.init_process_group("gloo", rank=rank, world_size=world_size)
# create local model
custom=True
if custom:
model = SARCBertClassifier.from_pretrained(
"bert-base-uncased", # Use the 12-layer BERT model, with an uncased vocab.
num_labels = 2, # The number of output labels--2 for binary classification.
output_attentions = False, # Whether the model returns attentions weights.
output_hidden_states = False, # Whether the model returns all hidden-states.
)
model.update_network_sarc(0,rank,freeze_bert_layers=False)
else:
model = BertForSequenceClassification.from_pretrained(
"bert-base-uncased", # Use the 12-layer BERT model, with an uncased vocab.
num_labels = 2, # The number of output labels--2 for binary classification.
output_attentions = False, # Whether the model returns attentions weights.
output_hidden_states = False, # Whether the model returns all hidden-states.
)
model.to(rank)
# construct DDP model
model = DDP(model, device_ids=[rank])
# Note: AdamW is a class from the huggingface library (as opposed to pytorch)
# I believe the 'W' stands for 'Weight Decay fix"
optimizer = AdamW(model.parameters(),
lr = 2e-5, # args.learning_rate - default is 5e-5, our notebook had 2e-5
eps = 1e-8 # args.adam_epsilon - default is 1e-8.
)
# Number of training epochs. The BERT authors recommend between 2 and 4.
# We chose to run for 4, but we'll see later that this may be over-fitting the
# training data.
epochs = 4
# Total number of training steps is [number of batches] x [number of epochs].
# (Note that this is not the same as the number of training samples).
total_steps = len(train_dataloader) * epochs
# Create the learning rate scheduler.
scheduler = get_linear_schedule_with_warmup(optimizer,
num_warmup_steps = 0, # Default value in run_glue.py
num_training_steps = total_steps)
seed_val = 42
random.seed(seed_val)
np.random.seed(seed_val)
torch.manual_seed(seed_val)
torch.cuda.manual_seed_all(seed_val)
# We'll store a number of quantities such as training and validation loss,
# validation accuracy, and timings.
training_stats_custom = []
# Measure the total training time for the whole run.
total_t0 = time.time()
# For each epoch...
for epoch_i in range(0, epochs):
# ========================================
# Training
# ========================================
# Perform one full pass over the training set.
print("")
print('======== Epoch {:} / {:} ========'.format(epoch_i + 1, epochs))
print('Training...')
# Measure how long the training epoch takes.
t0 = time.time()
# Reset the total loss for this epoch.
total_train_loss = 0
# Put the model into training mode. Don't be mislead--the call to
# `train` just changes the *mode*, it doesn't *perform* the training.
# `dropout` and `batchnorm` layers behave differently during training
# vs. test (source: https://stackoverflow.com/questions/51433378/what-does-model-train-do-in-pytorch)
model.train()
total_training_loss = 0
total_correct_predictions, total_predictions = 0, 0
generator_tqdm = tqdm(train_dataloader)
# For each batch of training data...
for step, batch in enumerate(generator_tqdm):
# Progress update every 40 batches.
b_input_ids = batch[0].to(rank)
b_input_mask = batch[1].to(rank)
b_labels = batch[2].to(rank)
model.zero_grad()
result = model(b_input_ids,
token_type_ids=None,
attention_mask=b_input_mask,
labels=b_labels,
return_dict=True)
loss = result.loss
logits = result.logits
total_train_loss += loss.mean()
# Perform a backward pass to calculate the gradients.
loss.mean().backward()
# Clip the norm of the gradients to 1.0.
# This is to help prevent the "exploding gradients" problem.
torch.nn.utils.clip_grad_norm_(model.parameters(), 1.0)
# Update parameters and take a step using the computed gradient.
# The optimizer dictates the "update rule"--how the parameters are
# modified based on their gradients, the learning rate, etc.
optimizer.step()
# Update the learning rate.
scheduler.step()
batch_predictions = np.argmax(nn.Softmax(dim=1)(logits).detach().cpu().numpy(), axis=-1)
total_correct_predictions += (batch_predictions == b_labels.detach().cpu().numpy()).sum()
total_predictions += b_labels.shape[0]
description = ("Average training loss: %.2f Accuracy: %.2f Lable sum: %2f"
% (total_train_loss/(step+1), total_correct_predictions/total_predictions,batch_predictions.sum()))
generator_tqdm.set_description(description, refresh=False)
break
# Calculate the average loss over all of the batches.
avg_train_loss = total_train_loss / len(train_dataloader)
# Measure how long this epoch took.
training_time = format_time(time.time() - t0)
CHECKPOINT_PATH = "./model.checkpoint"
if rank == 0:
# All processes should see same parameters as they all start from same
# random parameters and gradients are synchronized in backward passes.
# Therefore, saving it in one process is sufficient.
torch.save(model.state_dict(), CHECKPOINT_PATH)
print("")
print(" Average training loss: {0:.2f}".format(avg_train_loss))
print(" Training epcoh took: {:}".format(training_time))
if(rank==0):
# ========================================
# Validation
# ========================================
# After the completion of each training epoch, measure our performance on
# our validation set.
print("")
print("Running Validation...")
t0 = time.time()
# Put the model in evaluation mode--the dropout layers behave differently
# during evaluation.
model.eval()
# Tracking variables
total_eval_accuracy = 0
total_eval_loss = 0
nb_eval_steps = 0
print("Evaluation In Progress")
# Evaluate data for one epoch
for batch in tqdm(validation_dataloader):
b_input_ids = batch[0].to(rank)
b_input_mask = batch[1].to(rank)
b_labels = batch[2].to(rank)
with torch.no_grad():
result = model(b_input_ids,
token_type_ids=None,
attention_mask=b_input_mask,
labels=b_labels,
return_dict=True)
# Get the loss and "logits" output by the model. The "logits" are the
# output values prior to applying an activation function like the
# softmax.
loss = result.loss
logits = result.logits
# Accumulate the validation loss.
total_eval_loss += loss.mean()
# Move logits and labels to CPU
logits = logits.detach().cpu().numpy()
label_ids = b_labels.to('cpu').numpy()
# Calculate the accuracy for this batch of test sentences, and
# accumulate it over all batches.
total_eval_accuracy += flat_accuracy(logits, label_ids)
# Report the final accuracy for this validation run.
avg_val_accuracy = total_eval_accuracy / len(validation_dataloader)
print(" Accuracy: {0:.2f}".format(avg_val_accuracy))
# Calculate the average loss over all of the batches.
avg_val_loss = total_eval_loss / len(validation_dataloader)
# Measure how long the validation run took.
validation_time = format_time(time.time() - t0)
print(" Validation Loss: {0:.2f}".format(avg_val_loss))
print(" Validation took: {:}".format(validation_time))
# Record all statistics from this epoch.
training_stats_custom.append(
{
'epoch': epoch_i + 1,
'Training Loss': avg_train_loss,
'Valid. Loss': avg_val_loss,
'Valid. Accur.': avg_val_accuracy,
'Training Time': training_time,
'Validation Time': validation_time
}
)
if(rank==0):
print("")
print("Training complete!")
print("Total training took {:} (h:mm:ss)".format(format_time(time.time()-total_t0)))
# Saving best-practices: if you use defaults names for the model, you can reload it using from_pretrained()
output_dir = './model_save_DDP/'
# Create output directory if needed
if not os.path.exists(output_dir):
os.makedirs(output_dir)
print("Saving model to %s" % output_dir)
# Save a trained model, configuration and tokenizer using `save_pretrained()`.
# They can then be reloaded using `from_pretrained()`
model_to_save = model.module if hasattr(model, 'module') else model # Take care of distributed/parallel training
model_to_save.save_pretrained(output_dir)
tokenizer.save_pretrained(output_dir)
# Good practice: save your training arguments together with the trained model
# torch.save(args, os.path.join(output_dir, 'training_args.bin'))
def main():
world_size = 8
mp.spawn(train,args=(world_size,),nprocs=world_size,join=True)
if __name__=="__main__":
main()
|
{"/distributed_training.py": ["/SARCBertClassifier.py"], "/baselines/baseline_code.py": ["/SARCBertClassifier.py"], "/train.py": ["/SARCBertClassifier.py"]}
|
6,440
|
PradeepNalluri/Prefix-Tuning-Bert
|
refs/heads/master
|
/SARCBertClassifier.py
|
import transformers
import torch
from transformers.modeling_outputs import SequenceClassifierOutput
import torch.nn as nn
from transformers import BertTokenizer,BertForSequenceClassification,BertModel
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
class SARCBertClassifier(BertForSequenceClassification):
"""
Classifier to handle classification task on SARC dataset
"""
def __init__(self,config):
super(SARCBertClassifier, self).__init__(config)
# self.mlp_layer=None
# self.prefix_embeddings =None
self.run_device = None
def update_network_sarc(self,num_layers,device,freeze_bert_layers=False,custom_embedding=False,custom_embedding_vector=None,add_user_information=False):
"""
Update the network architecture all the variable are class variables from source code of BerforSequenceClassification
transformer module
"""
config=self.config
if(freeze_bert_layers):
for name,param in self.bert.named_parameters():
if(name!="embeddings.prefix_embeddings.weight"):
param.requires_grad = False
self.prefix_embeddings = nn.Embedding(config.prefix_length, config.hidden_size)
self.prefix_length = config.prefix_length
self.mlp_layer = nn.Sequential(nn.Linear(config.hidden_size, config.hidden_size),
nn.Tanh(),
nn.Linear(config.hidden_size,config.hidden_size))
if(add_user_information):
self.classifier = nn.Linear(config.hidden_size+2, config.num_labels)
if(custom_embedding):
self.prefix_length = config.prefix_length
self.mlp_layer = nn.Sequential(nn.Linear(config.hidden_size, config.hidden_size),
nn.Tanh(),
nn.Linear(config.hidden_size,config.hidden_size))
self.init_weights()
custom_embedding_vector = custom_embedding_vector.expand(config.prefix_length,custom_embedding_vector.shape[0])
self.prefix_embeddings=nn.Embedding.from_pretrained(custom_embedding_vector)
else:
self.prefix_embeddings = nn.Embedding(config.prefix_length, config.hidden_size)
self.prefix_length = config.prefix_length
self.mlp_layer = nn.Sequential(nn.Linear(config.hidden_size, config.hidden_size),
nn.Tanh(),
nn.Linear(config.hidden_size,config.hidden_size))
self.init_weights()
self.run_device = device
def check_closest_matching_bert_model(self):
prefix_tokens = self.prefix_embeddings(torch.LongTensor(torch.arange(1)).to(self.run_device)).detach()
bert_base = self.bert.embeddings.word_embeddings(torch.LongTensor(torch.arange(30522)).to(self.run_device)).detach()
closest_words_ids = []
for embd in prefix_tokens:
closest_words_ids.append(torch.norm(bert_base - embd.unsqueeze(0), dim=1).topk(5).indices)
tokenizer = BertTokenizer.from_pretrained("./prefix_tuning_model_random_initializations_prefix_tuninglr_2e-5/")
closest_words_ids=torch.stack(closest_words_ids)
closest = {}
for idx,t in enumerate(closest_words_ids):
word_l = []
for tok in t:
word_l.append(tokenizer._convert_id_to_token(int(tok)))
closest[idx]=word_l
return closest
def closest_matching_bert_model(self):
prefix_tokens = self.prefix_embeddings(torch.LongTensor(torch.arange(self.prefix_embeddings.weight.shape[0])).to(self.run_device)).detach()
bert_base = self.bert.embeddings.word_embeddings(torch.LongTensor(torch.arange(30522)).to(self.run_device)).detach()
closest_words_ids = []
for embd in prefix_tokens:
closest_words_ids.append(torch.norm(bert_base - embd.unsqueeze(0), dim=1).topk(5).indices)
tokenizer = BertTokenizer.from_pretrained("./prefix_tuning_model_random_initializations_prefix_tuninglr_2e-5/")
closest_words_ids=torch.stack(closest_words_ids)
closest = {}
for idx,t in enumerate(closest_words_ids):
word_l = []
for tok in t:
word_l.append(tokenizer._convert_id_to_token(int(tok)))
closest[idx]=word_l
return closest
def forward(self,input_ids=None,attention_mask=None,token_type_ids=None,position_ids=None,
head_mask=None,inputs_embeds=None,labels=None,output_attentions=None,output_hidden_states=None,return_dict=None,
user_information=False):
r"""
FROM CORE HUGGINGFACE MODULE
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for computing the sequence classification/regression loss. Indices should be in :obj:`[0, ...,
config.num_labels - 1]`. If :obj:`config.num_labels == 1` a regression loss is computed (Mean-Square loss),
If :obj:`config.num_labels > 1` a classification loss is computed (Cross-Entropy).
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
prefix_embds = self.prefix_embeddings(torch.arange(0, self.prefix_length).to(self.run_device))
prefix_embds = self.mlp_layer(prefix_embds)
prefix_embds = prefix_embds.expand(len(input_ids),prefix_embds.shape[0],prefix_embds.shape[1])
attention_mask = torch.cat((torch.ones(self.prefix_length).to(self.run_device).expand(attention_mask.shape[0],self.prefix_length),attention_mask),1)
# if(user_information):
# attention_mask = attention_mask[:,2:]
# user_ids = input_ids[:,:2]
# input_ids = input_ids[:,2:].to(self.run_device).long()
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
prefix_embeddings=prefix_embds,
)
pooled_output = outputs[1]
pooled_output = self.dropout(pooled_output)
# if(user_information):
# pooled_output = torch.cat((user_ids,pooled_output),dim=1)
logits = self.classifier(pooled_output)
loss = None
if labels is not None:
if self.config.problem_type is None:
if self.num_labels == 1:
self.config.problem_type = "regression"
elif self.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int):
self.config.problem_type = "single_label_classification"
else:
self.config.problem_type = "multi_label_classification"
if self.config.problem_type == "regression":
loss_fct = MSELoss()
if self.num_labels == 1:
loss = loss_fct(logits.squeeze(), labels.squeeze())
else:
loss = loss_fct(logits, labels)
elif self.config.problem_type == "single_label_classification":
loss_fct = CrossEntropyLoss()
loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
elif self.config.problem_type == "multi_label_classification":
loss_fct = BCEWithLogitsLoss()
loss = loss_fct(logits, labels)
if not return_dict:
output = (logits,) + outputs[2:]
return ((loss,) + output) if loss is not None else output
return SequenceClassifierOutput(
loss=loss,
logits=logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
|
{"/distributed_training.py": ["/SARCBertClassifier.py"], "/baselines/baseline_code.py": ["/SARCBertClassifier.py"], "/train.py": ["/SARCBertClassifier.py"]}
|
6,441
|
PradeepNalluri/Prefix-Tuning-Bert
|
refs/heads/master
|
/baselines/baseline_code.py
|
import numpy as np
import pandas as pd
from pandas import DataFrame
import torch
import torch.nn as nn
import multiprocessing as mp
from torch.utils.data import TensorDataset, random_split
from torch.utils.data import DataLoader, RandomSampler, SequentialSampler
from transformers import BertTokenizer
from transformers import BertForSequenceClassification, AdamW, BertConfig
from transformers import get_linear_schedule_with_warmup
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
from sklearn.metrics import f1_score
from sklearn.metrics import confusion_matrix
from SARCBertClassifier import SARCBertClassifier
from keras.preprocessing.sequence import pad_sequences
from string import ascii_uppercase
from tqdm import tqdm
import seaborn as sn
import time
import datetime
import pickle
import os
import random
import itertools
import json
import argparse
def parallelize(function_pointer,list_to_parallelize,NUM_CORE=2*mp.cpu_count()):
'''
Prallel apply the given function to the list the numeber of process will
be twice the number of cpu cores by default
'''
start=time.time()
component_list=np.array_split(list_to_parallelize,NUM_CORE*10)
pool = mp.Pool(NUM_CORE)
results = pool.map(function_pointer,component_list)
pool.close()
pool.join()
end=time.time()
print("Executed in:",end-start)
return results
def find_max_length(sentences):
"""
Find the max length of the senteces
"""
max_len = 0
for _,row in sentences.iterrows():
sent=row["comment"]
try:
train_inputs_ids = tokenizer.encode(sent, add_special_tokens=True)
except:
train_inputs_ids = tokenizer.encode("", add_special_tokens=True)
max_len = max(max_len, len(train_inputs_ids))
return max_len
def compute_accuracy(preds, labels):
pred_flat = np.argmax(preds, axis=1).flatten()
labels_flat = labels.flatten()
return np.sum(pred_flat == labels_flat) / len(labels_flat)
def format_time(elapsed):
'''
Takes a time in seconds and returns a string hh:mm:ss
'''
elapsed_rounded = int(round((elapsed)))
return str(datetime.timedelta(seconds=elapsed_rounded))
def main(args):
prepare_data = args.prepare_data
save_processed_data = args.save_processed_data
batch_size = args.batch_size
custom = args.custom
epochs = args.epochs
learning_rate = args.learning_rate
save_model = args.save_model
tuning_mode = args.tuning_mode
model_save_directory = args.tuning_mode
if torch.cuda.is_available():
device = torch.device("cuda")
else:
device = torch.device("cpu")
print("Using:",device)
if(prepare_data):
data = pd.read_csv("train-balanced-sarcasm.csv")
training_set,test_set = train_test_split(data,stratify=data[["label"]], test_size=0.1)
del data
#Storing for future use across experiments
test_set.to_csv("test_set.csv",index=False)
training_set.dropna(subset=["comment"],inplace=True)
training_set.reset_index(drop=False,inplace=True)
training_set.rename(columns={"index":"id"},inplace=True)
sentences = training_set[["id","comment"]]
labels = training_set[["id","label"]]
max_len = max(parallelize(tokenize,sentences))
tokenizer = BertTokenizer.from_pretrained('bert-base-uncased', do_lower_case=True)
sentences=training_set.comment.values
labels = training_set.label.values
train_inputs_ids = []
training_attention_masks = []
#Tokenizing the sentences
for sent in tqdm(sentences):
encoded_dict = tokenizer.encode_plus(sent,add_special_tokens = True,max_length = 64,pad_to_max_length = True,
return_attention_mask = True,return_tensors = 'pt',)
train_inputs_ids.append(encoded_sentences['train_inputs_ids'])
training_attention_masks.append(encoded_sentences['attention_mask'])
train_inputs_ids = torch.cat(train_inputs_ids, dim=0)
training_attention_masks = torch.cat(training_attention_masks, dim=0)
labels = torch.tensor(labels)
#save data for future use
if(save_processed_data):
f = open('processed_data.pckl', 'wb')
pickle.dump([train_inputs_ids,training_attention_masks,labels], f)
f.close()
else:
#lOAD THE DATA
f = open('processed_data.pckl', 'rb')
input_processed_data = pickle.load(f)
f.close()
train_inputs_ids = input_processed_data[0]
training_attention_masks = input_processed_data[1]
labels = input_processed_data[2]
print("Data Preperation Done")
main_dataset = TensorDataset(train_inputs_ids, training_attention_masks, labels)
train_size = int(0.9 * len(main_dataset))
val_size = len(main_dataset) - train_size
train_dataset, validation_data = random_split(main_dataset, [train_size, val_size])
train_dataloader = DataLoader(train_dataset,sampler = RandomSampler(train_dataset),batch_size = batch_size,)
validation_dataloader = DataLoader(validation_data,sampler = SequentialSampler(validation_data), batch_size = batch_size,)
if custom:
model = SARCBertClassifier.from_pretrained("bert-base-uncased",num_labels = 2,output_attentions = False,output_hidden_states = False,)
model.update_network_sarc(2,device,freeze_bert_layers=tuning_mode=="light_weight")
else:
model = BertForSequenceClassification.from_pretrained("bert-base-uncased",num_labels = 2,output_attentions = False,output_hidden_states = False,)
if(torch.cuda.device_count()>1):
print("Parallelizing Model")
model = nn.DataParallel(model)
model.to(device)
model = model.cuda()
print("Model Initialization Done")
optimizer = AdamW(model.parameters(),lr = learning_rate,eps = 1e-8)
total_steps = len(train_dataloader) * epochs
scheduler = get_linear_schedule_with_warmup(optimizer,num_warmup_steps = 0,num_training_steps = total_steps)
print("Optimizer setup done")
seed_val = 42
random.seed(seed_val)
np.random.seed(seed_val)
torch.manual_seed(seed_val)
torch.cuda.manual_seed_all(seed_val)
training_stats = []
total_t0 = time.time()
for epoch_i in range(0, epochs):
t0 = time.time()
batch_train_loss = 0
model.train()
total_training_loss = 0
correct_preds, total_predictions = 0, 0
generator_tqdm = tqdm(train_dataloader)
for step, batch in enumerate(generator_tqdm):
b_input_ids = batch[0].to(device)
b_input_mask = batch[1].to(device)
b_labels = batch[2].to(device)
model.zero_grad()
result = model(b_input_ids,
token_type_ids=None,
attention_mask=b_input_mask,
labels=b_labels,
return_dict=True)
loss = result.loss
logits = result.logits
batch_train_loss += loss.mean()
loss.mean().backward()
torch.nn.utils.clip_grad_norm_(model.parameters(), 1.0)
optimizer.step()
optimizer.zero_grad()
scheduler.step()
batch_predictions = np.argmax(nn.Softmax(dim=1)(logits).detach().cpu().numpy(), axis=-1)
correct_preds += (batch_predictions == b_labels.detach().cpu().numpy()).sum()
total_predictions += b_labels.shape[0]
description = ("Average training loss: %.2f Accuracy: %.2f Lable sum: %2f"
% (batch_train_loss/(step+1), correct_preds/total_predictions,batch_predictions.sum()))
generator_tqdm.set_description(description, refresh=False)
train_loss = batch_train_loss / len(train_dataloader)
training_time = format_time(time.time() - t0)
t0 = time.time()
model.eval()
total_eval_accuracy = 0
total_eval_loss = 0
nb_eval_steps = 0
for batch in tqdm(validation_dataloader):
b_input_ids = batch[0].to(device)
b_input_mask = batch[1].to(device)
b_labels = batch[2].to(device)
with torch.no_grad():
result = model(b_input_ids,
token_type_ids=None,
attention_mask=b_input_mask,
labels=b_labels,
return_dict=True)
loss = result.loss
logits = result.logits
total_eval_loss += loss.mean()
logits = logits.detach().cpu().numpy()
label_ids = b_labels.to('cpu').numpy()
total_eval_accuracy += compute_accuracy(logits, label_ids)
avg_val_accuracy = total_eval_accuracy / len(validation_dataloader)
avg_val_loss = total_eval_loss / len(validation_dataloader)
validation_time = format_time(time.time() - t0)
training_stats.append(
{
'epoch': epoch_i + 1,
'Training Loss': train_loss,
'Valid. Loss': avg_val_loss,
'Valid. Accur.': avg_val_accuracy,
'Training Time': training_time,
'Validation Time': validation_time
}
)
print("Training Time:".format(format_time(time.time()-total_t0)))
if(not prepare_data):
tokenizer = BertTokenizer.from_pretrained("./model_save_new/")
if(save_model):
output_dir = model_save_directory
if not os.path.exists(output_dir):
os.makedirs(output_dir)
print("Saving model to %s" % output_dir)
model_to_save = model.module if hasattr(model, 'module') else model
model_to_save.save_pretrained(output_dir)
tokenizer.save_pretrained(output_dir)
pd.set_option('precision', 2)
df_stats = pd.DataFrame(data=training_stats)
df_stats = df_stats.set_index('epoch')
df_stats.to_csv(output_dir+"/perfomance_stats.csv",index=False)
print("Models Saved")
if(not prepare_data):
test_set = pd.read_csv("test_set.csv")
print("Started Testing")
sentences = test_set.dropna(subset=["comment"]).comment.values
labels = test_set.dropna(subset=["comment"]).label.values
test_inputs_ids = []
for sent in sentences:
encoded_sent = tokenizer.encode(sent,add_special_tokens = True,)
test_inputs_ids.append(encoded_sent)
test_inputs_ids = pad_sequences(test_inputs_ids, maxlen=64,
dtype="long", truncating="post", padding="post")
test_attention_masks = []
for seq in tqdm(test_inputs_ids):
seq_mask = [float(i>0) for i in seq]
test_attention_masks.append(seq_mask)
prediction_inputs = torch.tensor(test_inputs_ids)
prediction_masks = torch.tensor(test_attention_masks)
prediction_labels = torch.tensor(labels)
batch_size = 32
prediction_data = TensorDataset(prediction_inputs, prediction_masks, prediction_labels)
prediction_sampler = SequentialSampler(prediction_data)
prediction_dataloader = DataLoader(prediction_data, sampler=prediction_sampler, batch_size=batch_size)
model.eval()
predictions , true_labels = [], []
for batch in tqdm(prediction_dataloader):
batch = tuple(t.to(device) for t in batch)
b_input_ids, b_input_mask, b_labels = batch
with torch.no_grad():
outputs = model(b_input_ids,token_type_ids=None,attention_mask=b_input_mask,labels=b_labels,return_dict=True)
logits = outputs.logits
logits = logits.detach().cpu().numpy()
label_ids = b_labels.to('cpu').numpy()
predictions.append(logits)
true_labels.append(label_ids)
preds=[]
actuals=[]
for i in range(len(true_labels)):
preds.append(list(np.argmax(predictions[i], axis=1).flatten()))
actuals.append(list(true_labels[i]))
preds = list(itertools.chain(*preds))
actuals = list(itertools.chain(*actuals))
test_metrics = {}
test_metrics['accuracy_score'] = accuracy_score(actuals,preds)*100
test_metrics['f1_score'] = f1_score(actuals, preds, average='macro')
confm = confusion_matrix(actuals, preds,normalize='true')
columns = ["Sarcastic","Normal"]
df_cm = DataFrame(confm, index=columns, columns=columns)
ax = sn.heatmap(df_cm, cmap='Oranges', annot=True)
test_metrics["confusion_matrix"] = df_cm.to_dict('list')
# if(save_processed_data):
with open(output_dir+'/test_metrics.json', 'w') as fp:
json.dump(test_metrics, fp)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Train Dependency Parsing Model')
# General training arguments
parser.add_argument('--prepare_data', action="store_true", default=False,
help='if passed, will prepare data.')
parser.add_argument('--save_processed_data', action="store_true", default=False,
help='if passed, save the processed data.')
parser.add_argument('--batch_size', type=int, help='batch_size ', default=128)
parser.add_argument('--custom', action="store_true", default=True,
help='if passed, use no custom.')
parser.add_argument('--epochs', type=int, help='epochs ', default=4)
parser.add_argument('--learning_rate', type=float, help='learning_rate ', default=0.005)
parser.add_argument('--save_model', action="store_true", default=True,
help='if passed, save model.')
parser.add_argument('--tuning_mode', type=str, choices=("light_weight", "fine_tune"),
help='tuning_mode', default="light_weight")
parser.add_argument('--model_save_directory', type=str,
help='tuning_mode', default="temper")
args = parser.parse_args()
main(args)
|
{"/distributed_training.py": ["/SARCBertClassifier.py"], "/baselines/baseline_code.py": ["/SARCBertClassifier.py"], "/train.py": ["/SARCBertClassifier.py"]}
|
6,442
|
PradeepNalluri/Prefix-Tuning-Bert
|
refs/heads/master
|
/baselines/SARCBertClassifier.py
|
from transformers import BertModel
from transformers.modeling_outputs import SequenceClassifierOutput
from torch.nn import CrossEntropyLoss
import torch.nn as nn
from transformers import BertForSequenceClassification, AdamW, BertConfig
class SARCBertClassifier(BertForSequenceClassification):
"""
Classifier to handle classification task on SARC dataset
"""
def __init__(self,config):
super(SARCBertClassifier, self).__init__(config)
def update_network_sarc(self,num_layers,device,freeze_bert_layers=False):
"""
Update the network architecture all the variable are class variables from source code of BerforSequenceClassification
transformer module
"""
config=self.config
if(freeze_bert_layers):
for param in self.bert.parameters():
param.requires_grad = False
self.classifier = nn.Sequential()
for layer in range(num_layers-1):
self.classifier.add_module("classification_layer_"+str(layer+1),nn.Linear(config.hidden_size, config.hidden_size))
self.classifier.add_module("activation_layer_"+str(layer+1),nn.ReLU())
self.classifier.add_module("output_layer",nn.Linear(config.hidden_size, config.num_labels))
self.classifier.to(device)
self.init_weights()
def forward(self,input_ids=None,attention_mask=None,token_type_ids=None,position_ids=None,
head_mask=None,inputs_embeds=None,labels=None,output_attentions=None,output_hidden_states=None,return_dict=None,):
r"""
FROM CORE HUGGINGFACE MODULE
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for computing the sequence classification/regression loss. Indices should be in :obj:`[0, ...,
config.num_labels - 1]`. If :obj:`config.num_labels == 1` a regression loss is computed (Mean-Square loss),
If :obj:`config.num_labels > 1` a classification loss is computed (Cross-Entropy).
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
pooled_output = outputs[1]
pooled_output = self.dropout(pooled_output)
logits = self.classifier(pooled_output)
loss = None
if labels is not None:
if self.config.problem_type is None:
if self.num_labels == 1:
self.config.problem_type = "regression"
elif self.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int):
self.config.problem_type = "single_label_classification"
else:
self.config.problem_type = "multi_label_classification"
if self.config.problem_type == "regression":
loss_fct = MSELoss()
if self.num_labels == 1:
loss = loss_fct(logits.squeeze(), labels.squeeze())
else:
loss = loss_fct(logits, labels)
elif self.config.problem_type == "single_label_classification":
loss_fct = CrossEntropyLoss()
loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
elif self.config.problem_type == "multi_label_classification":
loss_fct = BCEWithLogitsLoss()
loss = loss_fct(logits, labels)
if not return_dict:
output = (logits,) + outputs[2:]
return ((loss,) + output) if loss is not None else output
return SequenceClassifierOutput(
loss=loss,
logits=logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
|
{"/distributed_training.py": ["/SARCBertClassifier.py"], "/baselines/baseline_code.py": ["/SARCBertClassifier.py"], "/train.py": ["/SARCBertClassifier.py"]}
|
6,443
|
PradeepNalluri/Prefix-Tuning-Bert
|
refs/heads/master
|
/train.py
|
from urllib import parse
import numpy as np
import pandas as pd
from pandas import DataFrame
import torch
import torch.nn as nn
import multiprocessing as mp
from torch.utils.data import TensorDataset, random_split
from torch.utils.data import DataLoader, RandomSampler, SequentialSampler
from transformers import BertTokenizer
from transformers import BertForSequenceClassification, AdamW, BertConfig,BertModel
from transformers import get_linear_schedule_with_warmup
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
from sklearn.metrics import f1_score
from sklearn.metrics import confusion_matrix
from SARCBertClassifier import SARCBertClassifier
from keras.preprocessing.sequence import pad_sequences
from string import ascii_uppercase
from tqdm import tqdm
import seaborn as sn
import time
import datetime
import pickle
import os
import random
import itertools
import json
import argparse
def get_bert_embedding(word):
"""
Create embedding for phrases this can be used to initialize the prefix embeddings based on phrases
"""
tokenizer = BertTokenizer.from_pretrained('bert-base-uncased', do_lower_case=True)
tokenized_res = tokenizer.encode_plus(
word, # Sentence to encode.
add_special_tokens = True, # Add '[CLS]' and '[SEP]'
max_length = 5, # Pad & truncate all sentences.
pad_to_max_length = True,
return_attention_mask = True, # Construct attn. masks.
return_tensors = 'pt', # Return pytorch tensors.
)
embed_model = BertModel.from_pretrained('bert-base-uncased')
embed_model.eval()
embedding = embed_model(**tokenized_res)
embedding = embedding.last_hidden_state.detach()#[:,tokenized_res['attention_mask']]
required_embedding = embedding[0][tokenized_res['attention_mask'][0]==1,:]
required_embedding = required_embedding.mean(dim=0)
del embed_model
del embedding
return required_embedding
def parallelize(function_pointer,list_to_parallelize,NUM_CORE=2*mp.cpu_count()):
'''
Prallel apply the given function to the list the numeber of process will
be twice the number of cpu cores by default
'''
start=time.time()
component_list=np.array_split(list_to_parallelize,NUM_CORE*10)
pool = mp.Pool(NUM_CORE)
results = pool.map(function_pointer,component_list)
pool.close()
pool.join()
end=time.time()
print("Executed in:",end-start)
return results
def find_max_length(sentences,tokenizer):
"""
Find the max length of the senteces
"""
max_len = 0
for _,row in sentences.iterrows():
sent=row["comment"]
try:
train_inputs_ids = tokenizer.encode(sent, add_special_tokens=True)
except:
train_inputs_ids = tokenizer.encode("", add_special_tokens=True)
max_len = max(max_len, len(train_inputs_ids))
return max_len
def compute_accuracy(preds, labels):
pred_flat = np.argmax(preds, axis=1).flatten()
labels_flat = labels.flatten()
return np.sum(pred_flat == labels_flat) / len(labels_flat)
def format_time(elapsed):
'''
Takes a time in seconds and returns a string hh:mm:ss
'''
elapsed_rounded = int(round((elapsed)))
return str(datetime.timedelta(seconds=elapsed_rounded))
def main(args):
prepare_data = args.prepare_data
save_processed_data = args.save_processed_data
batch_size = args.batch_size
epochs = args.epochs
learning_rate = args.learning_rate
save_model = args.save_model
tuning_mode = args.tuning_mode
model_save_directory = "model_experiment_"+tuning_mode+"_batch_"+str(batch_size)+"_lr_"+str(learning_rate)+"_epoch_"+str(epochs)+"/"
prefix_tuning = True if "prefix" in tuning_mode else False
use_multi_gpu = args.use_multi_gpu
phrase_for_init = args.phrase_for_init
checkpoint = args.checkpoint
analyze_tokens = args.analyze_tokens
test_file = args.test_file
train_model = not args.evaluate
train_data = args.train_data
model_directory_to_use = args.saved_model_location
if(prefix_tuning):
prefix_length = args.prefix_length
if torch.cuda.is_available():
device = torch.device("cuda")
else:
device = torch.device("cpu")
print("Using:",device)
if(train_model):
if(prepare_data):
try:
data = pd.read_csv(train_data)
except:
raise Exception("File not found: Make sure you download the dataset from https://www.kaggle.com/danofer/sarcasm/ The data should be kept in main folder")
training_set,test_set = train_test_split(data,stratify=data[["label"]], test_size=0.1)
del data
#Storing for future use across experiments
test_set.to_csv("test_set.csv",index=False)
training_set.dropna(subset=["comment"],inplace=True)
training_set.reset_index(drop=False,inplace=True)
training_set.rename(columns={"index":"id"},inplace=True)
sentences = training_set[["id","comment"]]
labels = training_set[["id","label"]]
tokenizer = BertTokenizer.from_pretrained('bert-base-uncased', do_lower_case=True)
sentences=training_set.comment.values
labels = training_set.label.values
train_inputs_ids = []
training_attention_masks = []
#Tokenizing the sentences
for sent in tqdm(sentences):
encoded_sentences = tokenizer.encode_plus(sent,add_special_tokens = True,max_length = 64,pad_to_max_length = True,
return_attention_mask = True,return_tensors = 'pt',)
train_inputs_ids.append(encoded_sentences['input_ids'])
training_attention_masks.append(encoded_sentences['attention_mask'])
train_inputs_ids = torch.cat(train_inputs_ids, dim=0)
training_attention_masks = torch.cat(training_attention_masks, dim=0)
labels = torch.tensor(labels)
#save data for future use
if(save_processed_data):
f = open('processed_data.pckl', 'wb')
pickle.dump([train_inputs_ids,training_attention_masks,labels], f)
f.close()
else:
#lOAD THE DATA
f = open('processed_data.pckl', 'rb')
input_processed_data = pickle.load(f)
f.close()
train_inputs_ids = input_processed_data[0]
training_attention_masks = input_processed_data[1]
labels = input_processed_data[2]
print("Data Preperation Done")
main_dataset = TensorDataset(train_inputs_ids, training_attention_masks, labels)
train_size = int(0.9 * len(main_dataset))
val_size = len(main_dataset) - train_size
train_dataset, validation_data = random_split(main_dataset, [train_size, val_size])
train_dataloader = DataLoader(train_dataset,sampler = RandomSampler(train_dataset),batch_size = batch_size,)
validation_dataloader = DataLoader(validation_data,sampler = SequentialSampler(validation_data), batch_size = batch_size,)
config = BertConfig.from_pretrained("bert-base-uncased", # Use the 12-layer BERT model, with an uncased vocab.
num_labels = 2, # The number of output labels--2 for binary classification.
output_attentions = False, # Whether the model returns attentions weights.
output_hidden_states = False, # Whether the model returns all hidden-states.
)
if(prefix_tuning):
config.prefix_length = prefix_length
phrase_for_init = phrase_for_init
if prefix_tuning:
model = SARCBertClassifier(config)
model.update_network_sarc(2,device,freeze_bert_layers=True)
model.to(device)
if(tuning_mode == "prefix_bottom_two_layers"):
for n,p in model.named_parameters():
if(n=="prefix_embeddings.weight" or "bert.encoder.layer.0." in n or "bert.encoder.layer.1." in n or n=="classifier.weight" or n=="classifier.bias"):
p.requires_grad = True
else:
p.requires_grad = False
if p.requires_grad:
print("Tuning:",n)
elif(tuning_mode == "prefix_top_two_layers"):
for n,p in model.named_parameters():
if(n=="prefix_embeddings.weight" or "bert.encoder.layer.10." in n or "bert.encoder.layer.11." in n or n=="classifier.weight" or n=="classifier.bias"):
p.requires_grad = True
else:
p.requires_grad = False
if p.requires_grad:
print("Tuning:",n)
elif(tuning_mode == "prefix_bert_embedding_layer"):
for n,p in model.named_parameters():
if(n=="prefix_embeddings.weight" or "bert.embeddings.word_embeddings.weight" in n or n=="classifier.weight" or n=="classifier.bias"):
p.requires_grad = True
else:
p.requires_grad = False
if p.requires_grad:
print("Tuning:",n)
elif(tuning_mode == "prefix_custom_initializaition"):
del model
custom_embedding = get_bert_embedding(phrase_for_init)
model = SARCBertClassifier(config)
model.update_network_sarc(2,device,freeze_bert_layers=True,custom_embedding=True,custom_embedding_vector=custom_embedding)
for n,p in model.named_parameters():
if(n=="prefix_embeddings.weight" or n=="classifier.weight" or n=="classifier.bias"):
p.requires_grad = True
else:
p.requires_grad = False
if p.requires_grad:
print("Tuning:",n)
elif(tuning_mode == "prefix_random_initializaition"):
for n,p in model.named_parameters():
if(n=="prefix_embeddings.weight" in n or n=="classifier.weight" or n=="classifier.bias"):
p.requires_grad = True
else:
p.requires_grad = False
if p.requires_grad:
print("Tuning:",n)
else:
raise Exception("Exception: Unknow Experiment")
else:
model = BertForSequenceClassification(config)
if(tuning_mode == "noprefix_top_two_layers"):
for n,p in model.named_parameters():
if("bert.encoder.layer.10." in n or "bert.encoder.layer.11." in n or n=="classifier.weight" or n=="classifier.bias"):
p.requires_grad = True
else:
p.requires_grad = False
if p.requires_grad:
print("Tuning:",n)
elif(tuning_mode == "noprefix_bottom_two_layers"):
for n,p in model.named_parameters():
if("bert.encoder.layer.0." in n or "bert.encoder.layer.1." in n or n=="classifier.weight" or n=="classifier.bias"):
p.requires_grad = True
else:
p.requires_grad = False
if p.requires_grad:
print("Tuning:",n)
elif(tuning_mode == "noprefix_embedding_layer_update"):
for n,p in model.named_parameters():
if("bert.embeddings.word_embeddings.weight" in n or n=="classifier.weight" or n=="classifier.bias"):
p.requires_grad = True
else:
p.requires_grad = False
if p.requires_grad:
print("Tuning:",n)
elif(tuning_mode=="baseline_finetune"):
for n,p in model.named_parameters():
p.requires_grad = True
if p.requires_grad:
print("Tuning:",n)
elif(tuning_mode=="baseline_lightweight_finetune"):
for n,p in model.named_parameters():
if(n=="classifier.weight" or n=="classifier.bias"):
p.requires_grad = True
if p.requires_grad:
print("Tuning:",n)
else:
raise Exception("Exception: Unknow Experiment")
if(use_multi_gpu and torch.cuda.device_count()>1):
print("Parallelizing Model")
model = nn.DataParallel(model)
model.to(device)
model = model.cuda()
print("Model Initialization Done")
optimizer = AdamW(model.parameters(),lr = learning_rate,eps = 1e-8)
total_steps = len(train_dataloader) * epochs
scheduler = get_linear_schedule_with_warmup(optimizer,num_warmup_steps = 0,num_training_steps = total_steps)
print("Optimizer setup done")
seed_val = 42
random.seed(seed_val)
np.random.seed(seed_val)
torch.manual_seed(seed_val)
torch.cuda.manual_seed_all(seed_val)
training_stats = []
total_t0 = time.time()
for epoch_i in range(0, epochs):
t0 = time.time()
batch_train_loss = 0
model.train()
total_training_loss = 0
correct_preds, total_predictions = 0, 0
generator_tqdm = tqdm(train_dataloader)
if(checkpoint):
output_dir = model_save_directory+"_checkpoint/"
if not os.path.exists(output_dir):
os.makedirs(output_dir)
print("Saving model to %s" % output_dir)
model_to_save = model.module if hasattr(model, 'module') else model # Take care of distributed/parallel training
model_to_save.save_pretrained(output_dir)
if(analyze_tokens):
with open(output_dir+"prefix_embed_matching_words_epoch_"+str(epoch_i)+".json", 'w') as fp:
json.dump(model.closest_matching_bert_model(), fp)
for step, batch in enumerate(generator_tqdm):
if(prefix_length):
b_input_ids = torch.cat((torch.arange(0, config.prefix_length).expand(batch[0].shape[0], config.prefix_length),batch[0]),1).to(device)
b_input_mask = torch.cat((torch.ones(config.prefix_length).expand(batch[1].shape[0], config.prefix_length),batch[1]),1).to(device)
else:
b_input_ids = batch[0].to(device)
b_input_mask = batch[1].to(device)
b_labels = batch[2].to(device)
model.zero_grad()
result = model(b_input_ids,
token_type_ids=None,
attention_mask=b_input_mask,
labels=b_labels,
return_dict=True)
loss = result.loss
logits = result.logits
batch_train_loss += loss.mean()
loss.mean().backward()
torch.nn.utils.clip_grad_norm_(model.parameters(), 1.0)
optimizer.step()
optimizer.zero_grad()
scheduler.step()
batch_predictions = np.argmax(nn.Softmax(dim=1)(logits).detach().cpu().numpy(), axis=-1)
correct_preds += (batch_predictions == b_labels.detach().cpu().numpy()).sum()
total_predictions += b_labels.shape[0]
description = ("Average training loss: %.2f Accuracy: %.2f Lable sum: %2f"
% (batch_train_loss/(step+1), correct_preds/total_predictions,batch_predictions.sum()))
generator_tqdm.set_description(description, refresh=False)
train_loss = batch_train_loss / len(train_dataloader)
training_time = format_time(time.time() - t0)
t0 = time.time()
model.eval()
total_eval_accuracy = 0
total_eval_loss = 0
nb_eval_steps = 0
for batch in tqdm(validation_dataloader):
if(prefix_tuning):
b_input_ids = torch.cat((torch.arange(0, config.prefix_length).expand(batch[0].shape[0], config.prefix_length),batch[0]),1).to(device)
b_input_mask = torch.cat((torch.ones(config.prefix_length).expand(batch[1].shape[0], config.prefix_length),batch[1]),1).to(device)
else:
b_input_ids=batch[0].to(device)
b_input_mask=batch[1].to(device)
b_labels = batch[2].to(device)
with torch.no_grad():
result = model(b_input_ids,
token_type_ids=None,
attention_mask=b_input_mask,
labels=b_labels,
return_dict=True)
loss = result.loss
logits = result.logits
total_eval_loss += loss.mean()
logits = logits.detach().cpu().numpy()
label_ids = b_labels.to('cpu').numpy()
total_eval_accuracy += compute_accuracy(logits, label_ids)
avg_val_accuracy = total_eval_accuracy / len(validation_dataloader)
avg_val_loss = total_eval_loss / len(validation_dataloader)
validation_time = format_time(time.time() - t0)
training_stats.append(
{
'epoch': epoch_i + 1,
'Training Loss': train_loss,
'Valid. Loss': avg_val_loss,
'Valid. Accur.': avg_val_accuracy,
'Training Time': training_time,
'Validation Time': validation_time
}
)
print("Training Time:".format(format_time(time.time()-total_t0)))
if(not prepare_data):
tokenizer = BertTokenizer.from_pretrained('bert-base-uncased', do_lower_case=True)
if(save_model):
output_dir = model_save_directory
if not os.path.exists(output_dir):
os.makedirs(output_dir)
print("Saving model to %s" % output_dir)
model_to_save = model.module if hasattr(model, 'module') else model
model_to_save.save_pretrained(output_dir)
tokenizer.save_pretrained(output_dir)
pd.set_option('precision', 2)
df_stats = pd.DataFrame(data=training_stats)
df_stats = df_stats.set_index('epoch')
df_stats.to_csv(output_dir+"/perfomance_stats.csv",index=False)
print("Models Saved")
else:
if(not model_directory_to_use):
raise Exception("Must give the folder location of the pretrained model during evaluation")
if(prefix_tuning):
print("Loading Model from: " , model_directory_to_use)
output_dir = model_directory_to_use
config = BertConfig.from_pretrained(output_dir,output_hidden_states=True, output_attentions=True)
config.user_embeddings=False
config.prefix_length = prefix_length
model = SARCBertClassifier.from_pretrained(output_dir,config=config,)
else:
model = BertForSequenceClassification.from_pretrained(model_directory_to_use)
model.to(device)
model.prefix_length = prefix_length
model.run_device = device
tokenizer = BertTokenizer.from_pretrained('bert-base-uncased', do_lower_case=True)
if(not prepare_data):
test_set = pd.read_csv(test_file)
print("Started Testing")
sentences = test_set.dropna(subset=["comment"]).comment.values
labels = test_set.dropna(subset=["comment"]).label.values
test_inputs_ids = []
for sent in sentences:
encoded_sent = tokenizer.encode(sent,add_special_tokens = True,)
test_inputs_ids.append(encoded_sent)
test_inputs_ids = pad_sequences(test_inputs_ids, maxlen=64,
dtype="long", truncating="post", padding="post")
test_attention_masks = []
for seq in tqdm(test_inputs_ids):
seq_mask = [float(i>0) for i in seq]
test_attention_masks.append(seq_mask)
prediction_inputs = torch.tensor(test_inputs_ids)
prediction_masks = torch.tensor(test_attention_masks)
prediction_labels = torch.tensor(labels)
batch_size = 32
prediction_data = TensorDataset(prediction_inputs, prediction_masks, prediction_labels)
prediction_sampler = SequentialSampler(prediction_data)
prediction_dataloader = DataLoader(prediction_data, sampler=prediction_sampler, batch_size=batch_size)
model.eval()
predictions , true_labels = [], []
for batch in tqdm(prediction_dataloader):
batch = tuple(t.to(device) for t in batch)
b_input_ids, b_input_mask, b_labels = batch
if(prefix_tuning):
b_input_ids = torch.cat((torch.arange(0, 5).expand(b_input_ids.shape[0], 5).to(device),b_input_ids),1).to(device)
b_input_mask = torch.cat((torch.ones(5).expand(b_input_mask.shape[0], 5).to(device),b_input_mask),1).to(device)
with torch.no_grad():
outputs = model(b_input_ids,token_type_ids=None,attention_mask=b_input_mask,labels=b_labels,return_dict=True)
logits = outputs.logits
logits = logits.detach().cpu().numpy()
label_ids = b_labels.to('cpu').numpy()
predictions.append(logits)
true_labels.append(label_ids)
preds=[]
actuals=[]
for i in range(len(true_labels)):
preds.append(list(np.argmax(predictions[i], axis=1).flatten()))
actuals.append(list(true_labels[i]))
preds = list(itertools.chain(*preds))
actuals = list(itertools.chain(*actuals))
test_metrics = {}
test_metrics['accuracy_score'] = accuracy_score(actuals,preds)*100
test_metrics['f1_score'] = f1_score(actuals, preds, average='macro')
confm = confusion_matrix(actuals, preds,normalize='true')
columns = ["Sarcastic","Normal"]
df_cm = DataFrame(confm, index=columns, columns=columns)
ax = sn.heatmap(df_cm, cmap='Oranges', annot=True)
test_metrics["confusion_matrix"] = df_cm.to_dict('list')
# if(save_processed_data):
with open(output_dir+'/test_metrics.json', 'w') as fp:
json.dump(test_metrics, fp)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Train Bert Model - Prefix Tuning')
# General training arguments
parser.add_argument("--train_data",type=str,help="training dataset file that have to be used",default="sample_train.csv")
parser.add_argument('--prepare_data', action="store_true", default=True,
help='if passed, will prepare data.')
parser.add_argument('--save_processed_data', action="store_true", default=False,
help='if passed, save the processed data.')
parser.add_argument('--batch_size', type=int, help='batch_size ', default=64)
parser.add_argument('--custom', action="store_true", default=True,
help='if passed, use no custom.')
parser.add_argument('--epochs', type=int, help='epochs ', default=4)
parser.add_argument('--learning_rate', type=float, help='learning_rate ', default=2e-5)
parser.add_argument('--save_model', action="store_true", default=True,
help='if passed, save model.')
parser.add_argument('--prefix_length', type=int, help='number of prefix tokens ', default=5)
parser.add_argument('--model_save_directory', type=str,
help='save the model to', default="model_store/")
parser.add_argument("--tuning_mode", type=str,
help='Name of the tuning_mode', default="prefix_random_initializaition",choices=["prefix_bottom_two_layers","prefix_top_two_layers",
"prefix_bert_embedding_layer","prefix_custom_initializaition","prefix_random_initializaition",
"noprefix_top_two_layers","noprefix_bottom_two_layers","baseline_finetune",
"baseline_lightweight_finetune","noprefix_embedding_layer_update"])
parser.add_argument("--use_multi_gpu",type=bool,help="Use Multiple GPUs",default=False)
parser.add_argument("--phrase_for_init",type=str,help="If using custom initialization this will be used to initialize the prefix tokens",default=False)
parser.add_argument("--checkpoint",type=str,help="to checkpoint the model at each epoch",default=True)
parser.add_argument("--analyze_tokens",type=bool,help="Closest words in bert vocab in each epoch are extracted",default=False)
parser.add_argument("--test_file",type=str,help="test file that have to be used",default="sample_test.csv")
parser.add_argument("--evaluate",action="store_true",help="To run the script in Evaluation mode",default=False)
parser.add_argument("--saved_model_location",type=str,help="Loaction of the stored model, must be used when only evaluation is called")
args = parser.parse_args()
main(args)
|
{"/distributed_training.py": ["/SARCBertClassifier.py"], "/baselines/baseline_code.py": ["/SARCBertClassifier.py"], "/train.py": ["/SARCBertClassifier.py"]}
|
6,444
|
ZhangYet/vulcan
|
refs/heads/master
|
/vulcan/data/jp_word.py
|
STAR = '☆'
class Word:
def __str__(self):
return '{0.lesson}, {0.word}, {0.gana}, {0.tone}, {0.attr}, {0.chinese}'.format(self)
def __init__(self, lesson: str, word: str, gana: str, tone: str, attr: str, chinese):
self.lesson = lesson
self.word = word
self.gana = gana
self.tone = tone
self.attr = attr
self.chinese = chinese
def to_chinese(self):
return '汉语: {}'.format(self.chinese)
def to_word(self):
return '日文: {}'.format(self.word)
def to_gana(self):
return '假名: {}'.format(self.gana)
def clean_lesson(self) -> str:
lesson_num = self.lesson.strip().replace(STAR, '')
try:
if int(lesson_num) < 10:
return '0' + self.lesson
return self.lesson
except:
print(self.lesson)
def load_from_file(file_path: str) -> [Word]:
ret = []
with open(file_path) as data:
for line in data:
sline = line.split('\t')
if len(sline) < 6:
continue
word = Word(sline[0].strip(),
sline[1].strip(),
sline[2].strip(),
sline[3].strip(),
sline[4].strip(),
sline[5].strip())
if __name__ == '__main__':
print(word)
ret.append(word)
return ret
if __name__ == '__main__':
load_from_file('./clean_jp.csv')
|
{"/main.py": ["/vulcan/data/jp_word.py", "/vulcan/anki.py"], "/vulcan/anki.py": ["/vulcan/data/jp_word.py"]}
|
6,445
|
ZhangYet/vulcan
|
refs/heads/master
|
/main.py
|
from vulcan.data.jp_word import load_from_file
from vulcan.anki import Vulcan
if __name__ == '__main__':
word_list = load_from_file('vulcan/data/new_japanese_1.csv')
v = Vulcan('新编日语(上海教育出版社)第一册')
for word in word_list:
v.add(word)
v.save()
|
{"/main.py": ["/vulcan/data/jp_word.py", "/vulcan/anki.py"], "/vulcan/anki.py": ["/vulcan/data/jp_word.py"]}
|
6,446
|
ZhangYet/vulcan
|
refs/heads/master
|
/vulcan/anki.py
|
import genanki
import datetime
from vulcan.data.jp_word import Word
CSS = '''
div.front, div.back {
text-align:center;
font-family: Courier;
font-size: 30px;
}
span.small {font-size: 15px;}
span.normal {font-size: 30px;}
span.large {font-size: 60px;}
span.italic {font-style:italic;}
'''
def gen_id() -> int:
return int(datetime.datetime.now().timestamp())
class Vulcan:
def __init__(self, name: str):
self.name = name
self.model = genanki.Model(gen_id(),
'新编日语(上海外语教育出版社)第一册',
fields=[
{'name': 'Question'},
{'name': 'Answer1'},
{'name': 'Answer2'},
{'name': 'Tone'},
{'name': 'Attr'},
{'name': 'Lesson'},
],
templates=[
{
'name': 'new_japanese',
'qfmt': '''
<div class="front">
<span class="large japanese">{{Question}}</span>
<br/
</div>
''',
'afmt': '''
<div class="back">
<span class="large">{{Answer1}}</span>
<span class="large">{{Answer2}}</span>
<hr/>
声调:{{Tone}}, 词性:{{Attr}}, 课文:{{Lesson}}
<br/>
</span>
</div>
''',
},
],
css=CSS)
self.deck = genanki.Deck(gen_id(),
name='新编日语')
def add(self, word: Word):
node1 = genanki.Note(model=self.model,
fields=[
word.to_word(), word.to_gana(), word.to_chinese(),
word.tone, word.attr, word.clean_lesson(),
])
self.deck.add_note(node1)
node3 = genanki.Note(model=self.model,
fields=[
word.to_gana(), word.to_word(), word.to_chinese(),
word.tone, word.attr, word.clean_lesson(),
])
self.deck.add_note(node3)
def save(self):
genanki.Package(self.deck).write_to_file(self.name + '.apkg')
|
{"/main.py": ["/vulcan/data/jp_word.py", "/vulcan/anki.py"], "/vulcan/anki.py": ["/vulcan/data/jp_word.py"]}
|
6,450
|
AKkkAha/API_WITH_EXCEL
|
refs/heads/master
|
/Execute.py
|
# -*- coding:utf-8 -*-
import xlrd
import os
import json
import glob
import config
import re
import HTTP_API
import time
from parse_dict import *
import logger
import sys
from HTML import *
reload(sys)
sys.setdefaultencoding("utf-8")
pre_case_list = []
pre_recv = None
pre_var = config.custom_var
titledict = {}
logr = None
logl = None
def exec_test(times=1):
global pre_case_list, pre_recv, pre_var, titledict, logr, logl
#add by zx---begin
# global my_token
# my_token = None
# add by zx---end
filename = glob.glob(sys.path[0] + os.sep + '*.xls*')[0]
wb = xlrd.open_workbook(filename)
for num in range(times):
for testsheet in config.test_module.keys():
pre_case_list = []
pre_recv = None
pre_var = config.custom_var
titledict = {}
logr = logger.rstcls.initial(testsheet + "_result")
logl = logger.logcls(testsheet)
logl.log("Case module : " + testsheet)
logl.log("Test Round : Round " + str(num + 1))
logr.log("Case module : " + testsheet)
logr.log("Test Round : Round " + str(num + 1))
table = wb.sheet_by_name(testsheet)
caselist = get_case(config.test_module[testsheet], table)
for case_num in caselist:
api_run(table, int(case_num))
logl.log("Round " + str(num + 1) + " finished")
logr.log("Round " + str(num + 1) + " finished")
def get_title_index(title_list):
global titledict
for title in title_list:
titledict[title.encode("utf-8")] = title_list.index(title)
return titledict
def get_case(sheet_list, table):
case_list = []
if sheet_list:
for case_srl in sheet_list:
if type(case_srl) is int:
case_list.append(case_srl)
else:
extent = case_srl.split('-')
for i in range(int(extent[0]), int(extent[1])+1):
case_list.append(i)
else:
case_list = [i + 1 for i in range(table.nrows)]
return case_list
def make_headers_json(header_str):
"""
:param header_str: 从chrome控制台直接复制出来的headers外面用单引号
:return: json_str
"""
if header_str:
headers_li = header_str.split('\n')
header_ch = []
for each in headers_li:
each = each.replace(' ', '', 1)
each_li = each.split(':', 1)
each_li[0] = "\'" + each_li[0] + "\':"
each_li[1] = "\'" + each_li[1] + "\',\n"
each_str = ''.join(each_li)
header_ch.append(each_str)
all_str = ''.join(header_ch)
headers = json.dumps(eval('{' + all_str[:-3] + '}'))
else:
headers = '{"Content-Type": "application/x-www-form-urlencoded", "fronttype": "scp-admin-ui"}'
return headers
def api_run(table, case_num):
print "run case " + str(case_num)
global pre_case_list, pre_recv, pre_var, logr, logl
global titledict
global my_token
if not titledict:
titledict = get_title_index(table.row_values(0))
caseinfo = table.row_values(case_num)
url_addr = caseinfo[titledict["URL_ADDR"]]
url_addr = deal_var(url_addr, caseinfo, table)
url = caseinfo[titledict["域名IP及端口"]] + url_addr
msg = caseinfo[titledict["REQUEST_MESSAGE"]]
try:
msg_loads = json.loads(msg)
except:
msg_loads = None
if type(msg_loads) is dict:
msg = deal_var(str(msg), caseinfo, table)
else:
msg = deal_var(str(msg), caseinfo, table)
http_test = HTTP_API.HTTP_Cls(table.name)
# add by zx---begin
# if my_token:
# http_test.headers["authorization"] = my_token
# print("header is set by token={}".format(http_test.headers))
# if case_num in (4,5):
# http_test.headers["Content-Type"] = "application/json;charset=UTF-8"
# print("header is set by case {0}={1}".format(case_num, http_test.headers))
# add by zx---end
headers = caseinfo[titledict["HEADERS"]]
headers = make_headers_json(headers.encode('utf-8'))
headers = json.loads(deal_var(headers, caseinfo, table))
if caseinfo[titledict["请求方法"]].upper() == "GET":
recv_msg = http_test.get_msg(url, msg, headers)
else:
recv_msg = http_test.post_msg(url, msg, headers)
#add by zx---begin
# if "token" in recv_msg:
# dict_tmp = eval(recv_msg)
# if "data" in dict_tmp and "token" in dict_tmp["data"]:
# my_token = dict_tmp["data"]["token"]
# print("my_token set to:{}".format(my_token))
#add by zx---end
if int(case_num) not in pre_case_list:
pre_case_list.append(int(case_num))
check_flag = check_result(recv_msg, caseinfo)
if check_flag is None:
print "用例 PASS %s" % caseinfo[titledict["用例标题"]]
logr.log("用例 PASS %s %s" % (table.name, caseinfo[titledict["用例标题"]]))
logl.debug("用例 PASS %s" % caseinfo[titledict["用例标题"]])
else:
print "用例 FAIL %s fail_result: %s" % (caseinfo[titledict["用例标题"]], str(check_flag))
logr.log("用例 FAIL %s %s fail_result: %s" % (table.name, caseinfo[titledict["用例标题"]], str(check_flag)))
logl.debug("用例 FAIL %s fail_result: %s" % (caseinfo[titledict["用例标题"]], str(check_flag)))
# print "check_failed: " + str(check_flag)
try:
recv_msg = json.loads(recv_msg)
pre_recv = recv_msg
except:
pass
if caseinfo[titledict["REMAIN_PARAM"]]:
remain_param_list = caseinfo[titledict["REMAIN_PARAM"]].split()
for remain_param in remain_param_list:
remain_param = remain_param.strip()
remain_value = find_from_dict(remain_param, recv_msg)
pre_var[remain_param+'_'+str(case_num)] = remain_value
print "pre_var"
print pre_var
print "pre_case_list"
print pre_case_list
return recv_msg
# def deal_var_dict(msg, msg_loads, caseinfo, table):
# global pre_recv
# var_list = re.findall(r'".*?":\s+?"\${.*?}"', msg)
# key_list = []
# value_list = []
# if var_list:
# for item in var_list:
# value_list.append(item.split("${")[-1].strip('}"'))
# key_list.append(item.split("${")[0].strip('"').strip().strip(':').strip('"'))
# if caseinfo[titledict["前置条件"]]: # 表格内多个前置条件用空格隔开
# for pre_case in str(caseinfo[titledict["前置条件"]]).split():
# pre_case = int(float(pre_case))
# if pre_case in pre_case_list:
# pass
# else:
# pre_case_list.append(pre_case)
# pre_recv = api_run(table, pre_case)
# for pre_condition in value_list:
# if pre_condition not in pre_var.keys():
# # pre_var[pre_condition] = Check(pre_condition, msg_loads)
# pre_var[pre_condition] = eval("pre_recv" + search_dict(pre_condition, pre_recv))
# for var in value_list:
# var_key = key_list[value_list.index(var)]
# if var == "timestamp":
# exec ("msg_loads" + search_dict(var_key, msg_loads) + "=" + time.time())
# else:
# exec ("msg_loads" + search_dict(var_key, msg_loads) + "='" + str(pre_var[var]) + "'")
# else:
# if caseinfo[titledict["前置条件"]]:
# for pre_case in str(caseinfo[titledict["前置条件"]]).split():
# pre_case = int(float(pre_case))
# if pre_case in pre_case_list:
# pass
# else:
# pre_case_list.append(pre_case)
# pre_recv = api_run(table, pre_case)
# return msg_loads
def deal_var(msg, caseinfo, table):
global pre_recv
var_list = re.findall(r'\${(.*?)}', msg)
if var_list:
if caseinfo[titledict["前置条件"]]:
for pre_case in str(caseinfo[titledict["前置条件"]]).split():
pre_case = int(float(pre_case))
if pre_case in pre_case_list:
pass
else:
pre_recv = api_run(table, pre_case)
for pre_condition in var_list:
if pre_condition not in pre_var.keys():
# pre_var[pre_condition] = Check(pre_condition, msg_loads)
pre_var[pre_condition] = eval("pre_recv" + search_dict(pre_condition, pre_recv))
for var in var_list:
msg = msg.replace('${' + str(var) + '}', str(pre_var[var]))
else:
if caseinfo[titledict["前置条件"]]:
for pre_case in str(caseinfo[titledict["前置条件"]]).split():
pre_case = int(float(pre_case))
print("pre_case={}".format(pre_case))
print("pre_case_list={}".format(pre_case_list))
if pre_case in pre_case_list:
pass
else:
# pre_case_list.append(pre_case)
pre_recv = api_run(table, pre_case)
print("pre_recv={}".format(pre_recv))
return str(msg)
def check_result(recv_msg, caseinfo):
try:
recv_msg = json.loads(recv_msg)
exp_code = caseinfo[titledict["EXPECTED_CODE"]]
if exp_code:
get_code = find_from_dict("code", recv_msg)
if exp_code != get_code:
return "code = " + str(get_code)
if caseinfo[titledict["EXPECTED_RESULTS"]]:
try:
result_dict = json.loads(caseinfo[titledict["EXPECTED_RESULTS"]])
miss_list = compare_dict(result_dict, recv_msg)
if miss_list:
return miss_list # 返回缺少的值
except:
result = json.loads(caseinfo[titledict["EXPECTED_RESULTS"]])
if recv_msg != result:
return recv_msg
except:
if caseinfo[titledict["EXPECTED_RESULTS"]]:
try:
json.loads(caseinfo[titledict["EXPECTED_RESULTS"]])
return recv_msg
except:
result = str(caseinfo[titledict["EXPECTED_RESULTS"]])
if recv_msg != result:
return recv_msg
return None
if __name__ == "__main__":
exec_test()
obj = Html(logger.now + "result")
loglist = []
for module in config.test_module:
logfile = obj.find_new_file(os.path.join(os.getcwd(), "log", module))
loglist.append(logfile)
obj.parse_logfile(loglist)
|
{"/logger.py": ["/config.py"]}
|
6,451
|
AKkkAha/API_WITH_EXCEL
|
refs/heads/master
|
/HTML.py
|
#coding=utf-8
import os
import sys
import re
import collections
import shutil
from os.path import join
import time
from pyh import *
import cPickle as pickle
import config
from platform import system
# from ExcelAccessor import *
reload(sys)
sys.setdefaultencoding('utf8')
sys.path.append('log')
RECORD_TITLE = '测试'
class Html:
def __init__(self, recordFileName):
#print type(recordFileName)
self.recordFileName = recordFileName
timestr = time.strftime('%Y-%m-%d %H:%M')
self.page = PyH(timestr+RECORD_TITLE)
self.page << h1(timestr+RECORD_TITLE, align='center')
self.page.addJS('mode.js')
self.page.addCSS("http://libs.baidu.com/bootstrap/3.0.3/css/bootstrap.min.css")
self.page.addJS('http://libs.baidu.com/jquery/2.0.0/jquery.min.js')
self.page.addJS('http://libs.baidu.com/bootstrap/3.0.3/js/bootstrap.min.js')
self.page.addCSS('mode.css')
self.versionNumber=0
self.passcase_num, self.failcase_num = 0, 0
self.r = self.page << div(cl="panel panel-default")
self.r << div(h2("测试概况", align='left'),cl="panel - heading")
self.r = self.r << div(cl="panel-body")
self.t = table(cl="table",body={"width": "80%", "margin": "auto"})
self.t << tr(td('执行轮数',align="right",width="5%")+td('测试用例组名称',align="left",width="35%")+td('测试总数',align="left",width="10%")+td('成功',align="left",width="10%")+td('失败',align="left",width="10%")+td('执行记录',align="left",width="10%"),id="header_row")
self.write_page()
self.page << self.t
#print type(self.page)
def write_page(self):
f = open("cpickle.db", "wb")
pickle.dump(self.t, f)
f.close()
def read_page(self):
f = open("cpickle.db", "rb")
self.t = pickle.load(f)
def add_result(self, resultlist):
self.r << p("总共执行用例: %d | 通过:%d | 失败(测试预期结果不对): %d" %(resultlist[0]+resultlist[1],resultlist[0],resultlist[1]))
self.r << p("用例通过率:{:.2f}%".format(float(resultlist[0])/float((resultlist[0]+resultlist[1]))*100))
def add_table(self, name, passValue, failValue):
#self.r=self.page << table(caption="chen",border="1",cl="table1",cellpadding="0",cellspacing="0",align='center',width=1200)
self.i = self.page << table(cl="table table-bordered",body={"width": "80%", "margin": "auto"})
#self.r << colgroup()
#self.r<<colgroup(col(align="left",width="50%")+col(align="right",width="10%")+col(align="right",width="10%")+col(align="right",width="10%")+col(align="right",width="10%"))
if failValue:
self.i << tr(td("",align="right",width="5%") + td(name,align="left",width="35%") + td(str(passValue+failValue),align="left",width="10%") + td(str(passValue),align="left",width="10%") + td(str(failValue),align="left",width="10%") + td(a("查看详情",href="javascript:void(0)",onclick="showClassDetail(this.parentNode.parentNode)"),align="left",width="10%"),cl="testclass failClass")
else:
self.i << tr(td("",align="right",width="5%") + td(name, align="left", width="35%") + td(str(passValue+failValue),align="left",width="10%") + td(str(passValue),align="left",width="10%") + td(str(failValue),align="left",width="10%") + td(a("查看详情",href="javascript:void(0)",onclick="showClassDetail(this.parentNode.parentNode)"),align="left",width="10%"),cl="testclass passClass")
def add_tr(self, isPass, name, round, row_list):
self.read_page()
if isPass == "FAIL":
self.i << tr(td(round) + td(name, cl="failCase") + td(a("FAIL", cl="popup_link", onfocus="this.blur();", href="javascript:showTestDetail('div_caseRun.%s')" % (name))+div(div(a("[x]",onfocus="this.blur();",onclick="document.getElementById('div_caseRun.%s').style.display = 'none'" %(name)),style="text-align: right; color:red; cursor:pointer;")+p(row_list), id="div_caseRun.%s" %(name), cl='popup_window',style="display: none;")), cl="testcase", id="caseRun.%s" %(name))
else:
self.i << tr(td(round) + td(name, cl="passCase") + td(a("PASS", cl="popup_link", onfocus="this.blur();", href="javascript:showTestDetail('div_caseRun.%s')" % (name))+div(div(a("[x]",onfocus="this.blur();",onclick="document.getElementById('div_caseRun.%s').style.display = 'none'" %(name)),style="text-align: right; color:red; cursor:pointer;")+p(row_list), id="div_caseRun.%s" %(name), cl='popup_window',style="display: none;")), cl="testcase", id="caseRun.%s" %(name))
#self.page << self.r
def createhtml(self):
#self.page<<self.t
print "html path:[%s]" % (self.record_path + self.recordFileName+'.html')
self.recordFileName = self.recordFileName.decode('gbk').encode('utf-8')+".html" if system() == 'Linux' else self.recordFileName + ".html"
print self.record_path+self.recordFileName
self.page.printOut(self.record_path+self.recordFileName)
self.page = None
self.page = PyH(RECORD_TITLE)
self.page << h1(RECORD_TITLE, align='center')
def timespace(self,start,end):
day_s=start.split("-")[0]
time_s=start.split("-")[1]
day_e=end.split("-")[0]
time_e=end.split("-")[1]
#print day_s,time_s,day_e,time_e
[hour_s, minu_s, sec_s] = time_s.split('_')
[hour_e, minu_e, sec_e] = time_e.split('_')
if day_s == day_e:
spacetime=int(hour_e)*3600+int(minu_e)*60+int(sec_e)-(int(hour_s)*3600+int(minu_s)*60+int(sec_s))
# print spacetime
else:
day_s=day_s.split('-')[-1]
day_e = day_e.split('-')[-1]
spacetime = int(hour_e) * 3600 + int(minu_e) * 60 + int(sec_e) + 3600*24 - (int(hour_s) * 3600 + int(minu_s) * 60 + int(sec_s))
return spacetime
def parse_entitylog(self,entitylog):
result = 1 if entitylog.find(":Pass") != -1 else 0
if result:
self.passcase_num += 1
else:
self.failcase_num += 1
index = entitylog.find("this case spend")
index_end = entitylog[index+17:].find('s')
time = '%.3f' % (float(entitylog[index+17:][:index_end]))
time = str(float(time)*1000)+"毫秒"
#print 'time=',time.decode('gbk').encode('utf-8')
return result, time
def add_cssjs_to_html(self):
htmlfile = self.record_path + self.recordFileName
jsfile = 'mode.js'
cssfile = 'mode.css'
csstext = open(cssfile, 'r').read()
jstext = open(jsfile, 'r').read()
#print type(jstext)
#print type(csstext)
line = True
#with open(htmlfile, "r", encoding="utf-8") as f1, open("%s.bk" % htmlfile, "w", encoding="utf-8") as f2:
with open(htmlfile, "r") as f1, open("%s.bak" % htmlfile, "w") as f2:
f2.write(r'<meta http-equiv="Content-Type" content="text/html;charset=utf-8">')
for line in f1.readlines():
if 'src="mode.js"' in line:
line = line.replace('src="mode.js"', '')
line = line.replace('type="text/javascript">', 'type="text/javascript">'+jstext)
#print line
elif 'href="mode.css"' in line:
line = '<style type="text/css" media="screen">' + csstext + '</style>'
#print line
f2.write(line)
f1.close()
f2.close()
os.remove(htmlfile)
os.rename("%s.bak" % htmlfile, htmlfile)
def parse_logfile(self, logcol):
__FORMAT = '%Y-%m-%d %H:%M:%S'
casedict = collections.OrderedDict()
result_dict = {}
for logfile in logcol:
print logfile
round = []
postmsg, recvmsg = "", ""
caseflag, postflag, recvflag = 0, 0, 0
entitydict = None
symbol = os.sep
self.record_path = os.path.join(sys.path[0], "log") + symbol
casemodule = logfile.split(symbol)[-2]
with open(logfile) as lf:
for line in lf:
if casemodule in casedict:
entitydict = casedict[casemodule]
else:
entitydict = collections.OrderedDict()
if "Test Round" in line:
round.append(line.strip('\r\n').split(':')[-1])
if "post to" in line or "get from" in line:
postmsg = line.strip('\r\n').split('- INFO -')[-1]
postflag = 1
elif "recv :" in line:
recvmsg = line.strip('\r\n').split('- INFO -')[-1]
elif "DEBUG" in line and postflag == 1:
conc = line.strip('\r\n').split("- DEBUG -")[-1]
result, casename = tuple(conc.split()[1:3])
logdetail = postmsg + '<p>' + recvmsg + '<p>' + conc
entitylist = [round[-1], result, logdetail]
entitydict[casename + round[-1]] = entitylist
casedict[casemodule] = entitydict
pass_num = 0
fail_num = 0
for casename, entitylist in casedict[casemodule].items():
if entitylist[1] == "PASS":
pass_num += 1
self.passcase_num += 1
else:
fail_num += 1
self.failcase_num += 1
result = 1 if fail_num == 0 else 0
result_dict[casemodule] = {"result": result, "info": [pass_num, fail_num]}
self.add_result([self.passcase_num, self.failcase_num])
for resultcase, resultitem in result_dict.items():
if not resultitem["result"]:
self.add_table(resultcase, resultitem["info"][0], resultitem["info"][1])
for entitynameitem, entityrstitem in casedict[resultcase].items():
if "FAIL" not in entityrstitem:
self.add_tr(entityrstitem[1], entitynameitem, entityrstitem[0], entityrstitem[2])
for entitynameitem, entityrstitem in casedict[resultcase].items():
if "FAIL" in entityrstitem:
self.add_tr(entityrstitem[1], entitynameitem, entityrstitem[0], entityrstitem[2])
for resultcase, resultitem in result_dict.items():
if resultitem["result"]:
self.add_table(resultcase, resultitem["info"][0], resultitem["info"][1])
for entitynameitem, entityrstitem in casedict[resultcase].items():
self.add_tr(entityrstitem[1], entitynameitem, entityrstitem[0], entityrstitem[2])
self.createhtml()
self.add_cssjs_to_html()
def find_new_file(self, dir):
file_lists = os.listdir(dir)
file_lists.sort(key=lambda fn: os.path.getmtime(dir + "\\" + fn)
if not os.path.isdir(dir + "\\" + fn) else 0)
file = os.path.join(dir, file_lists[-1])
return file
def make_html():
obj = Html("result")
loglist = []
for module in config.test_module:
logfile = obj.find_new_file(os.path.join(sys.path[0], "log", module))
loglist.append(logfile)
obj.parse_logfile(loglist)
if __name__ == "__main__":
make_html()
|
{"/logger.py": ["/config.py"]}
|
6,452
|
AKkkAha/API_WITH_EXCEL
|
refs/heads/master
|
/HTTP_API.py
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
import requests
import json
import logger
# import time
class HTTP_Cls(object):
def __init__(self, arg):
self.log = logger.logcls(arg)
self.r = None
# application/json;charset=UTF-8
# application/x-www-form-urlencoded
# self.headers = {'Content-Type': 'application/x-www-form-urlencoded', 'authorization': 'eyJhbGciOiJSUzUxMiJ9.eyJzdWIiOiJhZG1pbiIsImV4cCI6MTU5Mjg3NzI5NX0.NaFI2zH5ESVdvGJgrNzE63qCWxeWY3ZGnVQJmf7alZjpvPhnITrRFNTv4E6riWLhcWQwHwf_v_p891b1OqM9BcXf3KTkVemDRGPjVxC8zxjiyRc6fEV1ZJ2_aVuHVd2bEzU3wBAiNkLUaEu-DmLsIfPczBPrGJiQ1tT504IgIkA', 'fronttype': 'scp-admin-ui'}
# 登陆 data='username=admin&password=YWRtaW4%3D'
def post_msg(self, url, post_data="", headers=None):
self.r = requests.post(url=url, data=post_data.encode("utf-8"), headers=headers)
print "------ post to %s ------: data = %s, headers = %s" % (url, json.dumps(post_data), headers)
self.log.log("post to %s : json_data = %s, headers = %s" % (url, json.dumps(post_data), headers))
if len(self.r.text) < 2000:
print "-------- recv ---------: %s" % self.r.text
else:
print "-------- recv ---------: %s" % "get messege successfully but it's too long to show you !"
self.log.log("recv : %s" % self.r.text)
# try:
# return self.r.json(), self.r.headers
# except Exception as e:
# return e, self.r.headers
return self.r.text
def get_msg(self, url, param=None, headers=None):
self.r = requests.get(url=url, params=param, headers=headers)
print "------- get from %s ------: param = %s, headers = %s" % (url, json.dumps(param), headers)
# self.r = requests.get(url=url, params=param, headers=headers)
# print "get from %s ------: param = %s" % (url, json.dumps(param))
self.log.log("get from %s : param = %s, headers = %s" % (url, json.dumps(param), headers))
if len(self.r.text) < 2000:
print "-------- recv ---------: %s" % self.r.text
else:
print "-------- recv ---------: %s" % "get messege successfully but it's too long to show you !"
self.log.log("recv : %s" % self.r.text)
return self.r.text
# try:
# return self.r.json(), self.r.headers
# except Exception as e:
# if e is ValueError:
# return {"code": 200}, self.r.headers
# else:
# return e, self.r.headers
|
{"/logger.py": ["/config.py"]}
|
6,453
|
AKkkAha/API_WITH_EXCEL
|
refs/heads/master
|
/logger.py
|
import logging
import config
import time
import os
now = time.strftime("%Y-%m-%d-%H_%M_%S", time.localtime(time.time()))
class logcls:
# _singleton = None
# logger = None
def __init__(self, arg):
# now = time.strftime("%Y-%m-%d-%H_%M_%S", time.localtime(time.time()))
filename = "log\\"+str(arg)+"\\"+now + r"_log.txt"
file_dir = os.path.split(filename)[0]
if not os.path.isdir(file_dir):
os.makedirs(file_dir)
# if self._singleton is None:
# self._singleton = logcls(arg)
logger = logging.getLogger(arg)
if not logger.handlers:
logger.setLevel(level=logging.DEBUG)
handler = logging.FileHandler(filename)
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
self.logger = logger
# return self._singleton
def log(self, msg):
#if logcls._singleton is None:
#logcls._singleton = logcls()
self.logger.info(str(msg) + '\n')
# logger.debug(msg)
# logger.warning(msg)
# logger.info(msg)
def debug(self, msg):
self.logger.debug(str(msg) + '\n')
class rstcls:
_singleton = None
logger1 = None
def __init__(self):
pass
@staticmethod
def initial(arg):
filename1 = "log\\"+now + r"_result.txt"
if 'log' not in os.listdir(os.getcwd()):
try:
os.mkdir('log')
except:
pass
if rstcls._singleton is None:
rstcls._singleton = rstcls()
logger1 = logging.getLogger(str(arg))
if not logger1.handlers:
logger1.setLevel(level=logging.INFO)
handler = logging.FileHandler(filename1)
handler.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger1.addHandler(handler)
rstcls.logger1 = logger1
return rstcls._singleton
@staticmethod
def log(msg):
#if logcls._singleton is None:
#logcls._singleton = logcls()
rstcls.logger1.info(str(msg))
# logger.debug(msg)
# logger.warning(msg)
# logger.info(msg)
|
{"/logger.py": ["/config.py"]}
|
6,454
|
AKkkAha/API_WITH_EXCEL
|
refs/heads/master
|
/parse_dict.py
|
# -*- coding:utf-8 -*-
def search_dict(target, temp_dict): # 返回target在temp_dict中的索引
value = ""
if target in temp_dict.keys():
value = "['" + str(target) + "']"
else:
for key in temp_dict.keys():
if type(temp_dict[key]) is list and temp_dict[key]:
temp_dict[key] = temp_dict[key][0]
if type(temp_dict[key]) is dict:
value = "['" + str(key) + "']" + search_dict(target, temp_dict[key])
return value
# 从输入的key = target, 获取temp_dict中对应值
def find_from_dict(target, temp_dict): # 返回target在temp_dict中的值
value = ""
try:
if target in temp_dict.keys():
value = temp_dict[target]
else:
for key in temp_dict.keys():
if type(temp_dict[key]) is list and temp_dict[key]:
temp_dict[key] = temp_dict[key][0]
if type(temp_dict[key]) is dict:
value = find_from_dict(target, temp_dict[key])
except:
value = str(temp_dict)
return value
def compare_dict(dict1, dict2):
miss_list = []
for key, value in dict1.items():
if key in dict2.keys():
if type(dict2[key]) is list and dict2[key]:
dict2[key] = dict2[key][0]
if type(dict2[key]) is dict:
if type(dict1[key]) is dict:
miss_list += compare_dict(dict1[key], dict2[key])
else:
miss_list.append(key)
return miss_list
# target = "b"
# mydict = {'a': 1, 'b': {"xy": 4, "xx": 8}, 'c': 3}
# path = search_dict(target, mydict)
# print "mydict" + path + "=24"
# exec("mydict" + path + "=24")
# print mydict
# target = "xx"
# test_dict = {'a': 1, 'b': 2, 'c': {'d': 4, 'e': 5}, 'f': {'g': 6, 'xx': 7, 'h': 8}}
# value = find_from_dict(target, test_dict)
# print value
|
{"/logger.py": ["/config.py"]}
|
6,455
|
AKkkAha/API_WITH_EXCEL
|
refs/heads/master
|
/config.py
|
# -*- coding:utf-8 -*-
test_module = {
"testcase": ["3-13"], # 要执行的用例sheet和具体用例,单个用例用整形表示如 1 ,多个连续用例用字符串表示如 "2-6" ,都储存在列表钟
}
# 预设的参数,字典格式表示。
custom_var = {
}
|
{"/logger.py": ["/config.py"]}
|
6,459
|
DmitrievaNatalia/-PP_Homework_5
|
refs/heads/master
|
/main.py
|
import requests
from bs4 import BeautifulSoup
from logger_and_log_path import set_log_path
KEYWORDS = {'дизайн', 'фото', 'web', 'python'}
URL = 'https://habr.com'
def get_articles():
ret = requests.get('https://habr.com/ru/all/')
soup = BeautifulSoup(ret.text, features='html.parser')
return soup.find_all('article')
def get_words(article):
preview = article.find(class_="tm-article-snippet")
if preview:
return {word.lower() for word in preview.text.split()}
else:
return {}
def get_link(article):
return URL + article.find('h2').find('a').attrs.get('href')
def get_date(article):
return article.find('time').attrs.get('title').split(',')[0]
def get_title(article):
return article.find('h2').find('span').text
@set_log_path('log.txt')
def scan():
out = []
articles = get_articles()
for article in articles:
preview = get_words(article)
if preview & KEYWORDS:
date = get_date(article)
title = get_title(article)
link = get_link(article)
out.append(f"{date} - {title} - {link}")
return out
print(scan())
|
{"/main.py": ["/logger_and_log_path.py"]}
|
6,460
|
DmitrievaNatalia/-PP_Homework_5
|
refs/heads/master
|
/logger_and_log_path.py
|
import datetime
def set_log_path(log_path):
def logger(some_func):
log_str = ''
def new_some_func(*args, **kwargs):
with open(log_path, 'a', encoding='utf-8') as logfile:
nonlocal log_str
log_str = f'{datetime.datetime.now()} : '
log_str += f'функция <{some_func.__name__}>'
log_str += f', аргументы *args: <{args}>, **kwargs: <{kwargs}>'
result = some_func(*args, **kwargs)
log_str += f'- возвращаемое значение <{result}>'
logfile.write(f'{log_str}\n')
return result
return new_some_func
return logger
|
{"/main.py": ["/logger_and_log_path.py"]}
|
6,461
|
jeannas/jet_tracking
|
refs/heads/master
|
/jet_tracking/tests/test_cam_utils.py
|
import pytest
import numpy as np
from .. import cam_utils
from . import conftest
@pytest.fixture()
def onaxis_image():
# TODO: an actual image would be nice...
return np.random.random((100, 100))
def test_smoke_jet_detect(onaxis_image):
print(cam_utils.jet_detect(onaxis_image))
def test_smoke_get_jet_z():
cam_utils.get_jet_z(rho=0.0, theta=0.0, roi_y=1, roi_z=1, pxsize=0.001,
cam_y=1, cam_z=1, beam_y=1, beam_z=1, cam_pitch=1)
def test_smoke_get_jet_x():
cam_utils.get_jet_x(rho=0.0, theta=0.0, roi_x=1, roi_y=1, pxsize=0.001,
cam_x=1, cam_y=1, beam_x=1, beam_y=1, cam_roll=1)
def test_smoke_get_jet_pitch():
cam_utils.get_jet_pitch(theta=0.0, cam_pitch=1)
def test_smoke_get_jet_roll():
cam_utils.get_jet_roll(theta=0.0, cam_roll=1)
def test_smoke_get_jet_width(onaxis_image):
cam_utils.get_jet_width(im=onaxis_image, rho=0.0, theta=1.0)
def test_smoke_get_offaxis_coords():
cam_utils.get_offaxis_coords(cam_beam_y=0.0, cam_beam_z=0.0,
cam_pitch=1, pxsize=0.001)
def test_smoke_get_cam_coords():
cam_utils.get_cam_coords(cam_beam_x=0.0, cam_beam_y=0.0,
cam_roll=1, pxsize=0.001)
def test_smoke_get_cam_pitch(onaxis_image):
cam_utils.get_cam_pitch([onaxis_image,
np.random.random(onaxis_image.shape)])
def test_smoke_get_cam_roll(onaxis_image):
cam_utils.get_cam_roll([onaxis_image,
np.random.random(onaxis_image.shape)])
def test_smoke_get_cam_pitch_pxsize(onaxis_image):
cam_utils.get_cam_pitch_pxsize([onaxis_image,
np.random.random(onaxis_image.shape)],
positions=[0, 1])
def test_smoke_get_cam_roll_pxsize(onaxis_image):
cam_utils.get_cam_roll_pxsize([onaxis_image,
np.random.random(onaxis_image.shape)],
positions=[0, 1])
def test_smoke_get_nozzle_shift(onaxis_image):
cam_utils.get_nozzle_shift(
onaxis_image, np.random.random(onaxis_image.shape),
cam_roll=1, pxsize=0.001)
def test_smoke_get_burst_avg(jet_control):
roi_image = jet_control.camera.ROI_image
conftest.set_random_image(roi_image)
cam_utils.get_burst_avg(2, roi_image)
|
{"/jet_tracking/tests/test_sim.py": ["/jet_tracking/sim.py"], "/jet_tracking/tests/conftest.py": ["/jet_tracking/devices.py", "/jet_tracking/jet_control.py"], "/jet_tracking/tests/test_control.py": ["/jet_tracking/jet_control.py"]}
|
6,462
|
jeannas/jet_tracking
|
refs/heads/master
|
/jet_tracking/jettracking.py
|
'''
Calls the GUI for jet tracking. Ultimately only this file should need to be run, and the GUI will
control when the jet tracking methods e.g. calibrate(), jet_detect(), etc should be run
'''
from qtpy.QtCore import QThread
from pydm import Display
import jt_utils
import jet_control
from time import sleep
class TrackThread(QThread):
def __init__(self):
# def __init__(self, injector, camera, cspad, stopper, pulse_picker, wave8, params):
super().__init__()
'''
self.stopper = stopper
self.pulse_picker = pulse_picker
self.wave8 = wave8
self.cspad = cspad
self.camera = camera
self.injector = injector
self.params = params
'''
def run(self):
while not self.isInterruptionRequested():
'''
# check devices first
# check if stopper is in
if (jt_utils.get_stopper(self.stopper) == 1):
# if stopper is in, stop jet tracking
print('Stopper in - TRACKING STOPPED')
self.requestInterruption()
continue
# check if pulse picker is closed
if (jt_utils.get_pulse_picker(self.pulse_picker) == 1):
# if pulse picker is closed, stop jet tracking
print('Pulse picker closed - TRACKING STOPPED')
self.requestInterruption()
continue
# check wave8
if (jt_utils.get_wave8(self.wave8) < self.params.thresh_w8):
# if wave8 is below threshold, continue running jet tracking but do not move
print('Wave8 below threshold - NOT TRACKING')
continue
# check CSPAD
# get azimuthal average from CSPAD & Wave8 data
if (jt_utils.get_cspad(azav, params.radius.get(), gas_det) <
self.params.intensity.get() * self.params.thresh_lo.get()):
# if CSPAD is below lower threshold, move jet
if (not self.params.bypass_camera()):
# if camera is not bypassed, check if there is a jet and location of jet
try:
jet_control.jet_calculate_inline(self.camera, self.params)
# if jet is more than 10 microns away from x-rays, move jet using camera feedback
# threshold for this can be changed if needed
if (self.params.jet_x.get() > 0.01):
jet_control.jet_move_inline(self.injector, self.camera, self.params)
continue
except Exception:
# if jet is not detected, continue running jet tracking but do not move
print('Cannot find jet - NOT TRACKING')
continue
# if camera is bypassed or if jet is less than 10 microns away from x-rays, scan jet across x-rays to find new maximum
jet_control.scan(self.injector, self.cspad)
# get azimuthal average from CSPAD & Wave8 data
intensity = jt_utils.get_cspad(azav, self.params.radius.get(), gas_det)
self.params.intensity.put(intensity)
# if CSPAD is still below upper threshold, stop jet tracking
if (jt_utils.get_cspad(azav, self.params.radius.get(), gas_det) <
self.params.intensity.get() * self.params.thresh_hi.get()):
print('CSPAD below threshold - TRACKING STOPPED')
self.requestInterruption()
'''
class JetTrack(Display):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# TrackThread to run jet tracking in
self.track_thread = TrackThread()
# self.track_thread = TrackThread(injector, camera, cspad, stopper, pulse_picker, wave8, params)
# connect GUI buttons to appropriate methods
self.ui.calibrate_btn.clicked.connect(self.calibrate_clicked)
self.ui.start_btn.clicked.connect(self.start_clicked)
self.ui.stop_btn.clicked.connect(self.stop_clicked)
# set initial availability of buttons
self.ui.calibrate_btn.setEnabled(True)
self.ui.start_btn.setEnabled(False)
self.ui.stop_btn.setEnabled(False)
def ui_filename(self):
'''
Load ui file for GUI
'''
return 'jettracking.ui'
def calibrate_clicked(self):
'''
Runs calibration method when calibrate button is clicked
'''
self.ui.logger.write('Calibrating')
self.ui.calibrate_btn.setEnabled(False)
#jet_control.calibrate(injector, camera, cspad, params)
self.ui.logger.write('Calibration complete - can now run jet tracking')
self.ui.calibrate_btn.setEnabled(True)
# activate start button
self.ui.start_btn.setEnabled(True)
return
def start_clicked(self):
'''
Starts new thread to run jet tracking in when start button is clicked
'''
self.ui.logger.write('Running jet tracking')
self.ui.start_btn.setEnabled(False)
self.ui.stop_btn.setEnabled(True)
self.ui.calibrate_btn.setEnabled(False)
# start TrackThread
self.track_thread.start()
def stop_clicked(self):
'''
Stops jet tracking when stop button is clicked
'''
self.track_thread.requestInterruption()
self.ui.logger.write('Jet tracking stopped')
self.ui.stop_btn.setEnabled(False)
self.ui.start_btn.setEnabled(True)
self.ui.calibrate_btn.setEnabled(True)
|
{"/jet_tracking/tests/test_sim.py": ["/jet_tracking/sim.py"], "/jet_tracking/tests/conftest.py": ["/jet_tracking/devices.py", "/jet_tracking/jet_control.py"], "/jet_tracking/tests/test_control.py": ["/jet_tracking/jet_control.py"]}
|
6,463
|
jeannas/jet_tracking
|
refs/heads/master
|
/jet_tracking/jt_utils.py
|
# methods for jet tracking that do not involve the camera
import numpy as np
from scipy.optimize import curve_fit
def gaussianslope(x, a, mean, std, m, b):
'''
Define the function for a Gaussian on a slope (Gaussian + linear)
Parameters
----------
x : float
x-coordinate
a : float
amplitude of Gaussian
mean : float
mean of Gaussian
std : float
standard deviation of Gaussian
m : float
slope of linear baseline
b : float
y-intercept of linear baseline
Returns
-------
y : float
the y-coordinate for the given x-coordinate as defined by the
parameters given for the Gaussian on a slope
'''
return (a * np.exp(-((x-mean) / 2 / std) ** 2)) + (m * x + b)
def fit_cspad(azav, norm, gas_det):
'''
Fit the Azimuthal average of the CSPAD to a Gaussian on a slope
Parameters
----------
azav : ndarray
Azimuthal average for CSPAD
norm : ndarray
number of pixels in each qbin
gas_det : float
gas detector
Returns
-------
center : int
radius of the diffraction ring
intensity : float
sum of qbins 5 above and below the center, normalized by gas detector
'''
# determine number of pixels in each qbin, only use qbins where pixels > 150
# **can change 150 to different value if needed
start = 0
end = len(norm)
begin = end / 2
for i in range(begin):
a = begin - i
b = begin + i
if (norm[a] < 150) and (a > start):
start = a
if (norm[b] < 150) and (b < end):
end = b
x = np.arange(len(azav))
# estimate mean and standard deviation for Gaussian
n = len(x)
mean = sum(x*azav) / sum(azav)
std = np.sqrt(sum((x-mean)**2)/n)
# estimate slope and y-intercept for linear baseline by taking first & last
# 50 points and fitting a line between them
# **can change 50 to different value if needed
x0 = 50/2
l = len(azav)
x1 = l - (50/2)
y0 = np.mean(azav[0:50])
y1 = np.mean(azav[l-50:])
m, b = np.polyfit((x0, x1), (y0, y1), 1)
# fit Gaussian + linear to Azimuthal average; provide initial parameters
popt, pcov = curve_fit(gaussianslope, x, azav, p0=[max(azav), mean, std, m, b])
# calculate radius of ring and intensity of center 10 qbins
center = int(round(popt[1]))
intensity = sum(azav[center-5:center+5]) / gas_det
return center, intensity
def get_cspad(azav, r, gas_det):
'''
Get the intensity of the diffraction ring on the CSPAD
Parameters
----------
azav : ndarray
Azimuthal average calculated from CSPAD
r : int
radius of diffraction ring
gas_det : float
gas detector
Returns
-------
intensity : float
sum of qbins 5 above and below the center, normalized by gas detector
'''
intensity = sum(azav[r-5:r+5]) / gas_det
return intensity
# unfinished methods for checking stopper, pulse picker, and Wave8
# can make Ophyd devices or load specific PV needed directly into beamline.py
def get_stopper(stopper):
return stopper
def get_pulse_picker(pulse_picker):
return pulse_picker
def get_wave8(wave8):
return wave8
|
{"/jet_tracking/tests/test_sim.py": ["/jet_tracking/sim.py"], "/jet_tracking/tests/conftest.py": ["/jet_tracking/devices.py", "/jet_tracking/jet_control.py"], "/jet_tracking/tests/test_control.py": ["/jet_tracking/jet_control.py"]}
|
6,464
|
jeannas/jet_tracking
|
refs/heads/master
|
/jet_tracking/tests/test_sim.py
|
import os.path
import numpy as np
import pandas as pd
import pytest
from jet_tracking.sim import generate_simulation
@pytest.fixture(scope='session')
def simulated_data():
return pd.read_csv(os.path.join(os.path.dirname(__file__), 'sim.csv'))
def test_generate_simulation(simulated_data):
ns = generate_simulation('x', 'y', simulated_data,
motor_precision=0,
random_state=np.random.RandomState(0))
assert ns.motor.precision == 0
# Set our motor
ns.motor.set(4)
# Grab ten readings
values = list()
for i in range(10):
ns.signal.trigger()
values.append(ns.signal.get())
possible_values = ns.data[ns.data['x'] == 4]['y'].unique()
assert len(set(values)) == len(possible_values)
assert all(val in possible_values for val in values)
|
{"/jet_tracking/tests/test_sim.py": ["/jet_tracking/sim.py"], "/jet_tracking/tests/conftest.py": ["/jet_tracking/devices.py", "/jet_tracking/jet_control.py"], "/jet_tracking/tests/test_control.py": ["/jet_tracking/jet_control.py"]}
|
6,465
|
jeannas/jet_tracking
|
refs/heads/master
|
/jet_tracking/tests/conftest.py
|
import numpy as np
import pytest
import types
import inspect
from ..devices import (Injector, Selector, CoolerShaker, HPLC,
PressureController, FlowIntegrator, Offaxis, Questar,
Parameters, OffaxisParams, Control, Diffract,
SDS)
from ophyd.areadetector.plugins import PluginBase
all_devices = (Injector, Selector, CoolerShaker, HPLC, PressureController,
FlowIntegrator, Offaxis, Questar, Parameters, OffaxisParams,
Control, Diffract, SDS)
@pytest.fixture(scope='function')
def devices(monkeypatch):
'''A namespace containing faked versions of all devices
Separately, this monkeypatches jet_tracking.devices so that all access
to those devices returns the faked versions.
'''
from .. import devices as _devices
from ophyd.areadetector import EpicsSignalWithRBV
import ophyd.sim
ns = types.SimpleNamespace()
ophyd.sim.fake_device_cache[EpicsSignalWithRBV] = ophyd.sim.FakeEpicsSignal
for cls in all_devices:
name = cls.__name__
if cls is not SDS:
cls = ophyd.sim.make_fake_device(cls)
setattr(ns, name, cls)
monkeypatch.setattr(_devices, name, cls)
# Short-circuit all plugin type checks, array data
for dev in (ns.Questar, ns.Offaxis):
components = [
cpt.cls
for name, cpt in dev._sig_attrs.items()
if hasattr(cpt, 'cls') and issubclass(cpt.cls, PluginBase)
]
for component_cls in components:
if hasattr(component_cls, '_plugin_type'):
monkeypatch.setattr(component_cls, '_plugin_type', None)
return ns
@pytest.fixture(scope='function')
def injector(devices):
injector = _instantiate_fake_device(
devices.Injector,
name='fake_PI1_injector',
coarseX='fake_CXI:PI1:MMS:01',
coarseY='fake_CXI:PI1:MMS:02',
coarseZ='fake_CXI:PI1:MMS:03',
fineX='fake_CXI:USR:MMS:01',
fineY='fake_CXI:USR:MMS:02',
fineZ='fake_CXI:USR:MMS:03'
)
for i, attr in enumerate(['coarseX', 'coarseY', 'coarseZ',
'fineX', 'fineY', 'fineZ']):
motor = getattr(injector, attr)
motor.user_readback.sim_put(0.1 * i)
motor.user_setpoint.sim_put(0.0)
motor.motor_spg.sim_put('Go')
_patch_user_setpoint(motor)
return injector
def _patch_array_data(plugin_inst):
def get_array_data(*args, count=None, **kwargs):
# eat the count argument, unsupported by fakeepicssignal.get()
return orig_get(*args, **kwargs)
array_data = plugin_inst.array_data
orig_get = array_data.get
array_data.get = get_array_data
def _patch_user_setpoint(motor):
def putter(pos, *args, **kwargs):
motor.user_setpoint.sim_put(pos, *args, **kwargs)
motor.user_readback.sim_put(pos)
motor._done_moving(success=True)
motor.user_setpoint.sim_set_putter(putter)
@pytest.fixture(scope='function')
def questar(devices):
questar = _instantiate_fake_device(
devices.Questar,
prefix='fake_CXI:SC1:INLINE',
name='fake_SC1_questar',
ROI_port='ROI1',
ROI_stats_port='Stats1',
ROI_image_port='IMAGE1',
)
_patch_array_data(questar.image)
_patch_array_data(questar.ROI_image)
return questar
@pytest.fixture(scope='function')
def offaxis_camera(devices):
offaxis = _instantiate_fake_device(
devices.Offaxis,
prefix='fake_CXI:SC1:OFFAXIS',
name='fake_SC1_offaxis',
ROI_port='ROI1',
ROI_stats_port='Stats1',
ROI_image_port='IMAGE1',
)
_patch_array_data(offaxis.image)
_patch_array_data(offaxis.ROI_image)
return offaxis
@pytest.fixture(scope='function')
def offaxis_parameters(devices):
params = _instantiate_fake_device(
devices.OffaxisParams,
prefix='fake_CXI:SC1:INLINE',
name='fake_SC1_params'
)
params.beam_y.put(1.0)
params.beam_z.put(1.0)
params.beam_y_px.put(1)
params.beam_z_px.put(1)
params.cam_y.put(1.0)
params.cam_z.put(1.0)
params.pxsize.put(0.001)
params.cam_pitch.put(1.0)
return params
@pytest.fixture(scope='function')
def parameters(devices):
params = _instantiate_fake_device(
devices.Parameters,
prefix='fake_CXI:SC1:INLINE',
name='fake_SC1_params'
)
params.beam_x.put(1.0)
params.beam_y.put(1.0)
params.beam_x_px.put(1)
params.beam_y_px.put(1)
params.cam_x.put(1.0)
params.cam_y.put(1.0)
params.pxsize.put(0.001)
params.cam_roll.put(1.0)
return params
@pytest.fixture(scope='function')
def diffract(devices):
return _instantiate_fake_device(devices.Diffract,
prefix='fake_CXI:SC1:DIFFRACT',
name='fake_SC1_diffract')
@pytest.fixture(scope='function')
def jet_control(injector, questar, parameters, diffract):
from ..jet_control import JetControl
return JetControl(name='test_control',
injector=injector,
camera=questar,
params=parameters,
diffract=diffract)
def _instantiate_fake_device(dev_cls, name=None, prefix='_prefix',
**specified_kw):
'''Instantiate a FakeDevice, optionally specifying some initializer kwargs
If unspecified, all initializer keyword arguments will default to
the string f"_{argument_name}_".
All signals on the device (and its subdevices) are initialized to either 0
or ''.
'''
sig = inspect.signature(dev_cls)
ignore_kw = {'kind', 'read_attrs', 'configuration_attrs', 'parent',
'args', 'name', 'prefix'}
kwargs = {name: specified_kw.get(name, f'_{param.name}_')
for name, param in sig.parameters.items()
if param.kind != param.VAR_KEYWORD and
name not in ignore_kw
}
kwargs['name'] = (name if name is not None else dev_cls.__name__)
kwargs['prefix'] = prefix
dev = dev_cls(**kwargs)
devs = [dev]
while devs:
sub_dev = devs.pop(0)
devs.extend([getattr(sub_dev, name)
for name in sub_dev._sub_devices])
for name, cpt in sub_dev._sig_attrs.items():
sig = getattr(sub_dev, name)
try:
if cpt.kwargs.get('string', False):
sig.sim_put('')
else:
sig.sim_put(0)
except Exception:
...
return dev
@pytest.fixture(scope='function')
def device_instances(injector, questar, offaxis_camera, parameters,
offaxis_parameters, diffract, devices):
ns = types.SimpleNamespace()
ns.Control = _instantiate_fake_device(devices.Control)
ns.CoolerShaker = _instantiate_fake_device(devices.CoolerShaker)
ns.Diffract = _instantiate_fake_device(devices.Diffract)
ns.Diffract = diffract
ns.FlowIntegrator = _instantiate_fake_device(devices.FlowIntegrator)
ns.HPLC = _instantiate_fake_device(devices.HPLC)
ns.Injector = injector
ns.Offaxis = offaxis_camera
ns.OffaxisParams = offaxis_parameters
ns.Parameters = parameters
ns.PressureController = _instantiate_fake_device(devices.PressureController)
ns.Questar = questar
ns.Selector = _instantiate_fake_device(devices.Selector)
ns.SDS = SDS({})
ns.SDS.SDS_devices.extend([ns.Selector, ns.CoolerShaker, ns.HPLC,
ns.PressureController, ns.FlowIntegrator])
return ns
def set_random_image(plugin, dimx=100, dimy=100):
'Set up a random image of dimensions (dimx, dimy) on the given image plugin'
plugin.array_data.put(np.random.random((dimx, dimy)))
plugin.array_size.width.sim_put(dimx)
plugin.array_size.height.sim_put(dimy)
plugin.array_size.depth.sim_put(0)
plugin.ndimensions.sim_put(2)
|
{"/jet_tracking/tests/test_sim.py": ["/jet_tracking/sim.py"], "/jet_tracking/tests/conftest.py": ["/jet_tracking/devices.py", "/jet_tracking/jet_control.py"], "/jet_tracking/tests/test_control.py": ["/jet_tracking/jet_control.py"]}
|
6,466
|
jeannas/jet_tracking
|
refs/heads/master
|
/jet_tracking/devices.py
|
import pandas as pd
import epics
from ophyd.device import Device, FormattedComponent as FCpt, Component as Cpt
from ophyd.signal import EpicsSignal
from ophyd.areadetector.plugins import ROIPlugin, StatsPlugin, ImagePlugin
from pcdsdevices.areadetector.detectors import PCDSDetector
from pcdsdevices.epics_motor import IMS
class _TableMixin:
_table_attrs = ('value', 'units', 'desc')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._descriptions = None
def _update_descriptions(self):
adesc = {}
for name, signal in self._signals.items():
pvname = getattr(signal, 'pvname', None)
adesc[name] = (epics.caget(pvname + '.DESC')
if pvname else '')
self._descriptions = adesc
@property
def table(self):
"""
Return table of Device settings
"""
if self._descriptions is None:
self._update_descriptions()
atable = {}
for name, signal in sorted(self._signals.items()):
try:
value = signal.read()[signal.name]['value']
except Exception:
value = None
try:
units = signal.describe()[signal.name].get('units', '')
except Exception:
units = None
atable[name] = {
'value': value,
'units': units,
'desc': self._descriptions.get(name),
}
return pd.DataFrame(atable).T.loc[:, self._table_attrs]
class Injector(Device, _TableMixin):
'''An Injector which consists of 3 coarse control motors and 3 fine control motors
Parameters
----------
pvs : str dict
A dictionary containing the name of the device and
the PVs of all the injector components
Attributes
----------
coarseX : EpicsSignal
The coarse control motor in the X direction
coarseY : EpicsSignal
The coarse control motor in the Y direction
coarseZ : EpicsSignal
The coarse control motor in the Z direction
fineX : EpicsSignal
The fine control motor in the X direction
fineY : EpicsSignal
The fine control motor in the Y direction
fineZ : EpicsSignal
The fine control motor in the Z direction
'''
coarseX = FCpt(IMS, '{self._coarseX}')
coarseY = FCpt(IMS, '{self._coarseY}')
coarseZ = FCpt(IMS, '{self._coarseZ}')
fineX = FCpt(IMS, '{self._fineX}')
fineY = FCpt(IMS, '{self._fineY}')
fineZ = FCpt(IMS, '{self._fineZ}')
def __init__(self, name,
coarseX, coarseY, coarseZ,
fineX, fineY, fineZ, **kwargs):
self._coarseX = coarseX
self._coarseY = coarseY
self._coarseZ = coarseZ
self._fineX = fineX
self._fineY = fineY
self._fineZ = fineZ
super().__init__(name=name, **kwargs)
class Selector(Device, _TableMixin):
'''A Selector for the sample delivery system
Parameters
----------
pvs : str dict
A dictionary containing the name of the device and
the PVs of all the selector components
Attributes
----------
remote_control : EpicsSignal
Remote control enabled
status : EpicsSignal
Connection status for selector
flow : EpicsSignal
Flow
flowstate : EpicsSignal
State of the flow
flowtype : EpicsSignal
Type of the flow
FM_rb : EpicsSignal
FM_reset : EpicsSignal
FM : EpicsSignal
names_button : EpicsSignal
couple_button : EpicsSignal
names1 : EpicsSignal
names2 : EpicsSignal
shaker1 : EpicsSignal
Shaker 1
shaker2 : EpicsSignal
Shaker 2
shaker3 : EpicsSignal
Shaker 3
shaker4 : EpicsSignal
Shaker 4
'''
# also appears on pressure controller screen?
remote_control = FCpt(EpicsSignal, '{self._remote_control}')
status = FCpt(EpicsSignal, '{self._status}')
flow = FCpt(EpicsSignal, '{self._flow}')
flowstate = FCpt(EpicsSignal, '{self._flowstate}')
flowtype = FCpt(EpicsSignal, '{self._flowtype}')
FM_rb = FCpt(EpicsSignal, '{self._FM_rb}')
FM_reset = FCpt(EpicsSignal, '{self._FM_reset}')
FM = FCpt(EpicsSignal, '{self._FM}')
names_button = FCpt(EpicsSignal, '{self._names_button}')
couple_button = FCpt(EpicsSignal, '{self._couple_button}')
names1 = FCpt(EpicsSignal, '{self._names1}')
names2 = FCpt(EpicsSignal, '{self._names2}')
shaker1 = FCpt(EpicsSignal, '{self._shaker1}')
shaker2 = FCpt(EpicsSignal, '{self._shaker2}')
shaker3 = FCpt(EpicsSignal, '{self._shaker3}')
shaker4 = FCpt(EpicsSignal, '{self._shaker4}')
def __init__(self, name,
remote_control, status,
flow, flowstate, flowtype,
FM_rb, FM_reset, FM,
names_button, couple_button, names1, names2,
shaker1, shaker2, shaker3, shaker4, **kwargs):
self._status = status
self._remote_control = remote_control
self._flow = flow
self._flowstate = flowstate
self._flowtype = flowtype
self._FM_rb = FM_rb
self._FM_reset = FM_reset
self._FM = FM
self._names_button = names_button
self._couple_button = couple_button
self._names1 = names1
self._names2 = names2
self._shaker1 = shaker1
self._shaker2 = shaker2
self._shaker3 = shaker3
self._shaker4 = shaker4
super().__init__(name=name, **kwargs)
class CoolerShaker(Device, _TableMixin):
'''A Cooler/Shaker for the sample delivery system
Parameters
----------
pvs : str dict
A dictionary containing the PVs of all the cooler/shaker components
name : str
The device name
Attributes
----------
temperature1 : EpicsSignal
Temperature of 1
SP1 : EpicsSignal
Set point of 1
set_SP1 : EpicsSignal
Set the set point for 1
current1 : EpicsSignal
Current for 1
temperature2 : EpicsSignal
Temperature of 2
SP2 : EpicsSignal
Set point of 2
set_SP2 : EpicsSignal
Set the set point of 2
current2 : EpicsSignal
Current of 2
reboot : EpicsSignal
Reboot the cooler/shaker
'''
temperature1 = FCpt(EpicsSignal, '{self._temperature1}')
SP1 = FCpt(EpicsSignal, '{self._SP1}')
set_SP1 = FCpt(EpicsSignal, '{self._set_SP1}')
current1 = FCpt(EpicsSignal, '{self._current1}')
temperature2 = FCpt(EpicsSignal, '{self._temperature2}')
SP2 = FCpt(EpicsSignal, '{self._SP2}')
set_SP2 = FCpt(EpicsSignal, '{self._set_SP2}')
current2 = FCpt(EpicsSignal, '{self._current2}')
reboot = FCpt(EpicsSignal, '{self._reboot}')
def __init__(self, name,
temperature1, SP1, set_SP1, current1,
temperature2, SP2, set_SP2, current2,
reboot, **kwargs):
self._temperature1 = temperature1
self._SP1 = SP1
self._set_SP1 = set_SP1
self._current1 = current1
self._temperature2 = temperature2
self._SP2 = SP2
self._set_SP2 = set_SP2
self._current2 = current2
self._reboot = reboot
super().__init__(name=name, **kwargs)
class HPLC(Device, _TableMixin):
'''An HPLC for the sample delivery system
Parameters
----------
pvs : str dict
A dictionary containing the PVs of all the HPLC components
name : str
The device name
Attributes
----------
status : EpicsSignal
Status of the HPLC
run : EpicsSignal
Run the HPLC
flowrate : EpicsSignal
Flow rate of the HPLC
set_flowrate : EpicsSignal
Set the flow rate of the HPLC
flowrate_SP : EpicsSignal
Set point for the flow rate
pressure : EpicsSignal
Pressure in the HPLC
pressure_units : EpicsSignal
Units for the pressure
set_max_pressure : EpicsSignal
Set the maximum pressure
max_pressure : EpicsSignal
Maximum pressure
clear_error : EpicsSignal
Clear errors
'''
status = FCpt(EpicsSignal, '{self._status}')
run = FCpt(EpicsSignal, '{self._run}')
flowrate = FCpt(EpicsSignal, '{self._flowrate}')
set_flowrate = FCpt(EpicsSignal, '{self._set_flowrate}')
flowrate_SP = FCpt(EpicsSignal, '{self._flowrate_SP}')
pressure = FCpt(EpicsSignal, '{self._pressure}')
pressure_units = FCpt(EpicsSignal, '{self._pressure_units}')
set_max_pressure = FCpt(EpicsSignal, '{self._set_max_pressure}')
max_pressure = FCpt(EpicsSignal, '{self._max_pressure}')
clear_error = FCpt(EpicsSignal, '{self._clear_error}')
def __init__(self, name,
status, run,
flowrate, set_flowrate, flowrate_SP,
pressure, pressure_units, set_max_pressure, max_pressure,
clear_error, **kwargs):
self._status = status
self._run = run
self._flowrate = flowrate
self._set_flowrate = set_flowrate
self._flowrate_SP = flowrate_SP
self._pressure = pressure
self._pressure_units = pressure_units
self._set_max_pressure = set_max_pressure
self._max_pressure = max_pressure
self._clear_error = clear_error
super().__init__(name=name, **kwargs)
class PressureController(Device, _TableMixin):
'''An Pressure Controller for the sample delivery system
Parameters
----------
pvs : str dict
A dictionary containing the PVs of all the pressure controller components
name : str
The device name
Attributes
----------
status : EpicsSignal
Connection status of pressure controller
pressure1 : EpicsSignal
Pressure of 1
enabled1 : EpicsSignal
Is 1 enabled
limit1 : EpicsSignal
High pressure limit of 1
SP1 : EpicsSignal
Pressure set point of 1
pressure2 : EpicsSignal
Pressure of 2
enabled2 : EpicsSignal
Is 2 enabled
limit2 : EpicsSignal
High pressure limit of 2
SP2 : EpicsSignal
Pressure set point of 2
'''
status = FCpt(EpicsSignal, '{self._status}')
pressure1 = FCpt(EpicsSignal, '{self._pressure1}')
enabled1 = FCpt(EpicsSignal, '{self._enabled1}')
limit1 = FCpt(EpicsSignal, '{self._limit1}')
SP1 = FCpt(EpicsSignal, '{self._SP1}')
pressure2 = FCpt(EpicsSignal, '{self._pressure2}')
enabled2 = FCpt(EpicsSignal, '{self._enabled2}')
limit2 = FCpt(EpicsSignal, '{self._limit2}')
SP2 = FCpt(EpicsSignal, '{self._SP2}')
def __init__(self, name,
status,
pressure1, enabled1, limit1, SP1,
pressure2, enabled2, limit2, SP2, **kwargs):
self._status = status
self._pressure1 = pressure1
self._enabled1 = enabled1
self._limit1 = limit1
self._SP1 = SP1
self._pressure2 = pressure2
self._enabled2 = enabled2
self._limit2 = limit2
self._SP2 = SP2
super().__init__(name=name, **kwargs)
class FlowIntegrator(Device, _TableMixin):
'''An FlowIntegrator for the sample delivery system
Parameters
----------
pvs : str dict
A dictionary containing the PVs of all the flow integrator components
name : str
The device name
Attributes
----------
integrator_source: EpicsSignal
flow_source : EpicsSignal
names : EpicsSignal
Names of
start1 : EpicsSignal
Starting volume of 1
used1 : EpicsSignal
Flow of 1
time1 : EpicsSignal
Estimated depletion time of 1
start2 : EpicsSignal
Starting volume of 2
used2 : EpicsSignal
Flow of 2
time2 : EpicsSignal
Estimated depletion time of 2
start3 : EpicsSignal
Starting volume of 3
used3 : EpicsSignal
Flow of 3
time3 : EpicsSignal
Estimated depletion time of 3
start4 : EpicsSignal
Starting volume of 4
used4 : EpicsSignal
Flow of 4
time4 : EpicsSignal
Estimated depletion time of 4
start5 : EpicsSignal
Starting volume of 5
used5 : EpicsSignal
Flow of 5
time5 : EpicsSignal
Estimated depletion time of 5
start6 : EpicsSignal
Starting volume of 6
used6 : EpicsSignal
Flow of 6
time6 : EpicsSignal
Estimated depletion time of 6
start7 : EpicsSignal
Starting volume of 7
used7 : EpicsSignal
Flow of 7
time7 : EpicsSignal
Estimated depletion time of 7
start8 : EpicsSignal
Starting volume of 8
used8 : EpicsSignal
Flow of 8
time8 : EpicsSignal
Estimated depletion time of 8
start9 : EpicsSignal
Starting volume of 9
used9 : EpicsSignal
Flow of 9
time9 : EpicsSignal
Estimated depletion time of 9
start10 : EpicsSignal
Starting volume of 10
used10 : EpicsSignal
Flow of 10
time10 : EpicsSignal
Estimated depletion time of 10
'''
integrator_source = FCpt(EpicsSignal, '{self._integrator_source}')
flow_source = FCpt(EpicsSignal, '{self._flow_source}')
names = FCpt(EpicsSignal, '{self._names}')
start1 = FCpt(EpicsSignal, '{self._start1}')
used1 = FCpt(EpicsSignal, '{self._used1}')
time1 = FCpt(EpicsSignal, '{self._time1}')
start2 = FCpt(EpicsSignal, '{self._start2}')
used2 = FCpt(EpicsSignal, '{self._used2}')
time2 = FCpt(EpicsSignal, '{self._time2}')
start3 = FCpt(EpicsSignal, '{self._start3}')
used3 = FCpt(EpicsSignal, '{self._used3}')
time3 = FCpt(EpicsSignal, '{self._time3}')
start4 = FCpt(EpicsSignal, '{self._start4}')
used4 = FCpt(EpicsSignal, '{self._used4}')
time4 = FCpt(EpicsSignal, '{self._time4}')
start5 = FCpt(EpicsSignal, '{self._start5}')
used5 = FCpt(EpicsSignal, '{self._used5}')
time5 = FCpt(EpicsSignal, '{self._time5}')
start6 = FCpt(EpicsSignal, '{self._start6}')
used6 = FCpt(EpicsSignal, '{self._used6}')
time6 = FCpt(EpicsSignal, '{self._time6}')
start7 = FCpt(EpicsSignal, '{self._start7}')
used7 = FCpt(EpicsSignal, '{self._used7}')
time7 = FCpt(EpicsSignal, '{self._time7}')
start8 = FCpt(EpicsSignal, '{self._start8}')
used8 = FCpt(EpicsSignal, '{self._used8}')
time8 = FCpt(EpicsSignal, '{self._time8}')
start9 = FCpt(EpicsSignal, '{self._start9}')
used9 = FCpt(EpicsSignal, '{self._used9}')
time9 = FCpt(EpicsSignal, '{self._time9}')
start10 = FCpt(EpicsSignal, '{self._start10}')
used10 = FCpt(EpicsSignal, '{self._used10}')
time10 = FCpt(EpicsSignal, '{self._time10}')
def __init__(self, name,
integrator_source, flow_source, names,
start1, used1, time1,
start2, used2, time2,
start3, used3, time3,
start4, used4, time4,
start5, used5, time5,
start6, used6, time6,
start7, used7, time7,
start8, used8, time8,
start9, used9, time9,
start10, used10, time10, **kwargs):
self._integrator_source = integrator_source
self._flow_source = flow_source
self._names = names
self._start1 = start1
self._used1 = used1
self._time1 = time1
self._start2 = start2
self._used2 = used2
self._time2 = time2
self._start3 = start3
self._used3 = used3
self._time3 = time3
self._start4 = start4
self._used4 = used4
self._time4 = time4
self._start5 = start5
self._used5 = used5
self._time5 = time5
self._start6 = start6
self._used6 = used6
self._time6 = time6
self._start7 = start7
self._used7 = used7
self._time7 = time7
self._start8 = start8
self._used8 = used8
self._time8 = time8
self._start9 = start9
self._used9 = used9
self._time9 = time9
self._start10 = start10
self._used10 = used10
self._time10 = time10
super().__init__(name=name, **kwargs)
class SDS:
'''
Sample delivery system
Parameters
----------
devices : dict
A dictionary of dictionaries containing the devices to be made and
their PV names. The dictionary key is a string, one of the following:
{'selector', 'cooler_shaker', 'hplc', 'pressure_controller',
'flow_integrator'}
The values of the dictionary, are also dictionaries. These are passed
to the new device, allowing parameters such as PV names to be
specified.
Attributes
----------
SDS_devices : list
List containing all the devices that are in the sample delivery system
'''
device_types = {
'selector': Selector,
'cooler_shaker': CoolerShaker,
'hplc': HPLC,
'pressure_controller': PressureController,
'flow_integrator': FlowIntegrator,
}
def __init__(self, devices):
self.SDS_devices = [
self.device_types[dev](**kwargs)
for dev, kwargs in devices.items()
if dev in self.device_types
]
invalid_devices = [dev for dev in devices
if dev not in self.device_types]
for device in invalid_devices:
print(f'WARNING: {device} is not a valid device type')
class Offaxis(PCDSDetector):
'''Area detector for Offaxis camera in CXI
Parameters
----------
port_names : str dict
A dictionary containing the access port names for the plugins
prefix : str
Prefix for the PV name of the camera
name : str
Name of the camera
Attributes
----------
ROI : ROIPlugin
ROI on original rate image
ROI_stats : StatsPlugin
Stats on ROI of original rate image
'''
ROI = FCpt(ROIPlugin, '{self.prefix}:{self._ROI_port}:')
ROI_stats = FCpt(StatsPlugin, '{self.prefix}:{self._ROI_stats_port}:')
ROI_image = FCpt(ImagePlugin, '{self.prefix}:{self._ROI_image_port}:')
def __init__(self, ROI_port,
ROI_stats_port,
ROI_image_port,
prefix, *args, **kwargs):
self._ROI_port = ROI_port
self._ROI_stats_port = ROI_stats_port
self._ROI_image_port = ROI_image_port
super().__init__(prefix, *args, **kwargs)
self.ROI_stats.nd_array_port.put(ROI_port)
self.ROI_image.nd_array_port.put(ROI_port)
self.ROI.enable.put('Enabled')
self.ROI_stats.enable.put('Enabled')
self.ROI_image.enable.put('Enabled')
class Questar(PCDSDetector):
'''
Area detector for Inline Questar Camera in CXI
Parameters
----------
port_names : str dict
A dictionary containing the access port names for the plugins
prefix : str
Prefix for the PV name of the camera
name : str
Name of the camera
Attributes
----------
ROI : ROIPlugin
ROI on original rate image
ROI_stats : StatsPlugin
Stats on ROI of original rate image
'''
ROI = FCpt(ROIPlugin, '{self.prefix}:{self._ROI_port}:')
ROI_stats = FCpt(StatsPlugin, '{self.prefix}:{self._ROI_stats_port}:')
ROI_image = FCpt(ImagePlugin, '{self.prefix}:{self._ROI_image_port}:')
def __init__(self, ROI_port,
ROI_stats_port,
ROI_image_port,
prefix, *args, **kwargs):
self._ROI_port = ROI_port
self._ROI_stats_port = ROI_stats_port
self._ROI_image_port = ROI_image_port
super().__init__(prefix, *args, **kwargs)
self.ROI_stats.nd_array_port.put(ROI_port)
self.ROI_image.nd_array_port.put(ROI_port)
self.ROI.enable.put('Enabled')
self.ROI_stats.enable.put('Enabled')
self.ROI_image.enable.put('Enabled')
class Parameters(Device, _TableMixin):
'''
Contains EPICS PVs used for jet tracking
'''
cam_x = Cpt(EpicsSignal, ':CAM_X',
doc='x-coordinate of camera position in mm')
cam_y = Cpt(EpicsSignal, ':CAM_Y',
doc='y-coordinate of camera position in mm')
pxsize = Cpt(EpicsSignal, ':PXSIZE',
doc='size of pixel in mm')
cam_roll = Cpt(EpicsSignal, ':CAM_ROLL',
doc='rotation of camera about z axis in radians')
beam_x = Cpt(EpicsSignal, ':BEAM_X',
doc='x-coordinate of x-ray beam in mm (usually 0)')
beam_y = Cpt(EpicsSignal, ':BEAM_Y',
doc='y-coordinate of x-ray beam in mm (usually 0)')
beam_x_px = Cpt(EpicsSignal, ':BEAM_X_PX',
doc='x-coordinate of x-ray beam in camera image in pixels')
beam_y_px = Cpt(EpicsSignal, ':BEAM_Y_PX',
doc='y-coordinate of x-ray beam in camera image in pixels')
nozzle_x = Cpt(EpicsSignal, ':NOZZLE_X',
doc='x-coordinate of nozzle in mm')
nozzle_y = Cpt(EpicsSignal, ':NOZZLE_Y',
doc='y-coordinate of nozzle in mm')
nozzle_xwidth = Cpt(EpicsSignal, ':NOZZLE_XWIDTH',
doc='width of nozzle in mm')
jet_x = Cpt(EpicsSignal, ':JET_X',
doc='distance from sample jet to x-ray beam in mm')
jet_roll = Cpt(EpicsSignal, ':JET_ROLL',
doc='rotation of sample jet about z axis in radians')
state = Cpt(EpicsSignal, ':STATE',
doc='dictionary of strings')
jet_counter = Cpt(EpicsSignal, ':JET_Counter',
doc='Jet counter')
jet_reprate = Cpt(EpicsSignal, ':JET_RepRate',
doc='Jet repetition rate')
nozzle_counter = Cpt(EpicsSignal, ':NOZZLE_Counter',
doc='Nozzle counter')
nozzle_reprate = Cpt(EpicsSignal, ':NOZZLE_RepRate',
doc='Nozzle repetition rate')
mean = Cpt(EpicsSignal, ':ROI_mean',
doc='mean of calibration ROI image with jet')
std = Cpt(EpicsSignal, ':ROI_std',
doc='standard devation of calibration ROI image with jet')
radius = Cpt(EpicsSignal, ':RADIUS',
doc='radius of calibration diffraction ring')
intensity = Cpt(EpicsSignal, ':INTENSITY',
doc='intensity of calibration diffraction ring')
thresh_hi = Cpt(EpicsSignal, ':THRESH_hi',
doc='upper threshold for CSPAD ring intensity')
thresh_lo = Cpt(EpicsSignal, ':THRESH_lo',
doc='lower threshold for CSPAD ring intensity')
thresh_w8 = Cpt(EpicsSignal, ':THRESH_w8',
doc='threshold for wave8')
bypass_cam = Cpt(EpicsSignal, ':BYPASS_cam',
doc='bypass camera during jet tracking')
frames_cam = Cpt(EpicsSignal, ':FRAMES_cam',
doc='number of frames for integration for camera')
frames_cspad = Cpt(EpicsSignal, ':FRAMES_cspad',
doc='number of frames for integration for cspad')
class OffaxisParams(Device, _TableMixin):
'''
Contains EPICS PVs used with Offaxis camera for jet tracking
'''
cam_z = Cpt(EpicsSignal, ':CAM_Z',
doc='z-coordinate of camera position in mm')
cam_y = Cpt(EpicsSignal, ':CAM_Y',
doc='y-coordinate of camera position in mm')
pxsize = Cpt(EpicsSignal, ':PXSIZE',
doc='size of pixel in mm')
cam_pitch = Cpt(EpicsSignal, ':CAM_PITCH',
doc='rotation of camera about x axis in radians')
beam_z = Cpt(EpicsSignal, ':BEAM_Z',
doc='z-coordinate of x-ray beam in mm (usually 0)')
beam_y = Cpt(EpicsSignal, ':BEAM_Y',
doc='y-coordinate of x-ray beam in mm (usually 0)')
beam_z_px = Cpt(EpicsSignal, ':BEAM_Z_PX',
doc='z-coordinate of x-ray beam in camera image in pixels')
beam_y_px = Cpt(EpicsSignal, ':BEAM_Y_PX',
doc='y-coordinate of x-ray beam in camera image in pixels')
nozzle_z = Cpt(EpicsSignal, ':NOZZLE_Z',
doc='z-coordinate of nozzle in mm')
nozzle_y = Cpt(EpicsSignal, ':NOZZLE_Y',
doc='y-coordinate of nozzle in mm')
nozzle_zwidth = Cpt(EpicsSignal, ':NOZZLE_ZWIDTH',
doc='width of nozzle in mm')
jet_z = Cpt(EpicsSignal, ':JET_Z',
doc='distance from sample jet to x-ray beam in mm')
jet_pitch = Cpt(EpicsSignal, ':JET_PITCH',
doc='rotation of sample jet about z axis in radians')
state = Cpt(EpicsSignal, ':STATE',
doc='dictionary of strings')
jet_counter = Cpt(EpicsSignal, ':JET_Counter',
doc='Jet counter')
jet_reprate = Cpt(EpicsSignal, ':JET_RepRate',
doc='Jet repetition rate')
nozzle_counter = Cpt(EpicsSignal, ':NOZZLE_Counter',
doc='Nozzle counter')
nozzle_reprate = Cpt(EpicsSignal, ':NOZZLE_RepRate',
doc='Nozzle repetition rate')
mean = Cpt(EpicsSignal, ':ROI_mean',
doc='mean of calibration ROI image with jet')
std = Cpt(EpicsSignal, ':ROI_std',
doc='standard devation of calibration ROI image with jet')
radius = Cpt(EpicsSignal, ':RADIUS',
doc='radius of calibration diffraction ring')
intensity = Cpt(EpicsSignal, ':INTENSITY',
doc='intensity of calibration diffraction ring')
thresh_hi = Cpt(EpicsSignal, ':THRESH_hi',
doc='upper threshold for CSPAD ring intensity')
thresh_lo = Cpt(EpicsSignal, ':THRESH_lo',
doc='lower threshold for CSPAD ring intensity')
thresh_w8 = Cpt(EpicsSignal, ':THRESH_w8',
doc='threshold for wave8')
bypass_cam = Cpt(EpicsSignal, ':BYPASS_cam',
doc='bypass camera during jet tracking')
frames_cam = Cpt(EpicsSignal, ':FRAMES_cam',
doc='number of frames for integration for camera')
frames_cspad = Cpt(EpicsSignal, ':FRAMES_cspad',
doc='number of frames for integration for cspad')
class Control(Device, _TableMixin):
'''
Contains EPICS PVs used for jet tracking control
'''
re_state = Cpt(EpicsSignal, ':RE:STATE')
beam_state = Cpt(EpicsSignal, ':BEAM:STATE')
injector_state = Cpt(EpicsSignal, ':INJECTOR:STATE')
beam_trans = Cpt(EpicsSignal, ':BEAM:TRANS')
beam_pulse_energy = Cpt(EpicsSignal, ':BEAM:PULSE_ENERGY')
beam_e_thresh = Cpt(EpicsSignal, ':BEAM:E_THRESH')
xstep_size = Cpt(EpicsSignal, ':INJECTOR:XSTEP_SIZE')
xscan_min = Cpt(EpicsSignal, ':INJECTOR:XSCAN_MIN')
xscan_max = Cpt(EpicsSignal, ':INJECTOR:XSCAN_MAX')
bounce_width = Cpt(EpicsSignal, ':INJECTOR:BOUNCE_WIDTH')
xmin = Cpt(EpicsSignal, ':INJECTOR:XMIN')
xmax = Cpt(EpicsSignal, ':INJECTOR:XMAX')
class Diffract(Device, _TableMixin):
'''
Contains EPICS PVs used for shared memory X-ray Diffraction detector
used in jet tracking.
'''
total_counter = Cpt(EpicsSignal, ':TOTAL_Counter',
doc='Total counter')
total_reprate = Cpt(EpicsSignal, ':TOTAL_RepRate',
doc='Diffraction total intensity calc rate')
ring_counter = Cpt(EpicsSignal, ':RING_Counter',
doc='Diffraction ring intensity event counter')
ring_reprate = Cpt(EpicsSignal, ':RING_RepRate',
doc='Diffraction ring intensity event counter')
psd_counter = Cpt(EpicsSignal, ':PSD_Counter',
doc='Diffraction periodogram event counter')
psd_reprate = Cpt(EpicsSignal, ':PSD_RepRate',
doc='Diffraction periodogram event counter')
stats_counter = Cpt(EpicsSignal, ':STATS_Counter',
doc='Diffraction stats event counter')
stats_reprate = Cpt(EpicsSignal, ':STATS_RepRate',
doc='Diffraction stats event counter')
streak_counter = Cpt(EpicsSignal, ':STREAK_Counter',
doc='Diffraction streak event counter')
streak_reprate = Cpt(EpicsSignal, ':STREAK_RepRate',
doc='Diffraction streak event counter')
cspad_sum = Cpt(EpicsSignal, ':TOTAL_ADU',
doc='Total detector ADU')
streak_fraction = Cpt(EpicsSignal, ':STREAK_FRACTION',
doc='Fraction of events with diffraction streak')
stats_mean = Cpt(EpicsSignal, ':STATS_MEAN',
doc='Mean Diffraction Statistic')
stats_std = Cpt(EpicsSignal, ':STATS_STD',
doc='Std Diffraction Statistic')
stats_min = Cpt(EpicsSignal, ':STATS_MIN',
doc='Min Diffraction Statistic')
stats_max = Cpt(EpicsSignal, ':STATS_MAX',
doc='Max Diffraction Statistic')
psd_frequency = Cpt(EpicsSignal, ':PSD_FREQUENCY',
doc='Diffraction periodogram fundamental frequency')
psd_amplitude = Cpt(EpicsSignal, ':PSD_AMPLITUDE',
doc='Diffraction periodogram Frequency analysis amplitude')
psd_rate = Cpt(EpicsSignal, ':PSD_RATE',
doc='Event frequency for periodogram')
psd_events = Cpt(EpicsSignal, ':PSD_EVENTS',
doc='Diffraction periodogram')
psd_resolution = Cpt(EpicsSignal, ':PSD_RESOLUTION',
doc='Resultion to smooth over for periodogra')
psd_freq_min = Cpt(EpicsSignal, ':PSD_FREQ_MIN',
doc='Minimum frequency for periodogram calcs')
psd_amp_wf = Cpt(EpicsSignal, ':PSD_AMP_WF',
doc='Diffraction periodogram Frequency analysis waveform array')
psd_freq_wf = Cpt(EpicsSignal, ':PSD_FREQ_WF',
doc='Diffraction periodogram frequency waveform')
psd_amp_array = Cpt(EpicsSignal, ':PSD_AMP_ARRAY',
doc='Diffraction periodogram Frequency analysis amplitude array')
state = Cpt(EpicsSignal, ':STATE',
doc='State of diffraction analysis')
|
{"/jet_tracking/tests/test_sim.py": ["/jet_tracking/sim.py"], "/jet_tracking/tests/conftest.py": ["/jet_tracking/devices.py", "/jet_tracking/jet_control.py"], "/jet_tracking/tests/test_control.py": ["/jet_tracking/jet_control.py"]}
|
6,467
|
jeannas/jet_tracking
|
refs/heads/master
|
/jet_tracking/sim.py
|
import types
import numpy as np
from ophyd.sim import SynAxis, SynSignal
def generate_simulation(motor_column, signal_column, dataframe,
motor_precision=3, random_state=None):
"""
Generate a simulation based on a provided DataFrame
Use collected data to simulate the relationship between a single
input and output variable. A ``SynAxis`` object will be returned that can
be set to a specified precision. The value of the dependent variable is
then determined by finding the closest position of the motor we have
recorded and returning the corresponding value. If multiple readings were
taken at this position one is randomly chosen.
Parameters
----------
motor_column: str
The column of data that will be used as the independent variable. Will
also be the name of the created motor
signal_column: str
The name of the column to be the dependent variable. Will also be the
name of the created signal
dataframe: pandas.DataFrame
Data to use in simulation
motor_precision: int, optional
Limit the accuracy of the simulated motor
random_state: np.random.RandomState, optional
Seed the simulation
Returns
-------
namespace: types.SimpleNamespace
A namespace with attributes ``motor``, ``signal``, and ``data``.
"""
# Create our motor that will serve as the independent variable
motor = SynAxis(name=motor_column, precision=motor_precision)
ns = types.SimpleNamespace(data=dataframe, motor=motor)
random_state = random_state or np.random.RandomState(0)
# Create a function to return a random value from closest motor position
def func():
motor_positions = ns.data[motor_column].unique()
sim_data = dict(iter(ns.data.groupby(motor_column)))
pos = ns.motor.position
closest_position = motor_positions[np.abs(motor_positions - pos).argmin()]
return random_state.choice(sim_data[closest_position][signal_column])
ns.signal = SynSignal(name=signal_column, func=func)
return ns
|
{"/jet_tracking/tests/test_sim.py": ["/jet_tracking/sim.py"], "/jet_tracking/tests/conftest.py": ["/jet_tracking/devices.py", "/jet_tracking/jet_control.py"], "/jet_tracking/tests/test_control.py": ["/jet_tracking/jet_control.py"]}
|
6,468
|
jeannas/jet_tracking
|
refs/heads/master
|
/jet_tracking/cam_utils.py
|
import numpy as np
import cv2
from scipy.signal import peak_widths
from skimage.feature import register_translation
from skimage.feature import register_translation
from skimage.feature import canny, peak_local_max
from skimage.transform import hough_line, hough_line_peaks, rotate
def image_stats(img):
'''
Parameters
----------
img : ndarray
image
Returns
-------
mean : float
mean of given image
std : float
standard deviation of image
'''
return img.mean(), img.std()
def jet_detect(img, calibratemean, calibratestd):
'''Finds the jet from the online camera roi using Canny edge detection and Hough line transform
This method first compares the mean of the ROI image to the mean of the calibrated ROI.
Then Canny edge detection is used to detect the edges of the jet in the ROI and convert
the original image to a binary image.
Hough line transform is performed on the binary image to determine the approximate position
of the jet.
Peak-finding is performed on several horizontal slices of the image, and a line is fitted
to these points to determine the actual position of the jet.
If a peak is found that is not in the approximate position of the jet determined by the
Hough line transform, that point is not considered in the line fitting.
Parameters
----------
img : ndarray
ROI of the on-axis image
mean : float
mean of calibration ROI image with jet (see calibrate())
calibratestd : float
standard deviation calibration ROI image with jet (see calibrate())
Returns
-------
rho : float
Distance from (0,0) to the line in pixels
theta : float
Angle of the shortest vector from (0,0) to the line in radians
'''
# compare mean & std of current image to mean & std of calibrate image
mean, std = image_stats(img)
if (mean < calibratemean * 0.8) or (mean > calibratemean * 1.2):
raise ValueError('ERROR mean: no jet')
try:
# use canny edge detection to convert image to binary
binary = canny(img, sigma=2, use_quantiles=True, low_threshold=0.9, high_threshold=0.99)
# perform Hough Line Transform on binary image
h, angles, d = hough_line(binary)
res = hough_line_peaks(h, angles, d, min_distance=1, threshold=int(img.shape[0]/3))
# keep only valid lines
valid = []
for _, theta, dist in zip(*res):
jetValid = True
# jet must be within 45 degrees of vertical
if (theta < np.radians(-45)) or (theta > np.radians(45)):
jetValid = False
# jet must start from top edge of imagei
yint = dist / np.sin(theta)
xint = np.tan(theta) * yint
if (dist < 0) or (xint > binary.shape[1]):
jetValid = False
# jet must be within [x] pixels width
#if (cam_utils.get_jet_width(img, rho, theta) * pxsize > 0.01):
# jetValid = false
# print('ERROR width: not a jet')
if (jetValid):
valid.append([theta, dist])
except Exception:
raise ValueError('ERROR hough: no jet')
# use local maxes to determine exact jet position
# line-fitting cannot be performed on vertical line (which is highly likely due to
# nature of jet) so rotate image first
imgr = rotate(img, 90, resize=True, preserve_range=True)
jet_xcoords = []
jet_ycoords = []
for x in range(10):
# try to find local maxes (corresponds to jet) in 10 rows along height of image)
col = int(imgr.shape[1] / 10 * x)
ymax = peak_local_max(imgr[:,col], threshold_rel=0.9, num_peaks=1)[0][0]
# check if point found for max is close to jet lines found with Hough transform
miny = imgr.shape[0]
maxy = 0
for theta, dist in valid:
xint = dist / np.sin(theta)
y = imgr.shape[0] - ((xint - col) * np.tan(theta))
if (y < miny):
miny = y
if (y > maxy):
maxy = y
# if x found using local max is close to lines found with Hough transform, keep it
if (ymax >= (miny - 5)) and (ymax <= (maxy + 5)):
jet_xcoords.append(col)
jet_ycoords.append(ymax)
try:
# fit a line to the points found using local max
m, b = np.polyfit(jet_xcoords, jet_ycoords, 1)
theta = -np.arctan(m)
rho = np.cos(theta) * (imgr.shape[0] - b)
except Exception:
raise ValueError('ERROR polyfit: no jet')
return rho, theta
def get_jet_z(rho, theta, roi_y, roi_z, *, pxsize, cam_y, cam_z, beam_y,
beam_z, cam_pitch):
'''
Calculates the jet position at beam height in the main coordinate system
in offaxis camera (z and pitch replace x and roll given camera orientation)
Parameters
----------
rho : float
Distance from (0,0) to the line in pixels
theta : float
Angle of the shortest vector from (0,0) to the line in radians
y_roi : int
Y-coordinate of the origin of the ROI on the camera image in pixels
z_roi : int
Z-coordinate of the origin of the ROI on the camera image in pixels
pxsize : float
size of pixel in mm
cam_y : float
y-coordinate of camera position in mm
cam_z : float
z-coordinate of camera position in mm
beam_y : float
y-coordinate of x-ray beam in mm (usually 0)
beam_z : float
z-coordinate of x-ray beam in mm (usually 0)
cam_pitch : float
rotation of camera about x axis in radians
Returns
-------
zj : float
Jet position at the beam height in millimeters
'''
yb_roi = (1.0 / pxsize) * ((cam_y - beam_y) * np.cos(-cam_pitch) +
(cam_z - beam_z) * np.sin(-cam_pitch)) - roi_y
# print('yb_roi: {}'.format(yb_roi))
zj_roi = (rho - yb_roi * np.sin(theta)) / np.cos(theta)
# print('zj_roi: {}'.format(zj_roi))
z0_roi = (1.0 / pxsize) * (cam_z * np.cos(cam_pitch) -
cam_y * np.sin(-cam_pitch)) - roi_z
zj = pxsize * (z0_roi - zj_roi)
return zj
def get_jet_x(rho, theta, roi_x, roi_y, *, pxsize, cam_x, cam_y, beam_x,
beam_y, cam_roll):
'''Calculates the jet position at beam height in the main coordinate system
Parameters
----------
rho : float
Distance from (0,0) to the line in pixels
theta : float
Angle of the shortest vector from (0,0) to the line in radians
x_roi : int
X-coordinate of the origin of the ROI on the camera image in pixels
y_roi : int
Y-coordinate of the origin of the ROI on the camera image in pixels
pxsize : float
size of pixel in mm
cam_x : float
x-coordinate of camera position in mm
cam_y : float
y-coordinate of camera position in mm
beam_x : float
x-coordinate of x-ray beam in mm (usually 0)
beam_y : float
y-coordinate of x-ray beam in mm (usually 0)
cam_roll : float
rotation of camera about z axis in radians
Returns
-------
xj : float
Jet position at the beam height in millimeters
'''
yb_roi = (1.0 / pxsize) * ((cam_y - beam_y) * np.cos(cam_roll) +
(cam_x - beam_x) * np.sin(cam_roll)) - roi_y
# print('yb_roi: {}'.format(yb_roi))
xj_roi = (rho - yb_roi * np.sin(theta)) / np.cos(theta)
# print('xj_roi: {}'.format(xj_roi))
x0_roi = (1.0 / pxsize) * (cam_x * np.cos(cam_roll) -
cam_y * np.sin(cam_roll)) - roi_x
xj = pxsize * (x0_roi - xj_roi)
return xj
def get_jet_pitch(theta, cam_pitch):
'''Calculates jet angle in the main coordinate system (in radians, from -pi/2 to pi/2)
Parameters
----------
theta : float
Angle of the shortest vector from (0,0) to the line in radians
cam_pitch : float
rotation of camera about x axis in radians
Returns
-------
jet_pitch : float
Jet angle in radians
'''
return (theta - np.pi / 2 - cam_pitch) % np.pi - np.pi / 2
def get_jet_roll(theta, cam_roll):
'''Calculates jet angle in the main coordinate system (in radians, from -pi/2 to pi/2)
Parameters
----------
theta : float
Angle of the shortest vector from (0,0) to the line in radians
cam_roll : float
rotation of camera about z axis in radians
Returns
-------
jet_roll : float
Jet angle in radians
'''
return (theta - np.pi / 2 - cam_roll) % np.pi - np.pi / 2
def get_jet_width(im, rho, theta):
'''Calculates the jet width
Parameters
----------
img : ndarray
ROI of the on-axis image
rho : float
Distance from (0,0) to the line in pixels
theta : float
Angle of the shortest vector from (0,0) to the line in radians
Returns
-------
w : float
Jet width in pixels
'''
rows, column_indices = np.ogrid[:im.shape[0], :im.shape[1]]
r = np.asarray([int((rho + y * np.sin(theta)) / np.cos(theta))
for y in range(im.shape[0])])
r = r % im.shape[1]
column_indices = column_indices - r[:, np.newaxis]
s = im[rows, column_indices].sum(axis=0)
return peak_widths(s, [s.argmax()])[0]
def get_offaxis_coords(cam_beam_y, cam_beam_z, *, cam_pitch, pxsize):
'''Finds cam_y and cam_z using the pixel coordinates of the origin
Parameters
----------
cam_beam_y : float
y coordinate for the beam (= main coordinate origin) on the camera in pixels
cam_beam_z : float
z coordinate for the beam (= main coordinate origin) on the camera in pixels
cam_pitch : float
rotation of camera about x axis in radians
pxsize : float
size of pixel in mm
Returns
-------
cam_y : float
Y-coordinate of the origin of the camera in the main coordinate system in millimeters
cam_z : float
Z-coordinate of the origin of the camera in the main coordinate system in millimeters
'''
cam_y = pxsize * (cam_beam_z * np.sin(cam_pitch) +
cam_beam_y * np.cos(cam_pitch))
cam_z = pxsize * (cam_beam_z * np.cos(cam_pitch) -
cam_beam_y * np.sin(cam_pitch))
return cam_y, cam_z
def get_cam_coords(cam_beam_x, cam_beam_y, *, cam_roll, pxsize):
'''Finds cam_x and cam_y using the pixel coordinates of the origin
Parameters
----------
cam_beam_x : float
x coordinate for the beam (= main coordinate origin) on the camera in pixels
cam_beam_y : float
y coordinate for the beam (= main coordinate origin) on the camera in pixels
cam_roll : float
rotation of camera about z axis in radians
pxsize : float
size of pixel in mm
Returns
-------
cam_x : float
X-coordinate of the origin of the camera in the main coordinate system in millimeters
cam_y : float
Y-coordinate of the origin of the camera in the main coordinate system in millimeters
'''
cam_x = pxsize * (cam_beam_y * np.sin(cam_roll) +
cam_beam_x * np.cos(cam_roll))
cam_y = pxsize * (cam_beam_y * np.cos(cam_roll) -
cam_beam_x * np.sin(cam_roll))
return cam_x, cam_y
def get_cam_pitch(imgs):
'''Finds the camera angle
Parameters
----------
imgs : list(ndarray)
List of images where nozzle has been moved in x-direction
Returns
-------
cam_pitch : float
Offaxis camera pitch angle in radians
'''
ytot = 0
ztot = 0
for i in range(len(imgs) - 1):
im1, im2 = imgs[i], imgs[i + 1]
(dy, dz), error, diffphase = register_translation(im1, im2, 100)
if dy < 0:
dy *= -1
dz *= -1
ytot += dy
ztot += dz
return np.arctan(ytot / ztot)
def get_cam_roll(imgs):
'''Finds the camera angle
Parameters
----------
imgs : list(ndarray)
List of images where nozzle has been moved in x-direction
Returns
-------
cam_roll : float
Camera angle in radians
'''
ytot = 0
xtot = 0
for i in range(len(imgs) - 1):
im1, im2 = imgs[i], imgs[i + 1]
(dy, dx), error, diffphase = register_translation(im1, im2, 100)
if dy < 0:
dy *= -1
dx *= -1
ytot += dy
xtot += dx
return -np.arctan(ytot / xtot)
def get_cam_pitch_pxsize(imgs, positions):
'''Finds offaxis camera pitch and pixel size
Parameters
----------
imgs : list(ndarray)
List of images where nozzle has been moved in x-direction
positions : list(float)
List of motor positions in millimeters
Returns
-------
cam_pitch : float
Camera angle in radians
pxsize : float
Pixel size in millimeters
'''
ytot = 0
ztot = 0
changetot = 0
for i in range(len(positions) - 1):
im1, im2 = imgs[i], imgs[i + 1]
(dy, dz), error, diffphase = register_translation(im1, im2, 100)
if dy < 0:
dy *= -1
dz *= -1
ytot += dy
ztot += dz
changetot += abs(positions[i + 1] - positions[i])
cam_pitch = np.arctan(ytot / ztot)
pxsize = changetot / np.sqrt(ytot**2 + ztot**2)
return cam_pitch, pxsize
def get_cam_roll_pxsize(imgs, positions):
'''Finds camera angle and pixel size
Parameters
----------
imgs : list(ndarray)
List of images where nozzle has been moved in x-direction
positions : list(float)
List of motor positions in millimeters
Returns
-------
cam_roll : float
Camera angle in radians
pxsize : float
Pixel size in millimeters
'''
ytot = 0
xtot = 0
changetot = 0
for i in range(len(positions) - 1):
im1, im2 = imgs[i], imgs[i + 1]
(dy, dx), error, diffphase = register_translation(im1, im2, 100)
if dy < 0:
dy *= -1
dx *= -1
ytot += dy
xtot += dx
changetot += abs(positions[i + 1] - positions[i])
cam_roll = -np.arctan(ytot / xtot)
pxsize = changetot / np.sqrt(ytot**2 + xtot**2)
return cam_roll, pxsize
def get_nozzle_shift(im1, im2, *, cam_roll, pxsize):
'''Finds the distance the nozzle has shifted between two images
Parameters
----------
im1 : ndarray
On-axis camera image 1
im2 : ndarray
On-axis camera image 2
cam_roll : float
rotation of camera about z axis in radians
pxsize : float
size of pixel in mm
Returns
-------
dy : float
Distance in y
dx : float
Distance in x
'''
(sy, sx), error, diffphase = register_translation(im1, im2, 100)
dx = (sx * np.cos(cam_roll) - sy * np.sin(cam_roll)) * pxsize
dy = (sy * np.cos(cam_roll) + sx * np.sin(cam_roll)) * pxsize
return dy, dx
|
{"/jet_tracking/tests/test_sim.py": ["/jet_tracking/sim.py"], "/jet_tracking/tests/conftest.py": ["/jet_tracking/devices.py", "/jet_tracking/jet_control.py"], "/jet_tracking/tests/test_control.py": ["/jet_tracking/jet_control.py"]}
|
6,469
|
jeannas/jet_tracking
|
refs/heads/master
|
/jet_tracking/jet_control.py
|
from time import sleep
from . import cam_utils
from . import jt_utils
from .move_motor import movex
class JetControl:
'''
Jet tracking control class using jet_tracking methods
'''
def __init__(self, name, injector, camera, params, diffract, *,
offaxis=False, **kwargs):
self.name = name
self.injector = injector
self.camera = camera
self.params = params
self.diffract = diffract
self.offaxis = offaxis
def set_beam(self, beam_x_px, beam_y_px):
'''
Set the coordinates for the x-ray beam
Parameters
----------
beam_x_px : int
x-coordinate of x-ray beam in the camera image in pixels
beam_y_px : int
y-coordinate of x-ray beam in the camera image in pixels
'''
set_beam(beam_x_px, beam_y_px, self.params)
def calibrate(self, **kwargs):
'''
Calibrate the onaxis camera
'''
if self.offaxis:
return calibrate_off_axis(self.injector, self.camera, self.params,
**kwargs)
else:
return calibrate_inline(self.injector, self.camera, self.params,
**kwargs)
def jet_calculate(self):
'''
Track the sample jet and calculate the distance to the x-ray beam
'''
if self.offaxis:
return jet_calculate_off_axis(self.camera, self.params)
else:
return jet_calculate_inline(self.camera, self.params)
def jet_move(self):
'''
Move the sample jet to the x-ray beam
'''
if self.offaxis:
raise NotImplementedError()
else:
jet_move_inline(self.injector, self.camera, self.params)
def get_burst_avg(n, image_plugin):
'''
Get the average of n consecutive images from a camera
Parameters
----------
n : int
number of consecutive images to be averaged
image_plugin : ImagePlugin
camera ImagePlugin from which the images will be taken
Returns
-------
burst_avg : ndarray
average image
'''
imageX, imageY = image_plugin.image.shape
burst_imgs = np.empty((n, imageX, imageY))
for x in range(n):
burst_imgs[x] = image_plugin.image
burst_avg = burst_imgs.mean(axis=0)
return burst_avg
def set_beam(beam_x_px, beam_y_px, params):
'''
Set the coordinates for the x-ray beam
Parameters
----------
beam_x_px : int
x-coordinate of x-ray beam in the camera image in pixels
beam_y_px : int
y-coordinate of x-ray beam in the camera image in pixels
params : Parameters
EPICS PVs used for recording jet tracking data
'''
params.beam_x_px.put(beam_x_px)
params.beam_y_px.put(beam_y_px)
def get_calibration_images(axis, camera, *, settle_time=1.0,
burst_images=20):
# collect images and motor positions to calculate pxsize and cam_roll
imgs = []
positions = []
start_pos = axis.user_readback.get()
for i in range(2):
image = cam_utils.get_burst_avg(burst_images, camera.image)
imgs.append(image)
positions.append(axis.user_readback.get())
next_position = axis.user_setpoint.get() - 0.1
axis.set(next_position, wait=True)
sleep(settle_time)
axis.set(start_pos, wait=True)
sleep(settle_time)
return positions, imgs
def calibrate_off_axis(injector, camera, params, *, settle_time=1.0,
burst_images=20):
'''
Calibrate the off-axis camera
Parameters
----------
injector : Injector
sample injector
camera : Questar
camera looking at sample jet and x-rays
params : Parameters
EPICS PVs used for recording jet tracking data
settle_time : float, optional
Additional settle time after moving the motor
burst_imagess : int, optional
Number of burst images to average from the camera
'''
# TODO (koglin) check sign for off-axis calculations
injector_axis = injector.coarseX
positions, imgs = get_calibration_images(injector_axis, camera,
settle_time=settle_time)
cam_pitch, pxsize = cam_utils.get_cam_pitch_pxsize(imgs, positions)
params.pxsize.put(pxsize)
params.cam_pitch.put(cam_pitch)
beam_y_px = params.beam_y_px.get()
beam_z_px = params.beam_z_px.get()
cam_y, cam_z = cam_utils.get_offaxis_coords(beam_y_px, beam_z_px,
cam_pitch=cam_pitch,
pxsize=pxsize)
params.cam_y.put(cam_y)
params.cam_z.put(cam_z)
jet_pitch = cam_utils.get_jet_pitch(params.theta.get(), cam_pitch=cam_pitch)
params.jet_pitch.put(jet_pitch)
return dict(jet_pitch=jet_pitch, pxsize=pxsize,
cam_pitch=cam_pitch)
def calibrate_inline(injector, camera, params, *, settle_time=1.0,
burst_images=20):
'''
Calibrate the inline camera
Parameters
----------
injector : Injector
sample injector
camera : Questar
camera looking at sample jet and x-rays
params : Parameters
EPICS PVs used for recording jet tracking data
settle_time : float, optional
Additional settle time after moving the motor
burst_imagess : int, optional
Number of burst images to average from the camera
'''
injector_axis = injector.coarseZ
# collect images and motor positions needed for calibration
positions, imgs = get_calibration_images(injector_axis, camera,
settle_time=settle_time,
burst_images=burst_images)
# cam_roll: rotation of camera about z axis in radians
# pxsize: size of pixel in mm
cam_roll, pxsize = cam_utils.get_cam_roll_pxsize(imgs, positions)
params.pxsize.put(pxsize)
params.cam_roll.put(cam_roll)
# beam_x_px: x-coordinate of x-ray beam in camera image in pixels
beam_x_px = params.beam_x_px.get()
# beam_y_px: y-coordinate of x-ray beam in camera image in pixels
beam_y_px = params.beam_y_px.get()
# cam_x: x-coordinate of camera position in mm
# cam_y: y-coordinate of camera position in mm
cam_x, cam_y = cam_utils.get_cam_coords(beam_x_px, beam_y_px,
cam_roll=cam_roll, pxsize=pxsize)
params.cam_x.put(cam_x)
params.cam_y.put(cam_y)
# jet_roll: rotation of sample jet about z axis in radians
jet_roll = cam_utils.get_jet_roll(params.theta.get(), cam_roll=cam_roll)
params.jet_roll.put(jet_roll)
return dict(jet_roll=jet_roll, pxsize=pxsize,
cam_roll=cam_roll)
def calibrate(injector, camera, cspad, wave8, params, *, offaxis=False, settle_time=0.1):
'''
Calibrate the camera and CSPAD and determine parameters needed for
jet tracking
NEED TO CHECK offaxis calculation sign
First set the ROI of the camera to show the proper jet and illumination.
Determines the mean, standard deviation, radius, intensity, jet position and
tilt, pixel size, camera position and tilt
Params determined if onaxis camera used: mean, std, radius, intensity, pxsize,
camX, camY, cam_roll, jet_roll
Params determined if offaxis camera used: mean, std, radius, intensity, pxsize,
camY, camZ, cam_pitch, jet_pitch
Parameters
----------
injector : Injector
sample injector
camera : Questar
camera looking at sample jet and x-rays
cspad : CSPAD
CSPAD for data
wave8 : Wave8
Wave8 to normalize data from CSPAD
params : Parameters
EPICS PVs used for recording jet tracking data
settle_time : float, optional
Additional settle time after moving the motor
offaxis : bool, optional
Camera is off-axis in y-z plane
'''
# find jet in camera ROI
ROI_image = get_burst_avg(params.frames_cam.get(), camera.ROI_image)
mean, std = cam_utils.image_stats(ROI_image)
rho, theta = cam_utils.jet_detect(ROI_image, mean, std)
params.mean.put(mean)
params.std.put(std)
# take calibration CSPAD data
# get CSPAD and wave8 data
azav, norm = get_azav(cspad) # call azimuthal average function
r, i = jt_utils.fit_CSPAD(azav, norm, gas_det)
params.radius.put(r)
params.intensity.put(i)
# call appropriate camera calibration method
if offaxis:
return calibrate_off_axis(injector, camera, params,
settle_time=settle_time)
else:
return calibrate_inline(injector, camera, params,
settle_time=settle_time)
def jet_calculate_off_axis(camera, params):
'''
Detect the sample jet and calculate the distance to the x-ray beam using
the off-axis camera
Parameters
----------
camera : Questar
camera looking at the sample jet and x-ray beam
params : Parameters
EPICS PVs used for recording jet tracking data
offaxis : bool
Camera is off-axis in y-z plane
'''
# detect the jet in the camera ROI
ROI_image = cam_utils.get_burst_avg(20, camera.ROI_image)
rho, theta = cam_utils.jet_detect(ROI_image)
# check x-ray beam position
beam_y_px = params.beam_y_px.get()
beam_z_px = params.beam_z_px.get()
cam_y, cam_z = cam_utils.get_offaxis_coords(
beam_y_px, beam_z_px, cam_pitch=params.cam_pitch.get(),
pxsize=params.pxsize.get())
params.cam_y.put(cam_y)
params.cam_z.put(cam_z)
# find distance from jet to x-rays
roi_z = camera.ROI.min_xyz.min_x.get()
roi_y = camera.ROI.min_xyz.min_y.get()
jet_z = cam_utils.get_jet_z(rho, theta, roi_y=roi_y, roi_z=roi_z,
pxsize=params.pxsize.get(), cam_y=cam_y,
cam_z=cam_z, beam_y=params.beam_y.get(),
beam_z=params.beam_z.get(),
cam_pitch=params.cam_pitch.get())
params.jet_z.put(jet_z)
return dict(rho=rho, theta=theta, cam_y=cam_y, cam_z=cam_z, jet_z=jet_z)
def jet_calculate_inline(camera, params):
'''
Detect the sample jet and calculate the distance to the x-ray beam using
the inline camera
Parameters
----------
camera : Questar
camera looking at the sample jet and x-ray beam
params : Parameters
EPICS PVs used for recording jet tracking data
offaxis : bool
Camera is off-axis in y-z plane
'''
# detect the jet in the camera ROI
ROI_image = cam_utils.get_burst_avg(20, camera.ROI_image)
rho, theta = cam_utils.jet_detect(ROI_image)
# check x-ray beam position
beam_x_px = params.beam_x_px.get()
beam_y_px = params.beam_y_px.get()
cam_x, cam_y = cam_utils.get_cam_coords(
beam_x_px, beam_y_px, cam_roll=params.cam_roll.get(),
pxsize=params.pxsize.get())
params.cam_x.put(cam_x)
params.cam_y.put(cam_y)
# find distance from jet to x-rays
ROIx = camera.ROI.min_xyz.min_x.get()
roi_y = camera.ROI.min_xyz.min_y.get()
jet_x = cam_utils.get_jet_x(rho, theta, ROIx, roi_y,
pxsize=params.pxsize.get(), cam_x=cam_x,
cam_y=cam_y, beam_x=params.beam_x.get(),
beam_y=params.beam_y.get(),
cam_roll=params.cam_roll.get())
params.jet_x.put(jet_x)
return dict(rho=rho, theta=theta, cam_y=cam_y, cam_x=cam_x, jet_x=jet_x)
def jet_move_inline(injector, camera, params):
'A single step of the infinite-loop jet_move'
ROIx = camera.ROI.min_xyz.min_x.get()
# roi_y = camera.ROI.min_xyz.min_y.get()
if abs(params.jet_x.get()) > 0.01:
# move jet to x-rays using injector motor
print(f'Moving {params.jet_x.get()} mm')
movex(injector.coarseX, -params.jet_x.get())
# move the ROI to keep looking at the jet
min_x = ROIx + (params.jet_x.get() / params.pxsize.get())
camera.ROI.min_xyz.min_x.put(min_x)
# if params.state == [some state]
# [use [x] for jet tracking]
# else if params.state == [some other state]:
# [use [y] for jet tracking]
# else if params.state == [some other state]:
# [revert to manual injector controls]
# etc...
# if jet is clear in image:
# if jet_x != beam_x:
# move injector.coarseX
# walk_to_pixel(detector, motor, target) ??
# else if nozzle is clear in image:
# if nozzleX != beam_x:
# move injector.coarseX
# else:
# if injector.coarseX.get() != beam_x:
# move injector.coarseX
def jet_scan(injector, cspad):
'''
Scans jet across x-rays twice to determine highest intensity, then moves jet
to that position
Parameters
----------
injector : Injector
sample injector
cspad : CSPAD
CSPAD for data
'''
# step number & sizes from Mark's code
x_min = 0.0012
steps = 50
x_step = (-1) * steps * x_min / 2
hi_intensities = []
best_pos = []
for i in range(2):
# move motor to first position
injector.coarseX.mv(x_step, wait=True)
intensities = []
positions = []
for j in range(steps):
positions.append(injector.coarseX.user_readback.get())
# get azav from CSPAD
# get CSPAD and wave8
azav, norm = get_azav(CSPAD) #call azimuthal average function
intensities.append(jt_utils.get_cspad(azav, params.radius.get(), gas_det))
hi_intensities.append(max(intensities))
best_pos.append(positions[intensities.index(max(intensities))])
# move motor to average of best positions from two sweeps
injector.coarseX.mv(np.average(best_pos))
|
{"/jet_tracking/tests/test_sim.py": ["/jet_tracking/sim.py"], "/jet_tracking/tests/conftest.py": ["/jet_tracking/devices.py", "/jet_tracking/jet_control.py"], "/jet_tracking/tests/test_control.py": ["/jet_tracking/jet_control.py"]}
|
6,470
|
jeannas/jet_tracking
|
refs/heads/master
|
/jet_tracking/tests/test_control.py
|
import pytest
from . import conftest
def test_smoke_set_beam(jet_control):
from ..jet_control import set_beam
set_beam(1, 2, jet_control.params)
assert jet_control.params.beam_x_px.get() == 1
assert jet_control.params.beam_y_px.get() == 2
@pytest.mark.parametrize("use_offaxis", [False, True])
def test_smoke_calibrate(jet_control, injector, questar, parameters,
offaxis_parameters, use_offaxis):
from ..jet_control import calibrate
params = (offaxis_parameters
if use_offaxis
else parameters)
conftest.set_random_image(questar.image)
conftest.set_random_image(questar.ROI_image)
calibrate(injector=injector, camera=questar, params=params,
offaxis=use_offaxis)
@pytest.mark.parametrize("use_offaxis", [False, True])
def test_smoke_jet_calculate(questar, parameters,
offaxis_parameters, use_offaxis):
from ..jet_control import jet_calculate_off_axis, jet_calculate_inline
conftest.set_random_image(questar.image)
conftest.set_random_image(questar.ROI_image)
questar.ROI.min_xyz.min_x.sim_put(1)
questar.ROI.min_xyz.min_y.sim_put(1)
questar.ROI.min_xyz.min_z.sim_put(1)
if use_offaxis:
jet_calculate_off_axis(camera=questar, params=offaxis_parameters)
else:
jet_calculate_inline(camera=questar, params=parameters)
@pytest.mark.parametrize("jet_x", [0.0, 0.1])
def test_smoke_jet_move(injector, questar, parameters,
jet_x):
from ..jet_control import jet_move_inline
questar.ROI.min_xyz.min_x.put(1)
parameters.jet_x.sim_put(jet_x)
jet_move_inline(injector=injector, camera=questar, params=parameters)
devices_without_table = {'Questar', 'Offaxis', 'SDS'}
def test_table(device_instances):
for dev_name in dir(device_instances):
if dev_name.startswith('_') or dev_name in devices_without_table:
continue
dev = getattr(device_instances, dev_name)
print()
print(f'-- {dev_name} --')
print(dev.table)
|
{"/jet_tracking/tests/test_sim.py": ["/jet_tracking/sim.py"], "/jet_tracking/tests/conftest.py": ["/jet_tracking/devices.py", "/jet_tracking/jet_control.py"], "/jet_tracking/tests/test_control.py": ["/jet_tracking/jet_control.py"]}
|
6,491
|
amaan2398/EcommerceWebsiteDjango
|
refs/heads/master
|
/src/cart/models.py
|
from django.db import models
from datetime import datetime
from django.utils import timezone
# Create your models here.
class Cart(models.Model):
customer_id = models.IntegerField(blank=False,null=False)
product_id = models.IntegerField(blank=False,null=False)
address_id = models.IntegerField(blank=False,null=False,default=0)
bill_id = models.IntegerField(blank=False,null=False,default=0)
product_quantity = models.IntegerField(blank=False,null=False)
shipment = models.BooleanField(blank=False,null=False,default=False)
class Shipment(models.Model):
date_time = models.DateTimeField(default=datetime.now, blank=True)
customer_id = models.IntegerField(blank=False,null=False)
total_amount = models.IntegerField(blank=False,null=False)
|
{"/src/cart/views.py": ["/src/cart/models.py"], "/src/account/views.py": ["/src/account/models.py"], "/src/product/views.py": ["/src/product/models.py"]}
|
6,492
|
amaan2398/EcommerceWebsiteDjango
|
refs/heads/master
|
/src/account/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path("register/",views.register,name="register"),
path("login/",views.login,name="login"),
path("logout/",views.logout,name="logout"),
path("profile/",views.profile,name="profile"),
path("edit_address/",views.edit_address,name="edit_address"),
path("remove_address/<int:id>",views.remove_address,name="remove_address"),
]
|
{"/src/cart/views.py": ["/src/cart/models.py"], "/src/account/views.py": ["/src/account/models.py"], "/src/product/views.py": ["/src/product/models.py"]}
|
6,493
|
amaan2398/EcommerceWebsiteDjango
|
refs/heads/master
|
/src/cart/views.py
|
from django.shortcuts import render,redirect
from django.http import HttpResponseNotFound
from .models import Cart,Shipment
from product.models import Product
from account.models import Address
from django.contrib.auth.models import User,auth
def cart_data_add(cid):
data = Cart.objects.filter(customer_id=cid,shipment=False)
data = dict({"count":len(data)})
return data
# Create your views here.
def cart(request):
if request.user.id != None:
a = ['','active','']
cdata = Cart.objects.filter(customer_id=request.user.id,shipment=False)
data = []
tamount = 0
for i in cdata:
t_d = Product.objects.filter(id=i.product_id)
data.append(dict({"p_id":i.product_id,"p_name":t_d[0].name,"price":t_d[0].price,"quantity":i.product_quantity,"amount_t":(t_d[0].price * i.product_quantity)}))
tamount += t_d[0].price * i.product_quantity
fdata = dict({"total":tamount})
cid = request.user.id
cdata = dict({"count":len(data)})
address = Address.objects.filter(customer_id=cid)
return render(request,"product/cart.html",{"a":a,"data":data,"fdata":fdata,"address":address,"cdata":cdata})
else:
return redirect('login')
def addtocart(request,id):
cid = request.user.id
if Cart.objects.filter(customer_id= cid,product_id=id,shipment=False).exists():
a = Cart.objects.get(customer_id= cid,product_id=id,shipment=False)
a.product_quantity += 1
a.save()
del a
else:
a = Cart(customer_id=cid,product_id=id,product_quantity=1,shipment=False)
a.save()
del a
return redirect("/")
def addrm_pro_qut(request,id,v):
cid = request.user.id
if v == 1:
if Cart.objects.filter(customer_id= cid,product_id=id,shipment=False).exists():
a = Cart.objects.get(customer_id= cid,product_id=id,shipment=False)
a.product_quantity += 1
a.save()
del a
else:
a = Cart(customer_id=cid,product_id=id,product_quantity=1,shipment=False)
a.save()
del a
elif v == 0:
if Cart.objects.filter(customer_id= cid,product_id=id).exists():
a = Cart.objects.get(customer_id= cid,product_id=id,shipment=False)
a.product_quantity -= 1
a.save()
del a
if Cart.objects.filter(customer_id= cid,product_id=id)[0].product_quantity == 0:
a = Cart.objects.filter(customer_id= cid,product_id=id,shipment=False)
a.delete()
return redirect("cart")
def pro_remove(request, id):
cid = request.user.id
a = Cart.objects.filter(customer_id= cid,product_id=id,shipment=False)
a.delete()
return redirect("cart")
def checkout_products(request):
if request.method == "POST":
add_id = request.POST['address']
Cart.objects.filter(customer_id=request.user.id,shipment=False).update(address_id =add_id)
return redirect("checkout")
else:
cid = request.user.id
#Cart.objects.filter(customer_id= cid,shipment=False).update(shipment = True)
cdata = Cart.objects.filter(customer_id=request.user.id,shipment=False)
data = []
tamount = 0
for i in cdata:
t_d = Product.objects.filter(id=i.product_id)
data.append(dict({"p_id":i.product_id,"p_name":t_d[0].name,"price":t_d[0].price,"quantity":i.product_quantity,"amount_t":(t_d[0].price * i.product_quantity)}))
tamount += t_d[0].price * i.product_quantity
fdata = dict({"total":tamount})
cid = request.user.id
address = Address.objects.filter(customer_id=cid,id = cdata[0].address_id)
return render(request,"product/checkout.html",{"data":data,"fdata":fdata,"address":address})
def checkout_shipment(request):
cdata = Cart.objects.filter(customer_id=request.user.id,shipment=False)
if len(cdata) > 0:
tamount = 0
for i in cdata:
t_d = Product.objects.filter(id=i.product_id)
tamount += t_d[0].price * i.product_quantity
cid = request.user.id
s = Shipment(customer_id= cid,total_amount=tamount)
s.save()
Cart.objects.filter(customer_id= cid,shipment=False).update(shipment = True,bill_id=s.id)
return redirect('shipment_id',s.id)
else:
return HttpResponseNotFound()
def shipment(request):
if request.user.id != None:
sdata = Shipment.objects.filter(customer_id= request.user.id)
fdata = []
for i in sdata:
c_data = Cart.objects.filter(bill_id=i.id)
#cdata = Cart.objects.filter(customer_id=request.user.id,shipment=False)
data = []
tamount = 0
for j in c_data:
t_d = Product.objects.filter(id=j.product_id)
data.append(t_d[0].name)
tamount += t_d[0].price * j.product_quantity
fdata.append({'id':i.id,'data':data,'tamount':tamount})
del data
print(fdata)
cdata = cart_data_add(request.user.id)
return render(request,'product/shipment.html',{'sdata':sdata,'fdata':fdata,'cdata':cdata})
else:
return redirect('login')
def shipment_id(request,id):
sdata = Shipment.objects.filter(id=id)
user = request.user
if user.id == sdata[0].customer_id:
cdata = Cart.objects.filter(bill_id=id)
#cdata = Cart.objects.filter(customer_id=request.user.id,shipment=False)
data = []
tamount = 0
for i in cdata:
t_d = Product.objects.filter(id=i.product_id)
data.append(dict({"p_id":i.product_id,"p_name":t_d[0].name,"price":t_d[0].price,"quantity":i.product_quantity,"amount_t":(t_d[0].price * i.product_quantity)}))
tamount += t_d[0].price * i.product_quantity
address = Address.objects.filter(customer_id=user.id,id=cdata[0].address_id)
cdata = cart_data_add(request.user.id)
return render(request,'product/shipment_view.html',{'data':data,'sdata':sdata,'cdata':cdata,'user':user,'tamount':tamount,'address':address})
else:
return HttpResponseNotFound()
|
{"/src/cart/views.py": ["/src/cart/models.py"], "/src/account/views.py": ["/src/account/models.py"], "/src/product/views.py": ["/src/product/models.py"]}
|
6,494
|
amaan2398/EcommerceWebsiteDjango
|
refs/heads/master
|
/src/account/views.py
|
from django.shortcuts import render,redirect
from django.http import HttpResponseNotFound
from django.contrib import messages
from django.contrib.auth.models import User,auth
from .models import Address
from cart.models import Cart, Shipment
from product.models import Product
def cart_data_add(cid):
data = Cart.objects.filter(customer_id=cid,shipment=False)
data = dict({"count":len(data)})
return data
# Create your views here.
def register(request):
if request.method == "POST":
uname = request.POST['uname']
fname = request.POST['fname']
lname = request.POST['lname']
address = request.POST['address']
city = request.POST['city']
state = request.POST['state']
country = request.POST['country']
postcode = request.POST['postcode']
email = request.POST['email']
pass1 = request.POST['pass1']
pass2 = request.POST['pass2']
if pass1 == pass2:
if User.objects.filter(username = uname).exists():
messages.info(request,"Username taken ...")
return redirect("register")
elif User.objects.filter(email = email).exists():
messages.info(request,"Email taken ...")
return redirect("register")
else:
user = User.objects.create_user(username = uname,password = pass1, email = email,first_name = fname, last_name = lname)
user.save()
del user
user = User.objects.get(username = uname)
address = Address(customer_id=user.id,street_address=address,city=city,state=state,country=country,postcode=postcode)
address.save()
messages.info(request,"Account Created")
return redirect("login")
else:
messages.info(request,"Password not matching ...")
return redirect("register")
elif request.method == "GET":
if request.user.id == None:
return render(request,"accounts/register.html",{})
else:
return HttpResponseNotFound()
def login(request):
if request.method == "POST":
uname = request.POST['uname']
pass1 = request.POST['pass1']
user = auth.authenticate(username = uname, password=pass1)
if user is not None:
auth.login(request,user)
return redirect("/")
else:
messages.info(request,"Wrong username or password...")
return redirect("login")
elif request.method == "GET":
if request.user.id == None:
return render(request,"accounts/login.html",{})
else:
return HttpResponseNotFound()
def logout(request):
auth.logout(request)
return redirect("/")
def profile(request):
if request.method == "POST":
fname = request.POST['fname']
lname = request.POST['lname']
user = User.objects.get(id = request.user.id)
user.first_name = fname
user.last_name = lname
user.save()
return redirect("profile")
elif request.method == "GET":
data = request.user
address = Address.objects.filter(customer_id=data.id)
cdata = cart_data_add(request.user.id)
sdata = Shipment.objects.filter(customer_id= request.user.id)
fdata = []
for i in sdata:
c_data = Cart.objects.filter(bill_id=i.id)
#cdata = Cart.objects.filter(customer_id=request.user.id,shipment=False)
tamount = 0
for j in c_data:
t_d = Product.objects.filter(id=j.product_id)
tamount += t_d[0].price * j.product_quantity
fdata.append({'id':i.id,'tamount':tamount})
return render(request,"accounts/profile.html",{"data" : data,"address":address,"cdata":cdata,'sdata':sdata,'fdata':fdata})
def edit_address(request):
if request.method == "POST":
address = request.POST['address']
city = request.POST['city']
state = request.POST['state']
country = request.POST['country']
postcode = request.POST['postcode']
address = Address(customer_id=request.user.id,street_address=address,city=city,state=state,country=country,postcode=postcode,default_add=False)
address.save()
return redirect("profile")
elif request.method == "GET":
data = request.user
address = Address.objects.filter(customer_id=data.id)
cnt = len(address)
cdata = cart_data_add(request.user.id)
return render(request,"accounts/edit_address.html",{"data" : data,"address":address,"cnt":cnt,"cdata":cdata})
def remove_address(request,id):
data = Address.objects.filter(id = id)
if data[0].default_add == True:
data.delete()
del data
try:
data = Address.objects.get(customer_id=request.user.id)
except MultipleObjectsReturned:
data = Address.objects.get(customer_id=request.user.id)[0]
data.default_add = True
data.save()
else:
data.delete()
return redirect("edit_address")
|
{"/src/cart/views.py": ["/src/cart/models.py"], "/src/account/views.py": ["/src/account/models.py"], "/src/product/views.py": ["/src/product/models.py"]}
|
6,495
|
amaan2398/EcommerceWebsiteDjango
|
refs/heads/master
|
/src/product/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path('',views.index,name = 'index'),
path('product/<int:id>',views.product_view,name ="product_view"),
path('search/',views.search,name="search"),
]
|
{"/src/cart/views.py": ["/src/cart/models.py"], "/src/account/views.py": ["/src/account/models.py"], "/src/product/views.py": ["/src/product/models.py"]}
|
6,496
|
amaan2398/EcommerceWebsiteDjango
|
refs/heads/master
|
/src/cart/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path('',views.cart,name="cart"),
path('addtocart/<int:id>',views.addtocart,name="addtocart"),
path('addrm_pro_qnt/<int:id>/<int:v>', views.addrm_pro_qut, name='addrm_pro_qut'),
path('pro_remove/<int:id>', views.pro_remove, name='pro_remove'),
path('checkout/',views.checkout_products,name='checkout'),
path('checkout_shipment/',views.checkout_shipment,name='checkout_shipment'),
path('shipment/',views.shipment,name='shipment'),
path('shipment/<int:id>',views.shipment_id,name='shipment_id'),
]
|
{"/src/cart/views.py": ["/src/cart/models.py"], "/src/account/views.py": ["/src/account/models.py"], "/src/product/views.py": ["/src/product/models.py"]}
|
6,497
|
amaan2398/EcommerceWebsiteDjango
|
refs/heads/master
|
/src/product/models.py
|
from django.db import models
# Create your models here.
class Product(models.Model):
name = models.CharField(max_length=120)
image = models.ImageField(upload_to = 'pics')
description = models.TextField(blank=False,null=False)
price = models.DecimalField(decimal_places=2,max_digits= 1000)
|
{"/src/cart/views.py": ["/src/cart/models.py"], "/src/account/views.py": ["/src/account/models.py"], "/src/product/views.py": ["/src/product/models.py"]}
|
6,498
|
amaan2398/EcommerceWebsiteDjango
|
refs/heads/master
|
/src/cart/migrations/0002_auto_20200824_2040.py
|
# Generated by Django 3.1 on 2020-08-24 20:40
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cart', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Shipment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_time', models.DateTimeField(blank=True, default=datetime.datetime.now)),
('customer_id', models.IntegerField()),
('total_amount', models.IntegerField()),
],
),
migrations.AddField(
model_name='cart',
name='address_id',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='cart',
name='bill_id',
field=models.IntegerField(default=0),
),
]
|
{"/src/cart/views.py": ["/src/cart/models.py"], "/src/account/views.py": ["/src/account/models.py"], "/src/product/views.py": ["/src/product/models.py"]}
|
6,499
|
amaan2398/EcommerceWebsiteDjango
|
refs/heads/master
|
/src/product/views.py
|
from django.shortcuts import render
from .models import Product
from cart.models import Cart
#from django.http import HttpResponse
def cart_data_add(cid):
data = Cart.objects.filter(customer_id=cid,shipment=False)
data = dict({"count":len(data)})
return data
# Create your views here.
def index(request):
data = Product.objects.all()
for i,j in enumerate(data):
if len(j.description) > 20:
data[i].description = j.description[:20]+'...'
a = ['active','','']
cdata = cart_data_add(request.user.id)
return render(request,"index.html",{'data' : data,"a": a,"cdata":cdata})
def product_view(request,id):
data = Product.objects.get(id = id)
a = ['','','']
cdata = cart_data_add(request.user.id)
return render(request,"product/show_product.html",{"data":data,"a": a,"cdata":cdata})
def search(request):
s = request.GET['s']
lst = s.split(' ')
rpts = "description LIKE '%"
rpte = "%'"
a = ['','','']
cdata = cart_data_add(request.user.id)
q ="SELECT * FROM product_product WHERE "
if len(lst) > 0:
for i,j in enumerate(lst):
if i > 0:
q+=" or "
q += rpts+j+rpte
data = Product.objects.raw(q)
for i,j in enumerate(data):
if len(j.description) > 20:
data[i].description = j.description[:20]+'...'
return render(request,"index.html",{'data' : data,"a": a,"cdata":cdata,"tag":lst})
else:
data = Product.objects.all()
for i,j in enumerate(data):
if len(j.description) > 20:
data[i].description = j.description[:20]+'...'
return render(request,"index.html",{'data' : data,"a": a,"cdata":cdata})
|
{"/src/cart/views.py": ["/src/cart/models.py"], "/src/account/views.py": ["/src/account/models.py"], "/src/product/views.py": ["/src/product/models.py"]}
|
6,500
|
amaan2398/EcommerceWebsiteDjango
|
refs/heads/master
|
/src/account/models.py
|
from django.db import models
# Create your models here.
class Address(models.Model):
customer_id = models.IntegerField(blank=False,null=False)
street_address = models.CharField(max_length=125)
city = models.CharField(max_length = 30)
state = models.CharField(max_length = 30)
country = models.CharField(max_length = 30)
postcode = models.CharField(max_length=10)
default_add = models.BooleanField(default=True)
|
{"/src/cart/views.py": ["/src/cart/models.py"], "/src/account/views.py": ["/src/account/models.py"], "/src/product/views.py": ["/src/product/models.py"]}
|
6,506
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/info/views.py
|
from django.db.models import Prefetch
from rest_framework.response import Response
from rest_framework.viewsets import generics
from rest_framework.permissions import AllowAny
from django_filters.rest_framework import DjangoFilterBackend
from drf_yasg.utils import swagger_auto_schema
from apps.brand.pagination import SmallListPagination
from core.constants import PROMOTION
from .models import (
Banner,
ProgramCondition,
Contact,
PromotionAndNews, PromotionAndNewsImage,
)
from .serializers import (
ProgramConditionSerializer,
ContactListSerializer,
BannerDetailSerializer,
BannerAndPromotionSerializer,
PromotionAndNewsSerializer,
PromotionAndNewsDetailSerializer,
)
from apps.notifications.service import SendPushNotification
class BannerRetrieveAPIView(generics.RetrieveAPIView):
queryset = Banner.objects.all()
serializer_class = BannerDetailSerializer
def get_object(self):
return self.get_queryset().first()
class BannerAndPromotionAPIView(generics.RetrieveAPIView):
"""
API view for banner and promotion
"""
serializer_class = BannerAndPromotionSerializer
def retrieve(self, request, *args, **kwargs):
banner = Banner.objects.first()
promotion = (
PromotionAndNews.objects.filter(
is_active=True,
information_type=PROMOTION
).prefetch_related(
Prefetch('images', PromotionAndNewsImage.objects.filter(
is_main=True
))
)[:3]
)
banner_and_promotion = dict(
banner=banner,
promotion=promotion,
)
serializer = self.get_serializer(banner_and_promotion)
return Response(serializer.data)
class ProgramConditionAPIView(generics.RetrieveAPIView):
"""
API view for ProgramCondition
"""
queryset = ProgramCondition.objects.all()
serializer_class = ProgramConditionSerializer
def get_object(self):
return self.get_queryset().first()
class ContactListAPIView(generics.ListAPIView):
"""
API list view for contact
"""
queryset = Contact.objects.all()
serializer_class = ContactListSerializer
class PromotionAndNewsListAPIView(generics.ListAPIView):
"""
API list view for promotions and news
"""
queryset = (
PromotionAndNews.objects.filter(is_active=True).prefetch_related(
Prefetch(
'images', PromotionAndNewsImage.objects.filter(is_main=True)
)
)
)
serializer_class = PromotionAndNewsSerializer
pagination_class = SmallListPagination
filter_backends = [DjangoFilterBackend]
filterset_fields = ['information_type']
permission_classes = [AllowAny]
class PromotionAndNewsRetrieveAPIView(generics.RetrieveAPIView):
"""
API detail view for promotions and news
"""
queryset = PromotionAndNews.objects.all()
serializer_class = PromotionAndNewsDetailSerializer
permission_classes = [AllowAny]
@swagger_auto_schema(
responses={
404: '{"detail": "Страница не найдена."}',
}
)
def get(self, request, *args, **kwargs):
SendPushNotification.set_notification_viewed_for_article(
request, self.get_object()
)
return super(PromotionAndNewsRetrieveAPIView, self).get(request, *args, **kwargs)
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,507
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/notifications/serializers.py
|
import json
from rest_framework import serializers
from apps.notifications.models import Notification
class NotificationSerializer(serializers.ModelSerializer):
body = serializers.SerializerMethodField()
class Meta:
model = Notification
fields = [
'id', 'notice_type', 'is_on_credit',
'is_viewed', 'body', 'created_at'
]
extra_kwargs = {
'notice_type': {'read_only': True},
'is_on_credit': {'read_only': True},
'body': {'read_only': True},
'is_viewed': {'required': True},
}
def get_body(self, obj):
return json.loads(obj.body)
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,508
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/check/migrations/0003_auto_20210426_1106.py
|
# Generated by Django 2.2.16 on 2021-04-26 05:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('check', '0002_auto_20210415_1834'),
]
operations = [
migrations.AlterField(
model_name='check',
name='accrued_point',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=8, null=True, verbose_name='Начислено бонусов'),
),
migrations.AlterField(
model_name='check',
name='bonus_paid',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=8, null=True, verbose_name='Оплачено бонусами'),
),
migrations.AlterField(
model_name='check',
name='money_paid',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=8, null=True, verbose_name='Оплачено деньгами'),
),
migrations.AlterField(
model_name='check',
name='total_paid',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=8, null=True, verbose_name='Сумма оплаты'),
),
migrations.AlterField(
model_name='check',
name='withdrawn_point',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=8, null=True, verbose_name='Снято бонусов (Возврат)'),
),
]
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,509
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/brand/urls.py
|
from django.urls import path, include
from apps.brand.views import (
BrandRetrieveAPIView, BrandListAPIView, FilialListAPIView,
FilialRetrieveAPIView
)
urlpatterns = [
path('', BrandListAPIView.as_view(), name='brand_list'),
path('<int:id>/', BrandRetrieveAPIView.as_view(), name='brand_detail'),
path('filial/', include([
path('', FilialListAPIView.as_view(), name='filial_list'),
path('<int:id>/', FilialRetrieveAPIView.as_view(), name='filial_detail')
])),
]
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,510
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/check/urls.py
|
from django.urls import path
from apps.check.views import (
QRCodeAPIView, CheckListAPIView, CheckRetrieveAPIView
)
urlpatterns = [
path('', CheckListAPIView.as_view(), name='check_list'),
path('<int:pk>/', CheckRetrieveAPIView.as_view(), name='check_detail'),
path('qr/', QRCodeAPIView.as_view(), name='qr_code'),
]
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,511
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/account/urls.py
|
from django.urls import path
from fcm_django.api.rest_framework import FCMDeviceAuthorizedViewSet
from apps.account.views import (
AuthAPIView, LoginConfirmAPIView, CityListAPIView,
UserUpdateAPIView, UserAvatarRetrieveUpdateAPIView,
SendSmsToOldPhoneAPIView, OldPhoneConfirmAPIView,
ChangeOldPhoneAPIView, NewPhoneConfirmAPIView, UserRetrieveAPIView
)
urlpatterns = [
path('auth/', AuthAPIView.as_view(), name='auth'),
path('login-confirm/', LoginConfirmAPIView.as_view(), name='login-confirm'),
path('send-sms-to-old-phone/', SendSmsToOldPhoneAPIView.as_view(), name='send_sms_to_old_phone'),
path('old-phone-confirm/', OldPhoneConfirmAPIView.as_view(), name='old_phone_confirm'),
path('change-old-phone/', ChangeOldPhoneAPIView.as_view(), name='change_old_phone'),
path('new-phone-confirm/', NewPhoneConfirmAPIView.as_view(), name='new_phone_confirm'),
path('cities/', CityListAPIView.as_view(), name='cities'),
path('', UserUpdateAPIView.as_view(), name='update'),
path('data/', UserRetrieveAPIView.as_view(), name='retrieve'),
path('avatar/', UserAvatarRetrieveUpdateAPIView.as_view(), name='avatar'),
path(
'device/', FCMDeviceAuthorizedViewSet.as_view({'post': 'create'}),
name='create_fcm_device',
),
path(
'device/<str:registration_id>/',
FCMDeviceAuthorizedViewSet.as_view({'delete': 'destroy',
'put': 'update'}),
name='delete_fcm_device',
),
]
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,512
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/setting/migrations/0002_appversion.py
|
# Generated by Django 2.2.16 on 2021-04-07 07:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('setting', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='AppVersion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('android_version', models.CharField(max_length=15, verbose_name='Версия для android приложения')),
('android_force_update', models.BooleanField(default=False, verbose_name='Принудительное обновление(Вкл/Выкл)')),
('ios_version', models.CharField(max_length=15, verbose_name='Версия для ios приложения')),
('ios_force_update', models.BooleanField(default=False, verbose_name='Принудительное обновление(Вкл/Выкл)')),
],
options={
'verbose_name': 'Версия мобильного приложения',
'verbose_name_plural': 'Версии мобильного приложения',
},
),
]
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,513
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/check/admin.py
|
from django.contrib import admin
from apps.check.models import Check
@admin.register(Check)
class CheckAdmin(admin.ModelAdmin):
list_display = ('unique_1c_check_code', 'user', 'filial',)
search_fields = ('unique_1c_check_code',)
readonly_fields = (
'unique_1c_check_code', 'money_paid', 'bonus_paid', 'total_paid',
'accrued_point', 'accrued_point_date', 'withdrawn_point',
'withdrawn_point_date', 'is_active', 'user', 'filial', 'status',
'is_on_credit', 'balance_owed', 'due_date',
)
exclude = (
'user_1c_code', 'filial_1c_code'
)
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,514
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/info/migrations/0001_initial.py
|
# Generated by Django 2.2 on 2021-03-29 07:58
import core.utils
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Banner',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=25, verbose_name='Заголовок баннера')),
('image', models.ImageField(upload_to=core.utils.generate_filename, verbose_name='Фото баннера')),
],
options={
'verbose_name': 'Баннер',
'verbose_name_plural': 'Баннера',
},
),
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('icon_type', models.CharField(choices=[('email-icon', 'Иконка почты'), ('instagram-icon', 'Иконка instagram'), ('facebook-icon', 'Иконка facebook'), ('website-icon', 'Иконка веб-сайта')], default='website-icon', max_length=20, verbose_name='Тип иконки')),
('title', models.CharField(max_length=100, verbose_name='Заголовок ссылки')),
('link', models.CharField(max_length=150, verbose_name='Ссылка')),
],
options={
'verbose_name': 'Контакт',
'verbose_name_plural': 'Контакты',
},
),
migrations.CreateModel(
name='ProgramCondition',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=50, verbose_name='Заголовок программы лояльности')),
('description', models.TextField(verbose_name='Текст программы лояльности')),
],
options={
'verbose_name': 'Программа лояльности',
'verbose_name_plural': 'Программы лояльности',
},
),
migrations.CreateModel(
name='PromotionAndNews',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateField(auto_now_add=True)),
('information_type', models.CharField(choices=[('news', 'Новость'), ('promotion', 'Акция')], default='news', max_length=15)),
('title', models.CharField(max_length=100, verbose_name='Заголовок')),
('description', models.TextField(verbose_name='Текст программы лояльности')),
('is_active', models.BooleanField(default=False, verbose_name='Активный(Вкл/Выкл)')),
],
options={
'verbose_name': 'Акция и новость',
'verbose_name_plural': 'Акции и новости',
},
),
migrations.CreateModel(
name='PromotionAndNewsImage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(upload_to=core.utils.generate_filename, verbose_name='Фото новости или акции')),
('is_main', models.BooleanField(default=False, verbose_name='Превью(Вкл/Выкл)')),
('information', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='images', to='info.PromotionAndNews')),
],
options={
'verbose_name': 'Фотография для акции/новости',
'verbose_name_plural': 'Фотографии для акций/новости',
},
),
]
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,515
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/setting/urls.py
|
from django.urls import path
from .views import AppVersionAPIView
urlpatterns = [
path('version/', AppVersionAPIView.as_view(), name='app-version'),
]
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,516
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/core/settings/base.py
|
import os
import logging
import environ
BASE_DIR = os.path.dirname(
os.path.dirname(os.path.dirname((os.path.abspath(__file__)))))
env = environ.Env()
INSTALLED_APPS = [
'huey.contrib.djhuey',
'jet',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework.authtoken',
'rest_framework',
'solo',
'ckeditor',
'django_2gis_maps',
'fcm_django',
'drf_yasg',
'django_filters',
'adminsortable2',
'apps.account',
'apps.brand',
'apps.check',
'apps.info',
'apps.setting',
'apps.notifications',
]
FCM_DJANGO_SETTINGS = {
# Your firebase API KEY
"FCM_SERVER_KEY": env.str('FCM_SERVER_KEY'),
# true if you want to have only one active device per registered user at a time
# default: False
"ONE_DEVICE_PER_USER": False,
# devices to which notifications cannot be sent,
# are deleted upon receiving error response from FCM
# default: False
"DELETE_INACTIVE_DEVICES": True,
}
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'apps.setting.middleware.ApplicationStatusMiddleware',
]
ROOT_URLCONF = 'core.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'core.wsgi.application'
AUTH_USER_MODEL = 'account.User'
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'ru'
TIME_ZONE = 'Asia/Bishkek'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
JET_SIDE_MENU_COMPACT = True
# ckeditor
CKEDITOR_UPLOAD_PATH = "uploads/"
SWAGGER_SETTINGS = {
'SECURITY_DEFINITIONS': {
'Basic': {
'type': 'basic'
},
'Token': {
'type': 'apiKey',
'name': 'Authorization',
'in': 'header'
}
}
}
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.TokenAuthentication',
'rest_framework.authentication.SessionAuthentication',
),
}
NIKITA_LOGIN = env.str('NIKITA_LOGIN')
NIKITA_PASSWORD = env.str('NIKITA_PASSWORD')
NIKITA_SENDER = env.str('NIKITA_SENDER')
NIKITA_TEST = env.int('NIKITA_TEST', default=1)
BASE_1C_SERVER_DOMAIN = env.str(
'BASE_1C_SERVER_DOMAIN', default='http://185.29.184.147:12344'
)
LINKS_1C = {
'SYNC_USER_URL': BASE_1C_SERVER_DOMAIN + '/sendUser',
'GET_USER_WALLET_DATA_URL':
BASE_1C_SERVER_DOMAIN + '/userPoint/',
'CHANGE_USER_NUMBER': BASE_1C_SERVER_DOMAIN + '/change_number'
}
USER_1C = {
'username': env.str('USER_1C_USERNAME'),
'password': env.str('USER_1C_PASSWORD')
}
LOGGING = {
"version": 1,
"formatters": {"simple": {"format": "{levelname} {message}", "style": "{"}},
"handlers": {
"console": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "simple",
}
},
"loggers": {
"django": {"handlers": ["console"], "propagate": True},
"django.request": {
"handlers": ["console"],
"level": "ERROR",
"propagate": False,
},
"django.db.backends": {
"handlers": ["console"],
"level": "ERROR",
"propagate": False,
},
},
}
DATA_UPLOAD_MAX_MEMORY_SIZE = 5242880
HUEY = {
'huey_class': 'huey.RedisHuey', # Huey implementation to use.
'immediate': False,
'connection': {
'host': 'localhost',
'port': 6379,
'db': 0,
'connection_pool': None, # Definitely you should use pooling!
# ... tons of other options, see redis-py for details.
# huey-specific connection parameters.
'read_timeout': 1, # If not polling (blocking pop), use timeout.
'url': None, # Allow Redis config via a DSN.
},
'consumer': {
'workers': 5,
'worker_type': 'thread',
'initial_delay': 0.1, # Smallest polling interval, same as -d.
'backoff': 1.15, # Exponential backoff using this rate, -b.
'max_delay': 10.0, # Max possible polling interval, -m.
'scheduler_interval': 1, # Check schedule every second, -s.
'periodic': True, # Enable crontab feature.
'check_worker_health': True, # Enable worker health checks.
'health_check_interval': 1, # Check worker health every second.
},
}
try:
from .local import *
INSTALLED_APPS += ['debug_toolbar']
MIDDLEWARE += ['debug_toolbar.middleware.DebugToolbarMiddleware']
except ImportError:
try:
from .prod import *
except ImportError:
logging.error('core.settings.prod.py file not found !')
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,517
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/notifications/migrations/0002_auto_20210428_1228.py
|
# Generated by Django 2.2.16 on 2021-04-28 06:28
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('info', '0008_auto_20210428_1228'),
('check', '0003_auto_20210426_1106'),
('notifications', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='notification',
name='linked_object_id',
),
migrations.AddField(
model_name='notification',
name='linked_article',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notice', to='info.PromotionAndNews', verbose_name='Новость или Акция'),
),
migrations.AddField(
model_name='notification',
name='linked_check',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notice', to='check.Check', verbose_name='Чек'),
),
]
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,518
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/info/migrations/0002_auto_20210329_1451.py
|
# Generated by Django 2.2 on 2021-03-29 08:51
import ckeditor_uploader.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('info', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='programcondition',
name='description',
field=ckeditor_uploader.fields.RichTextUploadingField(verbose_name='Текст программы лояльности'),
),
migrations.AlterField(
model_name='promotionandnews',
name='description',
field=ckeditor_uploader.fields.RichTextUploadingField(verbose_name='Текст программы лояльности'),
),
migrations.AlterField(
model_name='promotionandnews',
name='information_type',
field=models.CharField(choices=[('news', 'Новость'), ('promotion', 'Акция')], default='news', max_length=15, verbose_name='Тип записи'),
),
]
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,519
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/setting/migrations/0004_auto_20210505_1513.py
|
# Generated by Django 2.2.16 on 2021-05-05 09:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('setting', '0003_appversion_ios_build_number'),
]
operations = [
migrations.AlterField(
model_name='appversion',
name='ios_build_number',
field=models.PositiveIntegerField(null=True, verbose_name='Build версия для ios приложения'),
),
]
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,520
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/brand/__init__.py
|
default_app_config = 'apps.brand.apps.BrandConfig'
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,521
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/info/tests/factories.py
|
from random import choice
import factory
from apps.info.models import (
Banner, ProgramCondition,
Contact, PromotionAndNews,
PromotionAndNewsImage, ContactIcon,
)
from core.constants import PROMOTION, NEWS
class ContactIconFactory(factory.django.DjangoModelFactory):
class Meta:
model = ContactIcon
title = factory.Faker('name')
image = factory.django.ImageField(width=1024, height=768)
class ContactFactory(factory.django.DjangoModelFactory):
class Meta:
model = Contact
title = factory.Faker('name')
link = factory.Faker('url')
icon_image = factory.SubFactory(ContactIconFactory)
class BannerFactory(factory.django.DjangoModelFactory):
class Meta:
model = Banner
title = factory.Faker('name')
image = factory.django.ImageField(width=1024, height=768)
description = factory.Faker('text')
class ProgramConditionFactory(factory.django.DjangoModelFactory):
class Meta:
model = ProgramCondition
title = factory.Faker('name')
description = factory.Faker('text')
class PromotionAndNewsFactory(factory.django.DjangoModelFactory):
class Meta:
model = PromotionAndNews
information_type = factory.Sequence(lambda x: choice([PROMOTION, NEWS]))
created_at = factory.Faker('date_time')
title = factory.Faker('city')
description = factory.Faker('text')
is_active = True
class PromotionAndNewsImageFactory(factory.django.DjangoModelFactory):
class Meta:
model = PromotionAndNewsImage
image = factory.django.ImageField(width=1024, height=768)
is_main = factory.Sequence(lambda x: choice([True, False]))
information = factory.SubFactory(PromotionAndNewsFactory)
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,522
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/brand/migrations/0009_auto_20210426_1104.py
|
# Generated by Django 2.2.16 on 2021-04-26 05:04
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('brand', '0008_auto_20210415_1834'),
]
operations = [
migrations.AlterField(
model_name='filial',
name='filial_1c_code',
field=models.CharField(default=django.utils.timezone.now, max_length=255, unique=True, verbose_name='Уникальный 1C код филиала'),
preserve_default=False,
),
]
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,523
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/brand/service.py
|
from datetime import datetime
from rest_framework import exceptions
from haversine import haversine
from apps.brand.models import WorkTime
from core.constants import WEEKDAY
class FilialService:
"""
Service for check filial status
"""
@staticmethod
def check_filial_status(filial_obj) -> bool:
time_now = datetime.now().time()
weekday_now = datetime.today().weekday() + 1
work_time = WorkTime.objects.filter(filial=filial_obj, day=weekday_now).first()
try:
check_is_filial_open = (
work_time.start_work <= time_now <= work_time.end_work
)
except TypeError:
return False
except AttributeError:
return False
return check_is_filial_open
@staticmethod
def get_geolocation(request):
geo_lat = request.GET.get('lat')
geo_long = request.GET.get('long')
if geo_lat and geo_long:
try:
geo_lat = float(geo_lat)
geo_long = float(geo_long)
except Exception:
raise exceptions.ValidationError(
{'geolocation': 'Неправильный формат query param: lat long'}
)
return geo_lat, geo_long
return None
@staticmethod
def calculate_distance(filial_geolocation, client_geolocation):
if not client_geolocation:
return None
filial_geolocation = tuple(map(float, filial_geolocation.split(',')))
return haversine(filial_geolocation, client_geolocation)
class WorkDayService:
@staticmethod
def create_weekday(filial_obj):
work_days = []
for day in range(7):
works_time_obj = WorkTime(day=day + 1, filial=filial_obj)
work_days.append(works_time_obj)
WorkTime.objects.bulk_create(work_days)
@classmethod
def get_weekday(cls, filial_obj):
""" Get workdays data in WorkTime model"""
workdays = WorkTime.objects.filter(filial=filial_obj)
data = cls.get_work_time(workdays)
return data
@classmethod
def get_work_time(cls, workdays):
""" Getting works time for JSON response """
output_data = workdays
raw_data = cls.get_raw_data(output_data)
data = cls.sort_raw_data(raw_data)
return data
@staticmethod
def get_raw_data(output_data):
""" Getting raw data """
raw_data = []
for item in output_data:
if item.start_work is None or item.end_work is None:
data = {
'days': WEEKDAY[item.day],
'time': 'ВЫХ',
'isWeekends': True
}
raw_data.append(data)
else:
data = {
'days': WEEKDAY[item.day],
'time': f'{str(item.start_work)[:5]} - {str(item.end_work)[:5]}',
'isWeekends': False
}
raw_data.append(data)
return raw_data
@staticmethod
def sort_raw_data(raw_data):
""" Sorted raw data for JSON response """
raw_data_copy = raw_data.copy()
for i in raw_data:
for j in raw_data_copy:
if i['time'] == j['time']:
if i['days'] == j['days']:
continue
else:
i['days'] += f" {j['days']}"
raw_data.remove(j)
return raw_data
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,524
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/info/urls.py
|
from django.urls import path
from .views import (
BannerAndPromotionAPIView,
BannerRetrieveAPIView,
ProgramConditionAPIView,
ContactListAPIView,
PromotionAndNewsListAPIView,
PromotionAndNewsRetrieveAPIView,
)
urlpatterns = [
path(
'promotions-news/',
PromotionAndNewsListAPIView.as_view(),
name='promotions-news'
),
path(
'promotions-news/<int:pk>/',
PromotionAndNewsRetrieveAPIView.as_view(),
name='promotions-news-detail'
),
path(
'banner-promotions/',
BannerAndPromotionAPIView.as_view(),
name='banner-promotions'
),
path(
'banner/',
BannerRetrieveAPIView.as_view(),
name='banner-detail'
),
path(
'program-condition/',
ProgramConditionAPIView.as_view(),
name='program_condition'
),
path('contacts/', ContactListAPIView.as_view(), name='contacts')
]
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,525
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/brand/migrations/0011_auto_20210601_1549.py
|
# Generated by Django 2.2.16 on 2021-06-01 09:49
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('brand', '0010_auto_20210512_1145'),
]
operations = [
migrations.RemoveField(
model_name='filial',
name='end_work',
),
migrations.RemoveField(
model_name='filial',
name='is_around_the_clock',
),
migrations.RemoveField(
model_name='filial',
name='start_work',
),
migrations.CreateModel(
name='WorkTime',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('day', models.SmallIntegerField(choices=[(1, 'ПН'), (2, 'ВТ'), (3, 'СР'), (4, 'ЧТ'), (5, 'ПТ'), (6, 'СБ'), (7, 'ВС')], verbose_name='День недели')),
('start_work', models.TimeField(blank=True, null=True, verbose_name='Начало рабочего времени')),
('end_work', models.TimeField(blank=True, null=True, verbose_name='Конец рабочего времени')),
('filial', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='works_time', to='brand.Filial', verbose_name='Филиал')),
],
options={
'verbose_name': 'Рабочий день',
'verbose_name_plural': 'Рабочие дни',
'ordering': ['day'],
'unique_together': {('day', 'filial')},
},
),
]
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,526
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/core/constants.py
|
MALE = 'male'
FEMALE = 'female'
GENDER_TYPE = (
(MALE, 'Мужской'),
(FEMALE, 'Женский')
)
NEWS = 'news'
PROMOTION = 'promotion'
INFORMATION_TYPE = (
(NEWS, 'Новость'),
(PROMOTION, 'Акция'),
)
ACCRUED = 'accrued'
WITHDRAW = 'withdrawn'
ACCRUED_AND_WITHDRAW = 'accrued_and_withdrawn'
NOTIFICATION_TYPE = (
(ACCRUED, 'Начислено бонусов'),
(WITHDRAW, 'Снято бонусов'),
(ACCRUED_AND_WITHDRAW, 'Начислено и снято'),
(PROMOTION, 'Акции'),
(NEWS, 'Новости')
)
CHECK_TYPE = (
(ACCRUED, 'Начислено'),
(WITHDRAW, 'Снято'),
(ACCRUED_AND_WITHDRAW, 'Начислено и снято'),
)
DUE_DATE_CHECK_MESSAGE = 'У вас есть неоплаченный долг'
MONTH_NAMES = {
'01': 'Январь',
'02': 'Февраль',
'03': 'Март',
'04': 'Апрель',
'05': 'Май',
'06': 'Июнь',
'07': 'Июль',
'08': 'Август',
'09': 'Сентябрь',
'10': 'Октябрь',
'11': 'Ноябрь',
'12': 'Декабрь',
}
MONDAY = 1
TUESDAY = 2
WEDNESDAY = 3
THURSDAY = 4
FRIDAY = 5
SATURDAY = 6
SUNDAY = 7
WORK_DAYS = (
(MONDAY, 'ПН'),
(TUESDAY, 'ВТ'),
(WEDNESDAY, 'СР'),
(THURSDAY, 'ЧТ'),
(FRIDAY, 'ПТ'),
(SATURDAY, 'СБ'),
(SUNDAY, 'ВС'),
)
WEEKDAY = {
MONDAY: 'ПН',
TUESDAY: 'ВТ',
WEDNESDAY: 'СР',
THURSDAY: 'ЧТ',
FRIDAY: 'ПТ',
SATURDAY: 'СБ',
SUNDAY: 'ВС',
}
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,527
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/account/migrations/0006_user_user_1c_code.py
|
# Generated by Django 2.2.16 on 2021-04-14 05:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('account', '0005_user_is_old_phone_confirmed'),
]
operations = [
migrations.AddField(
model_name='user',
name='user_1C_code',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Уникальный 1С код пользователя'),
),
]
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,528
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/info/__init__.py
|
default_app_config = 'apps.info.apps.InfoConfig'
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,529
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/brand/pagination.py
|
from rest_framework.response import Response
from rest_framework.pagination import PageNumberPagination
class LargeListPagination(PageNumberPagination):
page_size = 20
def get_paginated_response(self, data):
return Response({
'count': self.page.paginator.count,
'next': self.page.next_page_number() if self.page.has_next() else None,
'previous': self.page.previous_page_number() if self.page.has_previous() else None,
'results': data,
})
class SmallListPagination(PageNumberPagination):
page_size = 10
def get_paginated_response(self, data):
return Response({
'count': self.page.paginator.count,
'next': self.page.next_page_number() if self.page.has_next() else None,
'previous': self.page.previous_page_number() if self.page.has_previous() else None,
'results': data,
})
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,530
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/brand/tests/test_views.py
|
from collections import OrderedDict
from django.urls import reverse
from rest_framework import status
from rest_framework.exceptions import ErrorDetail
from rest_framework.test import APITestCase
from apps.brand.models import Brand
class TestBrandListAPIView(APITestCase):
def setUp(self) -> None:
Brand.objects.create(
id=1,
title="brand1", logo='brand_logo1.img', description='brand1 description',
address='brand1 address', link='http://localhost:8000'
)
Brand.objects.create(
id=2,
title="brand2", logo='brand_logo2.img', description='brand2 description',
address='brand2 address', link='http://localhost:8000'
)
self.url = reverse('v1:brand_list')
def test_get_brand_list(self):
response = self.client.get(self.url)
expected_data = {
'count': 2, 'next': None, 'previous': None,
'results': [
OrderedDict([
('id', 1), ('logo', 'http://testserver/media/brand_logo1.img')
]),
OrderedDict([
('id', 2), ('logo', 'http://testserver/media/brand_logo2.img')
])]
}
self.assertEqual(response.data, expected_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_data_if_model_is_empty(self):
Brand.objects.all().delete()
response = self.client.get(self.url)
expected_data = {'count': 0, 'next': None, 'previous': None, 'results': []}
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, expected_data)
class TestBrandRetrieveAPIView(APITestCase):
def setUp(self) -> None:
Brand.objects.create(
id=1,
title="brand1", logo='brand_logo1.img', description='brand1 description',
address='brand1 address', link='http://localhost:8000'
)
Brand.objects.create(
id=2,
title="brand2", logo='brand_logo2.img', description='brand2 description',
address='brand2 address', link='http://localhost:8000'
)
def test_get_brand_detail(self):
url = reverse('v1:brand_detail', kwargs={'id': 1})
response = self.client.get(url)
expected_data = {
'title': 'brand1', 'description': 'brand1 description',
'address': 'brand1 address', 'link': 'http://localhost:8000',
'images': [],
}
self.assertEqual(response.data, expected_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_brand_data_if_model_is_empty(self):
Brand.objects.all().delete()
url = reverse('v1:brand_detail', kwargs={'id': 100})
response = self.client.get(url)
expected_data = {
'detail': ErrorDetail(string='Страница не найдена.', code='not_found'),
}
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(response.data, expected_data)
def test_get_brand_data_if_incorrect_key(self):
url = reverse('v1:brand_detail', kwargs={'id': 10})
response = self.client.get(url)
expected_data = {
'detail': ErrorDetail(string='Страница не найдена.', code='not_found'),
}
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(response.data, expected_data)
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,531
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/account/migrations/0005_user_is_old_phone_confirmed.py
|
# Generated by Django 2.2.16 on 2021-04-07 07:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('account', '0004_auto_20210402_1044'),
]
operations = [
migrations.AddField(
model_name='user',
name='is_old_phone_confirmed',
field=models.BooleanField(default=False, verbose_name='is old phone confirmed'),
),
]
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,532
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/setting/tests/test_middleware.py
|
from django.test import TestCase
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
from apps.setting.tests.factories import SettingFactory
class SettingMiddlewareTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.admin_url = reverse('admin:index')
cls.default_url = reverse('v1:brand_list')
SettingFactory()
def setUp(self) -> None:
self.client = APIClient()
def test_success_get_admin_page_302(self):
response = self.client.get(self.admin_url)
self.assertEqual(status.HTTP_302_FOUND, response.status_code)
def test_fail_get_non_admin_page_503(self):
response = self.client.get(self.default_url)
self.assertEqual(
status.HTTP_503_SERVICE_UNAVAILABLE, response.status_code
)
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,533
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/info/migrations/0009_auto_20210506_1458.py
|
# Generated by Django 2.2.16 on 2021-05-06 08:58
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('info', '0008_auto_20210428_1228'),
]
operations = [
migrations.AlterField(
model_name='promotionandnews',
name='created_at',
field=models.DateTimeField(default=datetime.datetime.now, verbose_name='Дата публикации'),
),
migrations.AlterField(
model_name='promotionandnews',
name='is_active',
field=models.BooleanField(default=True, verbose_name='Активный(Вкл/Выкл)'),
),
]
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,534
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/brand/admin.py
|
from django.contrib import admin
from django.forms.models import BaseInlineFormSet
from adminsortable2.admin import SortableAdminMixin
from django_2gis_maps.admin import DoubleGisAdmin
from apps.brand.models import (
Brand, BrandImage, Filial, FilialImage, FilialPhone, WorkTime
)
from apps.brand.service import WorkDayService
class ImageInlineFormSet(BaseInlineFormSet):
"""
This formset class is for check main image count
max count of main image is equal to 1
"""
def clean(self):
super(ImageInlineFormSet, self).clean()
main_photo_count = 0
for form in self.forms:
is_main_image = (
form.cleaned_data and not form.cleaned_data.get('DELETE') and
form.cleaned_data['is_main']
)
if is_main_image:
main_photo_count += 1
if main_photo_count > 1 and form.cleaned_data['is_main']:
form.add_error(
'is_main',
'Допускается только одно изображение, как основное'
)
if self.forms and not main_photo_count:
self.forms[0].add_error(
'is_main',
'Хотя бы одно изображение должно быть, как основное'
)
class NumberInlineFormSet(BaseInlineFormSet):
"""
This formset class is for check number
"""
def clean(self):
super(NumberInlineFormSet, self).clean()
for form in self.forms:
is_valid_number_object = (
form.cleaned_data and not form.cleaned_data.get('DELETE') and (
form.cleaned_data['is_phone'] or
form.cleaned_data['is_whatsapp']
)
)
if not is_valid_number_object:
form.add_error(
'is_phone',
'У номера должна быть включена хотя бы одна функция'
)
form.add_error(
'is_whatsapp',
'У номера должна быть включена хотя бы одна функция'
)
class WorkTimeInline(admin.TabularInline):
model = WorkTime
can_delete = False
fields = ['day', 'start_work', 'end_work']
readonly_fields = ['day']
def has_add_permission(self, request, obj=None):
return False
class BrandImageAdmin(admin.TabularInline):
model = BrandImage
extra = 0
@admin.register(Brand)
class BrandAdmin(SortableAdminMixin, admin.ModelAdmin):
inlines = (BrandImageAdmin,)
list_display = ('position', 'title', 'address', 'link',)
list_display_links = ['title']
search_fields = ['title']
class FilialImageAdmin(admin.TabularInline):
model = FilialImage
extra = 0
formset = ImageInlineFormSet
class FilialPhoneAdmin(admin.TabularInline):
model = FilialPhone
extra = 0
formset = NumberInlineFormSet
@admin.register(Filial)
class FilialAdmin(SortableAdminMixin, DoubleGisAdmin):
inlines = (FilialImageAdmin, FilialPhoneAdmin, WorkTimeInline)
list_display = ('position', 'title', 'address',)
list_display_links = ['title']
search_fields = ['title']
def get_inline_instances(self, request, obj=None):
inline_instances = []
try:
work_time_obj = obj.works_time.all()
if work_time_obj:
pass
else:
WorkDayService.create_weekday(obj)
except Exception:
pass
for inline_class in self.inlines:
inline = inline_class(self.model, self.admin_site)
if request:
inline_has_add_permission = inline._has_add_permission(request,
obj)
if not (inline.has_view_or_change_permission(request, obj) or
inline_has_add_permission or
inline.has_delete_permission(request, obj)):
continue
if not inline_has_add_permission:
inline.max_num = 0
inline_instances.append(inline)
return inline_instances
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,535
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/brand/migrations/0003_remove_filial_description.py
|
# Generated by Django 2.2.16 on 2021-03-30 08:10
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('brand', '0002_auto_20210329_2332'),
]
operations = [
migrations.RemoveField(
model_name='filial',
name='description',
),
]
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
6,536
|
TimurAbdymazhinov/adeliya-backend
|
refs/heads/main
|
/apps/check/migrations/0006_auto_20210504_1222.py
|
# Generated by Django 2.2.16 on 2021-05-04 06:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('check', '0005_auto_20210503_1341'),
]
operations = [
migrations.AlterField(
model_name='check',
name='status',
field=models.CharField(choices=[('accrued', 'Начислено'), ('withdrawn', 'Снято'), ('accrued_and_withdrawn', 'Начислено и снято')], max_length=25, verbose_name='Статус чека'),
),
]
|
{"/apps/info/views.py": ["/apps/brand/pagination.py", "/core/constants.py", "/apps/info/models.py", "/apps/info/serializers.py", "/apps/notifications/service.py"], "/apps/notifications/serializers.py": ["/apps/notifications/models.py"], "/apps/brand/urls.py": ["/apps/brand/views.py"], "/apps/check/urls.py": ["/apps/check/views.py"], "/apps/account/urls.py": ["/apps/account/views.py"], "/apps/check/admin.py": ["/apps/check/models.py"], "/apps/info/migrations/0001_initial.py": ["/core/utils.py"], "/apps/setting/urls.py": ["/apps/setting/views.py"], "/apps/info/tests/factories.py": ["/apps/info/models.py", "/core/constants.py"], "/apps/brand/service.py": ["/apps/brand/models.py", "/core/constants.py"], "/apps/info/urls.py": ["/apps/info/views.py"], "/apps/brand/tests/test_views.py": ["/apps/brand/models.py"], "/apps/setting/tests/test_middleware.py": ["/apps/setting/tests/factories.py"], "/apps/brand/admin.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/check/models.py": ["/apps/account/models.py", "/apps/brand/models.py", "/core/constants.py"], "/apps/brand/models.py": ["/core/utils.py", "/core/constants.py"], "/apps/brand/migrations/0005_auto_20210405_1740.py": ["/core/utils.py"], "/apps/integration/views.py": ["/apps/check/models.py", "/apps/integration/serializers.py", "/apps/integration/service.py", "/apps/check/service.py"], "/apps/brand/tests/factories.py": ["/apps/brand/models.py"], "/apps/account/tests/test_views.py": ["/apps/account/service.py", "/apps/account/tests/factories.py", "/apps/account/tests/mymock.py", "/core/constants.py"], "/apps/info/admin.py": ["/apps/brand/admin.py", "/apps/info/models.py", "/apps/notifications/tasks.py"], "/apps/notifications/service.py": ["/core/constants.py", "/apps/check/models.py", "/apps/notifications/models.py", "/apps/notifications/tasks.py"], "/apps/check/tests/test_views.py": ["/apps/account/tests/factories.py"], "/apps/setting/middleware.py": ["/apps/setting/service.py"], "/apps/notifications/tasks.py": ["/apps/notifications/models.py", "/apps/notifications/service.py", "/core/constants.py", "/apps/account/models.py"], "/apps/check/filters.py": ["/apps/check/models.py"], "/apps/notifications/pagination.py": ["/apps/notifications/service.py"], "/apps/account/service.py": ["/apps/integration/service.py", "/apps/notifications/tasks.py", "/apps/account/serializers.py"], "/apps/setting/tests/factories.py": ["/apps/setting/models.py"], "/apps/check/views.py": ["/apps/account/custom_openapi.py", "/apps/account/serializers.py", "/apps/brand/pagination.py", "/apps/check/models.py", "/apps/check/serializers.py", "/apps/check/service.py", "/apps/check/filters.py", "/apps/notifications/service.py"], "/apps/info/serializers.py": ["/apps/info/models.py", "/apps/notifications/models.py"], "/apps/setting/admin.py": ["/apps/setting/models.py"], "/apps/setting/service.py": ["/apps/setting/models.py"], "/apps/notifications/urls.py": ["/apps/notifications/views.py"], "/apps/brand/views.py": ["/apps/brand/__init__.py", "/apps/brand/models.py", "/apps/brand/pagination.py", "/apps/brand/serializers.py", "/apps/brand/service.py"], "/apps/integration/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/check/models.py"], "/apps/notifications/models.py": ["/apps/account/models.py", "/core/constants.py", "/apps/setting/models.py"], "/apps/info/migrations/0006_auto_20210408_1352.py": ["/core/utils.py"], "/apps/account/views.py": ["/apps/account/custom_openapi.py", "/apps/account/models.py", "/apps/account/serializers.py", "/apps/account/service.py", "/apps/integration/service.py"], "/apps/notifications/admin.py": ["/apps/notifications/models.py"], "/apps/account/serializers.py": ["/apps/account/models.py", "/apps/account/service.py", "/apps/integration/service.py", "/apps/notifications/models.py", "/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/forms.py": ["/apps/brand/models.py"], "/apps/account/custom_openapi.py": ["/apps/account/serializers.py"], "/apps/account/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/integration/urls.py": ["/apps/integration/views.py"], "/apps/setting/views.py": ["/apps/setting/models.py", "/apps/setting/serializers.py"], "/apps/brand/migrations/0001_initial.py": ["/core/utils.py"], "/apps/brand/serializers.py": ["/apps/brand/models.py", "/apps/brand/service.py"], "/apps/integration/service.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/integration/serializers.py", "/apps/setting/models.py"], "/apps/check/serializers.py": ["/apps/account/models.py", "/apps/brand/models.py", "/apps/brand/serializers.py", "/apps/check/models.py", "/apps/setting/models.py", "/apps/notifications/models.py"], "/apps/account/tests/factories.py": ["/apps/account/models.py"], "/apps/info/tests/test_views.py": ["/apps/info/models.py", "/apps/info/tests/factories.py"], "/apps/notifications/views.py": ["/apps/notifications/models.py", "/apps/notifications/pagination.py", "/apps/notifications/serializers.py"], "/apps/check/service.py": ["/apps/notifications/tasks.py"], "/apps/setting/serializers.py": ["/apps/setting/models.py"], "/apps/info/models.py": ["/core/constants.py", "/core/utils.py"], "/apps/setting/tests/test_views.py": ["/apps/setting/models.py", "/apps/setting/tests/factories.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.