index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
14,616
|
camptocamp/tilecloud-chain
|
refs/heads/master
|
/tilecloud_chain/tests/test_expiretiles.py
|
import os
from typing import Tuple
import psycopg2
import pytest
from testfixtures import LogCapture
from tilecloud_chain import expiretiles
from tilecloud_chain.tests import CompareCase, MatchRegex
class TestExpireTiles(CompareCase):
def setUp(self) -> None: # noqa
self.maxDiff = None
@classmethod
def setUpClass(cls): # noqa
with open("/tmp/expired", "w") as f:
f.write("18/135900/92720\n")
f.write("18/135900/92721\n")
f.write("18/135900/92722\n")
f.write("18/135901/92721\n")
f.write("18/135901/92722\n")
f.write("18/135902/92722\n")
with open("/tmp/expired-empty", "w"):
pass
@classmethod
def tearDownClass(cls): # noqa
os.remove("/tmp/expired")
os.remove("/tmp/expired-empty")
def test_expire_tiles(
self,
) -> None:
with LogCapture("tilecloud_chain", level=30) as log_capture:
geom_re = MatchRegex(r"MULTIPOLYGON\(\(\(([0-9\. ,]+)\)\)\)")
geom_coords = [
pytest.approx([538274.006497397, 151463.940954133], abs=1e-6),
pytest.approx([538272.927475664, 151358.882137848], abs=1e-6),
pytest.approx([538167.532395446, 151359.965536437], abs=1e-6),
pytest.approx([538062.137334338, 151361.050781072], abs=1e-6),
pytest.approx([537956.742292377, 151362.137871759], abs=1e-6),
pytest.approx([537957.826834589, 151467.19663084], abs=1e-6),
pytest.approx([537958.911357866, 151572.253567259], abs=1e-6),
pytest.approx([537959.995862209, 151677.308681051], abs=1e-6),
pytest.approx([538065.385383791, 151676.221647663], abs=1e-6),
pytest.approx([538064.302719542, 151571.166514773], abs=1e-6),
pytest.approx([538169.694100363, 151570.08130827], abs=1e-6),
pytest.approx([538168.61325734, 151465.024333685], abs=1e-6),
pytest.approx([538274.006497397, 151463.940954133], abs=1e-6),
]
self.assert_cmd_equals(
cmd=[
".build/venv/bin/import_expiretiles",
"--create",
"--delete",
"--srid",
"21781",
"/tmp/expired",
"user=postgres password=postgres dbname=tests host=db",
"expired",
"the_geom",
],
main_func=expiretiles.main,
expected="""Import successful
""",
)
connection = psycopg2.connect("user=postgres password=postgres dbname=tests host=db")
cursor = connection.cursor()
cursor.execute("SELECT ST_AsText(the_geom) FROM expired")
geoms = [str(r[0]) for r in cursor.fetchall()]
assert [geom_re] == geoms
def parse_coord(coord: str) -> Tuple[float, float]:
coord_split = coord.split(" ")
return [float(c) for c in coord_split]
assert [parse_coord(e) for e in geom_re.match(geoms[0]).group(1).split(",")] == geom_coords
self.assert_cmd_equals(
cmd=[
".build/venv/bin/import_expiretiles",
"--create",
"--delete",
"--srid",
"21781",
"/tmp/expired",
"user=postgres password=postgres dbname=tests host=db",
"expired",
"the_geom",
],
main_func=expiretiles.main,
expected="""Import successful
""",
)
connection = psycopg2.connect("user=postgres password=postgres dbname=tests host=db")
cursor = connection.cursor()
cursor.execute("SELECT ST_AsText(the_geom) FROM expired")
geoms = [str(r[0]) for r in cursor.fetchall()]
assert [geom_re] == geoms
assert [parse_coord(e) for e in geom_re.match(geoms[0]).group(1).split(",")] == geom_coords
self.assert_cmd_equals(
cmd=[
".build/venv/bin/import_expiretiles",
"--simplify",
"1000",
"--create",
"--delete",
"/tmp/expired",
"user=postgres password=postgres dbname=tests host=db",
"expired2",
],
main_func=expiretiles.main,
expected="""Import successful
""",
)
connection = psycopg2.connect("user=postgres password=postgres dbname=tests host=db")
cursor = connection.cursor()
cursor.execute("SELECT ST_AsText(geom) FROM expired2")
geoms = [str(r[0]) for r in cursor.fetchall()]
geom_coords = [
pytest.approx([738534.567188568, 5862720.06865692], abs=1e-6),
pytest.approx([738534.567188568, 5862567.19460037], abs=1e-6),
pytest.approx([738381.693132021, 5862567.19460037], abs=1e-6),
pytest.approx([738228.819075469, 5862567.19460037], abs=1e-6),
pytest.approx([738075.945018921, 5862567.19460037], abs=1e-6),
pytest.approx([738075.945018921, 5862720.06865692], abs=1e-6),
pytest.approx([738075.945018921, 5862872.94271347], abs=1e-6),
pytest.approx([738075.945018921, 5863025.81677002], abs=1e-6),
pytest.approx([738228.819075469, 5863025.81677002], abs=1e-6),
pytest.approx([738228.819075469, 5862872.94271347], abs=1e-6),
pytest.approx([738381.693132021, 5862872.94271347], abs=1e-6),
pytest.approx([738381.693132021, 5862720.06865692], abs=1e-6),
pytest.approx([738534.567188568, 5862720.06865692], abs=1e-6),
]
assert [geom_re] == geoms
assert [parse_coord(e) for e in geom_re.match(geoms[0]).group(1).split(",")] == geom_coords
log_capture.check()
def test_expire_tiles_empty(self) -> None:
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_cmd_equals(
cmd=[
".build/venv/bin/import_expiretiles",
"--create",
"--delete",
"--srid",
"21781",
"/tmp/expired-empty",
"user=postgres password=postgres dbname=tests host=db",
"expired",
"the_geom",
],
main_func=expiretiles.main,
expected="""No coords found
""",
)
connection = psycopg2.connect("user=postgres password=postgres dbname=tests host=db")
cursor = connection.cursor()
cursor.execute("SELECT the_geom FROM expired")
geoms = cursor.fetchall()
self.assertEqual(len(geoms), 0)
|
{"/tilecloud_chain/copy_.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/controller.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/views/admin.py": ["/tilecloud_chain/server.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/server.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/expiretiles.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/tests/test_controller.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_config.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_expiretiles.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/__init__.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py"], "/tilecloud_chain/tests/test_generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_serve.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/server.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/internal_mapcache.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/generate.py", "/tilecloud_chain/server.py"], "/tilecloud_chain/tests/test_cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/tests/test_copy.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/database_logger.py", "/tilecloud_chain/format.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py", "/tilecloud_chain/mapnik_.py"], "/tilecloud_chain/tests/test_error.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"]}
|
14,617
|
camptocamp/tilecloud-chain
|
refs/heads/master
|
/tilecloud_chain/__init__.py
|
import collections
import json
import logging
import logging.config
import math
import os
import pathlib
import pkgutil
import queue
import re
import sqlite3
import subprocess # nosec
import sys
import tempfile
import threading
import time
from argparse import ArgumentParser, Namespace
from concurrent.futures import ThreadPoolExecutor
from datetime import datetime, timedelta
from fractions import Fraction
from hashlib import sha1
from io import BytesIO
from itertools import product
from math import ceil, sqrt
from typing import (
IO,
TYPE_CHECKING,
Any,
Callable,
Dict,
Iterable,
Iterator,
List,
Optional,
TextIO,
Tuple,
TypedDict,
Union,
cast,
)
import boto3
import botocore.client
import c2cwsgiutils.pyramid_logging
import c2cwsgiutils.setup_process
import jsonschema_validator
import psycopg2
from c2cwsgiutils import sentry
from PIL import Image
from prometheus_client import Counter, Summary
from ruamel.yaml import YAML
from shapely.geometry.base import BaseGeometry
from shapely.geometry.polygon import Polygon
from shapely.ops import unary_union
from shapely.wkb import loads as loads_wkb
import tilecloud.filter.error
import tilecloud_chain.configuration
import tilecloud_chain.security
from tilecloud import BoundingPyramid, Tile, TileCoord, TileGrid, TileStore, consume
from tilecloud.filter.error import LogErrors, MaximumConsecutiveErrors
from tilecloud.filter.logger import Logger
from tilecloud.grid.free import FreeTileGrid
from tilecloud.layout.wmts import WMTSTileLayout
from tilecloud.store.azure_storage_blob import AzureStorageBlobTileStore
from tilecloud.store.filesystem import FilesystemTileStore
from tilecloud.store.mbtiles import MBTilesTileStore
from tilecloud.store.metatile import MetaTileSplitterTileStore
from tilecloud.store.redis import RedisTileStore
from tilecloud.store.s3 import S3TileStore
from tilecloud.store.sqs import SQSTileStore, maybe_stop
from tilecloud_chain.multitilestore import MultiTileStore
from tilecloud_chain.timedtilestore import TimedTileStoreWrapper
_LOGGER = logging.getLogger(__name__)
_ERROR_COUNTER = Counter("tilecloud_chain_error_counter", "Number of errors", ["layer", "host"])
_GEOMS_GET_SUMMARY = Summary("tilecloud_chain_geoms_get", "Geoms filter get", ["layer", "host"])
def formated_metadata(tile: Tile) -> str:
"""Get human redable string of the metadata."""
metadata = dict(tile.metadata)
if "tiles" in metadata:
metadata["tiles"] = metadata["tiles"].keys() # type: ignore
return " ".join([f"{k}={metadata[k]}" for k in sorted(metadata.keys())])
setattr(Tile, "formated_metadata", property(formated_metadata))
def add_common_options(
parser: ArgumentParser,
tile_pyramid: bool = True,
no_geom: bool = True,
near: bool = True,
time: bool = True, # pylint: disable=redefined-outer-name
dimensions: bool = False,
cache: bool = True,
default_config_file: bool = False,
) -> None:
"""Get the options used by some commands."""
c2cwsgiutils.setup_process.fill_arguments(parser)
parser.add_argument(
"-c",
"--config",
default=os.environ.get("TILEGENERATION_CONFIGFILE", "tilegeneration/config.yaml")
if default_config_file
else None,
help="path to the configuration file",
metavar="FILE",
)
parser.add_argument(
"--host", help="the host name used in JSON logs and in the Prometheus stats", default="localhost"
)
parser.add_argument(
"--ignore-error",
action="store_true",
help="continue if there is an error in the configuration",
)
parser.add_argument("-l", "--layer", metavar="NAME", help="the layer to generate")
if tile_pyramid:
parser.add_argument(
"-b",
"--bbox",
nargs=4,
type=float,
metavar=("MINX", "MINY", "MAXX", "MAXY"),
help="restrict to specified bounding box",
)
parser.add_argument(
"-z",
"--zoom",
help="restrict to specified zoom level, or a zooms range (2-5), or a zooms list (2,4,5)",
)
parser.add_argument(
"-t", "--test", type=int, help="test with generating N tiles, and add log messages", metavar="N"
)
if near:
parser.add_argument(
"--near",
type=float,
nargs=2,
metavar=("X", "Y"),
help="This option is a good replacement of --bbox, to used with "
"--time or --test and --zoom, implies --no-geom. "
"It automatically measure a bbox around the X Y position that corresponds to the metatiles.",
)
if time:
parser.add_argument(
"--time",
"--measure-generation-time",
dest="time",
metavar="N",
type=int,
help="Measure the generation time by creating N tiles to warm-up, "
"N tile to do the measure and N tiles to slow-down",
)
if no_geom:
parser.add_argument(
"--no-geom",
default=True,
action="store_false",
dest="geom",
help="Don't the geometry available in the SQL",
)
if dimensions:
parser.add_argument(
"--dimensions",
nargs="+",
metavar="DIMENSION=VALUE",
default=[],
help="overwrite the dimensions values specified in the config file",
)
if cache:
parser.add_argument("--cache", dest="cache", metavar="NAME", help="The cache name to use")
parser.add_argument("-q", "--quiet", default=False, action="store_true", help="Display only errors.")
parser.add_argument("-v", "--verbose", default=False, action="store_true", help="Display info message.")
parser.add_argument(
"-d",
"--debug",
default=False,
action="store_true",
help="Display debug message, and stop on first error.",
)
def get_tile_matrix_identifier(
grid: tilecloud_chain.configuration.Grid, resolution: Optional[float] = None, zoom: Optional[int] = None
) -> str:
"""Get an identifier for a tile matrix."""
if grid is None or grid["matrix_identifier"] == "zoom":
return str(zoom)
else:
assert zoom is not None
if resolution is None:
resolution = grid["resolutions"][zoom]
if int(resolution) == resolution:
return str(int(resolution))
else:
return str(resolution).replace(".", "_")
class Run:
"""
Run the tile generation.
Add some logs.
Manage the max_consecutive_errors.
"""
_re_rm_xml_tag = re.compile("(<[^>]*>|\n)")
def __init__(
self,
gene: "TileGeneration",
functions: List[Callable[[Tile], Tile]],
) -> None:
self.gene = gene
self.functions = functions
self.safe = gene.options is None or not gene.options.debug
daemon = gene.options is not None and getattr(gene.options, "daemon", False)
self.max_consecutive_errors = (
MaximumConsecutiveErrors(gene.get_main_config().config["generation"]["maxconsecutive_errors"])
if not daemon and gene.maxconsecutive_errors
else None
)
self.error = 0
self.error_lock = threading.Lock()
self.error_logger = LogErrors(
_LOGGER, logging.ERROR, "Error in tile: %(tilecoord)s, %(formated_metadata)s, %(error)r"
)
def __call__(self, tile: Optional[Tile]) -> Optional[Tile]:
if tile is None:
return None
if "tiles" in tile.metadata:
tile.metadata["tiles"][tile.tilecoord] = tile # type: ignore
tilecoord = tile.tilecoord
_LOGGER.debug("[%s] Metadata: %s", tilecoord, tile.formated_metadata)
for func in self.functions:
try:
_LOGGER.debug("[%s] Run: %s", tilecoord, func)
n = datetime.now()
if self.safe:
try:
tile = func(tile)
except Exception as e:
_LOGGER.exception("[%s] Fail to process function %s", tilecoord, func)
tile.error = e
else:
tile = func(tile)
_LOGGER.debug("[%s] %s in %s", tilecoord, func.time_message if getattr(func, "time_message", None) is not None else func, str(datetime.now() - n)) # type: ignore
if tile is None:
_LOGGER.debug("[%s] Drop", tilecoord)
return None
if tile.error:
if tile.content_type and tile.content_type.startswith("application/vnd.ogc.se_xml"):
assert isinstance(tile.error, str)
tile.error = f"WMS server error: {self._re_rm_xml_tag.sub('', tile.error)}"
_LOGGER.warning("Error with tile %s:\n%s", tile.tilecoord, tile.error)
_ERROR_COUNTER.labels(
tile.metadata.get("layer", "none"), tile.metadata.get("host", "none")
).inc()
if "error_file" in self.gene.get_main_config().config["generation"]:
self.gene.log_tiles_error(tile=tile, message=repr(tile.error))
if self.max_consecutive_errors is not None:
self.max_consecutive_errors(tile)
if self.gene.queue_store is not None:
self.gene.queue_store.delete_one(tile)
with self.error_lock:
self.error += 1
return tile
except Exception:
_LOGGER.debug("Run error", stack_info=True)
raise
if self.max_consecutive_errors is not None:
self.max_consecutive_errors(tile)
return tile
class Close:
"""Database closer."""
def __init__(self, db: Any) -> None:
self.db = db
def __call__(self) -> None:
self.db.close()
class Legend(TypedDict, total=False):
"""Legend fields."""
mime_type: str
href: str
max_resolution: float
min_resolution: float
width: int
height: int
class DatedConfig:
"""Loaded config with timestamps to be able to invalidate it on configuration file change."""
def __init__(self, config: tilecloud_chain.configuration.Configuration, mtime: float, file: str) -> None:
self.config = config
self.mtime = mtime
self.file = file
class DatedGeoms:
"""Geoms with timestamps to be able to invalidate it on configuration change."""
def __init__(self, geoms: Dict[Union[str, int], BaseGeometry], mtime: float) -> None:
self.geoms = geoms
self.mtime = mtime
class DatedTileGrid:
"""TilGrid with timestamps to be able to invalidate it on configuration change."""
def __init__(self, grid: TileGrid, mtime: float) -> None:
self.grid = grid
self.mtime = mtime
class DatedHosts:
"""Host with timestamps to be able to invalidate it on configuration change."""
def __init__(self, hosts: Dict[str, str], mtime: float) -> None:
self.hosts = hosts
self.mtime = mtime
class MissingErrorFileException(Exception):
"""Missing error file exception."""
class LoggingInformation(TypedDict):
"""Logging information."""
host: Optional[str]
layer: Optional[str]
meta_tilecoord: str
LOGGING_CONTEXT: Dict[int, Dict[int, LoggingInformation]] = {}
class JsonLogHandler(c2cwsgiutils.pyramid_logging.JsonLogHandler):
"""Log to stdout in JSON."""
def __init__(self, stream: Optional[TextIO] = None):
super().__init__(stream)
self.addFilter(TileFilter())
class TileFilter(logging.Filter):
"""A logging filter that adds request information to CEE logs."""
def filter(self, record: Any) -> bool:
thread_id = threading.current_thread().native_id
assert thread_id is not None
log_info = LOGGING_CONTEXT.get(os.getpid(), {}).get(thread_id)
if log_info is not None:
record.tcc_host = log_info["host"]
record.tcc_layer = log_info["layer"]
record.tcc_meta_tilecoord = log_info["meta_tilecoord"]
return True
class TileGeneration:
"""Base class of all the tile generation."""
tilestream: Optional[Iterator[Tile]] = None
duration: timedelta = timedelta()
error = 0
queue_store: Optional[TileStore] = None
daemon = False
def __init__(
self,
config_file: Optional[str] = None,
options: Optional[Namespace] = None,
layer_name: Optional[str] = None,
base_config: Optional[tilecloud_chain.configuration.Configuration] = None,
configure_logging: bool = True,
multi_thread: bool = True,
maxconsecutive_errors: bool = True,
):
self.geoms_cache: Dict[str, Dict[str, DatedGeoms]] = {}
self._close_actions: List["Close"] = []
self.error_lock = threading.Lock()
self.error_files_: Dict[str, TextIO] = {}
self.functions_tiles: List[Callable[[Tile], Tile]] = []
self.functions_metatiles: List[Callable[[Tile], Tile]] = []
self.functions = self.functions_metatiles
self.metatilesplitter_thread_pool: Optional[ThreadPoolExecutor] = None
self.multi_thread = multi_thread
self.maxconsecutive_errors = maxconsecutive_errors
self.grid_cache: Dict[str, Dict[str, DatedTileGrid]] = {}
self.layer_legends: Dict[str, List[Legend]] = {}
self.config_file = config_file
self.base_config = base_config
self.configs: Dict[str, DatedConfig] = {}
self.hosts_cache: Optional[DatedHosts] = None
self.options: Namespace = options or collections.namedtuple( # type: ignore
"Options",
["verbose", "debug", "quiet", "bbox", "zoom", "test", "near", "time", "geom", "ignore_error"],
)(
False, False, False, None, None, None, None, None, True, False # type: ignore
)
del options
if not hasattr(self.options, "bbox"):
self.options.bbox = None
if not hasattr(self.options, "zoom"):
self.options.zoom = None
if not hasattr(self.options, "test"):
self.options.test = None
if not hasattr(self.options, "near"):
self.options.near = None
if not hasattr(self.options, "time"):
self.options.time = None
if not hasattr(self.options, "geom"):
self.options.geom = True
if not hasattr(self.options, "ignore_error"):
self.options.ignore_error = False
if configure_logging:
if os.environ.get("CI", "false").lower() != "true":
###
# logging configuration
# https://docs.python.org/3/library/logging.config.html#logging-config-dictschema
###
logging.config.dictConfig(
{
"version": 1,
"root": {
"level": os.environ["OTHER_LOG_LEVEL"],
"handlers": [os.environ["LOG_TYPE"]],
},
"loggers": {
"gunicorn.error": {"level": os.environ["GUNICORN_LOG_LEVEL"]},
# "level = INFO" logs SQL queries.
# "level = DEBUG" logs SQL queries and results.
# "level = WARN" logs neither. (Recommended for production systems.)
"sqlalchemy.engine": {"level": os.environ["SQL_LOG_LEVEL"]},
"c2cwsgiutils": {"level": os.environ["C2CWSGIUTILS_LOG_LEVEL"]},
"tilecloud": {"level": os.environ["TILECLOUD_LOG_LEVEL"]},
"tilecloud_chain": {"level": os.environ["TILECLOUD_CHAIN_LOG_LEVEL"]},
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"formatter": "generic",
"stream": "ext://sys.stdout",
},
"json": {
"class": "c2cwsgiutils.pyramid_logging.JsonLogHandler",
"formatter": "generic",
"stream": "ext://sys.stdout",
},
},
"formatters": {
"generic": {
"format": "%(asctime)s [%(process)d] [%(levelname)-5.5s] %(message)s",
"datefmt": "[%Y-%m-%d %H:%M:%S %z]",
"class": "logging.Formatter",
}
},
}
)
sentry.includeme()
assert "generation" in self.get_main_config().config, self.get_main_config().config
error = False
if self.options is not None and self.options.zoom is not None:
error_message = (
f"The zoom argument '{self.options.zoom}' has incorrect format, "
"it can be a single value, a range (3-9), a list of values (2,5,7)."
)
if self.options.zoom.find("-") >= 0:
splitted_zoom: List[str] = self.options.zoom.split("-")
if len(splitted_zoom) != 2:
_LOGGER.error(error_message)
error = True
try:
self.options.zoom = range(int(splitted_zoom[0]), int(splitted_zoom[1]) + 1)
except ValueError:
_LOGGER.exception(error_message)
error = True
elif self.options.zoom.find(",") >= 0:
try:
self.options.zoom = [int(z) for z in self.options.zoom.split(",")]
except ValueError:
_LOGGER.exception(error_message)
error = True
else:
try:
self.options.zoom = [int(self.options.zoom)]
except ValueError:
_LOGGER.exception(error_message)
error = True
if error:
sys.exit(1)
if layer_name and self.config_file:
assert layer_name is not None
self.create_log_tiles_error(layer_name)
def get_host_config_file(self, host: Optional[str]) -> Optional[str]:
"""Get the configuration file name for the given host."""
if self.config_file:
return self.config_file
assert host
if host not in self.get_hosts():
_LOGGER.error("Missing host '%s' in global config", host)
return None
config_file = self.get_hosts().get(host, os.environ.get("TILEGENERATION_CONFIGFILE"))
_LOGGER.debug("For the host %s, use config file: %s", host, config_file)
return config_file
def get_host_config(self, host: Optional[str]) -> DatedConfig:
"""Get the configuration for the given host."""
config_file = self.get_host_config_file(host)
if not config_file:
_LOGGER.error("No config file for host %s", host)
return (
self.get_config(config_file)
if config_file
else DatedConfig(cast(tilecloud_chain.configuration.Configuration, {}), 0, "")
)
def get_tile_config(self, tile: Tile) -> DatedConfig:
return self.get_config(tile.metadata["config_file"])
def get_config(
self,
config_file: str,
ignore_error: bool = True,
base_config: Optional[tilecloud_chain.configuration.Configuration] = None,
) -> DatedConfig:
"""Get the validated configuration for the file name, with cache management."""
assert config_file
config_path = pathlib.Path(config_file)
if not config_path.exists():
_LOGGER.error("Missing config file %s", config_file)
if ignore_error:
return DatedConfig(cast(tilecloud_chain.configuration.Configuration, {}), 0, "")
else:
sys.exit(1)
config: Optional[DatedConfig] = self.configs.get(config_file)
if config is not None and config.mtime == config_path.stat().st_mtime:
return config
config, success = self._get_config(config_file, ignore_error, base_config)
if not success or config is None:
if ignore_error:
config = DatedConfig(cast(tilecloud_chain.configuration.Configuration, {}), 0, "")
else:
sys.exit(1)
self.configs[config_file] = config
return config
def get_main_config(self) -> DatedConfig:
if "TILEGENERATION_MAIN_CONFIGFILE" in os.environ and os.environ["TILEGENERATION_MAIN_CONFIGFILE"]:
return self.get_config(os.environ["TILEGENERATION_MAIN_CONFIGFILE"], False)
elif self.config_file:
return self.get_config(self.config_file, self.options.ignore_error, self.base_config)
else:
_LOGGER.error("No provided configuration file")
return DatedConfig({}, 0, "")
def get_hosts(self, silent: bool = False) -> Dict[str, str]:
file_path = pathlib.Path(os.environ["TILEGENERATION_HOSTSFILE"])
if not file_path.exists():
if not silent:
_LOGGER.error("Missing hosts file %s", file_path)
return {}
if self.hosts_cache is not None and self.hosts_cache.mtime == file_path.stat().st_mtime:
return self.hosts_cache.hosts
with file_path.open(encoding="utf-8") as hosts_file:
ruamel = YAML(typ="safe")
hosts = {}
hosts_raw = ruamel.load(hosts_file)
if "sources" in hosts_raw:
for key, value in hosts_raw["sources"].items():
if isinstance(value, str):
hosts[key] = value
else:
hosts.update(value)
else:
hosts = hosts_raw
self.hosts_cache = DatedHosts(hosts, file_path.stat().st_mtime)
return hosts
def _get_config(
self,
config_file: str,
ignore_error: bool,
base_config: Optional[tilecloud_chain.configuration.Configuration] = None,
) -> Tuple[DatedConfig, bool]:
"""Get the validated configuration for the file name."""
with open(config_file, encoding="utf-8") as f:
config: Dict[str, Any] = {}
config.update({} if base_config is None else base_config)
ruamel = YAML()
config.update(ruamel.load(f))
dated_config = DatedConfig(
cast(tilecloud_chain.configuration.Configuration, config),
pathlib.Path(config_file).stat().st_mtime,
config_file,
)
success = self.validate_config(dated_config, ignore_error)
return dated_config, success
def validate_config(self, config: DatedConfig, ignore_error: bool) -> bool:
"""Validate the configuration."""
# Generate base structure
if "defaults" in config.config:
del config.config["defaults"]
if "generation" not in config.config:
config.config["generation"] = {}
if "cost" in config.config:
if "s3" not in config.config["cost"]:
config.config["cost"]["s3"] = {}
if "cloudfront" not in config.config["cost"]:
config.config["cost"]["cloudfront"] = {}
if "sqs" not in config.config["cost"]:
config.config["cost"]["sqs"] = {}
schema_data = pkgutil.get_data("tilecloud_chain", "schema.json")
assert schema_data
errors, _ = jsonschema_validator.validate(
config.file, cast(Dict[str, Any], config.config), json.loads(schema_data), default=True
)
if errors:
_LOGGER.error("The config file is invalid:\n%s", "\n".join(errors))
if not (
ignore_error
or os.environ.get("TILEGENERATION_IGNORE_CONFIG_ERROR", "FALSE").lower() == "true"
):
sys.exit(1)
error = False
grids = config.config.get("grids", {})
for grid in grids.values():
if "resolution_scale" in grid:
scale = grid["resolution_scale"]
for resolution in grid["resolutions"]:
if resolution * scale % 1 != 0.0:
_LOGGER.error(
"The resolution %s * resolution_scale %s is not an integer.", resolution, scale
)
error = True
else:
grid["resolution_scale"] = self._resolution_scale(grid["resolutions"])
srs = int(grid["srs"].split(":")[1])
if "proj4_literal" not in grid:
if srs == 3857:
grid["proj4_literal"] = (
"+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 "
"+x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over"
)
elif srs == 21781:
grid["proj4_literal"] = (
"+proj=somerc +lat_0=46.95240555555556 +lon_0=7.439583333333333 +k_0=1 "
"+x_0=600000 +y_0=200000 +ellps=bessel +towgs84=674.374,15.056,405.346,0,0,0,0 "
"+units=m +no_defs"
)
elif srs == 2056:
grid["proj4_literal"] = (
"+proj=somerc +lat_0=46.95240555555556 +lon_0=7.439583333333333 +k_0=1 "
"+x_0=2600000 +y_0=1200000 +ellps=bessel +towgs84=674.374,15.056,405.346,0,0,0,0 "
"+units=m +no_defs"
)
else:
grid["proj4_literal"] = f"+init={grid['srs']}"
layers = config.config.get("layers", {})
for lname, layer in sorted(layers.items()):
if "headers" not in layer and layer["type"] == "wms":
layer["headers"] = {
"Cache-Control": "no-cache, no-store",
"Pragma": "no-cache",
}
if layer["type"] == "mapnik" and layer.get("output_format", "png") == "grid" and layer["meta"]:
_LOGGER.error("The layer '%s' is of type Mapnik/Grid, that can't support matatiles.", lname)
error = True
if error:
if not (
ignore_error
or os.environ.get("TILEGENERATION_IGNORE_CONFIG_ERROR", "FALSE").lower() == "true"
):
sys.exit(1)
return not (error or errors)
def init(self, queue_store: Optional[TileStore] = None, daemon: bool = False) -> None:
self.queue_store = queue_store
self.daemon = daemon
@staticmethod
def _primefactors(x: int) -> List[int]:
factorlist = []
loop = 2
while loop <= x:
if x % loop == 0:
x = round(x / loop)
factorlist.append(loop)
else:
loop += 1
return factorlist
def _resolution_scale(self, resolutions: Union[List[float], List[int]]) -> int:
prime_fact = {}
for resolution in resolutions:
denominator = Fraction(str(resolution)).denominator
prime_factors = self._primefactors(denominator)
for factor in set(prime_factors):
if factor not in prime_fact:
prime_fact[factor] = 0
prime_fact[factor] = max(prime_fact[factor], len([f for f in prime_factors if f == factor]))
result = 1
for fact, nb in prime_fact.items():
result *= fact**nb
return result
def get_all_dimensions(self, layer: tilecloud_chain.configuration.Layer) -> List[Dict[str, str]]:
assert layer is not None
options_dimensions = {}
for opt_dim in self.options.dimensions:
opt_dim = opt_dim.split("=")
if len(opt_dim) != 2:
sys.exit("the DIMENSIONS option should be like this DATE=2013 VERSION=13.")
options_dimensions[opt_dim[0]] = opt_dim[1]
all_dimensions = [
[(dim["name"], d) for d in dim["generate"]]
for dim in layer.get("dimensions", [])
if dim["name"] not in options_dimensions
]
all_dimensions += [[p] for p in options_dimensions.items()]
return [{}] if len(all_dimensions) == 0 else [dict(d) for d in product(*all_dimensions)]
def get_store(
self,
config: DatedConfig,
cache: tilecloud_chain.configuration.Cache,
layer_name: str,
read_only: bool = False,
) -> TileStore:
layer = config.config["layers"][layer_name]
grid = config.config["grids"][layer["grid"]]
layout = WMTSTileLayout(
layer=layer_name,
url=cache["folder"],
style=layer["wmts_style"],
format="." + layer["extension"],
dimensions_name=[dimension["name"] for dimension in layer.get("dimensions", [])],
tile_matrix_set=layer["grid"],
tile_matrix=lambda z: get_tile_matrix_identifier(grid, zoom=z),
request_encoding="REST",
)
# store
if cache["type"] == "s3":
cache_s3 = cast(tilecloud_chain.configuration.CacheS3, cache)
# on s3
cache_tilestore: TileStore = S3TileStore(
cache_s3["bucket"],
layout,
s3_host=cache.get("host", "s3-eu-west-1.amazonaws.com"),
cache_control=cache.get("cache_control"),
)
elif cache["type"] == "azure":
cache_azure = cast(tilecloud_chain.configuration.CacheAzureTyped, cache)
# on Azure
cache_tilestore = AzureStorageBlobTileStore(
container=cache_azure["container"],
tilelayout=layout,
cache_control=cache_azure.get("cache_control"),
)
elif cache["type"] == "mbtiles":
metadata = {}
for dimension in layer["dimensions"]:
metadata["dimension_" + dimension["name"]] = dimension["default"]
# on mbtiles file
filename = (
layout.filename(TileCoord(0, 0, 0), metadata=metadata).replace("/0/0/0", "") + ".mbtiles"
)
if not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
cache_tilestore = MBTilesTileStore(
sqlite3.connect(filename),
content_type=layer["mime_type"],
tilecoord_in_topleft=True,
)
elif cache["type"] == "bsddb":
metadata = {}
for dimension in layer["dimensions"]:
metadata["dimension_" + dimension["name"]] = dimension["default"]
import bsddb3 as bsddb # pylint: disable=import-outside-toplevel,import-error
from tilecloud.store.bsddb import BSDDBTileStore # pylint: disable=import-outside-toplevel
# on bsddb file
filename = layout.filename(TileCoord(0, 0, 0), metadata=metadata).replace("/0/0/0", "") + ".bsddb"
if not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
db = bsddb.hashopen(
filename,
# and os.path.exists(filename) to avoid error on non existing file
"r" if read_only and os.path.exists(filename) else "c",
)
self._close_actions.append(Close(db))
cache_tilestore = BSDDBTileStore(
db,
content_type=layer["mime_type"],
)
elif cache["type"] == "filesystem":
# on filesystem
cache_tilestore = FilesystemTileStore(
layout,
content_type=layer["mime_type"],
)
else:
sys.exit("unknown cache type: " + cache["type"])
return cache_tilestore
@staticmethod
def get_grid_name(
config: DatedConfig, layer: tilecloud_chain.configuration.Layer, name: Optional[Any] = None
) -> tilecloud_chain.configuration.Grid:
if name is None:
name = layer["grid"]
return config.config["grids"][name]
def get_tilesstore(self, cache: Optional[str] = None) -> TimedTileStoreWrapper:
gene = self
def get_store(config_file: str, layer_name: str) -> TileStore:
config = gene.get_config(config_file)
cache_name = cache or config.config["generation"]["default_cache"]
cache_obj = config.config["caches"][cache_name]
return self.get_store(config, cache_obj, layer_name)
cache_tilestore = TimedTileStoreWrapper(
MultiTileStore(get_store),
store_name="store",
)
return cache_tilestore
def add_geom_filter(self) -> None:
self.imap(IntersectGeometryFilter(gene=self), "Intersect with geom")
def add_logger(self) -> None:
if (
not self.options.quiet
and not self.options.verbose
and not self.options.debug
and os.environ.get("FRONTEND") != "noninteractive"
):
def log_tiles(tile: Tile) -> Tile:
sys.stdout.write(f"{tile.tilecoord} {tile.formated_metadata} \r")
sys.stdout.flush()
return tile
self.imap(log_tiles)
elif not self.options.quiet and getattr(self.options, "role", None) != "server":
self.imap(Logger(_LOGGER, logging.INFO, "%(tilecoord)s, %(formated_metadata)s"))
def add_metatile_splitter(self, store: Optional[TileStore] = None) -> None:
assert self.functions != self.functions_tiles, "add_metatile_splitter should not be called twice"
if store is None:
gene = self
def get_splitter(config_file: str, layer_name: str) -> Optional[MetaTileSplitterTileStore]:
config = gene.get_config(config_file)
layer = config.config["layers"][layer_name]
if layer.get("meta"):
return MetaTileSplitterTileStore(
layer["mime_type"],
config.config["grids"][layer["grid"]]["tile_size"],
layer["meta_buffer"],
)
return None
store = TimedTileStoreWrapper(MultiTileStore(get_splitter), store_name="splitter")
run = Run(self, self.functions_tiles)
nb_thread = int(os.environ.get("TILE_NB_THREAD", "1"))
if nb_thread == 1 or not self.multi_thread:
def meta_get(metatile: Tile) -> Tile:
assert store is not None
substream = store.get((metatile,))
if getattr(self.options, "role", "") == "hash":
tile = next(substream)
assert tile is not None
run(tile)
else:
for tile in substream:
assert tile is not None
tile.metadata.update(metatile.metadata)
run(tile)
with self.error_lock:
self.error += run.error
return metatile
else:
def meta_get(metatile: Tile) -> Tile:
assert store is not None
if self.metatilesplitter_thread_pool is None:
self.metatilesplitter_thread_pool = ThreadPoolExecutor(nb_thread)
substream = store.get((metatile,))
for _ in self.metatilesplitter_thread_pool.map(
run, substream, chunksize=int(os.environ.get("TILE_CHUNK_SIZE", "1"))
):
pass
with self.error_lock:
self.error += run.error
return metatile
self.imap(meta_get)
self.functions = self.functions_tiles
def create_log_tiles_error(self, layer: str) -> Optional[TextIO]:
if "error_file" in self.get_main_config().config.get("generation", {}):
now = datetime.now()
time_ = now.strftime("%d-%m-%Y %H:%M:%S")
error_file = open( # pylint: disable=consider-using-with
self.get_main_config().config["generation"]["error_file"].format(layer=layer, datetime=now),
"a",
encoding="utf-8",
)
error_file.write(f"# [{time_}] Start the layer '{layer}' generation\n")
self.error_files_[layer] = error_file
return error_file
return None
def close(self) -> None:
for file_ in self.error_files_.values():
file_.close()
def get_log_tiles_error_file(self, layer: str) -> Optional[TextIO]:
return self.error_files_[layer] if layer in self.error_files_ else self.create_log_tiles_error(layer)
def log_tiles_error(self, tile: Optional[Tile] = None, message: Optional[str] = None) -> None:
if tile is None:
return
config = self.get_tile_config(tile)
if "error_file" in config.config["generation"]:
assert tile is not None
time_ = datetime.now().strftime("%d-%m-%Y %H:%M:%S")
if self.get_log_tiles_error_file(tile.metadata["layer"]) is None:
raise MissingErrorFileException("Missing error file")
tilecoord = "" if tile.tilecoord is None else f"{tile.tilecoord} {tile.formated_metadata} "
message = "" if message is None else f" {message}"
io = self.get_log_tiles_error_file(tile.metadata["layer"])
assert io is not None
out_message = message.replace("\n", " ")
io.write(f"{tilecoord}# [{time_}]{out_message}\n")
def get_grid(self, config: DatedConfig, grid_name: str) -> TileGrid:
dated_grid = self.grid_cache.get(config.file, {}).get(grid_name)
if dated_grid is not None and config.mtime == dated_grid.mtime:
return dated_grid.grid
grid = config.config["grids"][grid_name]
scale = grid["resolution_scale"]
tilegrid = FreeTileGrid(
resolutions=cast(List[int], [r * scale for r in grid["resolutions"]]),
scale=scale,
max_extent=cast(Tuple[int, int, int, int], grid["bbox"]),
tile_size=grid["tile_size"],
)
self.grid_cache.setdefault(config.file, {})[grid_name] = DatedTileGrid(tilegrid, config.mtime)
return tilegrid
def get_geoms(
self, config: DatedConfig, layer_name: str, host: Optional[str] = None
) -> Dict[Union[str, int], BaseGeometry]:
dated_geoms = self.geoms_cache.get(config.file, {}).get(layer_name)
if dated_geoms is not None and config.mtime == dated_geoms.mtime:
return dated_geoms.geoms
layer = config.config["layers"][layer_name]
if self.options.near is not None or (
self.options.time is not None and "bbox" in layer and self.options.zoom is not None
):
if self.options.zoom is None or len(self.options.zoom) != 1:
sys.exit("Option --near needs the option --zoom with one value.")
if not (self.options.time is not None or self.options.test is not None):
sys.exit("Option --near needs the option --time or --test.")
position = (
self.options.near
if self.options.near is not None
else [(layer["bbox"][0] + layer["bbox"][2]) / 2, (layer["bbox"][1] + layer["bbox"][3]) / 2]
)
bbox = config.config["grids"][layer["grid"]]["bbox"]
diff = [position[0] - bbox[0], position[1] - bbox[1]]
resolution = config.config["grids"][layer["grid"]]["resolutions"][self.options.zoom[0]]
mt_to_m = layer["meta_size"] * config.config["grids"][layer["grid"]]["tile_size"] * resolution
mt = [float(d) / mt_to_m for d in diff]
nb_tile = self.options.time * 3 if self.options.time is not None else self.options.test
nb_mt = nb_tile / (layer["meta_size"] ** 2)
nb_sqrt_mt = ceil(sqrt(nb_mt))
mt_origin = [round(m - nb_sqrt_mt / 2) for m in mt]
extent = [
bbox[0] + mt_origin[0] * mt_to_m,
bbox[1] + mt_origin[1] * mt_to_m,
bbox[0] + (mt_origin[0] + nb_sqrt_mt) * mt_to_m,
bbox[1] + (mt_origin[1] + nb_sqrt_mt) * mt_to_m,
]
elif self.options.bbox is not None:
extent = self.options.bbox
elif "bbox" in layer:
extent = layer["bbox"]
else:
extent = config.config["grids"][layer["grid"]]["bbox"]
geoms: Dict[Union[str, int], BaseGeometry] = {}
if extent:
geom = Polygon(
(
(extent[0], extent[1]),
(extent[0], extent[3]),
(extent[2], extent[3]),
(extent[2], extent[1]),
)
)
for z, r in enumerate(config.config["grids"][layer["grid"]]["resolutions"]):
geoms[z] = geom
if self.options.near is None and self.options.geom:
for g in layer.get("geoms", []):
with _GEOMS_GET_SUMMARY.labels(layer_name, host if host else self.options.host).time():
connection = psycopg2.connect(g["connection"])
cursor = connection.cursor()
sql = f"SELECT ST_AsBinary(geom) FROM (SELECT {g['sql']}) AS g" # nosec
_LOGGER.info("Execute SQL: %s.", sql)
cursor.execute(sql)
geom_list = [loads_wkb(bytes(r[0])) for r in cursor.fetchall()]
geom = unary_union(geom_list)
if extent:
geom = geom.intersection(
Polygon(
(
(extent[0], extent[1]),
(extent[0], extent[3]),
(extent[2], extent[3]),
(extent[2], extent[1]),
)
)
)
for z, r in enumerate(config.config["grids"][layer["grid"]]["resolutions"]):
if ("min_resolution" not in g or g["min_resolution"] <= r) and (
"max_resolution" not in g or g["max_resolution"] >= r
):
geoms[z] = geom
cursor.close()
connection.close()
self.geoms_cache.setdefault(config.file, {})[layer_name] = DatedGeoms(geoms, config.mtime)
return geoms
def init_tilecoords(self, config: DatedConfig, layer_name: str) -> None:
layer = config.config["layers"][layer_name]
resolutions = config.config["grids"][layer["grid"]]["resolutions"]
if self.options.time is not None and self.options.zoom is None:
if "min_resolution_seed" in layer:
self.options.zoom = [resolutions.index(layer["min_resolution_seed"])]
else:
self.options.zoom = [len(resolutions) - 1]
if self.options.zoom is not None:
zoom_max = len(resolutions) - 1
for zoom in self.options.zoom:
if zoom > zoom_max:
_LOGGER.warning(
"zoom %i is greater than the maximum zoom %i of grid %s of layer %s, ignored.",
zoom,
zoom_max,
layer["grid"],
layer_name,
)
self.options.zoom = [z for z in self.options.zoom if z <= zoom_max]
if "min_resolution_seed" in layer:
if self.options.zoom is None:
self.options.zoom = []
for z, resolution in enumerate(resolutions):
if resolution >= layer["min_resolution_seed"]:
self.options.zoom.append(z)
else:
for zoom in self.options.zoom:
resolution = resolutions[zoom]
if resolution < layer["min_resolution_seed"]:
_LOGGER.warning(
"zoom %i corresponds to resolution %s is smaller"
" than the 'min_resolution_seed' %s of layer %s, ignored.",
zoom,
resolution,
layer["min_resolution_seed"],
layer_name,
)
self.options.zoom = [
z for z in self.options.zoom if resolutions[z] >= layer["min_resolution_seed"]
]
if self.options.zoom is None:
self.options.zoom = [z for z, r in enumerate(resolutions)]
# Fill the bounding pyramid
tilegrid = self.get_grid(config, layer["grid"])
bounding_pyramid = BoundingPyramid(tilegrid=tilegrid)
geoms = self.get_geoms(config, layer_name)
for zoom in self.options.zoom:
if zoom in geoms:
extent = geoms[zoom].bounds
if len([e for e in extent if not math.isnan(e)]) == 0:
_LOGGER.warning("bounds empty for zoom %s", zoom)
else:
minx, miny, maxx, maxy = extent
px_buffer = layer["px_buffer"]
m_buffer = px_buffer * resolutions[zoom]
minx -= m_buffer
miny -= m_buffer
maxx += m_buffer
maxy += m_buffer
bounding_pyramid.add(
tilegrid.tilecoord(
zoom,
max(minx, tilegrid.max_extent[0]),
max(miny, tilegrid.max_extent[1]),
)
)
bounding_pyramid.add(
tilegrid.tilecoord(
zoom,
min(maxx, tilegrid.max_extent[2]),
min(maxy, tilegrid.max_extent[3]),
)
)
if layer["meta"]:
self.set_tilecoords(config, bounding_pyramid.metatilecoords(layer["meta_size"]), layer_name)
else:
self.set_tilecoords(config, bounding_pyramid, layer_name)
@staticmethod
def _tilestream(
tilecoords: Iterable[TileCoord],
default_metadata: Dict[str, str],
all_dimensions: List[Dict[str, str]],
) -> Iterator[Tile]:
for tilecoord in tilecoords:
for dimensions in all_dimensions:
metadata = {}
if default_metadata is not None:
metadata.update(default_metadata)
for k, v in dimensions.items():
metadata["dimension_" + k] = v
yield Tile(tilecoord, metadata=metadata)
def set_tilecoords(self, config: DatedConfig, tilecoords: Iterable[TileCoord], layer_name: str) -> None:
assert tilecoords is not None
layer = config.config["layers"][layer_name]
metadata = {"layer": layer_name, "config_file": config.file}
if self.options.host is not None:
metadata["host"] = self.options.host
self.tilestream = self._tilestream(tilecoords, metadata, self.get_all_dimensions(layer))
def set_store(self, store: TileStore) -> None:
self.tilestream = cast(Iterator[Tile], store.list())
def counter(self) -> "Count":
count = Count()
self.imap(count)
return count
def counter_size(self) -> "CountSize":
count = CountSize()
self.imap(count)
return count
def process(self, name: Optional[str] = None, key: str = "post_process") -> None:
gene = self
def get_process(config_file: str, layer_name: str) -> Optional[Process]:
config = gene.get_config(config_file)
layer = config.config["layers"][layer_name]
name_ = name
if name_ is None:
name_ = layer.get(key) # type: ignore
if name_ is not None:
return Process(config.config["process"][name_], self.options)
return None
self.imap(MultiAction(get_process))
def get(self, store: TileStore, time_message: Optional[str] = None) -> None:
assert store is not None
self.imap(store.get_one, time_message)
def put(self, store: TileStore, time_message: Optional[str] = None) -> None:
assert store is not None
def put_internal(tile: Tile) -> Tile:
store.put_one(tile)
return tile
self.imap(put_internal, time_message)
def delete(self, store: TileStore, time_message: Optional[str] = None) -> None:
assert store is not None
def delete_internal(tile: Tile) -> Tile:
store.delete_one(tile)
return tile
self.imap(delete_internal, time_message)
def imap(self, func: Any, time_message: Optional[str] = None) -> None:
assert func is not None
class Func:
"""Function with an additional field used to names it in timing messages."""
def __init__(self, func: Callable[[Tile], Tile], time_message: Optional[str]) -> None:
self.func = func
self.time_message = time_message
def __call__(self, tile: Tile) -> Tile:
return self.func(tile)
def __str__(self) -> str:
return f"Func: {self.func}"
self.functions.append(Func(func, time_message))
def consume(self, test: Optional[int] = None) -> None:
assert self.tilestream is not None
test = self.options.test if test is None else test
start = datetime.now()
run = Run(self, self.functions_metatiles)
if test is None:
if TYPE_CHECKING:
buffer: queue.Queue[Tile] = queue.Queue(int(os.environ.get("TILE_QUEUE_SIZE", "2")))
else:
buffer = queue.Queue(int(os.environ.get("TILE_QUEUE_SIZE", "2")))
end = False
nb_thread = int(os.environ.get("METATILE_NB_THREAD", "1"))
if nb_thread == 1 or not self.multi_thread:
consume(map(run, self.tilestream), None)
else:
should_exit_error = False
def target() -> None:
_LOGGER.debug("Start run")
nonlocal should_exit_error
while not end or not buffer.empty():
try:
run(buffer.get(timeout=1))
except tilecloud.filter.error.TooManyErrors:
_LOGGER.exception("Too many errors")
should_exit_error = True
except queue.Empty:
pass
_LOGGER.debug("End run")
threads = [threading.Thread(target=target, name=f"Run {i}") for i in range(nb_thread)]
for thread in threads:
thread.start()
for tile in self.tilestream:
while True:
try:
buffer.put(tile, timeout=1)
break
except queue.Full:
if should_exit_error:
sys.exit(1)
end = True
for thread in threads:
thread.join(30)
if self.metatilesplitter_thread_pool is not None:
self.metatilesplitter_thread_pool.shutdown()
self.metatilesplitter_thread_pool = None
assert buffer.empty(), buffer.qsize()
else:
for _ in range(test):
run(next(self.tilestream))
if self.metatilesplitter_thread_pool is not None:
self.metatilesplitter_thread_pool.shutdown()
self.metatilesplitter_thread_pool = None
assert threading.active_count() == 1, ", ".join([str(t) for t in threading.enumerate()])
self.error += run.error
self.duration = datetime.now() - start
for ca in self._close_actions:
ca()
class Count:
"""Count the number of generated tile."""
def __init__(self) -> None:
self.nb = 0
self.lock = threading.Lock()
def __call__(self, tile: Optional[Tile] = None) -> Optional[Tile]:
with self.lock:
self.nb += 1
return tile
class CountSize:
"""Count the number of generated tile and measure the total generated size."""
def __init__(self) -> None:
self.nb = 0
self.size = 0
self.lock = threading.Lock()
def __call__(self, tile: Optional[Tile] = None) -> Optional[Tile]:
if tile and tile.data:
with self.lock:
self.nb += 1
self.size += len(tile.data)
return tile
class HashDropper:
"""
Create a filter to remove the tiles data where they have the specified size and hash.
Used to drop the empty tiles.
The ``store`` is used to delete the empty tiles.
"""
def __init__(
self,
size: int,
sha1code: str,
store: Optional[TileStore] = None,
queue_store: Optional[TileStore] = None,
count: Optional[Count] = None,
) -> None:
self.size = size
self.sha1code = sha1code
self.store = store
self.queue_store = queue_store
self.count = count
def __call__(self, tile: Tile) -> Optional[Tile]:
assert tile.data
if len(tile.data) != self.size or sha1(tile.data).hexdigest() != self.sha1code: # nosec
return tile
else:
if self.store is not None:
if tile.tilecoord.n != 1:
for tilecoord in tile.tilecoord:
self.store.delete_one(Tile(tilecoord, metadata=tile.metadata))
else:
self.store.delete_one(tile)
_LOGGER.info("The tile %s %s is dropped", tile.tilecoord, tile.formated_metadata)
if hasattr(tile, "metatile"):
metatile: Tile = tile.metatile
metatile.elapsed_togenerate -= 1 # type: ignore
if metatile.elapsed_togenerate == 0 and self.queue_store is not None: # type: ignore
self.queue_store.delete_one(metatile)
elif self.queue_store is not None:
self.queue_store.delete_one(tile)
if self.count:
self.count()
return None
class MultiAction:
"""
Used to perform an action based on the tile's layer name.
E.g a HashDropper or Process
"""
def __init__(
self,
get_action: Callable[[str, str], Optional[Callable[[Tile], Optional[Tile]]]],
) -> None:
self.get_action = get_action
self.actions: Dict[Tuple[str, str], Optional[Callable[[Tile], Optional[Tile]]]] = {}
def __call__(self, tile: Tile) -> Optional[Tile]:
layer = tile.metadata["layer"]
config_file = tile.metadata["config_file"]
action = self.actions.get((config_file, layer))
if action is None:
action = self.get_action(config_file, layer)
self.actions[(config_file, layer)] = action
if action:
_LOGGER.debug("[%s] Run action %s.", tile.tilecoord, action)
return action(tile)
return tile
class HashLogger:
"""Log the tile size and hash."""
def __init__(self, block: str, out: Optional[IO[str]]) -> None:
self.block = block
self.out = out
def __call__(self, tile: Tile) -> Tile:
ref = None
try:
assert tile.data
image = Image.open(BytesIO(tile.data))
except OSError as ex:
assert tile.data
_LOGGER.exception("%s: %s", str(ex), tile.data)
raise
for px in image.getdata():
if ref is None:
ref = px
elif px != ref:
_LOGGER.error("Error: image is not uniform.")
sys.exit(1)
assert tile.data
print(
f"""Tile: {tile.tilecoord} {tile.formated_metadata}
{self.block}:
size: {len(tile.data)}
hash: {sha1(tile.data).hexdigest()}""", # nosec
file=self.out,
)
return tile
class LocalProcessFilter:
"""
Drop the tiles (coordinate) that shouldn't be generated in this process.
Process 1: process tiles 0 of 3
Process 2: process tiles 1 of 3
Process 3: process tiles 2 of 3
"""
def __init__(self, nb_process: int, process_nb: int) -> None:
self.nb_process = nb_process
self.process_nb = int(process_nb)
def filter(self, tilecoord: TileCoord) -> bool:
nb = round(tilecoord.z + tilecoord.x / tilecoord.n + tilecoord.y / tilecoord.n)
return nb % self.nb_process == self.process_nb
def __call__(self, tile: Tile) -> Optional[Tile]:
return tile if self.filter(tile.tilecoord) else None
class IntersectGeometryFilter:
"""Drop the tiles (coordinates) it she didn't intersect the configured geom."""
def __init__(
self,
gene: TileGeneration,
) -> None:
self.gene = gene
def filter_tilecoord(
self, config: DatedConfig, tilecoord: TileCoord, layer_name: str, host: Optional[str] = None
) -> bool:
layer = config.config["layers"][layer_name]
grid_name = layer["grid"]
grid = config.config["grids"][grid_name]
tile_grid = self.gene.get_grid(config, grid_name)
px_buffer = layer["px_buffer"] + layer["meta_buffer"] if layer["meta"] else 0
geoms = self.gene.get_geoms(config, layer_name, host=host)
return self.bbox_polygon( # type: ignore
tile_grid.extent(tilecoord, grid["resolutions"][tilecoord.z] * px_buffer)
).intersects(geoms[tilecoord.z])
def __call__(self, tile: Tile) -> Optional[Tile]:
return (
tile
if self.filter_tilecoord(self.gene.get_tile_config(tile), tile.tilecoord, tile.metadata["layer"])
else None
)
@staticmethod
def bbox_polygon(bbox: Tuple[float, float, float, float]) -> Polygon:
return Polygon(((bbox[0], bbox[1]), (bbox[0], bbox[3]), (bbox[2], bbox[3]), (bbox[2], bbox[1])))
class DropEmpty:
"""Create a filter for dropping all tiles with errors."""
def __init__(self, gene: TileGeneration) -> None:
self.gene = gene
def __call__(self, tile: Tile) -> Optional[Tile]:
config = self.gene.get_tile_config(tile)
if not tile or not tile.data:
_LOGGER.error(
"The tile: %s%s is empty",
tile.tilecoord if tile else "not defined",
" " + tile.formated_metadata if tile else "",
)
if "error_file" in config.config["generation"] and tile:
self.gene.log_tiles_error(tile=tile, message="The tile is empty")
return None
else:
return tile
def quote(arg: str) -> str:
"""Add some quote and escape to pass the argument to an externa command."""
if " " in arg or "'" in arg or '"' in arg:
if "'" in arg:
if '"' in arg:
formated_arg = arg.replace("'", "\\'")
return f"'{formated_arg}'"
return f'"{arg}"'
else:
return f"'{arg}'"
elif arg == "":
return "''"
else:
return arg
def parse_tilecoord(string_representation: str) -> TileCoord:
"""Parce the tile coordinates (z/x/y => TileCoord object)."""
parts = string_representation.split(":")
coords = [int(v) for v in parts[0].split("/")]
if len(coords) != 3:
raise ValueError("Wrong number of coordinates")
z, x, y = coords
if len(parts) == 1:
tilecoord = TileCoord(z, x, y)
elif len(parts) == 2:
meta = parts[1].split("/")
if len(meta) != 2:
raise ValueError("No one '/' in meta coordinates")
tilecoord = TileCoord(z, x, y, int(meta[0]))
else:
raise ValueError("More than on ':' in the tilecoord")
return tilecoord
class Process:
"""Process a tile throw an external command."""
def __init__(self, config: tilecloud_chain.configuration.ProcessCommand, options: Namespace) -> None:
self.config = config
self.options = options
def __call__(self, tile: Tile) -> Optional[Tile]:
if tile and tile.data:
fd_in, name_in = tempfile.mkstemp()
with open(name_in, "wb") as file_in:
file_in.write(tile.data)
for cmd in self.config:
args = []
if (
not self.options.verbose and not self.options.debug and not self.options.quiet
) and "default" in cmd["arg"]:
args.append(cmd["arg"]["default"])
if self.options.verbose and "verbose" in cmd["arg"]:
args.append(cmd["arg"]["verbose"])
if self.options.debug and "debug" in cmd["arg"]:
args.append(cmd["arg"]["debug"])
if self.options.quiet and "quiet" in cmd["arg"]:
args.append(cmd["arg"]["quiet"])
if cmd["need_out"]:
fd_out, name_out = tempfile.mkstemp()
os.unlink(name_out)
else:
name_out = name_in
command = cmd["cmd"] % {
"in": name_in,
"out": name_out,
"args": " ".join(args),
"x": tile.tilecoord.x,
"y": tile.tilecoord.y,
"z": tile.tilecoord.z,
}
_LOGGER.debug("[%s] process: %s", tile.tilecoord, command)
result = subprocess.run( # pylint: disable=subprocess-run-check
command, shell=True, capture_output=True # nosec
)
if result.returncode != 0:
tile.error = (
f"Command '{command}' on tile {tile.tilecoord} "
f"return error code {result.returncode}:\n{result.stderr!s}\n{result.stdout!s}"
)
tile.data = None
return tile
if cmd["need_out"]:
os.close(fd_in)
os.remove(name_in)
name_in = name_out
fd_in = fd_out
with open(name_in, "rb") as file_out:
tile.data = file_out.read()
os.close(fd_in)
os.remove(name_in)
return tile
class TilesFileStore(TileStore):
"""Load tiles to be generate from a file."""
def __init__(self, tiles_file: str):
super().__init__()
self.tiles_file = open(tiles_file, encoding="utf-8") # pylint: disable=consider-using-with
def list(self) -> Iterator[Tile]:
while True:
line = self.tiles_file.readline()
if not line:
return
line = line.split("#")[0].strip()
if line != "":
splitted_line = line.split(" ")
try:
tilecoord = parse_tilecoord(splitted_line[0])
except ValueError as e:
_LOGGER.exception(
"A tile '%s' is not in the format 'z/x/y' or z/x/y:+n/+n\n%s",
line,
repr(e),
)
continue
yield Tile(
tilecoord,
metadata=dict([cast(Tuple[str, str], e.split("=")) for e in splitted_line[1:]]),
)
def _await_message(_: Any) -> bool:
try:
# Just sleep, the SQSTileStore will try again after that...
time.sleep(10)
return False
except KeyboardInterrupt:
raise StopIteration # pylint: disable=raise-missing-from
def get_queue_store(config: DatedConfig, daemon: bool) -> TimedTileStoreWrapper:
"""Get the quue tile store (Redis or SQS)."""
if "redis" in config.config:
# Create a Redis queue
conf = config.config["redis"]
tilestore_kwargs: Dict[str, Any] = {
"name": conf["queue"],
"stop_if_empty": not daemon,
"timeout": conf["timeout"],
"pending_timeout": conf["pending_timeout"],
"max_retries": conf["max_retries"],
"max_errors_age": conf["max_errors_age"],
"max_errors_nb": conf["max_errors_nb"],
"connection_kwargs": conf.get("connection_kwargs", {}),
"sentinel_kwargs": conf.get("sentinel_kwargs"),
}
if "socket_timeout" in conf:
tilestore_kwargs["connection_kwargs"]["socket_timeout"] = conf["socket_timeout"]
if "db" in conf:
tilestore_kwargs["connection_kwargs"]["db"] = conf["db"]
if "url" in conf:
tilestore_kwargs["url"] = conf["url"]
else:
tilestore_kwargs["sentinels"] = conf["sentinels"]
tilestore_kwargs["service_name"] = conf.get("service_name", "mymaster")
if "pending_count" in conf:
tilestore_kwargs["pending_count"] = conf["pending_count"]
if "pending_max_count" in conf:
tilestore_kwargs["pending_max_count"] = conf["pending_max_count"]
return TimedTileStoreWrapper(RedisTileStore(**tilestore_kwargs), store_name="redis")
else:
# Create a SQS queue
return TimedTileStoreWrapper(
SQSTileStore(_get_sqs_queue(config), on_empty=_await_message if daemon else maybe_stop),
store_name="SQS",
)
def _get_sqs_queue(
config: DatedConfig,
) -> "botocore.client.SQS":
if "sqs" not in config.config:
sys.exit("The config hasn't any configured queue")
sqs = boto3.resource("sqs", region_name=config.config["sqs"].get("region", "eu-west-1"))
return sqs.get_queue_by_name(QueueName=config.config["sqs"]["queue"])
|
{"/tilecloud_chain/copy_.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/controller.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/views/admin.py": ["/tilecloud_chain/server.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/server.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/expiretiles.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/tests/test_controller.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_config.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_expiretiles.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/__init__.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py"], "/tilecloud_chain/tests/test_generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_serve.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/server.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/internal_mapcache.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/generate.py", "/tilecloud_chain/server.py"], "/tilecloud_chain/tests/test_cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/tests/test_copy.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/database_logger.py", "/tilecloud_chain/format.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py", "/tilecloud_chain/mapnik_.py"], "/tilecloud_chain/tests/test_error.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"]}
|
14,618
|
camptocamp/tilecloud-chain
|
refs/heads/master
|
/tilecloud_chain/tests/test_generate.py
|
import os
import shutil
from itertools import product, repeat
from typing import List, Tuple
import pytest
from testfixtures import LogCapture
from tilecloud.store.redis import RedisTileStore
from tilecloud_chain import controller, generate
from tilecloud_chain.tests import CompareCase
class TestGenerate(CompareCase):
def setUp(self) -> None: # noqa
self.maxDiff = None
@classmethod
def setUpClass(cls): # noqa
os.chdir(os.path.dirname(__file__))
if os.path.exists("/tmp/tiles"):
shutil.rmtree("/tmp/tiles")
@classmethod
def tearDownClass(cls): # noqa
os.chdir(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
if os.path.exists("/tmp/tiles"):
shutil.rmtree("/tmp/tiles")
def test_get_hash(self) -> None:
with LogCapture("tilecloud_chain", level=30) as log_capture:
for d in ("-d", ""):
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_tiles {} --get-hash 4/0/0 "
"-c tilegeneration/test.yaml -l point".format(d),
main_func=generate.main,
expected="""Tile: 4/0/0:+8/+8 config_file=tilegeneration/test.yaml dimension_DATE=2012 host=localhost layer=point
empty_metatile_detection:
size: 20743
hash: 01062bb3b25dcead792d7824f9a7045f0dd92992
Tile: 4/0/0 config_file=tilegeneration/test.yaml dimension_DATE=2012 host=localhost layer=point
empty_tile_detection:
size: 334
hash: dd6cb45962bccb3ad2450ab07011ef88f766eda8
""",
)
log_capture.check()
def test_get_wrong_hash(self) -> None:
for d in ("-d", "-q"):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_cmd_exit_equals(
cmd=".build/venv/bin/generate_tiles {} --get-hash 0/7/5 "
"-c tilegeneration/test.yaml -l all".format(d),
main_func=generate.main,
)
log_capture.check(
(
"tilecloud_chain",
"ERROR",
"Error: image is not uniform.",
)
)
def test_get_bbox(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_tiles {} "
"-c tilegeneration/test.yaml --get-bbox 4/4/4 -l point".format(d),
main_func=generate.main,
expected="""Tile bounds: [425120,343600,426400,344880]
""",
)
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_tiles {} "
"-c tilegeneration/test.yaml --get-bbox 4/4/4:+1/+1 -l point".format(d),
main_func=generate.main,
expected="""Tile bounds: [425120,343600,426400,344880]
""",
)
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_tiles {} "
"-c tilegeneration/test.yaml --get-bbox 4/4/4:+2/+2 -l point".format(d),
main_func=generate.main,
expected="""Tile bounds: [425120,342320,427680,344880]
""",
)
log_capture.check()
@pytest.mark.skip(reason="Don't test mapnik")
def test_hash_mapnik(self):
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_tiles {} "
"--get-hash 4/0/0 -c tilegeneration/test.yaml -l mapnik".format(d),
main_func=generate.main,
expected="""Tile: 4/0/0 config_file=tilegeneration/test.yaml
empty_tile_detection:
size: 334
hash: dd6cb45962bccb3ad2450ab07011ef88f766eda8
""",
)
log_capture.check()
def test_hash_mapnik_grid(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_tiles {} "
"--get-hash 4/0/0 -c tilegeneration/test.yaml -l all".format(d),
main_func=generate.main,
expected="""Tile: 4/0/0 config_file=tilegeneration/test.yaml dimension_DATE=2012 host=localhost layer=all
empty_metatile_detection:
size: 334
hash: dd6cb45962bccb3ad2450ab07011ef88f766eda8
Tile: 4/0/0 config_file=tilegeneration/test.yaml dimension_DATE=2012 host=localhost layer=all
empty_tile_detection:
size: 334
hash: dd6cb45962bccb3ad2450ab07011ef88f766eda8
""",
)
log_capture.check()
def test_test_all(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
cmd=f".build/venv/bin/generate_tiles {d} -c tilegeneration/test-nosns.yaml -t 1",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/%s/default/2012/swissgrid_5/%i/%i/%i.png",
tiles=[
("line", 0, 5, 6),
("line", 0, 5, 7),
("line", 0, 6, 5),
("line", 0, 6, 6),
("line", 0, 7, 4),
("line", 0, 7, 5),
("polygon", 0, 5, 4),
],
regex=True,
expected=r"""The tile generation of layer 'line \(DATE=2012\)' is finish
Nb generated metatiles: 1
Nb metatiles dropped: 0
Nb generated tiles: 64
Nb tiles dropped: 58
Nb tiles stored: 6
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: 3.[0-9] Kio
Time per tile: [0-9]+ ms
Size per tile: 6[0-9][0-9] o
The tile generation of layer 'polygon \(DATE=2012\)' is finish
Nb generated tiles: 1
Nb tiles dropped: 0
Nb tiles stored: 1
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: [45][0-9][0-9] o
Time per tile: [0-9]+ ms
Size per tile: [45][0-9][0-9] o
""",
)
log_capture.check()
def test_test_dimensions(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles %s -c tilegeneration/test-nosns.yaml -t 1 "
"--dimensions DATE=2013" % d,
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/%s/default/2013/swissgrid_5/%i/%i/%i.png",
tiles=[
("line", 0, 5, 6),
("line", 0, 5, 7),
("line", 0, 6, 5),
("line", 0, 6, 6),
("line", 0, 7, 4),
("line", 0, 7, 5),
("polygon", 0, 5, 4),
],
regex=True,
expected=r"""The tile generation of layer 'line \(DATE=2013\)' is finish
Nb generated metatiles: 1
Nb metatiles dropped: 0
Nb generated tiles: 64
Nb tiles dropped: 58
Nb tiles stored: 6
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: 3.[0-9] Kio
Time per tile: [0-9]+ ms
Size per tile: 6[0-9][0-9] o
The tile generation of layer 'polygon \(DATE=2013\)' is finish
Nb generated tiles: 1
Nb tiles dropped: 0
Nb tiles stored: 1
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: [45][0-9][0-9] o
Time per tile: [0-9]+ ms
Size per tile: [45][0-9][0-9] o
""",
)
log_capture.check()
def test_multigeom(self) -> None:
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles -c tilegeneration/test-multigeom.yaml",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/pp/default/2012/swissgrid_5/%i/%i/%i.png",
tiles=[
(0, 5, 4),
(0, 5, 5),
(0, 5, 6),
(0, 5, 7),
(0, 6, 4),
(0, 6, 5),
(0, 6, 6),
(0, 6, 7),
(0, 7, 4),
(0, 7, 5),
(0, 7, 6),
(0, 7, 7),
(1, 11, 8),
(1, 11, 9),
(1, 11, 10),
(1, 11, 11),
(1, 11, 12),
(1, 11, 13),
(1, 11, 14),
(1, 12, 8),
(1, 12, 9),
(1, 12, 10),
(1, 12, 11),
(1, 12, 12),
(1, 12, 13),
(1, 12, 14),
(1, 13, 8),
(1, 13, 9),
(1, 13, 10),
(1, 13, 11),
(1, 13, 12),
(1, 13, 13),
(1, 13, 14),
(1, 14, 8),
(1, 14, 9),
(1, 14, 10),
(1, 14, 11),
(1, 14, 12),
(1, 14, 13),
(1, 14, 14),
(1, 15, 8),
(1, 15, 9),
(1, 15, 10),
(1, 15, 11),
(1, 15, 12),
(1, 15, 13),
(1, 15, 14),
(2, 29, 35),
(2, 39, 21),
(3, 78, 42),
(3, 58, 70),
],
regex=True,
expected=r"""The tile generation of layer 'pp \(DATE=2012\)' is finish
Nb generated tiles: 51
Nb tiles dropped: 0
Nb tiles stored: 51
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: [34][0-9] Kio
Time per tile: [0-9]+ ms
Size per tile: [79][0-9][0-9] o
""",
)
log_capture.check()
def test_zoom_identifier(self) -> None:
with LogCapture("tilecloud_chain", level=30) as log_capture:
xy = list(product(range(585, 592), range(429, 432)))
x = [e[0] for e in xy]
y = [e[1] for e in xy]
xy2 = list(product(range(2929, 2936), range(2148, 2152)))
x2 = [e[0] for e in xy2]
y2 = [e[1] for e in xy2]
xy3 = list(product(range(5859, 5864), range(4296, 4304)))
x3 = [e[0] for e in xy3]
y3 = [e[1] for e in xy3]
for d in ("-d", ""):
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles {} "
"-c tilegeneration/test-nosns.yaml -t 1 -l polygon2 -z 0".format(d),
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/%s/default/2012/swissgrid_01/%s/%i/%i.png",
tiles=list(zip(repeat("polygon2", len(x)), repeat("1", len(x)), x, y)),
regex=True,
expected=r"""The tile generation of layer 'polygon2 \(DATE=2012\)' is finish
Nb generated metatiles: 1
Nb metatiles dropped: 0
Nb generated tiles: 64
Nb tiles dropped: 43
Nb tiles stored: 21
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: 16 Kio
Time per tile: [0-9]+ ms
Size per tile: 788 o
""",
)
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles {} "
"-c tilegeneration/test-nosns.yaml -t 1 -l polygon2 -z 1".format(d),
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/%s/default/2012/swissgrid_01/%s/%i/%i.png",
tiles=list(zip(repeat("polygon2", len(x2)), repeat("0_2", len(x2)), x2, y2)),
regex=True,
expected=r"""The tile generation of layer 'polygon2 \(DATE=2012\)' is finish
Nb generated metatiles: 1
Nb metatiles dropped: 0
Nb generated tiles: 64
Nb tiles dropped: 36
Nb tiles stored: 28
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: 22 Kio
Time per tile: [0-9]+ ms
Size per tile: 806 o
""",
)
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles {} "
"-c tilegeneration/test-nosns.yaml -t 1 -l polygon2 -z 2".format(d),
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/%s/default/2012/swissgrid_01/%s/%i/%i.png",
tiles=list(zip(repeat("polygon2", len(x3)), repeat("0_1", len(x3)), x3, y3)),
regex=True,
expected=r"""The tile generation of layer 'polygon2 \(DATE=2012\)' is finish
Nb generated metatiles: 1
Nb metatiles dropped: 0
Nb generated tiles: 64
Nb tiles dropped: 24
Nb tiles stored: 40
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: 32 Kio
Time per tile: [0-9]+ ms
Size per tile: 818 o
""",
)
log_capture.check()
def test_empty_bbox(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles %s -c tilegeneration/test-nosns.yaml "
"-l point_hash --bbox 700000 250000 800000 300000" % d,
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/%s",
tiles=[],
regex=True,
expected=r"""The tile generation of layer 'point_hash \(DATE=2012\)' is finish
Nb generated metatiles: 0
Nb metatiles dropped: 0
Nb generated tiles: 0
Nb tiles dropped: 0
Nb tiles stored: 0
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
""",
)
# Second time for the debug mode
log_capture.check(
("tilecloud_chain", "WARNING", "bounds empty for zoom 0"),
("tilecloud_chain", "WARNING", "bounds empty for zoom 1"),
("tilecloud_chain", "WARNING", "bounds empty for zoom 2"),
("tilecloud_chain", "WARNING", "bounds empty for zoom 3"),
)
def test_zoom(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles {} "
"-c tilegeneration/test-nosns.yaml -l point_hash --zoom 1".format(d),
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/%s/default/2012/swissgrid_5/%i/%i/%i.png",
tiles=[("point_hash", 1, 11, 14), ("point_hash", 1, 15, 8)],
regex=True,
expected=r"""The tile generation of layer 'point_hash \(DATE=2012\)' is finish
Nb generated metatiles: 1
Nb metatiles dropped: 0
Nb generated tiles: 64
Nb tiles dropped: 62
Nb tiles stored: 2
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: [89][0-9][0-9] o
Time per tile: [0-9]+ ms
Size per tile: 4[0-9][0-9] o
""",
)
log_capture.check()
def test_zoom_range(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles {} "
"-c tilegeneration/test-nosns.yaml -l point_hash --zoom 1-3".format(d),
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/%s/default/2012/swissgrid_5/%i/%i/%i.png",
tiles=[
("point_hash", 1, 11, 14),
("point_hash", 1, 15, 8),
("point_hash", 2, 29, 35),
("point_hash", 2, 39, 21),
("point_hash", 3, 58, 70),
("point_hash", 3, 78, 42),
],
regex=True,
expected=r"""The tile generation of layer 'point_hash \(DATE=2012\)' is finish
Nb generated metatiles: 9
Nb metatiles dropped: 4
Nb generated tiles: 320
Nb tiles dropped: 314
Nb tiles stored: 6
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: 2.[0-9] Kio
Time per tile: [0-9]+ ms
Size per tile: 4[0-9][0-9] o
""",
)
log_capture.check()
def test_no_zoom(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
cmd=(
".build/venv/bin/generate_tiles {} -c tilegeneration/test-nosns.yaml -l point_hash"
).format(d),
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/%s/default/2012/swissgrid_5/%i/%i/%i.png",
tiles=[
("point_hash", 0, 5, 7),
("point_hash", 0, 7, 4),
("point_hash", 1, 11, 14),
("point_hash", 1, 15, 8),
("point_hash", 2, 29, 35),
("point_hash", 2, 39, 21),
("point_hash", 3, 58, 70),
("point_hash", 3, 78, 42),
],
regex=True,
expected=r"""The tile generation of layer 'point_hash \(DATE=2012\)' is finish
Nb generated metatiles: 10
Nb metatiles dropped: 4
Nb generated tiles: 384
Nb tiles dropped: 376
Nb tiles stored: 8
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: 3.[0-9] Kio
Time per tile: [0-9]+ ms
Size per tile: 4[0-9][0-9] o
""",
)
log_capture.check()
def test_py_buffer(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles %s -c tilegeneration/test-nosns.yaml "
"-l point_px_buffer --zoom 0-2" % d,
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/point_px_buffer/default/2012/swissgrid_5/%i/%i/%i.png",
tiles=[(0, 5, 7), (0, 7, 4), (1, 11, 14), (1, 15, 8), (2, 29, 35), (2, 39, 21)],
regex=True,
expected=r"""The tile generation of layer 'point_px_buffer \(DATE=2012\)' is finish
Nb generated metatiles: 10
Nb metatiles dropped: 4
Nb generated tiles: 384
Nb tiles dropped: 378
Nb tiles stored: 6
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: 2.[0-9] Kio
Time per tile: [0-9]+ ms
Size per tile: 4[0-9][0-9] o
""",
)
log_capture.check()
def test_zoom_list(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
cmd=(
".build/venv/bin/generate_tiles %s -c tilegeneration/test-nosns.yaml "
"-l point_hash --zoom 0,2,3" % d
),
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/%s/default/2012/swissgrid_5/%i/%i/%i.png",
tiles=[
("point_hash", 0, 5, 7),
("point_hash", 0, 7, 4),
("point_hash", 2, 29, 35),
("point_hash", 2, 39, 21),
("point_hash", 3, 58, 70),
("point_hash", 3, 78, 42),
],
regex=True,
expected=r"""The tile generation of layer 'point_hash \(DATE=2012\)' is finish
Nb generated metatiles: 9
Nb metatiles dropped: 4
Nb generated tiles: 320
Nb tiles dropped: 314
Nb tiles stored: 6
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: 2.[0-9] Kio
Time per tile: [0-9]+ ms
Size per tile: 4[0-9][0-9] o
""",
)
log_capture.check()
def test_layer_bbox(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles {} "
"-c tilegeneration/test-nosns.yaml -l polygon -z 0".format(d),
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/polygon/default/2012/swissgrid_5/0/%i/%i.png",
tiles=list(product((5, 6, 7), (4, 5, 6, 7))),
regex=True,
expected=r"""The tile generation of layer 'polygon \(DATE=2012\)' is finish
Nb generated tiles: 12
Nb tiles dropped: 0
Nb tiles stored: 12
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: [0-9.]+ Kio
Time per tile: [0-9.]+ ms
Size per tile: [69][0-9][0-9] o
""",
)
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles %s "
"-c tilegeneration/test-nosns.yaml -l polygon -z 0"
" -b 550000 170000 560000 180000" % d,
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/polygon/default/2012/swissgrid_5/0/%i/%i.png",
tiles=[(6, 5), (7, 5)],
regex=True,
expected=r"""The tile generation of layer 'polygon \(DATE=2012\)' is finish
Nb generated tiles: 2
Nb tiles dropped: 0
Nb tiles stored: 2
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: 1.[6-9] Kio
Time per tile: [0-9]+ ms
Size per tile: [89][0-9][0-9] o
""",
)
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles %s "
"-c tilegeneration/test-nosns.yaml -l polygon -z 0"
" -b 550000.0 170000.0 560000.0 180000.0" % d,
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/polygon/default/2012/swissgrid_5/0/%i/%i.png",
tiles=[(6, 5), (7, 5)],
regex=True,
expected=r"""The tile generation of layer 'polygon \(DATE=2012\)' is finish
Nb generated tiles: 2
Nb tiles dropped: 0
Nb tiles stored: 2
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: 1.[6-9] Kio
Time per tile: [0-9]+ ms
Size per tile: [89][0-9][0-9] o
""",
)
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles {} "
"-c tilegeneration/test-nosns.yaml -l all -z 0".format(d),
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/all/default/2012/swissgrid_5/0/%i/%i.png",
tiles=[(6, 5), (7, 5)],
regex=True,
expected=r"""The tile generation of layer 'all \(DATE=2012\)' is finish
Nb generated tiles: 2
Nb tiles dropped: 0
Nb tiles stored: 2
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: 1.[6-9] Kio
Time per tile: [0-9]+ ms
Size per tile: [89][0-9][0-9] o
""",
)
log_capture.check()
def test_hash_generation(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles {} "
"-c tilegeneration/test-nosns.yaml -l point_hash -z 0".format(d),
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/point_hash/default/2012/swissgrid_5/0/%i/%i.png",
tiles=[(5, 7), (7, 4)],
regex=True,
expected=r"""The tile generation of layer 'point_hash \(DATE=2012\)' is finish
Nb generated metatiles: 1
Nb metatiles dropped: 0
Nb generated tiles: 64
Nb tiles dropped: 62
Nb tiles stored: 2
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: 9[0-9][0-9] o
Time per tile: [0-9]+ ms
Size per tile: [45][0-9][0-9] o
""",
)
log_capture.check()
@pytest.mark.skip(reason="Don't test mapnik")
def test_mapnik(self):
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles {} "
"-c tilegeneration/test-nosns.yaml -l mapnik -z 0".format(d),
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/mapnik/default/2012/swissgrid_5/0/%i/%i.png",
tiles=list(product((5, 6, 7), (4, 5, 6, 7))),
regex=True,
expected=r"""The tile generation of layer 'mapnik' is finish
Nb generated tiles: 12
Nb tiles dropped: 0
Nb tiles stored: 12
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: 9.7 Kio
Time per tile: [0-9]+ ms
Size per tile: 823 o
""",
)
log_capture.check()
@pytest.mark.skip(reason="Don't test mapnik")
def test_mapnik_grid(self):
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles {} "
"-c tilegeneration/test-nosns.yaml -l mapnik_grid -z 0".format(d),
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/mapnik_grid/default/2012/swissgrid_5/0/%i/%i.json",
tiles=list(product((5, 6, 7), (4, 5, 6, 7))),
regex=True,
expected=r"""The tile generation of layer 'mapnik_grid' is finish
Nb generated tiles: 12
Nb tiles dropped: 0
Nb tiles stored: 12
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: 4.5 Kio
Time per tile: [0-9]+ ms
Size per tile: 385 o
""",
)
with open("/tmp/tiles/1.0.0/mapnik_grid/default/2012/swissgrid_5/0/5/5.json") as f:
self.assert_result_equals(
f.read(),
'{"keys": ["", "1"], "data": {"1": {"name": "polygon1"}}, "grid": [" "'
', " ", " ", " ", " "'
', " ", " ", " ", " "'
', " ", " ", " ", " "'
', " ", "!!!!!!!!!!!!!!!!", "!!!!!!!!!!!!!!!!"]}',
)
with open("/tmp/tiles/1.0.0/mapnik_grid/default/2012/swissgrid_5/0/6/5.json") as f:
self.assert_result_equals(
f.read(),
'{"keys": ["1"], "data": {"1": {"name": "polygon1"}}, "grid": [" "'
', " ", " ", " ", " "'
', " ", " ", " ", " "'
', " ", " ", " ", " "'
', " ", " ", " "]}',
)
log_capture.check()
@pytest.mark.skip(reason="Don't test mapnik")
def test_mapnik_grid_drop(self):
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles {} "
"-c tilegeneration/test-nosns.yaml -l mapnik_grid_drop -z 0".format(d),
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/mapnik_grid_drop/default/2012/swissgrid_5/0/%i/%i.json",
tiles=((5, 7), (7, 4)),
regex=True,
expected=r"""The tile generation of layer 'mapnik_grid_drop' is finish
Nb generated tiles: 12
Nb tiles dropped: 10
Nb tiles stored: 2
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: 768 o
Time per tile: [0-9]+ ms
Size per tile: 384 o
""",
)
log_capture.check()
def test_not_authorised_user(self) -> None:
for d in ("-d", "-q"):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_cmd_exit_equals(
cmd=f".build/venv/bin/generate_tiles {d} -c tilegeneration/test-authorised.yaml",
main_func=generate.main,
)
log_capture.check(
(
"tilecloud_chain.generate",
"ERROR",
"not authorized, authorized user is: www-data.",
)
)
def test_verbose(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.run_cmd(
cmd=".build/venv/bin/generate_tiles {} "
"-c tilegeneration/test-nosns.yaml -t 2 -v -l polygon".format(d),
main_func=generate.main,
)
log_capture.check()
def test_time(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_tiles {} "
"-c tilegeneration/test.yaml --time 2 -l polygon".format(d),
main_func=generate.main,
expected=r"""size: 770
size: 862
size: 862
size: 862
time: [0-9]*
size: 862
size: 862
""",
regex=True,
empty_err=True,
)
log_capture.check()
def test_time_layer_bbox(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_tiles {} "
"-c tilegeneration/test.yaml --time 2 -l all".format(d),
main_func=generate.main,
expected=r"""size: 1010
size: 1010
size: 1010
size: 1010
time: [0-9]*
size: 1010
size: 1010
""",
regex=True,
empty_err=True,
)
log_capture.check()
# def test_daemonize(self):
# with LogCapture("tilecloud_chain", level=30) as log_capture:
# self.assert_cmd_equals(
# cmd='.build/venv/bin/generate_tiles %s -c tilegeneration/test.yaml -t 1 --daemonize' % d,
# main_func=generate.main,
# expected=r"""Daemonize with pid [0-9]*.""",
# regex=True)
# log_capture.check()
def _touch(self, tiles_pattern: str, tiles: List[Tuple[int, int]]) -> None:
for tile in tiles:
path = tiles_pattern % tile
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
with open(path, "w"):
pass
def test_delete_meta(self) -> None:
for d in ("-d", ""):
if os.path.exists("/tmp/tiles/"):
shutil.rmtree("/tmp/tiles/")
self._touch(
tiles_pattern="/tmp/tiles/1.0.0/point_hash_no_meta/default/2012/swissgrid_5/0/%i/%i.png",
tiles=list(product(range(12), range(16))),
)
self.assert_tiles_generated_deleted(
cmd=(
".build/venv/bin/generate_tiles %s -c tilegeneration/test-nosns.yaml "
"-l point_hash_no_meta -z 0" % d
),
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/point_hash_no_meta/default/2012/swissgrid_5/0/%i/%i.png",
tiles=[(5, 7), (7, 4)],
regex=True,
expected=r"""The tile generation of layer 'point_hash_no_meta \(DATE=2012\)' is finish
Nb generated tiles: 247
Nb tiles dropped: 245
Nb tiles stored: 2
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: [89][0-9][0-9] o
Time per tile: [0-9]+ ms
Size per tile: 4[0-9][0-9] o
""",
)
def test_delete_no_meta(self) -> None:
for d in ("-d", ""):
if os.path.exists("/tmp/tiles/"):
shutil.rmtree("/tmp/tiles/")
self._touch(
tiles_pattern="/tmp/tiles/1.0.0/point_hash_no_meta/default/2012/swissgrid_5/0/%i/%i.png",
tiles=list(product(range(12), range(16))),
)
self.assert_tiles_generated_deleted(
cmd=(
".build/venv/bin/generate_tiles %s -c tilegeneration/test-nosns.yaml "
"-l point_hash_no_meta -z 0" % d
),
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/point_hash_no_meta/default/2012/swissgrid_5/0/%i/%i.png",
tiles=[(5, 7), (7, 4)],
regex=True,
expected=r"""The tile generation of layer 'point_hash_no_meta \(DATE=2012\)' is finish
Nb generated tiles: 247
Nb tiles dropped: 245
Nb tiles stored: 2
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: [89][0-9][0-9] o
Time per tile: [0-9]+ ms
Size per tile: 4[0-9][0-9] o
""",
)
def test_error_file_create(self) -> None:
tile_mbt = os.environ["TILE_NB_THREAD"]
metatile_mbt = os.environ["METATILE_NB_THREAD"]
os.environ["TILE_NB_THREAD"] = "1"
os.environ["METATILE_NB_THREAD"] = "1"
if os.path.exists("error.list"):
os.remove("error.list")
self.assert_main_except_equals(
cmd=".build/venv/bin/generate_tiles -q -c tilegeneration/test-nosns.yaml -l point_error",
main_func=generate.main,
regex=True,
get_error=True,
expected=[
[
"error.list",
"\n".join(
[
r"# \[[0-9][0-9]-[0-9][0-9]-20[0-9][0-9] [0-9][0-9]:[0-9][0-9]:[0-9][0-9]\] "
r"Start the layer 'point_error' generation",
r"0/0/0:\+8/\+8 config_file=tilegeneration/test-nosns.yaml dimension_DATE=2012 "
r"host=localhost layer=point_error # \[[0-9][0-9]-[0-9][0-9]-20[0-9][0-9] "
r"[0-9][0-9]:[0-9][0-9]:[0-9][0-9]\] 'WMS server error: msWMSLoadGetMapParams\(\): "
r"WMS server error\. Invalid layer\(s\) given in the LAYERS parameter\. "
r"A layer might be disabled for this request\. Check wms/ows_enable_request "
r"settings\.'",
r"0/0/8:\+8/\+8 config_file=tilegeneration/test-nosns.yaml dimension_DATE=2012 "
r"host=localhost layer=point_error # \[[0-9][0-9]-[0-9][0-9]-20[0-9][0-9] "
r"[0-9][0-9]:[0-9][0-9]:[0-9][0-9]\] 'WMS server error: msWMSLoadGetMapParams\(\): "
r"WMS server error\. Invalid layer\(s\) given in the LAYERS parameter\. "
r"A layer might be disabled for this request\. Check wms/ows_enable_request "
r"settings\.'",
r"0/8/0:\+8/\+8 config_file=tilegeneration/test-nosns.yaml dimension_DATE=2012 "
r"host=localhost layer=point_error # \[[0-9][0-9]-[0-9][0-9]-20[0-9][0-9] "
r"[0-9][0-9]:[0-9][0-9]:[0-9][0-9]\] 'WMS server error: msWMSLoadGetMapParams\(\): "
r"WMS server error\. Invalid layer\(s\) given in the LAYERS parameter\. "
r"A layer might be disabled for this request\. Check wms/ows_enable_request settings\.'",
"",
]
),
]
],
)
os.environ["TILE_NB_THREAD"] = tile_mbt
os.environ["METATILE_NB_THREAD"] = metatile_mbt
def test_error_file_use(self) -> None:
tile_mbt = os.environ["TILE_NB_THREAD"]
metatile_mbt = os.environ["METATILE_NB_THREAD"]
main_congifile = os.environ["TILEGENERATION_MAIN_CONFIGFILE"]
os.environ["TILE_NB_THREAD"] = "1"
os.environ["METATILE_NB_THREAD"] = "1"
os.environ["TILEGENERATION_MAIN_CONFIGFILE"] = "tilegeneration/test-nosns.yaml"
try:
if os.path.exists("error.list"):
os.remove("error.list")
with open("error.list", "w") as error_file:
error_file.write(
"# comment\n"
"0/0/0:+8/+8 config_file=tilegeneration/test-nosns.yaml dimension_DATE=2012 layer=point_hash "
"# comment\n"
"0/0/8:+8/+8 config_file=tilegeneration/test-nosns.yaml dimension_DATE=2012 layer=point_hash\n"
"0/8/0:+8/+8 config_file=tilegeneration/test-nosns.yaml dimension_DATE=2012 layer=point_hash\n"
)
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles -d --tiles error.list",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/point_hash/default/2012/swissgrid_5/%i/%i/%i.png",
tiles=[(0, 5, 7), (0, 7, 4)],
regex=True,
expected=r"""The tile generation is finish
Nb generated metatiles: 3
Nb metatiles dropped: 1
Nb generated tiles: 128
Nb tiles dropped: 126
Nb tiles stored: 2
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: [89][0-9][0-9] o
Time per tile: [0-9]+ ms
Size per tile: [45][0-9][0-9] o
""",
)
finally:
os.environ["TILE_NB_THREAD"] = tile_mbt
os.environ["METATILE_NB_THREAD"] = metatile_mbt
os.environ["TILEGENERATION_MAIN_CONFIGFILE"] = main_congifile
def test_multy(self) -> None:
for d in ("-v", ""):
self.assert_tiles_generated(
cmd=f".build/venv/bin/generate_tiles {d} -c tilegeneration/test-multidim.yaml",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/multi/default/%s/swissgrid/%i/%i/%i.png",
tiles=[
("point1", 0, 5, 7),
("point1", 1, 11, 14),
("point1", 2, 29, 35),
("point2", 0, 7, 4),
("point2", 1, 15, 8),
("point2", 2, 39, 21),
],
regex=True,
expected=r"""The tile generation of layer 'multi \(POINT_NAME=point1 - POINT_NAME=point2\)' is finish
Nb generated metatiles: 16
Nb metatiles dropped: 10
Nb generated tiles: 384
Nb tiles dropped: 378
Nb tiles stored: 6
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: 2.9 Kio
Time per tile: [0-9]+ ms
Size per tile: 498 o
""",
)
def test_redis(self) -> None:
RedisTileStore(sentinels=[["redis_sentinel", 26379]]).delete_all()
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_tiles -c tilegeneration/test-redis.yaml --role master -l point",
main_func=generate.main,
regex=False,
expected="""The tile generation of layer 'point (DATE=2012)' is finish
Nb of generated jobs: 10
""",
)
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_controller -c tilegeneration/test-redis.yaml --status",
main_func=controller.main,
regex=False,
expected="""Approximate number of tiles to generate: 10
Approximate number of generating tiles: 0
Tiles in error:
""",
)
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_tiles -c tilegeneration/test-redis.yaml --role slave",
main_func=generate.main,
regex=True,
expected=r"""The tile generation is finish
Nb generated metatiles: 10
Nb metatiles dropped: 0
Nb generated tiles: 640
Nb tiles dropped: 0
Nb tiles stored: 640
Nb tiles in error: 0
Total time: 0:\d\d:\d\d
Total size: \d+ Kio
Time per tile: \d+ ms
Size per tile: \d+ o
""",
)
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_controller -c tilegeneration/test-redis.yaml --status",
main_func=controller.main,
regex=False,
expected="""Approximate number of tiles to generate: 0
Approximate number of generating tiles: 0
Tiles in error:
""",
)
def test_redis_main_config(self) -> None:
main_congifile = os.environ["TILEGENERATION_MAIN_CONFIGFILE"]
os.environ["TILEGENERATION_MAIN_CONFIGFILE"] = "tilegeneration/test-redis-main.yaml"
try:
RedisTileStore(sentinels=[["redis_sentinel", 26379]]).delete_all()
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_tiles -c tilegeneration/test-redis-project.yaml --role master -l point",
main_func=generate.main,
regex=False,
expected="""The tile generation of layer 'point (DATE=2012)' is finish
Nb of generated jobs: 10
""",
)
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_controller -c tilegeneration/test-redis-project.yaml --status",
main_func=controller.main,
regex=False,
expected="""Approximate number of tiles to generate: 10
Approximate number of generating tiles: 0
Tiles in error:
""",
)
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_tiles -c tilegeneration/test-redis-project.yaml --role slave",
main_func=generate.main,
regex=True,
expected=r"""The tile generation is finish
Nb generated metatiles: 10
Nb metatiles dropped: 0
Nb generated tiles: 640
Nb tiles dropped: 0
Nb tiles stored: 640
Nb tiles in error: 0
Total time: 0:\d\d:\d\d
Total size: \d+ Kio
Time per tile: \d+ ms
Size per tile: \d+ o
""",
)
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_controller -c tilegeneration/test-redis-project.yaml --status",
main_func=controller.main,
regex=False,
expected="""Approximate number of tiles to generate: 0
Approximate number of generating tiles: 0
Tiles in error:
""",
)
finally:
os.environ["TILEGENERATION_MAIN_CONFIGFILE"] = main_congifile
|
{"/tilecloud_chain/copy_.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/controller.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/views/admin.py": ["/tilecloud_chain/server.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/server.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/expiretiles.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/tests/test_controller.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_config.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_expiretiles.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/__init__.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py"], "/tilecloud_chain/tests/test_generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_serve.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/server.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/internal_mapcache.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/generate.py", "/tilecloud_chain/server.py"], "/tilecloud_chain/tests/test_cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/tests/test_copy.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/database_logger.py", "/tilecloud_chain/format.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py", "/tilecloud_chain/mapnik_.py"], "/tilecloud_chain/tests/test_error.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"]}
|
14,619
|
camptocamp/tilecloud-chain
|
refs/heads/master
|
/tilecloud_chain/tests/test_serve.py
|
import os
import shutil
import pytest
from pyramid.httpexceptions import HTTPBadRequest, HTTPNoContent
from pyramid.testing import DummyRequest
from testfixtures import LogCapture
from tilecloud_chain import controller, generate, server
from tilecloud_chain.server import PyramidView, app_factory
from tilecloud_chain.tests import CompareCase
CAPABILITIES = (
r"""<\?xml version="1.0" encoding="UTF-8"\?>
<Capabilities version="1.0.0"
xmlns="http://www.opengis.net/wmts/1.0"
xmlns:ows="http://www.opengis.net/ows/1.1"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:gml="http://www.opengis.net/gml"
xsi:schemaLocation="http://schemas.opengis.net/wmts/1.0/wmtsGetCapabilities_response.xsd">
<ows:OperationsMetadata>
<ows:Operation name="GetCapabilities">
<ows:DCP>
<ows:HTTP>
<ows:Get xlink:href="http://wmts1/tiles/wmts/1.0.0/WMTSCapabilities.xml">
<ows:Constraint name="GetEncoding">
<ows:AllowedValues>
<ows:Value>REST</ows:Value>
</ows:AllowedValues>
</ows:Constraint>
</ows:Get>
<ows:Get xlink:href="http://wmts1/tiles/wmts/">
<ows:Constraint name="GetEncoding">
<ows:AllowedValues>
<ows:Value>KVP</ows:Value>
</ows:AllowedValues>
</ows:Constraint>
</ows:Get>
</ows:HTTP>
</ows:DCP>
</ows:Operation>
<ows:Operation name="GetTile">
<ows:DCP>
<ows:HTTP>
<ows:Get xlink:href="http://wmts1/tiles/wmts/">
<ows:Constraint name="GetEncoding">
<ows:AllowedValues>
<ows:Value>REST</ows:Value>
<ows:Value>KVP</ows:Value>
</ows:AllowedValues>
</ows:Constraint>
</ows:Get>
</ows:HTTP>
</ows:DCP>
</ows:Operation>
</ows:OperationsMetadata>
<!-- <ServiceMetadataURL xlink:href="" /> -->
<Contents>
<Layer>
<ows:Title>point_hash</ows:Title>
<ows:Identifier>point_hash</ows:Identifier>
<Style isDefault="true">
<ows:Identifier>default</ows:Identifier>
</Style>
<Format>image/png</Format>
<InfoFormat></InfoFormat>
<Dimension>
<ows:Identifier>DATE</ows:Identifier>
<Default>2012</Default>
<Value>2005</Value>
<Value>2010</Value>
<Value>2012</Value>
</Dimension>
<ResourceURL format="image/png" resourceType="tile"
template="http://wmts1/tiles/wmts/1.0.0/point_hash/default/{DATE}/"""
r"""{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.png" />
<TileMatrixSetLink>
<TileMatrixSet>swissgrid_5</TileMatrixSet>
</TileMatrixSetLink>
</Layer>
<TileMatrixSet>
<ows:Identifier>swissgrid_5</ows:Identifier>
<ows:SupportedCRS>urn:ogc:def:crs:EPSG::21781</ows:SupportedCRS>
<TileMatrix>
<ows:Identifier>0</ows:Identifier>
<ScaleDenominator>357142.85714[0-9]*</ScaleDenominator>
<TopLeftCorner>420000 350000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>19</MatrixWidth>
<MatrixHeight>13</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>1</ows:Identifier>
<ScaleDenominator>178571.42857[0-9]*</ScaleDenominator>
<TopLeftCorner>420000 350000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>38</MatrixWidth>
<MatrixHeight>25</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>2</ows:Identifier>
<ScaleDenominator>71428.571428[0-9]*</ScaleDenominator>
<TopLeftCorner>420000 350000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>94</MatrixWidth>
<MatrixHeight>63</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>3</ows:Identifier>
<ScaleDenominator>35714.285714[0-9]*</ScaleDenominator>
<TopLeftCorner>420000 350000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>188</MatrixWidth>
<MatrixHeight>125</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>4</ows:Identifier>
<ScaleDenominator>17857.142857[0-9]*</ScaleDenominator>
<TopLeftCorner>420000 350000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>375</MatrixWidth>
<MatrixHeight>250</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
</Contents>
</Capabilities>"""
)
class TestServe(CompareCase):
def setUp(self) -> None: # noqa
self.maxDiff = None
@classmethod
def setUpClass(cls): # noqa
os.chdir(os.path.dirname(__file__))
if os.path.exists("/tmp/tiles"):
shutil.rmtree("/tmp/tiles")
@classmethod
def tearDownClass(cls): # noqa
os.chdir(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
if os.path.exists("/tmp/tiles"):
shutil.rmtree("/tmp/tiles")
def test_serve_kvp(self) -> None:
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles -d -c tilegeneration/test-nosns.yaml "
"-l point_hash --zoom 1",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/%s",
tiles=[
("point_hash/default/2012/swissgrid_5/1/11/14.png"),
("point_hash/default/2012/swissgrid_5/1/15/8.png"),
],
regex=True,
expected=r"""The tile generation of layer 'point_hash \(DATE=2012\)' is finish
Nb generated metatiles: 1
Nb metatiles dropped: 0
Nb generated tiles: 64
Nb tiles dropped: 62
Nb tiles stored: 2
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: [89][0-9][0-9] o
Time per tile: [0-9]+ ms
Size per tile: 4[0-9][0-9] o
""",
)
server.pyramid_server = None
server.tilegeneration = None
request = DummyRequest()
request.registry.settings = {
"tilegeneration_configfile": "tilegeneration/test-nosns.yaml",
}
request.params = {
"Service": "WMTS",
"Version": "1.0.0",
"Request": "GetTile",
"Format": "image/png",
"Layer": "point_hash",
"Style": "default",
"TileMatrixSet": "swissgrid_5",
"TileMatrix": "1",
"TileRow": "11",
"TileCol": "14",
}
serve = PyramidView(request)
serve()
self.assertEqual(request.response.headers["Content-Type"], "image/png")
self.assertEqual(request.response.headers["Cache-Control"], "max-age=28800")
request.params["TileRow"] = "12"
assert isinstance(serve(), HTTPNoContent)
request.params["TileRow"] = "11"
request.params["Service"] = "test"
self.assertRaises(HTTPBadRequest, serve)
request.params["Service"] = "WMTS"
request.params["Request"] = "test"
self.assertRaises(HTTPBadRequest, serve)
request.params["Request"] = "GetTile"
request.params["Version"] = "0.9"
self.assertRaises(HTTPBadRequest, serve)
request.params["Version"] = "1.0.0"
request.params["Format"] = "image/jpeg"
self.assertRaises(HTTPBadRequest, serve)
request.params["Format"] = "image/png"
request.params["Layer"] = "test"
self.assertRaises(HTTPBadRequest, serve)
request.params["Layer"] = "point_hash"
request.params["Style"] = "test"
self.assertRaises(HTTPBadRequest, serve)
request.params["Style"] = "default"
request.params["TileMatrixSet"] = "test"
self.assertRaises(HTTPBadRequest, serve)
request.params["TileMatrixSet"] = "swissgrid_5"
del request.params["Service"]
self.assertRaises(HTTPBadRequest, serve)
request.params = {
"Service": "WMTS",
"Version": "1.0.0",
"Request": "GetCapabilities",
}
PyramidView(request)()
self.assertEqual(request.response.headers["Content-Type"], "application/xml")
self.assert_result_equals(
request.response.body.decode("utf-8"),
regex=True,
expected=r"""<\?xml version="1.0" encoding="UTF-8"\?>
<Capabilities version="1.0.0"
xmlns="http://www.opengis.net/wmts/1.0"
xmlns:ows="http://www.opengis.net/ows/1.1"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:gml="http://www.opengis.net/gml"
xsi:schemaLocation="http://schemas.opengis.net/wmts/1.0/wmtsGetCapabilities_response.xsd">
<ows:OperationsMetadata>
<ows:Operation name="GetCapabilities">
<ows:DCP>
<ows:HTTP>
<ows:Get xlink:href="http://wmts1/tiles/wmts/1.0.0/WMTSCapabilities.xml">
<ows:Constraint name="GetEncoding">
<ows:AllowedValues>
<ows:Value>REST</ows:Value>
</ows:AllowedValues>
</ows:Constraint>
</ows:Get>
<ows:Get xlink:href="http://wmts1/tiles/wmts/">
<ows:Constraint name="GetEncoding">
<ows:AllowedValues>
<ows:Value>KVP</ows:Value>
</ows:AllowedValues>
</ows:Constraint>
</ows:Get>
</ows:HTTP>
</ows:DCP>
</ows:Operation>
<ows:Operation name="GetTile">
<ows:DCP>
<ows:HTTP>
<ows:Get xlink:href="http://wmts1/tiles/wmts/">
<ows:Constraint name="GetEncoding">
<ows:AllowedValues>
<ows:Value>REST</ows:Value>
<ows:Value>KVP</ows:Value>
</ows:AllowedValues>
</ows:Constraint>
</ows:Get>
</ows:HTTP>
</ows:DCP>
</ows:Operation>
</ows:OperationsMetadata>
<!-- <ServiceMetadataURL xlink:href="" /> -->
<Contents>
<Layer>
<ows:Title>all</ows:Title>
<ows:Identifier>all</ows:Identifier>
<Style isDefault="true">
<ows:Identifier>default</ows:Identifier>
</Style>
<Format>image/png</Format>
<Dimension>
<ows:Identifier>DATE</ows:Identifier>
<Default>2012</Default>
<Value>2005</Value>
<Value>2010</Value>
<Value>2012</Value>
</Dimension>
<ResourceURL format="image/png" resourceType="tile"
template="http://wmts1/tiles/wmts/1.0.0/all/default/"""
"""{DATE}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.png" />
<TileMatrixSetLink>
<TileMatrixSet>swissgrid_5</TileMatrixSet>
</TileMatrixSetLink>
</Layer>
<Layer>
<ows:Title>line</ows:Title>
<ows:Identifier>line</ows:Identifier>
<Style isDefault="true">
<ows:Identifier>default</ows:Identifier>
</Style>
<Format>image/png</Format>
<Dimension>
<ows:Identifier>DATE</ows:Identifier>
<Default>2012</Default>
<Value>2005</Value>
<Value>2010</Value>
<Value>2012</Value>
</Dimension>
<ResourceURL format="image/png" resourceType="tile"
template="http://wmts1/tiles/wmts/1.0.0/line/default/"""
"""{DATE}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.png" />
<TileMatrixSetLink>
<TileMatrixSet>swissgrid_5</TileMatrixSet>
</TileMatrixSetLink>
</Layer>
<Layer>
<ows:Title>mapnik</ows:Title>
<ows:Identifier>mapnik</ows:Identifier>
<Style isDefault="true">
<ows:Identifier>default</ows:Identifier>
</Style>
<Format>image/png</Format>
<Dimension>
<ows:Identifier>DATE</ows:Identifier>
<Default>2012</Default>
<Value>2005</Value>
<Value>2010</Value>
<Value>2012</Value>
</Dimension>
<ResourceURL format="image/png" resourceType="tile"
template="http://wmts1/tiles/wmts/1.0.0/mapnik/default/"""
"""{DATE}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.png" />
<TileMatrixSetLink>
<TileMatrixSet>swissgrid_5</TileMatrixSet>
</TileMatrixSetLink>
</Layer>
<Layer>
<ows:Title>mapnik_grid</ows:Title>
<ows:Identifier>mapnik_grid</ows:Identifier>
<Style isDefault="true">
<ows:Identifier>default</ows:Identifier>
</Style>
<Format>application/utfgrid</Format>
<Dimension>
<ows:Identifier>DATE</ows:Identifier>
<Default>2012</Default>
<Value>2005</Value>
<Value>2010</Value>
<Value>2012</Value>
</Dimension>
<ResourceURL format="application/utfgrid" resourceType="tile"
template="http://wmts1/tiles/wmts/1.0.0/mapnik_grid/default/"""
"""{DATE}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.json" />
<TileMatrixSetLink>
<TileMatrixSet>swissgrid_5</TileMatrixSet>
</TileMatrixSetLink>
</Layer>
<Layer>
<ows:Title>mapnik_grid_drop</ows:Title>
<ows:Identifier>mapnik_grid_drop</ows:Identifier>
<Style isDefault="true">
<ows:Identifier>default</ows:Identifier>
</Style>
<Format>application/utfgrid</Format>
<Dimension>
<ows:Identifier>DATE</ows:Identifier>
<Default>2012</Default>
<Value>2005</Value>
<Value>2010</Value>
<Value>2012</Value>
</Dimension>
<ResourceURL format="application/utfgrid" resourceType="tile"
template="http://wmts1/tiles/wmts/1.0.0/mapnik_grid_drop/default/"""
"""{DATE}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.json" />
<TileMatrixSetLink>
<TileMatrixSet>swissgrid_5</TileMatrixSet>
</TileMatrixSetLink>
</Layer>
<Layer>
<ows:Title>point</ows:Title>
<ows:Identifier>point</ows:Identifier>
<Style isDefault="true">
<ows:Identifier>default</ows:Identifier>
</Style>
<Format>image/png</Format>
<Dimension>
<ows:Identifier>DATE</ows:Identifier>
<Default>2012</Default>
<Value>2005</Value>
<Value>2010</Value>
<Value>2012</Value>
</Dimension>
<ResourceURL format="image/png" resourceType="tile"
template="http://wmts1/tiles/wmts/1.0.0/point/default/"""
"""{DATE}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.png" />
<TileMatrixSetLink>
<TileMatrixSet>swissgrid_5</TileMatrixSet>
</TileMatrixSetLink>
</Layer>
<Layer>
<ows:Title>point_error</ows:Title>
<ows:Identifier>point_error</ows:Identifier>
<Style isDefault="true">
<ows:Identifier>default</ows:Identifier>
</Style>
<Format>image/png</Format>
<Dimension>
<ows:Identifier>DATE</ows:Identifier>
<Default>2012</Default>
<Value>2005</Value>
<Value>2010</Value>
<Value>2012</Value>
</Dimension>
<ResourceURL format="image/png" resourceType="tile"
template="http://wmts1/tiles/wmts/1.0.0/point_error/default/"""
"""{DATE}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.png" />
<TileMatrixSetLink>
<TileMatrixSet>swissgrid_5</TileMatrixSet>
</TileMatrixSetLink>
</Layer>
<Layer>
<ows:Title>point_hash</ows:Title>
<ows:Identifier>point_hash</ows:Identifier>
<Style isDefault="true">
<ows:Identifier>default</ows:Identifier>
</Style>
<Format>image/png</Format>
<Dimension>
<ows:Identifier>DATE</ows:Identifier>
<Default>2012</Default>
<Value>2005</Value>
<Value>2010</Value>
<Value>2012</Value>
</Dimension>
<ResourceURL format="image/png" resourceType="tile"
template="http://wmts1/tiles/wmts/1.0.0/point_hash/default/"""
"""{DATE}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.png" />
<TileMatrixSetLink>
<TileMatrixSet>swissgrid_5</TileMatrixSet>
</TileMatrixSetLink>
</Layer>
<Layer>
<ows:Title>point_hash_no_meta</ows:Title>
<ows:Identifier>point_hash_no_meta</ows:Identifier>
<Style isDefault="true">
<ows:Identifier>default</ows:Identifier>
</Style>
<Format>image/png</Format>
<Dimension>
<ows:Identifier>DATE</ows:Identifier>
<Default>2012</Default>
<Value>2005</Value>
<Value>2010</Value>
<Value>2012</Value>
</Dimension>
<ResourceURL format="image/png" resourceType="tile"
template="http://wmts1/tiles/wmts/1.0.0/point_hash_no_meta/default/"""
"""{DATE}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.png" />
<TileMatrixSetLink>
<TileMatrixSet>swissgrid_5</TileMatrixSet>
</TileMatrixSetLink>
</Layer>
<Layer>
<ows:Title>point_px_buffer</ows:Title>
<ows:Identifier>point_px_buffer</ows:Identifier>
<Style isDefault="true">
<ows:Identifier>default</ows:Identifier>
</Style>
<Format>image/png</Format>
<Dimension>
<ows:Identifier>DATE</ows:Identifier>
<Default>2012</Default>
<Value>2005</Value>
<Value>2010</Value>
<Value>2012</Value>
</Dimension>
<ResourceURL format="image/png" resourceType="tile"
template="http://wmts1/tiles/wmts/1.0.0/point_px_buffer/default/"""
"""{DATE}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.png" />
<TileMatrixSetLink>
<TileMatrixSet>swissgrid_5</TileMatrixSet>
</TileMatrixSetLink>
</Layer>
<Layer>
<ows:Title>polygon</ows:Title>
<ows:Identifier>polygon</ows:Identifier>
<Style isDefault="true">
<ows:Identifier>default</ows:Identifier>
</Style>
<Format>image/png</Format>
<Dimension>
<ows:Identifier>DATE</ows:Identifier>
<Default>2012</Default>
<Value>2005</Value>
<Value>2010</Value>
<Value>2012</Value>
</Dimension>
<ResourceURL format="image/png" resourceType="tile"
template="http://wmts1/tiles/wmts/1.0.0/polygon/default/"""
"""{DATE}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.png" />
<TileMatrixSetLink>
<TileMatrixSet>swissgrid_5</TileMatrixSet>
</TileMatrixSetLink>
</Layer>
<Layer>
<ows:Title>polygon2</ows:Title>
<ows:Identifier>polygon2</ows:Identifier>
<Style isDefault="true">
<ows:Identifier>default</ows:Identifier>
</Style>
<Format>image/png</Format>
<Dimension>
<ows:Identifier>DATE</ows:Identifier>
<Default>2012</Default>
<Value>2005</Value>
<Value>2010</Value>
<Value>2012</Value>
</Dimension>
<ResourceURL format="image/png" resourceType="tile"
template="http://wmts1/tiles/wmts/1.0.0/polygon2/default/"""
"""{DATE}/{TileMatrixSet}/{TileMatrix}/{TileRow}/{TileCol}.png" />
<TileMatrixSetLink>
<TileMatrixSet>swissgrid_01</TileMatrixSet>
</TileMatrixSetLink>
</Layer>
<TileMatrixSet>
<ows:Identifier>swissgrid_01</ows:Identifier>
<ows:SupportedCRS>urn:ogc:def:crs:EPSG::21781</ows:SupportedCRS>
<TileMatrix>
<ows:Identifier>1</ows:Identifier>
<ScaleDenominator>3571.4285714[0-9]*</ScaleDenominator>
<TopLeftCorner>420000 350000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>1875</MatrixWidth>
<MatrixHeight>1250</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>0_2</ows:Identifier>
<ScaleDenominator>714.28571428[0-9]*</ScaleDenominator>
<TopLeftCorner>420000 350000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>9375</MatrixWidth>
<MatrixHeight>6250</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>0_1</ows:Identifier>
<ScaleDenominator>357.14285714[0-9]*</ScaleDenominator>
<TopLeftCorner>420000 350000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>18750</MatrixWidth>
<MatrixHeight>12500</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
<TileMatrixSet>
<ows:Identifier>swissgrid_025</ows:Identifier>
<ows:SupportedCRS>urn:ogc:def:crs:EPSG::21781</ows:SupportedCRS>
<TileMatrix>
<ows:Identifier>0_25</ows:Identifier>
<ScaleDenominator>892.85714285[0-9]*</ScaleDenominator>
<TopLeftCorner>420000 350000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>7500</MatrixWidth>
<MatrixHeight>5000</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
<TileMatrixSet>
<ows:Identifier>swissgrid_2_5</ows:Identifier>
<ows:SupportedCRS>urn:ogc:def:crs:EPSG::21781</ows:SupportedCRS>
<TileMatrix>
<ows:Identifier>2_5</ows:Identifier>
<ScaleDenominator>8928.5714285[0-9]*</ScaleDenominator>
<TopLeftCorner>420000 350000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>750</MatrixWidth>
<MatrixHeight>500</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
<TileMatrixSet>
<ows:Identifier>swissgrid_5</ows:Identifier>
<ows:SupportedCRS>urn:ogc:def:crs:EPSG::21781</ows:SupportedCRS>
<TileMatrix>
<ows:Identifier>0</ows:Identifier>
<ScaleDenominator>357142.85714[0-9]*</ScaleDenominator>
<TopLeftCorner>420000 350000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>19</MatrixWidth>
<MatrixHeight>13</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>1</ows:Identifier>
<ScaleDenominator>178571.42857[0-9]*</ScaleDenominator>
<TopLeftCorner>420000 350000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>38</MatrixWidth>
<MatrixHeight>25</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>2</ows:Identifier>
<ScaleDenominator>71428.571428[0-9]*</ScaleDenominator>
<TopLeftCorner>420000 350000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>94</MatrixWidth>
<MatrixHeight>63</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>3</ows:Identifier>
<ScaleDenominator>35714.285714[0-9]*</ScaleDenominator>
<TopLeftCorner>420000 350000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>188</MatrixWidth>
<MatrixHeight>125</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>4</ows:Identifier>
<ScaleDenominator>17857.142857[0-9]*</ScaleDenominator>
<TopLeftCorner>420000 350000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>375</MatrixWidth>
<MatrixHeight>250</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
</Contents>
</Capabilities>""",
)
log_capture.check()
def test_mbtiles_rest(self) -> None:
with LogCapture("tilecloud_chain", level=30) as log_capture:
tile_mbt = os.environ["TILE_NB_THREAD"]
metatile_mbt = os.environ["METATILE_NB_THREAD"]
os.environ["TILE_NB_THREAD"] = "1"
os.environ["METATILE_NB_THREAD"] = "1"
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles -d -c tilegeneration/test-serve.yaml"
" -l point_hash --zoom 1",
main_func=generate.main,
directory="/tmp/tiles/mbtiles/",
tiles_pattern="1.0.0/%s",
tiles=[("point_hash/default/2012/swissgrid_5.png.mbtiles")],
regex=True,
expected=r"""The tile generation of layer 'point_hash \(DATE=2012\)' is finish
Nb generated metatiles: 1
Nb metatiles dropped: 0
Nb generated tiles: 64
Nb tiles dropped: 62
Nb tiles stored: 2
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: [89][0-9][0-9] o
Time per tile: [0-9]+ ms
Size per tile: 4[0-9][0-9] o
""",
)
server.pyramid_server = None
server.tilegeneration = None
request = DummyRequest()
request.registry.settings = {
"tilegeneration_configfile": "tilegeneration/test-serve.yaml",
}
request.matchdict = {
"path": ["wmts", "1.0.0", "point_hash", "default", "2012", "swissgrid_5", "1", "11", "14.png"]
}
serve = PyramidView(request)
serve()
self.assertEqual(request.response.headers["Content-Type"], "image/png")
self.assertEqual(request.response.headers["Cache-Control"], "max-age=28800")
request.matchdict["path"][7] = "12"
response = serve()
assert isinstance(response, HTTPNoContent)
assert response.headers["Cache-Control"] == "max-age=28800"
request.matchdict["path"][7] = "11"
request.matchdict["path"][1] = "0.9"
self.assertRaises(HTTPBadRequest, serve)
request.matchdict["path"][1] = "1.0.0"
request.matchdict["path"][8] = "14.jpeg"
self.assertRaises(HTTPBadRequest, serve)
request.matchdict["path"][8] = "14.png"
request.matchdict["path"][2] = "test"
self.assertRaises(HTTPBadRequest, serve)
request.matchdict["path"][2] = "point_hash"
request.matchdict["path"][3] = "test"
self.assertRaises(HTTPBadRequest, serve)
request.matchdict["path"][3] = "default"
request.matchdict["path"][5] = "test"
self.assertRaises(HTTPBadRequest, serve)
request.matchdict["path"] = ["wmts", "point_hash", "default", "swissgrid_5", "1", "14", "11.png"]
self.assertRaises(HTTPBadRequest, serve)
request.matchdict["path"] = ["wmts", "1.0.0", "WMTSCapabilities.xml"]
PyramidView(request)()
self.assertEqual(request.response.headers["Content-Type"], "application/xml")
self.assert_result_equals(
request.response.body.decode("utf-8"),
CAPABILITIES,
regex=True,
)
os.environ["TILE_NB_THREAD"] = tile_mbt
os.environ["METATILE_NB_THREAD"] = metatile_mbt
log_capture.check()
@pytest.mark.skip(reason="Don't test bsddb")
def test_bsddb_rest(self):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles -d -c tilegeneration/test-bsddb.yaml"
" -l point_hash --zoom 1",
main_func=generate.main,
directory="/tmp/tiles/bsddb/",
tiles_pattern="1.0.0/%s",
tiles=[("point_hash/default/2012/swissgrid_5.png.bsddb")],
regex=True,
expected=r"""The tile generation of layer 'point_hash \(DATE=2012\)' is finish
Nb generated metatiles: 1
Nb metatiles dropped: 0
Nb generated tiles: 64
Nb tiles dropped: 62
Nb tiles stored: 2
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: [89][0-9][0-9] o
Time per tile: [0-9]+ ms
Size per tile: 4[0-9][0-9] o
""",
)
server.pyramid_server = None
server.tilegeneration = None
request = DummyRequest()
request.registry.settings = {
"tilegeneration_configfile": "tilegeneration/test-bsddb.yaml",
}
request.matchdict = {
"path": ["wmts", "1.0.0", "point_hash", "default", "2012", "swissgrid_5", "1", "11", "14.png"]
}
serve = PyramidView(request)
serve()
self.assertEqual(request.response.headers["Content-Type"], "image/png")
self.assertEqual(request.response.headers["Cache-Control"], "max-age=28800")
request.matchdict["path"][7] = "12"
assert isinstance(serve(), HTTPNoContent)
request.matchdict["path"][7] = "11"
request.matchdict["path"][1] = "0.9"
self.assertRaises(HTTPBadRequest, serve)
request.matchdict["path"][1] = "1.0.0"
request.matchdict["path"][8] = "14.jpeg"
self.assertRaises(HTTPBadRequest, serve)
request.matchdict["path"][8] = "14.png"
request.matchdict["path"][2] = "test"
self.assertRaises(HTTPBadRequest, serve)
request.matchdict["path"][2] = "point_hash"
request.matchdict["path"][3] = "test"
self.assertRaises(HTTPBadRequest, serve)
request.matchdict["path"][3] = "default"
request.matchdict["path"][5] = "test"
self.assertRaises(HTTPBadRequest, serve)
request.matchdict["path"] = ["wmts", "point_hash", "default", "swissgrid_5", "1", "14", "11.png"]
self.assertRaises(HTTPBadRequest, serve)
request.matchdict["path"] = ["wmts", "1.0.0", "WMTSCapabilities.xml"]
PyramidView(request)()
self.assertEqual(request.response.headers["Content-Type"], "application/xml")
self.assert_result_equals(
request.response.body.decode("utf-8"),
CAPABILITIES,
regex=True,
)
request.matchdict["path"] = ["static", "1.0.0", "WMTSCapabilities.xml"]
PyramidView(request)()
self.assertEqual(request.response.headers["Content-Type"], "application/xml")
self.assert_result_equals(
request.response.body.decode("utf-8"),
CAPABILITIES,
regex=True,
)
log_capture.check()
def test_serve_gfi(self) -> None:
server.pyramid_server = None
server.tilegeneration = None
request = DummyRequest()
request.registry.settings = {
"tilegeneration_configfile": "tilegeneration/test-serve.yaml",
}
request.params = {
"Service": "WMTS",
"Version": "1.0.0",
"Request": "GetFeatureInfo",
"Format": "image/png",
"Info_Format": "application/vnd.ogc.gml",
"Layer": "point_hash",
"Query_Layer": "point_hash",
"Style": "default",
"TileMatrixSet": "swissgrid_5",
"TileMatrix": "1",
"TileRow": "11",
"TileCol": "14",
"I": "114",
"J": "111",
}
serve = PyramidView(request)
serve()
self.assert_result_equals(
request.response.body.decode("utf-8"),
"""<?xml version="1.0" encoding="UTF-8"?>
<msGMLOutput
xmlns:gml="http://www.opengis.net/gml"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
</msGMLOutput>
""",
)
server.pyramid_server = None
server.tilegeneration = None
request = DummyRequest()
request.registry.settings = {
"tilegeneration_configfile": "tilegeneration/test-serve.yaml",
}
request.matchdict = {
"path": [
"wmts",
"1.0.0",
"point_hash",
"default",
"2012",
"swissgrid_5",
"1",
"11",
"14",
"114",
"111.xml",
]
}
request.params = {
"Service": "WMTS",
"Version": "1.0.0",
"Request": "GetFeatureInfo",
"Format": "image/png",
"Info_Format": "application/vnd.ogc.gml",
"Layer": "point_hash",
"Query_Layer": "point_hash",
"Style": "default",
"TileMatrixSet": "swissgrid_5",
"TileMatrix": "1",
"TileRow": "14",
"TileCol": "11",
"I": "114",
"J": "111",
}
serve = PyramidView(request)
serve()
self.assert_result_equals(
request.response.body.decode("utf-8"),
"""<?xml version="1.0" encoding="UTF-8"?>
<msGMLOutput
xmlns:gml="http://www.opengis.net/gml"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
</msGMLOutput>
""",
)
def test_wsgi(self) -> None:
tile_mbt = os.environ["TILE_NB_THREAD"]
metatile_mbt = os.environ["METATILE_NB_THREAD"]
os.environ["TILE_NB_THREAD"] = "1"
os.environ["METATILE_NB_THREAD"] = "1"
self.assert_tiles_generated(
cmd=".build/venv/bin/generate_tiles -d -c tilegeneration/test-serve.yaml "
"-l point_hash --zoom 1",
main_func=generate.main,
directory="/tmp/tiles/mbtiles/",
tiles_pattern="1.0.0/%s",
tiles=[("point_hash/default/2012/swissgrid_5.png.mbtiles")],
regex=True,
expected=r"""The tile generation of layer 'point_hash \(DATE=2012\)' is finish
Nb generated metatiles: 1
Nb metatiles dropped: 0
Nb generated tiles: 64
Nb tiles dropped: 62
Nb tiles stored: 2
Nb tiles in error: 0
Total time: [0-9]+:[0-9][0-9]:[0-9][0-9]
Total size: [89][0-9][0-9] o
Time per tile: [0-9]+ ms
Size per tile: 4[0-9][0-9] o
""",
)
server.pyramid_server = None
server.tilegeneration = None
serve = app_factory({}, configfile="tilegeneration/test-serve.yaml")
global code, headers
code = None
headers = None
def start_response(p_code, p_headers):
global code, headers
code = p_code
headers = {}
for key, value in p_headers:
headers[key] = value
result = serve(
server.tilegeneration.get_main_config(),
"tilegeneration/test-serve.yaml",
{
"QUERY_STRING": "&".join(
[
"{}={}".format(*item)
for item in {
"Service": "WMTS",
"Version": "1.0.0",
"Request": "GetFeatureInfo",
"Format": "image/png",
"Info_Format": "application/vnd.ogc.gml",
"Layer": "point_hash",
"Query_Layer": "point_hash",
"Style": "default",
"TileMatrixSet": "swissgrid_5",
"TileMatrix": "1",
"TileRow": "11",
"TileCol": "14",
"I": "114",
"J": "111",
}.items()
]
)
},
start_response,
)
self.assertEqual(code, "200 OK")
self.assert_result_equals(
result[0].decode("utf-8"),
"""<?xml version="1.0" encoding="UTF-8"?>
<msGMLOutput
xmlns:gml="http://www.opengis.net/gml"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
</msGMLOutput>
""",
)
result = serve(
server.tilegeneration.get_main_config(),
"tilegeneration/test-serve.yaml",
{
"QUERY_STRING": "",
"PATH_INFO": "/wmts/1.0.0/point_hash/default/2012/swissgrid_5/1/14/11/114/111.xml",
},
start_response,
)
self.assert_result_equals(
result[0].decode("utf-8"),
"""<?xml version="1.0" encoding="UTF-8"?>
<msGMLOutput
xmlns:gml="http://www.opengis.net/gml"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
</msGMLOutput>
""",
)
serve(
server.tilegeneration.get_main_config(),
"tilegeneration/test-serve.yaml",
{"QUERY_STRING": "", "PATH_INFO": "/wmts/1.0.0/point_hash/default/2012/swissgrid_5/1/11/12.png"},
start_response,
)
self.assertEqual(code, "204 No Content")
serve(
server.tilegeneration.get_main_config(),
"tilegeneration/test-serve.yaml",
{"QUERY_STRING": "", "PATH_INFO": "/wmts/1.0.0/point_hash/default/2012/swissgrid_5/1/11/14.png"},
start_response,
)
self.assertEqual(code, "200 OK")
self.assertEqual(headers["Cache-Control"], "max-age=28800")
result = serve(
server.tilegeneration.get_main_config(),
"tilegeneration/test-serve.yaml",
{"QUERY_STRING": "", "PATH_INFO": "/wmts/1.0.0/WMTSCapabilities.xml"},
start_response,
)
self.assertEqual(code, "200 OK")
self.assert_result_equals(
result[0].decode("utf-8"),
CAPABILITIES,
regex=True,
)
os.environ["TILE_NB_THREAD"] = tile_mbt
os.environ["METATILE_NB_THREAD"] = metatile_mbt
def test_ondemend_wmtscapabilities(self) -> None:
with LogCapture("tilecloud_chain", level=30) as log_capture:
server.pyramid_server = None
server.tilegeneration = None
request = DummyRequest()
request.registry.settings = {
"tilegeneration_configfile": "tilegeneration/test-serve-wmtscapabilities.yaml",
}
request.matchdict["path"] = ["wmts", "1.0.0", "WMTSCapabilities.xml"]
PyramidView(request)()
self.assertEqual(request.response.headers["Content-Type"], "application/xml")
self.assert_result_equals(
request.response.body.decode("utf-8"),
CAPABILITIES,
regex=True,
)
log_capture.check()
|
{"/tilecloud_chain/copy_.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/controller.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/views/admin.py": ["/tilecloud_chain/server.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/server.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/expiretiles.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/tests/test_controller.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_config.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_expiretiles.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/__init__.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py"], "/tilecloud_chain/tests/test_generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_serve.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/server.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/internal_mapcache.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/generate.py", "/tilecloud_chain/server.py"], "/tilecloud_chain/tests/test_cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/tests/test_copy.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/database_logger.py", "/tilecloud_chain/format.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py", "/tilecloud_chain/mapnik_.py"], "/tilecloud_chain/tests/test_error.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"]}
|
14,620
|
camptocamp/tilecloud-chain
|
refs/heads/master
|
/tilecloud_chain/internal_mapcache.py
|
import collections
import contextlib
import datetime
import json
import logging
import os
import struct
import sys
import threading
from typing import TYPE_CHECKING, Any, Dict, Iterator, Optional, TypeVar, cast
import redis.sentinel
import tilecloud_chain.configuration
from tilecloud import Tile, TileCoord, TileStore
from tilecloud_chain import Run
from tilecloud_chain.generate import Generate
if TYPE_CHECKING:
from tilecloud_chain.server import Server
MAX_GENERATION_TIME = 60
LOG = logging.getLogger(__name__)
lock = threading.Lock()
executing_lock = threading.Lock()
_generator = None
def _decode_tile(data: bytes, tile: Tile) -> None:
"""Decode a tile."""
image_len = struct.unpack("q", data[:8])[0]
tile.data = data[8 : (image_len + 8)]
other = json.loads((data[(8 + image_len) :]).decode("utf-8"))
tile.content_encoding = other["content_encoding"]
tile.content_type = other["content_type"]
def _encode_tile(tile: Tile) -> bytes:
"""Encode a tile."""
other = {"content_encoding": tile.content_encoding, "content_type": tile.content_type}
assert tile.data
data = struct.pack("q", len(tile.data)) + tile.data + json.dumps(other).encode("utf-8")
return data
class RedisStore(TileStore):
"""A store based on Redis."""
def __init__(self, config: tilecloud_chain.configuration.Redis, **kwargs: Any):
"""Initialize."""
super().__init__(**kwargs)
connection_kwargs = {}
if "socket_timeout" in config:
connection_kwargs["socket_timeout"] = config["socket_timeout"]
if "db" in config:
connection_kwargs["db"] = config["db"]
if "url" in config:
self._master = redis.Redis.from_url(config["url"], **connection_kwargs) # type: ignore
self._slave = self._master
else:
sentinels = [(host, int(port)) for host, port in config["sentinels"]]
sentinel = redis.sentinel.Sentinel(sentinels, **connection_kwargs) # type: ignore
self._master = sentinel.master_for(config.get("service_name", "mymaster"))
self._slave = sentinel.slave_for(config.get("service_name", "mymaster"))
self._prefix = config["prefix"]
self._expiration = config["expiration"]
def get_one(self, tile: Tile) -> Optional[Tile]:
"""See in superclass."""
key = self._get_key(tile)
data = self._slave.get(key)
if data is None:
LOG.debug("Tile not found: %s/%s", tile.metadata["layer"], tile.tilecoord)
return None
_decode_tile(data, tile)
LOG.debug("Tile found: %s/%s", tile.metadata["layer"], tile.tilecoord)
return tile
def put_one(self, tile: Tile) -> Tile:
"""See in superclass."""
key = self._get_key(tile)
self._master.set(key, _encode_tile(tile), ex=self._expiration)
LOG.info("Tile saved: %s/%s", tile.metadata["layer"], tile.tilecoord)
return tile
def delete_one(self, tile: Tile) -> Tile:
"""See in superclass."""
key = self._get_key(tile)
self._master.delete(key)
return tile
def _get_key(self, tile: Tile) -> str:
return (
f"{self._prefix}_{tile.metadata['config_file']}_{tile.metadata['layer']}_"
f"{tile.tilecoord.z}_{tile.tilecoord.x}_{tile.tilecoord.y}"
)
@contextlib.contextmanager
def lock(self, tile: Tile) -> Iterator[None]:
"""Lock a tile."""
key = self._get_key(tile) + "_l"
with self._master.lock(key, timeout=MAX_GENERATION_TIME):
yield
class Generator:
"""Get the tile from the cache (Redis) or generated it on the WMS server."""
def __init__(self, tilegeneration: tilecloud_chain.TileGeneration) -> None:
"""Initialize."""
redis_config = tilegeneration.get_main_config().config["redis"]
self._cache_store = RedisStore(redis_config)
log_level = os.environ.get("TILE_MAPCACHE_LOGLEVEL")
generator = Generate(
collections.namedtuple( # type: ignore
"Options",
[
"verbose",
"debug",
"quiet",
"role",
"near",
"time",
"daemon",
"local_process_number",
"tiles",
],
)(
log_level == "verbose", # type: ignore
log_level == "debug",
log_level == "quiet",
"server",
True,
False,
True,
None,
None,
),
tilegeneration,
out=sys.stdout,
server=True,
)
generator._generate_tiles()
self.run = Run(tilegeneration, tilegeneration.functions_metatiles)
def read_from_cache(self, tile: Tile) -> Optional[Tile]:
"""Get the tile from the cache (Redis)."""
return self._cache_store.get_one(tile)
def compute_tile(self, tile: Tile) -> None:
"""Create the tile."""
self.run(tile)
for tile_ in tile.metadata["tiles"].values(): # type: ignore
self._cache_store.put_one(tile_)
@contextlib.contextmanager
def lock(self, tile: Tile) -> Iterator[None]:
"""Lock the tile."""
with self._cache_store.lock(tile):
yield
def _get_generator(tilegeneration: tilecloud_chain.TileGeneration) -> Generator:
if _generator is None:
return _init_generator(tilegeneration)
return _generator
def _init_generator(tilegeneration: tilecloud_chain.TileGeneration) -> Generator:
with lock:
global _generator # pylint: disable=global-statement
if _generator is None:
_generator = Generator(tilegeneration)
return _generator
Response = TypeVar("Response")
def fetch(
config: tilecloud_chain.DatedConfig,
server: "Server[Response]",
tilegeneration: tilecloud_chain.TileGeneration,
layer: tilecloud_chain.configuration.Layer,
tile: Tile,
kwargs: Dict[str, Any],
) -> Response:
"""Fetch a time in the cache (redis) or get it on the WMS server."""
generator = _get_generator(tilegeneration)
fetched_tile = generator.read_from_cache(tile)
backend = "redis"
if fetched_tile is None:
backend = "wms-wait"
tile.metadata.setdefault("tiles", {}) # type: ignore
meta_tile = tile
if layer["meta"]:
meta_tile = Tile(
tilecoord=tile.tilecoord.metatilecoord(layer["meta_size"]), metadata=tile.metadata
)
with generator.lock(meta_tile):
fetched_tile = generator.read_from_cache(tile)
if fetched_tile is None:
backend = "wms-generate"
generator.compute_tile(meta_tile)
if meta_tile.error:
LOG.error("Tile '%s' in error: %s", meta_tile.tilecoord, meta_tile.error)
return server.error(config, 500, "Error while generate the tile, see logs for details")
# Don't fetch the just generated tile
tiles: Dict[TileCoord, Tile] = cast(Dict[TileCoord, Tile], meta_tile.metadata["tiles"])
try:
fetched_tile = tiles[tile.tilecoord]
except KeyError:
LOG.exception(
"Try to get the tile '%s', from the available: '%s'",
tile.tilecoord,
", ".join([str(e) for e in tiles.keys()]),
)
raise
response_headers = {
"Expires": (
datetime.datetime.utcnow() + datetime.timedelta(hours=server.get_expires_hours(config))
).isoformat(),
"Cache-Control": f"max-age={3600 * server.get_expires_hours(config)}",
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET",
"Tile-Backend": backend,
}
if fetched_tile.content_encoding:
response_headers["Content-Encoding"] = fetched_tile.content_encoding
if fetched_tile.content_type:
response_headers["Content-Type"] = fetched_tile.content_type
assert fetched_tile.data is not None
return server.response(config, fetched_tile.data, headers=response_headers, **kwargs)
|
{"/tilecloud_chain/copy_.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/controller.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/views/admin.py": ["/tilecloud_chain/server.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/server.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/expiretiles.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/tests/test_controller.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_config.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_expiretiles.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/__init__.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py"], "/tilecloud_chain/tests/test_generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_serve.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/server.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/internal_mapcache.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/generate.py", "/tilecloud_chain/server.py"], "/tilecloud_chain/tests/test_cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/tests/test_copy.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/database_logger.py", "/tilecloud_chain/format.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py", "/tilecloud_chain/mapnik_.py"], "/tilecloud_chain/tests/test_error.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"]}
|
14,621
|
camptocamp/tilecloud-chain
|
refs/heads/master
|
/tilecloud_chain/timedtilestore.py
|
import time
from typing import Any, Iterable, Iterator, Optional, TypeVar, cast
from prometheus_client import Summary
from tilecloud import BoundingPyramid, Tile, TileStore
_OPTIONAL_TILE_OR_NOT = TypeVar("_OPTIONAL_TILE_OR_NOT", Optional[Tile], Tile)
_TILESTORE_OPERATION_SUMMARY = Summary(
"tilecloud_chain_tilestore", "Number of tilestore contains", ["layer", "host", "store", "operation"]
)
_LEN_SUMMARY = Summary("tilecloud_chain_tilestore_len", "Number of tilestore len", ["store"])
class TimedTileStoreWrapper(TileStore):
"""A wrapper around a TileStore that adds timer metrics."""
def __init__(self, tile_store: TileStore, store_name: str) -> None:
"""Initialize."""
super().__init__()
self._tile_store = tile_store
self._store_name = store_name
def _time_iteration(
self, generator: Iterable[_OPTIONAL_TILE_OR_NOT], operation: str
) -> Iterator[_OPTIONAL_TILE_OR_NOT]:
while True:
start = time.perf_counter()
try:
tile = next(generator) # type: ignore
except StopIteration:
break
except RuntimeError as exception:
if isinstance(exception.__cause__, StopIteration):
# since python 3.7, a StopIteration is wrapped in a RuntimeError (PEP 479)
break
else:
raise
_TILESTORE_OPERATION_SUMMARY.labels(
tile.metadata.get("layer", "none"),
tile.metadata.get("host", "none"),
self._store_name,
operation,
).observe(time.perf_counter() - start)
yield tile
def __contains__(self, tile: Tile) -> bool:
"""See in superclass."""
with _TILESTORE_OPERATION_SUMMARY.labels(
tile.metadata.get("layer", "none"),
tile.metadata.get("host", "none"),
self._store_name,
"contains",
).time():
return self._tile_store.__contains__(tile)
def __len__(self) -> int:
"""See in superclass."""
with _LEN_SUMMARY.labels(
self._store_name,
).time():
return self._tile_store.__len__()
def delete(self, tiles: Iterable[Tile]) -> Iterator[Tile]:
"""See in superclass."""
return self._time_iteration(self._tile_store.delete(tiles), "delete")
def delete_one(self, tile: Tile) -> Tile:
"""See in superclass."""
with _TILESTORE_OPERATION_SUMMARY.labels(
tile.metadata.get("layer", "none"),
tile.metadata.get("host", "none"),
self._store_name,
"delete_one",
).time():
return self._tile_store.delete_one(tile)
def list(self) -> Iterable[Tile]:
"""See in superclass."""
return cast(Iterable[Tile], self._time_iteration(self._tile_store.list(), "list"))
def get(self, tiles: Iterable[Optional[Tile]]) -> Iterator[Optional[Tile]]:
"""See in superclass."""
return self._time_iteration(self._tile_store.get(tiles), "get")
def get_all(self) -> Iterator[Optional[Tile]]:
"""See in superclass."""
return self._time_iteration(self._tile_store.get_all(), "get_all")
def get_one(self, tile: Tile) -> Optional[Tile]:
"""See in superclass."""
with _TILESTORE_OPERATION_SUMMARY.labels(
tile.metadata.get("layer", "none"), tile.metadata.get("host", "none"), self._store_name, "get_one"
).time():
return self._tile_store.get_one(tile)
def put(self, tiles: Iterable[Tile]) -> Iterator[Tile]:
"""See in superclass."""
return cast(Iterator[Tile], self._time_iteration(self._tile_store.put(tiles), "put"))
def put_one(self, tile: Tile) -> Tile:
"""See in superclass."""
with _TILESTORE_OPERATION_SUMMARY.labels(
tile.metadata.get("layer", "none"), tile.metadata.get("host", "none"), self._store_name, "put_one"
).time():
return self._tile_store.put_one(tile)
def __getattr__(self, item: str) -> Any:
"""See in superclass."""
return getattr(self._tile_store, item)
def get_bounding_pyramid(self) -> BoundingPyramid:
"""See in superclass."""
return self._tile_store.get_bounding_pyramid()
def get_cheap_bounding_pyramid(self) -> Optional[BoundingPyramid]:
"""See in superclass."""
return self._tile_store.get_cheap_bounding_pyramid()
def __str__(self) -> str:
"""Get string representation."""
return f"tilecloud_chain.timedtilestore.TimedTileStoreWrapper: {self._tile_store}"
|
{"/tilecloud_chain/copy_.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/controller.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/views/admin.py": ["/tilecloud_chain/server.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/server.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/expiretiles.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/tests/test_controller.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_config.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_expiretiles.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/__init__.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py"], "/tilecloud_chain/tests/test_generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_serve.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/server.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/internal_mapcache.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/generate.py", "/tilecloud_chain/server.py"], "/tilecloud_chain/tests/test_cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/tests/test_copy.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/database_logger.py", "/tilecloud_chain/format.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py", "/tilecloud_chain/mapnik_.py"], "/tilecloud_chain/tests/test_error.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"]}
|
14,622
|
camptocamp/tilecloud-chain
|
refs/heads/master
|
/tilecloud_chain/security.py
|
import os
from typing import Optional, Union
import c2cwsgiutils.auth
import pyramid.request
from c2cwsgiutils.auth import AuthConfig
from pyramid.security import Allowed, Denied
class User:
"""The user definition."""
login: Optional[str]
name: Optional[str]
url: Optional[str]
is_auth: bool
token: Optional[str]
is_admin: bool
request: pyramid.request.Request
def __init__(
self,
auth_type: str,
login: Optional[str],
name: Optional[str],
url: Optional[str],
is_auth: bool,
token: Optional[str],
request: pyramid.request.Request,
) -> None:
self.auth_type = auth_type
self.login = login
self.name = name
self.url = url
self.is_auth = is_auth
self.token = token
self.request = request
self.is_admin = c2cwsgiutils.auth.check_access(self.request)
def has_access(self, auth_config: AuthConfig) -> bool:
if self.is_admin:
return True
if "github_repository" in auth_config:
return c2cwsgiutils.auth.check_access_config(self.request, auth_config) or self.is_admin
return False
class SecurityPolicy:
"""The pyramid security policy."""
def identity(self, request: pyramid.request.Request) -> User:
"""Return app-specific user object."""
if not hasattr(request, "user"):
if "TEST_USER" in os.environ:
user = User(
auth_type="test_user",
login=os.environ["TEST_USER"],
name=os.environ["TEST_USER"],
url="https://example.com/user",
is_auth=True,
token=None,
request=request,
)
else:
is_auth, c2cuser = c2cwsgiutils.auth.is_auth_user(request)
user = User(
"github_oauth",
c2cuser.get("login"),
c2cuser.get("name"),
c2cuser.get("url"),
is_auth,
c2cuser.get("token"),
request,
)
setattr(request, "user", user)
return request.user # type: ignore
def authenticated_userid(self, request: pyramid.request.Request) -> Optional[str]:
"""Return a string ID for the user."""
identity = self.identity(request)
if identity is None:
return None
return identity.login
def permits(
self, request: pyramid.request.Request, context: AuthConfig, permission: str
) -> Union[Allowed, Denied]:
"""Allow access to everything if signed in."""
identity = self.identity(request)
if identity is None:
return Denied("User is not signed in.")
if identity.auth_type in ("test_user",):
return Allowed(f"All access auth type: {identity.auth_type}")
if identity.is_admin:
return Allowed("The User is admin.")
if permission == "all":
return Denied("Root access is required.")
if permission not in context.get("sources", {}): # type: ignore
return Denied(f"No such source '{permission}'.")
if identity.has_access(context["sources"][permission]): # type: ignore
return Allowed(f"The User has access to source {permission}.")
return Denied(f"The User has no access to source {permission}.")
|
{"/tilecloud_chain/copy_.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/controller.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/views/admin.py": ["/tilecloud_chain/server.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/server.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/expiretiles.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/tests/test_controller.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_config.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_expiretiles.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/__init__.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py"], "/tilecloud_chain/tests/test_generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_serve.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/server.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/internal_mapcache.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/generate.py", "/tilecloud_chain/server.py"], "/tilecloud_chain/tests/test_cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/tests/test_copy.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/database_logger.py", "/tilecloud_chain/format.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py", "/tilecloud_chain/mapnik_.py"], "/tilecloud_chain/tests/test_error.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"]}
|
14,623
|
camptocamp/tilecloud-chain
|
refs/heads/master
|
/tilecloud_chain/tests/test_cost.py
|
import os
from tilecloud_chain import cost
from tilecloud_chain.tests import CompareCase
class TestCost(CompareCase):
def setUp(self) -> None: # noqa
self.maxDiff = None
@classmethod
def setUpClass(cls): # noqa
os.chdir(os.path.dirname(__file__))
@classmethod
def tearDownClass(cls): # noqa
os.chdir(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
ZOOM_SUMMARY = """
%(tiles)s tiles in zoom %(zoom)s.
Time to generate: %(time)s [d h:mm:ss]
S3 PUT: %(s3)s [$]"""
LAYER_SUMMARY = """
Number of tiles: %(tiles)s
Generation time: %(time)s [d h:mm:ss]
Generation cost: %(cost)s [$]"""
GLOBAL_SUMMARY = """
===== GLOBAL =====
Total number of tiles: %(tiles)s
Total generation time: %(time)s [d h:mm:ss]
Total generation cost: %(cost)s [$]"""
FINAL_SUMMARY = """
S3 Storage: %(storage)s [$/month]
S3 get: %(get)s [$/month]
"""
# CloudFront: %(cloudfront)s [$/month]
def test_cost_point(self) -> None:
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_cost -c tilegeneration/test-fix.yaml -l point",
main_func=cost.main,
expected="\n".join(
[
"Calculate zoom 0.",
"Calculate zoom 1.",
"Calculate zoom 2.",
"Calculate zoom 3.",
"",
"2 meta tiles in zoom 0.",
"2 meta tiles in zoom 1.",
"2 meta tiles in zoom 2.",
"2 meta tiles in zoom 3.",
self.ZOOM_SUMMARY % {"tiles": "6", "zoom": "0", "time": "0:00:00", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "6", "zoom": "1", "time": "0:00:00", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "6", "zoom": "2", "time": "0:00:00", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "6", "zoom": "3", "time": "0:00:00", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.LAYER_SUMMARY % {"tiles": "24", "time": "0:00:00", "cost": "0.00"},
self.FINAL_SUMMARY
% {
"storage": "0.00",
"get": "32.89",
# 'cloudfront': '31.89',
},
]
),
)
def test_cost_point_count(self) -> None:
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_cost -c tilegeneration/test-fix.yaml -l point --cost-algo count",
main_func=cost.main,
expected="\n".join(
[
"Calculate zoom 0.",
"Calculate zoom 1.",
"Calculate zoom 2.",
"Calculate zoom 3.",
"",
"1 meta tiles in zoom 0.",
"1 meta tiles in zoom 1.",
"6 meta tiles in zoom 2.",
"2 meta tiles in zoom 3.",
self.ZOOM_SUMMARY % {"tiles": "64", "zoom": "0", "time": "0:00:01", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "64", "zoom": "1", "time": "0:00:01", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "339", "zoom": "2", "time": "0:00:10", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "112", "zoom": "3", "time": "0:00:03", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.LAYER_SUMMARY % {"tiles": "579", "time": "0:00:17", "cost": "0.01"},
self.FINAL_SUMMARY
% {
"storage": "0.00",
"get": "32.89",
# 'cloudfront': '31.89',
},
]
),
)
def test_cost_line(self) -> None:
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_cost -c tilegeneration/test-fix.yaml -l line",
main_func=cost.main,
expected="\n".join(
[
"Calculate zoom 0.",
"Calculate zoom 1.",
"Calculate zoom 2.",
"Calculate zoom 3.",
"Calculate zoom 4.",
"",
"2 meta tiles in zoom 0.",
"2 meta tiles in zoom 1.",
"4 meta tiles in zoom 2.",
"8 meta tiles in zoom 3.",
"14 meta tiles in zoom 4.",
self.ZOOM_SUMMARY % {"tiles": "11", "zoom": "0", "time": "0:00:00", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "19", "zoom": "1", "time": "0:00:00", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "43", "zoom": "2", "time": "0:00:01", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "84", "zoom": "3", "time": "0:00:02", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "164", "zoom": "4", "time": "0:00:05", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.LAYER_SUMMARY % {"tiles": "321", "time": "0:00:10", "cost": "0.00"},
self.FINAL_SUMMARY
% {
"storage": "0.00",
"get": "32.89",
# 'cloudfront': '31.89',
},
]
),
)
def test_cost_line_count(self) -> None:
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_cost -d -c tilegeneration/test-fix.yaml -l line --cost-algo count",
main_func=cost.main,
expected="\n".join(
[
"Calculate zoom 0.",
"Calculate zoom 1.",
"Calculate zoom 2.",
"Calculate zoom 3.",
"Calculate zoom 4.",
"",
"1 meta tiles in zoom 0.",
"1 meta tiles in zoom 1.",
"6 meta tiles in zoom 2.",
"10 meta tiles in zoom 3.",
"21 meta tiles in zoom 4.",
self.ZOOM_SUMMARY % {"tiles": "64", "zoom": "0", "time": "0:00:01", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "64", "zoom": "1", "time": "0:00:01", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "383", "zoom": "2", "time": "0:00:11", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "493", "zoom": "3", "time": "0:00:15", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "600", "zoom": "4", "time": "0:00:18", "s3": "0.01"},
"SQS usage: 0.00 [$]",
self.LAYER_SUMMARY % {"tiles": "1604", "time": "0:00:49", "cost": "0.02"},
self.FINAL_SUMMARY
% {
"storage": "0.00",
"get": "32.89",
# 'cloudfront': '31.89',
},
]
),
)
def test_cost_polygon(self) -> None:
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_cost -c tilegeneration/test-fix.yaml -l polygon",
main_func=cost.main,
expected="\n".join(
[
"Calculate zoom 0.",
"Calculate zoom 1.",
"Calculate zoom 2.",
"Calculate zoom 3.",
"Calculate zoom 4.",
"",
self.ZOOM_SUMMARY % {"tiles": "13", "zoom": "0", "time": "0:00:00", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "35", "zoom": "1", "time": "0:00:02", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "167", "zoom": "2", "time": "0:00:10", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "601", "zoom": "3", "time": "0:00:36", "s3": "0.01"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "2268", "zoom": "4", "time": "0:02:16", "s3": "0.02"},
"SQS usage: 0.00 [$]",
self.LAYER_SUMMARY % {"tiles": "3084", "time": "0:03:05", "cost": "0.03"},
self.FINAL_SUMMARY
% {
"storage": "0.00",
"get": "32.89",
# 'cloudfront': '31.89',
},
]
),
)
def test_cost_polygon_count(self) -> None:
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_cost -c tilegeneration/test-fix.yaml "
"-l polygon --cost-algo count",
main_func=cost.main,
expected="\n".join(
[
"Calculate zoom 0.",
"Calculate zoom 1.",
"Calculate zoom 2.",
"Calculate zoom 3.",
"Calculate zoom 4.",
"",
self.ZOOM_SUMMARY % {"tiles": "12", "zoom": "0", "time": "0:00:00", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "35", "zoom": "1", "time": "0:00:02", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "165", "zoom": "2", "time": "0:00:09", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "609", "zoom": "3", "time": "0:00:36", "s3": "0.01"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "2240", "zoom": "4", "time": "0:02:14", "s3": "0.02"},
"SQS usage: 0.00 [$]",
self.LAYER_SUMMARY % {"tiles": "3061", "time": "0:03:03", "cost": "0.03"},
self.FINAL_SUMMARY
% {
"storage": "0.00",
"get": "32.89",
# 'cloudfront': '31.89',
},
]
),
)
def test_cost_default(self) -> None:
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_cost -c tilegeneration/test-fix.yaml",
main_func=cost.main,
expected="\n".join(
[
"",
"===== line =====",
"Calculate zoom 0.",
"Calculate zoom 1.",
"Calculate zoom 2.",
"Calculate zoom 3.",
"Calculate zoom 4.",
"",
"2 meta tiles in zoom 0.",
"2 meta tiles in zoom 1.",
"4 meta tiles in zoom 2.",
"8 meta tiles in zoom 3.",
"14 meta tiles in zoom 4.",
self.ZOOM_SUMMARY % {"tiles": "11", "zoom": "0", "time": "0:00:00", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "19", "zoom": "1", "time": "0:00:00", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "43", "zoom": "2", "time": "0:00:01", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "84", "zoom": "3", "time": "0:00:02", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "164", "zoom": "4", "time": "0:00:05", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.LAYER_SUMMARY % {"tiles": "321", "time": "0:00:10", "cost": "0.00"},
"",
"===== polygon =====",
"Calculate zoom 0.",
"Calculate zoom 1.",
"Calculate zoom 2.",
"Calculate zoom 3.",
"Calculate zoom 4.",
"",
self.ZOOM_SUMMARY % {"tiles": "13", "zoom": "0", "time": "0:00:00", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "35", "zoom": "1", "time": "0:00:02", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "167", "zoom": "2", "time": "0:00:10", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "601", "zoom": "3", "time": "0:00:36", "s3": "0.01"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "2268", "zoom": "4", "time": "0:02:16", "s3": "0.02"},
"SQS usage: 0.00 [$]",
self.LAYER_SUMMARY % {"tiles": "3084", "time": "0:03:05", "cost": "0.03"},
self.GLOBAL_SUMMARY % {"tiles": "3405", "time": "0:03:15", "cost": "0.03"},
self.FINAL_SUMMARY
% {
"storage": "0.00",
"get": "55.78",
# 'cloudfront': '54.78',
},
]
),
)
def test_cost_polygon2(self) -> None:
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_cost -c tilegeneration/test-fix.yaml -l polygon2",
main_func=cost.main,
expected="\n".join(
[
"Calculate zoom 0.",
"Calculate zoom 1.",
"Calculate zoom 2.",
"",
"925 meta tiles in zoom 0.",
"21310 meta tiles in zoom 1.",
"84341 meta tiles in zoom 2.",
self.ZOOM_SUMMARY % {"tiles": "54534", "zoom": "0", "time": "0:27:43", "s3": "0.55"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "1340772", "zoom": "1", "time": "11:21:02", "s3": "13.41"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY
% {"tiles": "5351829", "zoom": "2", "time": "1 21:18:05", "s3": "53.52"},
"SQS usage: 0.00 [$]",
self.LAYER_SUMMARY % {"tiles": "6747135", "time": "2 9:06:51", "cost": "67.47"},
self.FINAL_SUMMARY
% {
"storage": "0.02",
"get": "32.89",
# 'cloudfront': '31.89',
},
]
),
)
def test_cost_nometa(self) -> None:
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_cost -c tilegeneration/test-fix.yaml -l all",
main_func=cost.main,
expected="\n".join(
[
"Calculate zoom 0.",
"Calculate zoom 1.",
"Calculate zoom 2.",
"Calculate zoom 3.",
"Calculate zoom 4.",
"",
self.ZOOM_SUMMARY % {"tiles": "2", "zoom": "0", "time": "0:00:00", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "4", "zoom": "1", "time": "0:00:00", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "10", "zoom": "2", "time": "0:00:00", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "27", "zoom": "3", "time": "0:00:01", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "84", "zoom": "4", "time": "0:00:05", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.LAYER_SUMMARY % {"tiles": "127", "time": "0:00:07", "cost": "0.00"},
self.FINAL_SUMMARY
% {
"storage": "0.00",
"get": "32.89",
# 'cloudfront': '31.89',
},
]
),
)
def test_cost_layer_bbox(self) -> None:
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_cost -c tilegeneration/test-fix.yaml -l all --cost-algo count",
main_func=cost.main,
expected="\n".join(
[
"Calculate zoom 0.",
"Calculate zoom 1.",
"Calculate zoom 2.",
"Calculate zoom 3.",
"Calculate zoom 4.",
"",
self.ZOOM_SUMMARY % {"tiles": "2", "zoom": "0", "time": "0:00:00", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "2", "zoom": "1", "time": "0:00:00", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "9", "zoom": "2", "time": "0:00:00", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "25", "zoom": "3", "time": "0:00:01", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "81", "zoom": "4", "time": "0:00:04", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.LAYER_SUMMARY % {"tiles": "119", "time": "0:00:07", "cost": "0.00"},
self.FINAL_SUMMARY
% {
"storage": "0.00",
"get": "32.89",
# 'cloudfront': '31.89',
},
]
),
)
def test_cost_no_geom(self) -> None:
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_cost -c tilegeneration/test-fix.yaml -l point --no-geom",
main_func=cost.main,
expected="\n".join(
[
"Calculate zoom 0.",
"Calculate zoom 1.",
"Calculate zoom 2.",
"Calculate zoom 3.",
"",
"11 meta tiles in zoom 0.",
"28 meta tiles in zoom 1.",
"123 meta tiles in zoom 2.",
"427 meta tiles in zoom 3.",
self.ZOOM_SUMMARY % {"tiles": "312", "zoom": "0", "time": "0:00:09", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "1090", "zoom": "1", "time": "0:00:33", "s3": "0.01"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "6237", "zoom": "2", "time": "0:03:10", "s3": "0.06"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "24190", "zoom": "3", "time": "0:12:18", "s3": "0.24"},
"SQS usage: 0.00 [$]",
self.LAYER_SUMMARY % {"tiles": "31829", "time": "0:16:12", "cost": "0.32"},
self.FINAL_SUMMARY
% {
"storage": "0.00",
"get": "32.89",
# 'cloudfront': '31.89',
},
]
),
)
def test_cost_sqs_nometa(self) -> None:
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_cost -c tilegeneration/test.yaml -l point_hash_no_meta",
main_func=cost.main,
expected="\n".join(
[
"Calculate zoom 0.",
"Calculate zoom 1.",
"Calculate zoom 2.",
"Calculate zoom 3.",
"Calculate zoom 4.",
"",
self.ZOOM_SUMMARY % {"tiles": "279", "zoom": "0", "time": "0:00:16", "s3": "0.00"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "1026", "zoom": "1", "time": "0:01:01", "s3": "0.01"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "6079", "zoom": "2", "time": "0:06:04", "s3": "0.06"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "23876", "zoom": "3", "time": "0:23:52", "s3": "0.24"},
"SQS usage: 0.00 [$]",
self.ZOOM_SUMMARY % {"tiles": "94626", "zoom": "4", "time": "1:34:37", "s3": "0.95"},
"SQS usage: 0.00 [$]",
self.LAYER_SUMMARY % {"tiles": "125886", "time": "2:05:53", "cost": "1.26"},
self.FINAL_SUMMARY % {"storage": "0.00", "get": "32.89"},
]
),
)
|
{"/tilecloud_chain/copy_.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/controller.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/views/admin.py": ["/tilecloud_chain/server.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/server.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/expiretiles.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/tests/test_controller.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_config.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_expiretiles.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/__init__.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py"], "/tilecloud_chain/tests/test_generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_serve.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/server.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/internal_mapcache.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/generate.py", "/tilecloud_chain/server.py"], "/tilecloud_chain/tests/test_cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/tests/test_copy.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/database_logger.py", "/tilecloud_chain/format.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py", "/tilecloud_chain/mapnik_.py"], "/tilecloud_chain/tests/test_error.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"]}
|
14,624
|
camptocamp/tilecloud-chain
|
refs/heads/master
|
/tilecloud_chain/cost.py
|
import logging
import sys
from argparse import ArgumentParser, Namespace
from datetime import timedelta
from typing import Iterable, Iterator, Optional, Tuple
from tilecloud import Tile, TileStore
from tilecloud_chain import Run, TileGeneration, add_common_options
from tilecloud_chain.format import duration_format
logger = logging.getLogger(__name__)
def main() -> None:
"""Calculate the cost, main function."""
try:
parser = ArgumentParser(description="Used to calculate the generation cost", prog=sys.argv[0])
add_common_options(parser, tile_pyramid=False, dimensions=True)
parser.add_argument(
"--cost-algo",
"--calculate-cost-algorithm",
default="area",
dest="cost_algo",
choices=("area", "count"),
help="The algorithm use to calculate the cost default base on the 'area' "
"of the generation geometry, can also be 'count', to be base on number of tiles to generate.",
)
options = parser.parse_args()
gene = TileGeneration(
options.config,
options=options,
layer_name=options.layer,
base_config={"cost": {}},
multi_thread=False,
)
config = gene.get_config(options.config)
all_size: float = 0
tile_size: float = 0
all_tiles = 0
if options.layer:
layer = config.config["layers"][options.layer]
(all_size, all_time, all_price, all_tiles) = _calculate_cost(gene, options.layer, options)
tile_size = layer["cost"]["tile_size"] / (1024.0 * 1024)
else:
all_time = timedelta()
all_price = 0
for layer_name in gene.get_config(options.config).config["generation"]["default_layers"]:
print()
print(f"===== {layer_name} =====")
layer = config.config["layers"][layer_name]
gene.create_log_tiles_error(layer_name)
(size, time, price, tiles) = _calculate_cost(gene, layer_name, options)
tile_size += layer["cost"]["tile_size"] / (1024.0 * 1024)
all_time += time
all_price += price
all_size += size
all_tiles += tiles
print()
print("===== GLOBAL =====")
print(f"Total number of tiles: {all_tiles}")
print(f"Total generation time: {duration_format(all_time)} [d h:mm:ss]")
print(f"Total generation cost: {all_price:0.2f} [$]")
print()
s3_cost = all_size * gene.get_main_config().config["cost"]["s3"]["storage"] / (1024.0 * 1024 * 1024)
print(f"S3 Storage: {s3_cost:0.2f} [$/month]")
s3_get_cost = (
gene.get_main_config().config["cost"]["s3"]["get"]
* config.config["cost"]["request_per_layers"]
/ 10000.0
+ gene.get_main_config().config["cost"]["s3"]["download"]
* config.config["cost"]["request_per_layers"]
* tile_size
)
print(f"S3 get: {s3_get_cost:0.2f} [$/month]")
# if 'cloudfront' in gene.config['cost']:
# print('CloudFront: %0.2f [$/month]' % ()
# gene.config['cost']['cloudfront']['get'] *
# gene.config['cost']['request_per_layers'] / 10000.0 +
# gene.config['cost']['cloudfront']['download'] *
# gene.config['cost']['request_per_layers'] * tile_size)
except SystemExit:
raise
except: # pylint: disable=bare-except
logger.exception("Exit with exception")
sys.exit(1)
def _calculate_cost(
gene: TileGeneration, layer_name: str, options: Namespace
) -> Tuple[float, timedelta, float, int]:
nb_metatiles = {}
nb_tiles = {}
config = gene.get_config(options.config)
layer = config.config["layers"][layer_name]
meta = layer["meta"]
if options.cost_algo == "area":
tile_size = config.config["grids"][layer["grid"]]["tile_size"]
for zoom, resolution in enumerate(config.config["grids"][layer["grid"]]["resolutions"]):
if "min_resolution_seed" in layer and resolution < layer["min_resolution_seed"]:
continue
print(f"Calculate zoom {zoom}.")
px_buffer = layer["px_buffer"] + layer["meta_buffer"] if meta else 0
m_buffer = px_buffer * resolution
if meta:
size = tile_size * layer["meta_size"] * resolution
meta_buffer = size * 0.7 + m_buffer
meta_geom = gene.get_geoms(config, layer_name)[zoom].buffer(meta_buffer, 1)
nb_metatiles[zoom] = int(round(meta_geom.area / size**2))
size = tile_size * resolution
tile_buffer = size * 0.7 + m_buffer
geom = gene.get_geoms(config, layer_name)[zoom].buffer(tile_buffer, 1)
nb_tiles[zoom] = int(round(geom.area / size**2))
elif options.cost_algo == "count":
gene.init_tilecoords(config, layer_name)
gene.add_geom_filter()
if meta:
def count_metatile(tile: Tile) -> Tile:
if tile:
if tile.tilecoord.z in nb_metatiles:
nb_metatiles[tile.tilecoord.z] += 1
else:
nb_metatiles[tile.tilecoord.z] = 1
return tile
gene.imap(count_metatile)
class MetaTileSplitter(TileStore):
"""Convert the metatile flow to tile flow."""
def get(self, tiles: Iterable[Optional[Tile]]) -> Iterator[Tile]:
assert tiles is not None
for metatile in tiles:
assert metatile is not None
for tilecoord in metatile.tilecoord:
yield Tile(tilecoord)
gene.add_metatile_splitter(MetaTileSplitter())
# Only keep tiles that intersect geometry
gene.add_geom_filter()
def count_tile(tile: Tile) -> Tile:
if tile:
if tile.tilecoord.z in nb_tiles:
nb_tiles[tile.tilecoord.z] += 1
else:
print(f"Calculate zoom {tile.tilecoord.z}.")
nb_tiles[tile.tilecoord.z] = 1
return tile
gene.imap(count_tile)
run = Run(gene, gene.functions_metatiles)
assert gene.tilestream
for tile in gene.tilestream:
tile.metadata["layer"] = layer_name
run(tile)
times = {}
print()
for z, nb_metatile in nb_metatiles.items():
print(f"{nb_metatile} meta tiles in zoom {z}.")
times[z] = layer["cost"]["metatile_generation_time"] * nb_metatile
price: float = 0
all_size: float = 0
all_time: float = 0
all_tiles = 0
for z, nb_tile in nb_tiles.items():
print()
print(f"{nb_tile} tiles in zoom {z}.")
all_tiles += nb_tile
if meta:
time = times[z] + layer["cost"]["tile_generation_time"] * nb_tile
else:
time = layer["cost"]["tileonly_generation_time"] * nb_tile
size = layer["cost"]["tile_size"] * nb_tile
all_size += size
all_time += time
td = timedelta(milliseconds=time)
print(f"Time to generate: {duration_format(td)} [d h:mm:ss]")
c = gene.get_main_config().config["cost"]["s3"]["put"] * nb_tile / 1000.0
price += c
print(f"S3 PUT: {c:0.2f} [$]")
if "sqs" in gene.get_main_config().config:
if meta:
nb_sqs = nb_metatiles[z] * 3
else:
nb_sqs = nb_tile * 3
c = nb_sqs * gene.get_main_config().config["cost"]["sqs"]["request"] / 1000000.0
price += c
print(f"SQS usage: {c:0.2f} [$]")
print()
td = timedelta(milliseconds=all_time)
print(f"Number of tiles: {all_tiles}")
print(f"Generation time: {duration_format(td)} [d h:mm:ss]")
print(f"Generation cost: {price:0.2f} [$]")
return all_size, td, price, all_tiles
|
{"/tilecloud_chain/copy_.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/controller.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/views/admin.py": ["/tilecloud_chain/server.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/server.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/expiretiles.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/tests/test_controller.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_config.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_expiretiles.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/__init__.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py"], "/tilecloud_chain/tests/test_generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_serve.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/server.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/internal_mapcache.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/generate.py", "/tilecloud_chain/server.py"], "/tilecloud_chain/tests/test_cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/tests/test_copy.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/database_logger.py", "/tilecloud_chain/format.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py", "/tilecloud_chain/mapnik_.py"], "/tilecloud_chain/tests/test_error.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"]}
|
14,625
|
camptocamp/tilecloud-chain
|
refs/heads/master
|
/tilecloud_chain/tests/test_copy.py
|
import os
import shutil
import requests
from tilecloud_chain import copy_
from tilecloud_chain.tests import CompareCase
class TestGenerate(CompareCase):
def setUp(self) -> None: # noqa
self.maxDiff = None
@classmethod
def setUpClass(cls): # noqa
os.chdir(os.path.dirname(__file__))
if os.path.exists("/tmp/tiles"):
shutil.rmtree("/tmp/tiles")
os.makedirs("/tmp/tiles/src/1.0.0/point_hash/default/21781/0/0/")
@classmethod
def tearDownClass(cls): # noqa
os.chdir(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
if os.path.exists("/tmp/tiles"):
shutil.rmtree("/tmp/tiles")
def test_copy(self) -> None:
with open("/tmp/tiles/src/1.0.0/point_hash/default/21781/0/0/0.png", "w") as f:
f.write("test image")
for d in ("-d", "-q", "-v"):
self.assert_cmd_equals(
cmd=f".build/venv/bin/generate_copy {d} -c tilegeneration/test-copy.yaml src dst",
main_func=copy_.main,
regex=True,
expected="""The tile copy of layer 'point_hash' is finish
Nb copy tiles: 1
Nb errored tiles: 0
Nb dropped tiles: 0
Total time: 0:00:[0-9][0-9]
Total size: 10 o
Time per tile: [0-9]+ ms
Size per tile: 10(.0)? o
"""
if d != "-q"
else "",
empty_err=True,
)
with open("/tmp/tiles/dst/1.0.0/point_hash/default/21781/0/0/0.png") as f:
self.assertEqual(f.read(), "test image")
def test_process(self) -> None:
for d in ("-vd", "-q", "-v", ""):
response = requests.get(
"http://mapserver:8080/mapserv?STYLES=default&SERVICE=WMS&FORMAT=\
image%2Fpng&REQUEST=GetMap&HEIGHT=256&WIDTH=256&VERSION=1.1.1&BBOX=\
%28560800.0%2C+158000.0%2C+573600.0%2C+170800.0%29&LAYERS=point&SRS=EPSG%3A21781"
)
response.raise_for_status()
with open("/tmp/tiles/src/1.0.0/point_hash/default/21781/0/0/0.png", "wb") as out:
out.write(response.content)
statinfo = os.stat(
"/tmp/tiles/src/1.0.0/point_hash/default/21781/0/0/0.png",
)
self.assertEqual(statinfo.st_size, 755)
self.assert_cmd_equals(
cmd=".build/venv/bin/generate_process {} -c "
"tilegeneration/test-copy.yaml --cache src optipng".format(d),
main_func=copy_.process,
regex=True,
expected="""The tile process of layer 'point_hash' is finish
Nb process tiles: 1
Nb errored tiles: 0
Nb dropped tiles: 0
Total time: 0:00:[0-9][0-9]
Total size: 103 o
Time per tile: [0-9]+ ms
Size per tile: 103(.0)? o
"""
if d != "-q"
else "",
empty_err=True,
)
statinfo = os.stat(
"/tmp/tiles/src/1.0.0/point_hash/default/21781/0/0/0.png",
)
self.assertEqual(statinfo.st_size, 103)
|
{"/tilecloud_chain/copy_.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/controller.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/views/admin.py": ["/tilecloud_chain/server.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/server.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/expiretiles.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/tests/test_controller.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_config.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_expiretiles.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/__init__.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py"], "/tilecloud_chain/tests/test_generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_serve.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/server.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/internal_mapcache.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/generate.py", "/tilecloud_chain/server.py"], "/tilecloud_chain/tests/test_cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/tests/test_copy.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/database_logger.py", "/tilecloud_chain/format.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py", "/tilecloud_chain/mapnik_.py"], "/tilecloud_chain/tests/test_error.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"]}
|
14,626
|
camptocamp/tilecloud-chain
|
refs/heads/master
|
/tilecloud_chain/generate.py
|
import logging
import os
import random
import socket
import sys
import threading
from argparse import ArgumentParser, Namespace
from datetime import datetime
from getpass import getuser
from typing import IO, Callable, List, Optional, cast
import boto3
import prometheus_client
import tilecloud.filter.error
import tilecloud_chain
from tilecloud import Tile, TileCoord, TileStore
from tilecloud.filter.logger import Logger
from tilecloud.layout.wms import WMSTileLayout
from tilecloud.store.url import URLTileStore
from tilecloud_chain import (
Count,
CountSize,
HashDropper,
HashLogger,
LocalProcessFilter,
MultiAction,
TileGeneration,
TilesFileStore,
add_common_options,
get_queue_store,
parse_tilecoord,
quote,
)
from tilecloud_chain.database_logger import DatabaseLogger, DatabaseLoggerInit
from tilecloud_chain.format import default_int, duration_format, size_format
from tilecloud_chain.multitilestore import MultiTileStore
from tilecloud_chain.timedtilestore import TimedTileStoreWrapper
_LOGGER = logging.getLogger(__name__)
class LogTilesContext:
"""Logging tile context."""
def __init__(self, gene: TileGeneration):
self.gene = gene
def __call__(self, tile: Tile) -> Tile:
tilecloud_chain.LOGGING_CONTEXT.setdefault(os.getpid(), {})[threading.current_thread().native_id] = { # type: ignore
"host": tile.metadata.get("host"),
"layer": tile.metadata.get("layer"),
"meta_tilecoord": str(tile.tilecoord),
}
return tile
class Generate:
"""Generate the tiles, generate the queue, ..."""
def __init__(
self, options: Namespace, gene: TileGeneration, out: Optional[IO[str]], server: bool = False
) -> None:
self._count_metatiles: Optional[Count] = None
self._count_metatiles_dropped: Optional[Count] = None
self._count_tiles: Optional[Count] = None
self._count_tiles_dropped: Optional[Count] = None
self._count_tiles_stored: Optional[CountSize] = None
self._queue_tilestore: Optional[TileStore] = None
self._cache_tilestore: Optional[TileStore] = None
self._options = options
self._gene = gene
self.out = out
if getattr(self._options, "get_hash", None) is not None:
self._options.role = "hash"
self._options.test = 1
if getattr(self._options, "tiles", None) is not None:
self._options.role = "slave"
self._generate_init()
if self._options.role != "master" and not server:
self._generate_tiles()
def gene(self, layer_name: Optional[str] = None) -> None:
if self._count_tiles is not None:
self._count_tiles.nb = 0
if self._count_tiles_dropped is not None:
self._count_tiles_dropped.nb = 0
if self._count_tiles_stored is not None:
self._count_tiles_stored.nb = 0
self._count_tiles_stored.size = 0
if self._count_metatiles is not None:
self._count_metatiles.nb = 0
if self._count_metatiles_dropped is not None:
self._count_metatiles_dropped.nb = 0
self._gene.error = 0
if self._options.role != "slave" and not self._options.get_hash and not self._options.get_bbox:
assert layer_name
self._gene.create_log_tiles_error(layer_name)
if self._options.role != "slave" or self._options.tiles:
self._generate_queue(layer_name)
self.generate_consume()
self.generate_resume(layer_name)
def _generate_init(self) -> None:
if self._options.role != "server":
self._count_metatiles_dropped = Count()
self._count_tiles = Count()
self._count_tiles_dropped = Count()
if self._options.role in ("master", "slave") and not self._options.tiles:
self._queue_tilestore = get_queue_store(self._gene.get_main_config(), self._options.daemon)
if self._options.role in ("local", "master"):
self._gene.add_geom_filter()
if self._options.role in ("local", "master") and "logging" in self._gene.get_main_config().config:
self._gene.imap(
DatabaseLoggerInit(
self._gene.get_main_config().config["logging"],
self._options is not None and self._options.daemon,
)
)
if self._options.local_process_number is not None:
self.add_local_process_filter()
# At this stage, the tilestream contains metatiles that intersect geometry
self._gene.add_logger()
if self._options.role == "master":
assert self._queue_tilestore is not None
# Put the metatiles into the SQS or Redis queue
self._gene.put(self._queue_tilestore)
self._count_tiles = self._gene.counter()
if self._options.role in ("local", "slave"):
self._cache_tilestore = self._gene.get_tilesstore()
assert self._cache_tilestore is not None
def add_local_process_filter(self) -> None:
self._gene.imap(
LocalProcessFilter(
self._gene.get_main_config().config["generation"]["number_process"],
self._options.local_process_number,
)
)
def _generate_queue(self, layer_name: Optional[str]) -> None:
if self._options.tiles:
self._gene.set_store(TilesFileStore(self._options.tiles))
return
assert layer_name is not None
assert self._gene.config_file is not None
config = self._gene.get_config(self._gene.config_file)
layer = config.config["layers"][layer_name]
if self._options.get_bbox:
try:
tilecoord = parse_tilecoord(self._options.get_bbox)
bounds = default_int(self._gene.get_grid(config, layer["grid"]).extent(tilecoord))
print(f"Tile bounds: [{','.join([str(b) for b in bounds])}]", file=self.out)
sys.exit()
except ValueError:
_LOGGER.exception(
"Tile '%s' is not in the format 'z/x/y' or z/x/y:+n/+n",
self._options.get_bbox,
)
sys.exit(1)
if self._options.role in ("local", "master"):
# Generate a stream of metatiles
self._gene.init_tilecoords(config, layer_name)
elif self._options.role == "hash":
layer = config.config["layers"][layer_name]
try:
z, x, y = (int(v) for v in self._options.get_hash.split("/"))
if layer.get("meta"):
self._gene.set_tilecoords(config, [TileCoord(z, x, y, layer["meta_size"])], layer_name)
else:
self._gene.set_tilecoords(config, [TileCoord(z, x, y)], layer_name)
except ValueError:
_LOGGER.exception("Tile '%s' is not in the format 'z/x/y'", self._options.get_hash)
sys.exit(1)
def _generate_tiles(self) -> None:
if self._options.role in ("slave") and not self._options.tiles:
assert self._queue_tilestore is not None
# Get the metatiles from the SQS/Redis queue
self._gene.set_store(self._queue_tilestore)
self._gene.imap(lambda tile: tile if "layer" in tile.metadata else None)
self._gene.imap(LogTilesContext(self._gene))
if self._options.role != "server":
self._count_metatiles = self._gene.counter()
self._gene.get(
TimedTileStoreWrapper(
MultiTileStore(TilestoreGetter(self)),
store_name="get",
),
"Get tile",
)
if self._options.role in ("local", "slave") and "logging" in self._gene.get_main_config().config:
self._gene.imap(
DatabaseLogger(
self._gene.get_main_config().config["logging"],
self._options is not None and self._options.daemon,
)
)
self._gene.init(
self._queue_tilestore
if "error_file" in self._gene.get_main_config().config["generation"]
else None,
self._options.daemon,
)
else:
self._gene.init(daemon=self._options.daemon)
if self._options.role == "hash":
self._gene.imap(HashLogger("empty_metatile_detection", self.out))
elif not self._options.near:
assert self._count_metatiles_dropped is not None
self._gene.imap(MultiAction(HashDropperGetter(self, True, self._count_metatiles_dropped)))
def add_elapsed_togenerate(metatile: Tile) -> Optional[Tile]:
if metatile is not None:
metatile.elapsed_togenerate = metatile.tilecoord.n**2 # type: ignore
return metatile
return None
self._gene.imap(add_elapsed_togenerate)
# Split the metatile image into individual tiles
self._gene.add_metatile_splitter()
self._gene.imap(Logger(_LOGGER, logging.INFO, "%(tilecoord)s, %(formated_metadata)s"))
if self._count_tiles is not None:
self._gene.imap(self._count_tiles)
self._gene.process(key="pre_hash_post_process")
if self._options.role == "hash":
self._gene.imap(HashLogger("empty_tile_detection", self.out))
elif not self._options.near:
assert self._count_tiles_dropped is not None
self._gene.imap(MultiAction(HashDropperGetter(self, False, self._count_tiles_dropped)))
if self._options.role != "server":
self._gene.process()
if self._options.role in ("local", "slave"):
self._count_tiles_stored = self._gene.counter_size()
if self._options.time:
def log_size(tile: Tile) -> Tile:
assert tile.data is not None
sys.stdout.write(f"size: {len(tile.data)}\n")
return tile
self._gene.imap(log_size)
assert self._cache_tilestore is not None
self._gene.put(self._cache_tilestore, "Store the tile")
if self._options.role == "slave" and not self._options.tiles:
def delete_from_store(tile: Tile) -> Tile:
assert self._queue_tilestore is not None
if hasattr(tile, "metatile"):
metatile: Tile = tile.metatile
metatile.elapsed_togenerate -= 1 # type: ignore
if metatile.elapsed_togenerate == 0: # type: ignore
self._queue_tilestore.delete_one(metatile)
else:
self._queue_tilestore.delete_one(tile)
return tile
self._gene.imap(delete_from_store)
if self._options.role in ("local", "slave") and "logging" in self._gene.get_main_config().config:
self._gene.imap(
DatabaseLogger(
self._gene.get_main_config().config["logging"],
self._options is not None and self._options.daemon,
)
)
self._gene.init(daemon=self._options.daemon)
def generate_consume(self) -> None:
if self._options.time is not None:
options = self._options
class LogTime:
"""Log the generation time."""
n = 0
t1 = None
def __call__(self, tile: Tile) -> Tile:
self.n += 1
assert options.time
if self.n == options.time:
self.t1 = datetime.now()
elif self.n == 2 * options.time:
t2 = datetime.now()
assert self.t1
duration = (t2 - self.t1) / options.time
time = (
duration.days * 24 * 3600 + duration.seconds
) * 1000000 + duration.microseconds
sys.stdout.write(f"time: {time}\n")
return tile
self._gene.imap(LogTime())
self._gene.consume(self._options.time * 3)
else:
self._gene.consume()
def generate_resume(self, layer_name: Optional[str]) -> None:
config = self._gene.get_config(self._gene.config_file) if self._gene.config_file is not None else None
if self._options.time is None:
layer = None
if layer_name is not None:
assert config is not None
layer = config.config["layers"][layer_name]
all_dimensions = self._gene.get_all_dimensions(layer)
formated_dimensions = " - ".join(
[", ".join(["=".join(d) for d in dimensions.items()]) for dimensions in all_dimensions]
)
suffix = (
""
if ((len(all_dimensions) == 1 and len(all_dimensions[0]) == 0) or layer["type"] != "wms")
else f" ({formated_dimensions})"
)
message = [f"The tile generation of layer '{layer_name}{suffix}' is finish"]
else:
message = ["The tile generation is finish"]
if self._options.role == "master":
assert self._count_tiles
message.append(f"Nb of generated jobs: {self._count_tiles.nb}")
elif layer.get("meta") if layer is not None else self._options.role == "slave":
assert self._count_metatiles is not None
assert self._count_metatiles_dropped is not None
message += [
f"Nb generated metatiles: {self._count_metatiles.nb}",
f"Nb metatiles dropped: {self._count_metatiles_dropped.nb}",
]
if self._options.role != "master":
assert self._count_tiles is not None
assert self._count_tiles_dropped is not None
message += [
f"Nb generated tiles: {self._count_tiles.nb}",
f"Nb tiles dropped: {self._count_tiles_dropped.nb}",
]
if self._options.role in ("local", "slave"):
assert self._count_tiles_stored is not None
assert self._count_tiles is not None
message += [
f"Nb tiles stored: {self._count_tiles_stored.nb}",
f"Nb tiles in error: {self._gene.error}",
f"Total time: {duration_format(self._gene.duration)}",
]
if self._count_tiles_stored.nb != 0:
message.append(f"Total size: {size_format(self._count_tiles_stored.size)}")
if self._count_tiles.nb != 0:
message.append(
"Time per tile: "
f"{(self._gene.duration / self._count_tiles.nb * 1000).seconds:0.0f} ms"
)
if self._count_tiles_stored.nb != 0:
message.append(
"Size per tile: "
f"{self._count_tiles_stored.size / self._count_tiles_stored.nb:0.0f} o"
)
if not self._options.quiet and self._options.role in ("local", "slave", "master") and message:
print("\n".join(message) + "\n", file=self.out)
if self._cache_tilestore is not None and hasattr(self._cache_tilestore, "connection"):
self._cache_tilestore.connection.close()
if (
self._options.role != "hash"
and self._options.time is None
and config is not None
and "sns" in config.config
):
if "region" in config.config["sns"]:
sns_client = boto3.client("sns", region_name=config.config["sns"].get("region", "eu-west-1"))
else:
sns_client = boto3.client("sns")
sns_message = [message[0]]
sns_message += [
f"Layer: {layer_name if layer_name is not None else '(All layers)'}",
f"Role: {self._options.role}",
f"Host: {socket.getfqdn()}",
f"Command: {' '.join([quote(arg) for arg in sys.argv])}",
]
sns_message += message[1:]
sns_client.publish(
TopicArn=config.config["sns"]["topic"],
Message="\n".join(sns_message),
Subject=f"Tile generation ({layer_name if layer_name is not None else 'All layers'} - "
f"{self._options.role})",
)
class TilestoreGetter:
"""Used to get the correct tilestore based on the layername config file any layer type."""
def __init__(self, gene: Generate):
self.gene = gene
def __call__(self, config_file: str, layer_name: str) -> Optional[TileStore]:
config = self.gene._gene.get_config(config_file)
layer = config.config["layers"][layer_name]
if layer["type"] == "wms":
params = layer.get("params", {}).copy()
if "STYLES" not in params:
params["STYLES"] = ",".join(layer["wmts_style"] for _ in layer["layers"].split(","))
if layer.get("generate_salt", False):
params["SALT"] = str(random.randint(0, 999999)) # nosec
# Get the metatile image from the WMS server
return TimedTileStoreWrapper(
URLTileStore(
tilelayouts=(
WMSTileLayout(
url=layer["url"],
layers=layer["layers"],
srs=config.config["grids"][layer["grid"]]["srs"],
format=layer["mime_type"],
border=layer["meta_buffer"] if layer["meta"] else 0,
tilegrid=self.gene._gene.get_grid(config, layer["grid"]),
params=params,
),
),
headers=layer["headers"],
),
"wms",
)
elif layer["type"] == "mapnik":
try:
from tilecloud.store.mapnik_ import MapnikTileStore # pylint: disable=import-outside-toplevel
from tilecloud_chain.mapnik_ import ( # pylint: disable=import-outside-toplevel
MapnikDropActionTileStore,
)
except ImportError:
if os.environ.get("CI", "FALSE") == "FALSE": # pragma nocover
_LOGGER.error("Mapnik is not available", exc_info=True)
return None
grid = config.config["grids"][layer["grid"]]
if cast(str, layer.get("output_format", "png")) == "grid":
assert self.gene._count_tiles
assert self.gene._count_tiles_dropped
return MapnikDropActionTileStore(
tilegrid=self.gene._gene.get_grid(config, layer["grid"]),
mapfile=layer["mapfile"],
image_buffer=layer["meta_buffer"] if layer.get("meta") else 0,
data_buffer=layer.get("data_buffer", 128),
output_format=layer.get("output_format", "png"),
resolution=layer.get("resolution", 4),
layers_fields=layer.get("layers_fields", {}),
drop_empty_utfgrid=layer.get("drop_empty_utfgrid", False),
store=self.gene._cache_tilestore,
queue_store=self.gene._queue_tilestore,
count=[self.gene._count_tiles, self.gene._count_tiles_dropped],
proj4_literal=grid["proj4_literal"],
)
else:
return MapnikTileStore(
tilegrid=self.gene._gene.get_grid(config, layer["grid"]),
mapfile=layer["mapfile"],
image_buffer=layer["meta_buffer"] if layer.get("meta") else 0,
data_buffer=layer.get("data_buffer", 128),
output_format=cast(str, layer.get("output_format", "png")),
proj4_literal=grid["proj4_literal"],
)
return None
def detach() -> None:
"""Detach to the parent process."""
try:
pid = os.fork()
if pid > 0:
print(f"Detached with pid {pid}.")
sys.stderr.write(str(pid))
# exit parent
sys.exit(0)
except OSError as e:
_LOGGER.exception("fork #1 failed: %d (%s)", e.errno, e.strerror)
sys.exit(1)
def main(args: Optional[List[str]] = None, out: Optional[IO[str]] = None) -> None:
"""Run the tiles generation."""
try:
parser = ArgumentParser(
description="Used to generate the tiles", prog=args[0] if args else sys.argv[0]
)
add_common_options(parser, dimensions=True)
parser.add_argument(
"--get-hash", metavar="TILE", help="get the empty tiles hash, use the specified TILE z/x/y"
)
parser.add_argument(
"--get-bbox",
metavar="TILE",
help="get the bbox of a tile, use the specified TILE z/x/y, or z/x/y:+n/+n for metatiles",
)
parser.add_argument(
"--role",
default="local",
choices=("local", "master", "slave"),
help="local/master/slave, master to file the queue and slave to generate the tiles",
)
parser.add_argument(
"--local-process-number", default=None, help="The number of process that we run in parallel"
)
parser.add_argument(
"--detach", default=False, action="store_true", help="run detached from the terminal"
)
parser.add_argument(
"--daemon", default=False, action="store_true", help="run continuously as a daemon"
)
parser.add_argument(
"--tiles",
metavar="FILE",
help="Generate the tiles from a tiles file, use the format z/x/y, or z/x/y:+n/+n for metatiles",
)
options = parser.parse_args(args[1:] if args else sys.argv[1:])
if options.detach:
detach()
if options.daemon and "C2C_PROMETHEUS_PORT" in os.environ:
prometheus_client.start_http_server(int(os.environ["C2C_PROMETHEUS_PORT"]))
gene = TileGeneration(
config_file=options.config or os.environ.get("TILEGENERATION_CONFIGFILE"),
options=options,
multi_thread=options.get_hash is None,
)
if (
options.get_hash is None
and options.get_bbox is None
and options.config is not None
and "authorised_user" in gene.get_main_config().config.get("generation", {})
and gene.get_main_config().config["generation"]["authorised_user"] != getuser()
):
_LOGGER.error(
"not authorized, authorized user is: %s.",
gene.get_main_config().config["generation"]["authorised_user"],
)
sys.exit(1)
if options.config:
config = gene.get_config(options.config)
if options.cache is None and options.config:
options.cache = config.config["generation"]["default_cache"]
if options.tiles is not None and options.role not in ["local", "master"]:
_LOGGER.error("The --tiles option work only with role local or master")
sys.exit(1)
try:
generate = Generate(options, gene, out)
if options.role == "slave":
generate.gene()
elif options.layer:
generate.gene(options.layer)
elif options.get_bbox:
_LOGGER.error("With --get-bbox option you need to specify a layer")
sys.exit(1)
elif options.get_hash:
_LOGGER.error("With --get-hash option you need to specify a layer")
sys.exit(1)
else:
if options.config:
for layer in config.config["generation"].get(
"default_layers", config.config["layers"].keys()
):
generate.gene(layer)
except tilecloud.filter.error.TooManyErrors:
_LOGGER.exception("Too many errors")
sys.exit(1)
finally:
gene.close()
except SystemExit:
raise
except: # pylint: disable=bare-except
_LOGGER.exception("Exit with exception")
if os.environ.get("TESTS", "false").lower() == "true":
raise
sys.exit(1)
class HashDropperGetter:
"""Drop th tiles based on the hash and the size."""
def __init__(self, gene: Generate, meta: bool, count: Count):
self.gene = gene
self.meta = meta
self.count = count
def __call__(self, config_file: str, layer_name: str) -> Callable[[Tile], Optional[Tile]]:
"""Call."""
layer = self.gene._gene.get_config(config_file).config["layers"][layer_name]
conf_name = "empty_metatile_detection" if self.meta else "empty_tile_detection"
if conf_name in layer:
empty_tile = layer["empty_metatile_detection"] if self.meta else layer["empty_tile_detection"]
return HashDropper(
empty_tile["size"],
empty_tile["hash"],
store=self.gene._gene.get_tilesstore(),
queue_store=self.gene._gene.queue_store,
count=self.count,
)
return lambda tile: tile
if __name__ == "__main__":
main()
|
{"/tilecloud_chain/copy_.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/controller.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/views/admin.py": ["/tilecloud_chain/server.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/server.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/expiretiles.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/tests/test_controller.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_config.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_expiretiles.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/__init__.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py"], "/tilecloud_chain/tests/test_generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_serve.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/server.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/internal_mapcache.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/generate.py", "/tilecloud_chain/server.py"], "/tilecloud_chain/tests/test_cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/tests/test_copy.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/database_logger.py", "/tilecloud_chain/format.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py", "/tilecloud_chain/mapnik_.py"], "/tilecloud_chain/tests/test_error.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"]}
|
14,627
|
camptocamp/tilecloud-chain
|
refs/heads/master
|
/tilecloud_chain/mapnik_.py
|
import logging
from typing import Any, Callable, List, Optional
from tilecloud import Tile, TileStore
from tilecloud.store.mapnik_ import MapnikTileStore
logger = logging.getLogger(__name__)
class MapnikDropActionTileStore(MapnikTileStore):
"""MapnikTileStore with drop action if the generated tile is empty."""
def __init__(
self,
store: Optional[TileStore] = None,
queue_store: Optional[TileStore] = None,
count: Optional[List[Callable[[Optional[Tile]], Any]]] = None,
**kwargs: Any,
) -> None:
"""Initialize."""
self.store = store
self.queue_store = queue_store
self.count = count or []
MapnikTileStore.__init__(self, **kwargs)
def get_one(self, tile: Tile) -> Optional[Tile]:
"""See in superclass."""
result = MapnikTileStore.get_one(self, tile)
if result is None:
if self.store is not None:
if tile.tilecoord.n != 1:
for tilecoord in tile.tilecoord:
self.store.delete_one(Tile(tilecoord))
else:
self.store.delete_one(tile)
logger.info("The tile %s %s is dropped", tile.tilecoord, tile.formated_metadata)
if hasattr(tile, "metatile"):
metatile: Tile = tile.metatile
metatile.elapsed_togenerate -= 1 # type: ignore
if metatile.elapsed_togenerate == 0 and self.queue_store is not None: # type: ignore
self.queue_store.delete_one(metatile)
elif self.queue_store is not None:
self.queue_store.delete_one(tile)
for count in self.count:
count(None)
return result
|
{"/tilecloud_chain/copy_.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/controller.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/views/admin.py": ["/tilecloud_chain/server.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/server.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/expiretiles.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/tests/test_controller.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_config.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_expiretiles.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/__init__.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py"], "/tilecloud_chain/tests/test_generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_serve.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/server.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/internal_mapcache.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/generate.py", "/tilecloud_chain/server.py"], "/tilecloud_chain/tests/test_cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/tests/test_copy.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/database_logger.py", "/tilecloud_chain/format.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py", "/tilecloud_chain/mapnik_.py"], "/tilecloud_chain/tests/test_error.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"]}
|
14,628
|
camptocamp/tilecloud-chain
|
refs/heads/master
|
/tilecloud_chain/tests/test_error.py
|
import os
from testfixtures import LogCapture
from tilecloud_chain import controller, generate
from tilecloud_chain.tests import CompareCase
class TestError(CompareCase):
def setUp(self) -> None: # noqa
self.maxDiff = None
@classmethod
def setUpClass(cls): # noqa
os.chdir(os.path.dirname(__file__))
@classmethod
def tearDownClass(cls): # noqa
os.chdir(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
def test_resolution(self) -> None:
with LogCapture("tilecloud_chain") as log_capture:
self.run_cmd(
cmd=".build/venv/bin/generate_controller -c tilegeneration/wrong_resolutions.yaml",
main_func=controller.main,
get_error=True,
)
log_capture.check(
("tilecloud_chain", "ERROR", "The resolution 0.1 * resolution_scale 5 is not an integer."),
)
def test_mapnik_grid_meta(self) -> None:
with LogCapture("tilecloud_chain") as log_capture:
self.run_cmd(
cmd=".build/venv/bin/generate_controller -c tilegeneration/wrong_mapnik_grid_meta.yaml",
main_func=controller.main,
get_error=True,
)
log_capture.check(
(
"tilecloud_chain",
"ERROR",
"The layer 'b' is of type Mapnik/Grid, that can't support matatiles.",
)
)
def test_type(self) -> None:
with LogCapture("tilecloud_chain") as log_capture:
self.run_cmd(
cmd=".build/venv/bin/generate_controller -v -c tilegeneration/wrong_type.yaml",
main_func=controller.main,
get_error=True,
)
log_capture.check(
(
"tilecloud_chain",
"ERROR",
"""The config file is invalid:
-- tilegeneration/wrong_type.yaml:10:10 grids.swissgrid_2.srs: {} is not of type 'string' (rule: properties.grids.additionalProperties.properties.srs.type)
-- tilegeneration/wrong_type.yaml:12:5 grids.swissgrid_3.srs: 'epsg:21781' does not match '^EPSG:[0-9]+$' (rule: properties.grids.additionalProperties.properties.srs.pattern)
-- tilegeneration/wrong_type.yaml:12:5 grids.swissgrid_3: 'bbox' is a required property (rule: properties.grids.additionalProperties.required)
-- tilegeneration/wrong_type.yaml:12:5 grids.swissgrid_3: 'resolutions' is a required property (rule: properties.grids.additionalProperties.required)
-- tilegeneration/wrong_type.yaml:14:5 grids.swissgrid_4.srs: 'epsg21781' does not match '^EPSG:[0-9]+$' (rule: properties.grids.additionalProperties.properties.srs.pattern)
-- tilegeneration/wrong_type.yaml:14:5 grids.swissgrid_4: 'bbox' is a required property (rule: properties.grids.additionalProperties.required)
-- tilegeneration/wrong_type.yaml:14:5 grids.swissgrid_4: 'resolutions' is a required property (rule: properties.grids.additionalProperties.required)
-- tilegeneration/wrong_type.yaml:15:16 grids.swissgrid_5: 'bbox' is a required property (rule: properties.grids.additionalProperties.required)
-- tilegeneration/wrong_type.yaml:15:16 grids.swissgrid_5: 'resolutions' is a required property (rule: properties.grids.additionalProperties.required)
-- tilegeneration/wrong_type.yaml:15:16 grids.swissgrid_5: 'srs' is a required property (rule: properties.grids.additionalProperties.required)
-- tilegeneration/wrong_type.yaml:17:15 grids.swissgrid!: 'bbox' is a required property (rule: properties.grids.additionalProperties.required)
-- tilegeneration/wrong_type.yaml:17:15 grids.swissgrid!: 'resolutions' is a required property (rule: properties.grids.additionalProperties.required)
-- tilegeneration/wrong_type.yaml:17:15 grids.swissgrid!: 'srs' is a required property (rule: properties.grids.additionalProperties.required)
-- tilegeneration/wrong_type.yaml:22:3 layers: 'hi!' does not match '^[a-zA-Z0-9_\\\\-~\\\\.]+$' (rule: properties.layers.propertyNames.pattern)
-- tilegeneration/wrong_type.yaml:23:5 layers.hi!.wmts_style: 'yo!' does not match '^[a-zA-Z0-9_\\\\-\\\\+~\\\\.]+$' (rule: properties.layers.additionalProperties.anyOf.0.properties.wmts_style.pattern)
-- tilegeneration/wrong_type.yaml:23:5 layers.hi!.wmts_style: 'yo!' does not match '^[a-zA-Z0-9_\\\\-\\\\+~\\\\.]+$' (rule: properties.layers.additionalProperties.anyOf.1.properties.wmts_style.pattern)
-- tilegeneration/wrong_type.yaml:23:5 layers.hi!: 'extension' is a required property (rule: properties.layers.additionalProperties.anyOf.0.required)
-- tilegeneration/wrong_type.yaml:23:5 layers.hi!: 'extension' is a required property (rule: properties.layers.additionalProperties.anyOf.1.required)
-- tilegeneration/wrong_type.yaml:23:5 layers.hi!: 'grid' is a required property (rule: properties.layers.additionalProperties.anyOf.0.required)
-- tilegeneration/wrong_type.yaml:23:5 layers.hi!: 'grid' is a required property (rule: properties.layers.additionalProperties.anyOf.1.required)
-- tilegeneration/wrong_type.yaml:23:5 layers.hi!: 'layers' is a required property (rule: properties.layers.additionalProperties.anyOf.0.required)
-- tilegeneration/wrong_type.yaml:23:5 layers.hi!: 'mime_type' is a required property (rule: properties.layers.additionalProperties.anyOf.0.required)
-- tilegeneration/wrong_type.yaml:23:5 layers.hi!: 'mime_type' is a required property (rule: properties.layers.additionalProperties.anyOf.1.required)
-- tilegeneration/wrong_type.yaml:23:5 layers.hi!: 'url' is a required property (rule: properties.layers.additionalProperties.anyOf.0.required)
-- tilegeneration/wrong_type.yaml:25:9 layers.hi!.dimensions.0.default: '2010!' does not match '^[a-zA-Z0-9_\\\\-\\\\+~\\\\.]+$' (rule: properties.layers.additionalProperties.anyOf.0.properties.dimensions.items.properties.default.pattern)
-- tilegeneration/wrong_type.yaml:25:9 layers.hi!.dimensions.0.default: '2010!' does not match '^[a-zA-Z0-9_\\\\-\\\\+~\\\\.]+$' (rule: properties.layers.additionalProperties.anyOf.1.properties.dimensions.items.properties.default.pattern)
-- tilegeneration/wrong_type.yaml:25:9 layers.hi!.dimensions.0.name: 'DATE!' does not match '^(?!(?i)(SERVICE|VERSION|REQUEST|LAYERS|STYLES|SRS|CRS|BBOX|WIDTH|HEIGHT|FORMAT|BGCOLOR|TRANSPARENT|SLD|EXCEPTIONS|SALT))[a-z0-9_\\\\-~\\\\.]+$' (rule: properties.layers.additionalProperties.anyOf.0.properties.dimensions.items.properties.name.pattern)
-- tilegeneration/wrong_type.yaml:25:9 layers.hi!.dimensions.0.name: 'DATE!' does not match '^(?!(?i)(SERVICE|VERSION|REQUEST|LAYERS|STYLES|SRS|CRS|BBOX|WIDTH|HEIGHT|FORMAT|BGCOLOR|TRANSPARENT|SLD|EXCEPTIONS|SALT))[a-z0-9_\\\\-~\\\\.]+$' (rule: properties.layers.additionalProperties.anyOf.1.properties.dimensions.items.properties.name.pattern)
-- tilegeneration/wrong_type.yaml:27:19 layers.hi!.dimensions.0.generate.0: '2012!' does not match '^[a-zA-Z0-9_\\\\-\\\\+~\\\\.]+$' (rule: properties.layers.additionalProperties.anyOf.0.properties.dimensions.items.properties.generate.items.pattern)
-- tilegeneration/wrong_type.yaml:27:19 layers.hi!.dimensions.0.generate.0: '2012!' does not match '^[a-zA-Z0-9_\\\\-\\\\+~\\\\.]+$' (rule: properties.layers.additionalProperties.anyOf.1.properties.dimensions.items.properties.generate.items.pattern)
-- tilegeneration/wrong_type.yaml:28:17 layers.hi!.dimensions.0.values.0: '2005!' does not match '^[a-zA-Z0-9_\\\\-\\\\+~\\\\.]+$' (rule: properties.layers.additionalProperties.anyOf.0.properties.dimensions.items.properties.values.items.pattern)
-- tilegeneration/wrong_type.yaml:28:17 layers.hi!.dimensions.0.values.0: '2005!' does not match '^[a-zA-Z0-9_\\\\-\\\\+~\\\\.]+$' (rule: properties.layers.additionalProperties.anyOf.1.properties.dimensions.items.properties.values.items.pattern)
-- tilegeneration/wrong_type.yaml:28:17 layers.hi!.dimensions.0.values.1: '2010!' does not match '^[a-zA-Z0-9_\\\\-\\\\+~\\\\.]+$' (rule: properties.layers.additionalProperties.anyOf.0.properties.dimensions.items.properties.values.items.pattern)
-- tilegeneration/wrong_type.yaml:28:17 layers.hi!.dimensions.0.values.1: '2010!' does not match '^[a-zA-Z0-9_\\\\-\\\\+~\\\\.]+$' (rule: properties.layers.additionalProperties.anyOf.1.properties.dimensions.items.properties.values.items.pattern)
-- tilegeneration/wrong_type.yaml:28:17 layers.hi!.dimensions.0.values.2: '2012!' does not match '^[a-zA-Z0-9_\\\\-\\\\+~\\\\.]+$' (rule: properties.layers.additionalProperties.anyOf.0.properties.dimensions.items.properties.values.items.pattern)
-- tilegeneration/wrong_type.yaml:28:17 layers.hi!.dimensions.0.values.2: '2012!' does not match '^[a-zA-Z0-9_\\\\-\\\\+~\\\\.]+$' (rule: properties.layers.additionalProperties.anyOf.1.properties.dimensions.items.properties.values.items.pattern)
-- tilegeneration/wrong_type.yaml:29:9 layers.hi!.dimensions.1.default: 1 is not of type 'string' (rule: properties.layers.additionalProperties.anyOf.0.properties.dimensions.items.properties.default.type)
-- tilegeneration/wrong_type.yaml:29:9 layers.hi!.dimensions.1.default: 1 is not of type 'string' (rule: properties.layers.additionalProperties.anyOf.1.properties.dimensions.items.properties.default.type)
-- tilegeneration/wrong_type.yaml:2:3 grids.swissgrid_6: None is not of type 'object' (rule: properties.grids.additionalProperties.type)
-- tilegeneration/wrong_type.yaml:2:3 grids: 'swissgrid!' does not match '^[a-zA-Z0-9_\\\\-~\\\\.]+$' (rule: properties.grids.propertyNames.pattern)
-- tilegeneration/wrong_type.yaml:31:19 layers.hi!.dimensions.1.generate.0: 1 is not of type 'string' (rule: properties.layers.additionalProperties.anyOf.0.properties.dimensions.items.properties.generate.items.type)
-- tilegeneration/wrong_type.yaml:31:19 layers.hi!.dimensions.1.generate.0: 1 is not of type 'string' (rule: properties.layers.additionalProperties.anyOf.1.properties.dimensions.items.properties.generate.items.type)
-- tilegeneration/wrong_type.yaml:32:17 layers.hi!.dimensions.1.values.0: 1 is not of type 'string' (rule: properties.layers.additionalProperties.anyOf.0.properties.dimensions.items.properties.values.items.type)
-- tilegeneration/wrong_type.yaml:32:17 layers.hi!.dimensions.1.values.0: 1 is not of type 'string' (rule: properties.layers.additionalProperties.anyOf.1.properties.dimensions.items.properties.values.items.type)
-- tilegeneration/wrong_type.yaml:3:5 grids.swissgrid_1.resolution_scale: 5.5 is not of type 'integer' (rule: properties.grids.additionalProperties.properties.resolution_scale.type)
-- tilegeneration/wrong_type.yaml:5:11 grids.swissgrid_1.bbox.0: 'a' is not of type 'number' (rule: properties.grids.additionalProperties.properties.bbox.items.type)
-- tilegeneration/wrong_type.yaml:5:11 grids.swissgrid_1.bbox.1: 'b' is not of type 'number' (rule: properties.grids.additionalProperties.properties.bbox.items.type)
-- tilegeneration/wrong_type.yaml:5:11 grids.swissgrid_1.bbox.2: 'c' is not of type 'number' (rule: properties.grids.additionalProperties.properties.bbox.items.type)
-- tilegeneration/wrong_type.yaml:6:10 grids.swissgrid_1.srs: ['EPSG:21781'] is not of type 'string' (rule: properties.grids.additionalProperties.properties.srs.type)""", # noqa
)
)
def test_zoom_errors(self) -> None:
with LogCapture("tilecloud_chain") as log_capture:
self.run_cmd(
cmd=".build/venv/bin/generate_tiles -c tilegeneration/test-nosns.yaml -l point --zoom 4,10",
main_func=generate.main,
)
log_capture.check_present(
(
"tilecloud_chain",
"WARNING",
"zoom 10 is greater than the maximum zoom 4 of grid swissgrid_5 of layer point, ignored.",
),
(
"tilecloud_chain",
"WARNING",
"zoom 4 corresponds to resolution 5 "
"is smaller than the 'min_resolution_seed' 10 of layer point, ignored.",
),
)
def test_wrong_srs_auth(self) -> None:
with LogCapture("tilecloud_chain") as log_capture:
self.run_cmd(
cmd=".build/venv/bin/generate_controller -c tilegeneration/wrong_srs_auth.yaml",
main_func=controller.main,
get_error=True,
)
log_capture.check(
(
"tilecloud_chain",
"ERROR",
"""The config file is invalid:
-- tilegeneration/wrong_srs_auth.yaml:3:5 grids.swissgrid_01.srs: 'toto:21781' does not match '^EPSG:[0-9]+$' (rule: properties.grids.additionalProperties.properties.srs.pattern)""", # noqa
)
)
def test_wrong_srs_id(self) -> None:
with LogCapture("tilecloud_chain") as log_capture:
self.run_cmd(
cmd=".build/venv/bin/generate_controller -c tilegeneration/wrong_srs_id.yaml",
main_func=controller.main,
get_error=True,
)
log_capture.check(
(
"tilecloud_chain",
"ERROR",
"""The config file is invalid:
-- tilegeneration/wrong_srs_id.yaml:3:5 grids.swissgrid_01.srs: 'EPSG:21781a' does not match '^EPSG:[0-9]+$' (rule: properties.grids.additionalProperties.properties.srs.pattern)""", # noqa
)
)
def test_wrong_srs(self) -> None:
with LogCapture("tilecloud_chain") as log_capture:
self.run_cmd(
cmd=".build/venv/bin/generate_controller -c tilegeneration/wrong_srs.yaml",
main_func=controller.main,
get_error=True,
)
log_capture.check(
(
"tilecloud_chain",
"ERROR",
"""The config file is invalid:
-- tilegeneration/wrong_srs.yaml:3:5 grids.swissgrid_01.srs: 'EPSG21781' does not match '^EPSG:[0-9]+$' (rule: properties.grids.additionalProperties.properties.srs.pattern)""",
)
)
def test_wrong_map(self) -> None:
with LogCapture("tilecloud_chain") as log_capture:
self.run_cmd(
cmd=".build/venv/bin/generate_controller -c tilegeneration/wrong_map.yaml",
main_func=controller.main,
get_error=True,
)
log_capture.check(
(
"tilecloud_chain",
"ERROR",
"""The config file is invalid:
-- tilegeneration/wrong_map.yaml:3:5 layers.test.empty_tile_detection: 'test' is not of type 'object' (rule: properties.layers.additionalProperties.anyOf.0.properties.empty_tile_detection.type)
-- tilegeneration/wrong_map.yaml:3:5 layers.test.empty_tile_detection: 'test' is not of type 'object' (rule: properties.layers.additionalProperties.anyOf.1.properties.empty_tile_detection.type)
-- tilegeneration/wrong_map.yaml:3:5 layers.test: 'extension' is a required property (rule: properties.layers.additionalProperties.anyOf.0.required)
-- tilegeneration/wrong_map.yaml:3:5 layers.test: 'extension' is a required property (rule: properties.layers.additionalProperties.anyOf.1.required)
-- tilegeneration/wrong_map.yaml:3:5 layers.test: 'grid' is a required property (rule: properties.layers.additionalProperties.anyOf.0.required)
-- tilegeneration/wrong_map.yaml:3:5 layers.test: 'grid' is a required property (rule: properties.layers.additionalProperties.anyOf.1.required)
-- tilegeneration/wrong_map.yaml:3:5 layers.test: 'layers' is a required property (rule: properties.layers.additionalProperties.anyOf.0.required)
-- tilegeneration/wrong_map.yaml:3:5 layers.test: 'mime_type' is a required property (rule: properties.layers.additionalProperties.anyOf.0.required)
-- tilegeneration/wrong_map.yaml:3:5 layers.test: 'mime_type' is a required property (rule: properties.layers.additionalProperties.anyOf.1.required)
-- tilegeneration/wrong_map.yaml:3:5 layers.test: 'url' is a required property (rule: properties.layers.additionalProperties.anyOf.0.required)
-- tilegeneration/wrong_map.yaml:3:5 layers.test: 'wmts_style' is a required property (rule: properties.layers.additionalProperties.anyOf.0.required)
-- tilegeneration/wrong_map.yaml:3:5 layers.test: 'wmts_style' is a required property (rule: properties.layers.additionalProperties.anyOf.1.required)""",
)
)
def test_wrong_sequence(self) -> None:
with LogCapture("tilecloud_chain") as log_capture:
self.run_cmd(
cmd=".build/venv/bin/generate_controller -c tilegeneration/wrong_sequence.yaml",
main_func=controller.main,
get_error=True,
)
log_capture.check(
(
"tilecloud_chain",
"ERROR",
"""The config file is invalid:
-- tilegeneration/wrong_sequence.yaml:3:5 grids.test.resolutions: 'test' is not of type 'array' (rule: properties.grids.additionalProperties.properties.resolutions.type)
-- tilegeneration/wrong_sequence.yaml:3:5 grids.test: 'bbox' is a required property (rule: properties.grids.additionalProperties.required)
-- tilegeneration/wrong_sequence.yaml:3:5 grids.test: 'srs' is a required property (rule: properties.grids.additionalProperties.required)""",
)
)
|
{"/tilecloud_chain/copy_.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/controller.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/views/admin.py": ["/tilecloud_chain/server.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/server.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/__init__.py", "/tilecloud_chain/controller.py"], "/tilecloud_chain/expiretiles.py": ["/tilecloud_chain/__init__.py"], "/tilecloud_chain/tests/test_controller.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_config.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_expiretiles.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/__init__.py": ["/tilecloud_chain/security.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py"], "/tilecloud_chain/tests/test_generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/tests/test_serve.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/server.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/internal_mapcache.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/generate.py", "/tilecloud_chain/server.py"], "/tilecloud_chain/tests/test_cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/cost.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/format.py"], "/tilecloud_chain/tests/test_copy.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"], "/tilecloud_chain/generate.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/database_logger.py", "/tilecloud_chain/format.py", "/tilecloud_chain/multitilestore.py", "/tilecloud_chain/timedtilestore.py", "/tilecloud_chain/mapnik_.py"], "/tilecloud_chain/tests/test_error.py": ["/tilecloud_chain/__init__.py", "/tilecloud_chain/tests/__init__.py"]}
|
14,650
|
sanj909/CollectiWise
|
refs/heads/main
|
/trainPlayground.py
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import pywt
import time
import os
import tensorflow as tf
#Importing stock data
import yfinance as yf
from datetime import date,datetime,timedelta
ticker = '^GSPC'
first_day = datetime(2000, 1, 3)
last_day = datetime(2019, 7, 1)
data = yf.Ticker(ticker).history(interval = '1d', start=first_day, end=last_day)
data.reset_index(inplace=True)
from models import *
from dataUtils import *
from waveletDenoising import optDenoise, normalise
close_data = data.Close.to_numpy()
close_data = optDenoise(close_data)
close_data = normalise(close_data, 0, 1) #Normalise to N(0, 1)
print(close_data.shape)
unroll_length = 50
X_train, X_test, y_train, y_test = train_test_split_lstm(close_data, 5, int(close_data.shape[0] * 0.1))
X_train = np.expand_dims(unroll(X_train, unroll_length), axis = 2)
y_train = np.expand_dims(unroll(y_train, unroll_length), axis = 2)
X_test = np.expand_dims(unroll(X_test, unroll_length), axis = 2)
y_test = np.expand_dims(unroll(y_test, unroll_length), axis = 2)
print(int(close_data.shape[0] * 0.1))
print(X_train.shape)
print(y_train.shape)
print(X_test.shape)
print(y_test.shape)
#model = build_basic_lstm_model(lstm_input_dim = X_train.shape[-1], lstm_output_dim = unroll_length, dense_output_dim = y_train.shape[-1], return_sequences=True)
model = build_att_lstm_model(lstm_input_dim = X_train.shape[-1], lstm_output_dim = unroll_length, dense_output_dim = y_train.shape[-1], return_sequences=True)
# Compile the model
start = time.time()
opt = tf.keras.optimizers.Adam(learning_rate = 0.1)
model.compile(loss='mean_squared_error', optimizer = opt, metrics=['accuracy']) #metrics argument is necessary for model.evaluate to return accuracy
print('compilation time : ', time.time() - start)
# Create a callback that saves the model's weights
checkpoint_path = "GHRepos/CollectiWise/model_checkpoints/cp.ckpt"
checkpoint_dir = os.path.dirname(checkpoint_path)
cp_callback = tf.keras.callbacks.ModelCheckpoint(filepath=checkpoint_path, save_weights_only=True, verbose=1)
#model.fit(X_train, y_train, epochs = 5, validation_split = 0.05, callbacks=[cp_callback])
# Load saved weights
model.load_weights(checkpoint_path) #All we have to do before this line is to create and compile the model
results = model.evaluate(X_test, y_test, verbose=1)
print("test loss, test acc:", results)
predictions = model.predict(X_test[(len(X_test)-1):]) #Predict using the last row of X_test (afaik, the last row is the 50 most recent prices)
print("predictions shape:", predictions.shape) #Model prediction of the 50 next prices
print(predictions)
|
{"/trainPlayground.py": ["/models.py", "/dataUtils.py", "/waveletDenoising.py"], "/lstm_train_single.py": ["/dataUtils.py", "/waveletDenoising.py", "/models.py"], "/GAN/GANmodel.py": ["/models.py", "/dataUtils.py", "/waveletDenoising.py"], "/wdDemo.py": ["/waveletDenoising.py"]}
|
14,651
|
sanj909/CollectiWise
|
refs/heads/main
|
/models.py
|
import tensorflow as tf
import tensorflow.keras.layers as layers
import keras as og_keras
from keras_self_attention import SeqSelfAttention
# pip install keras_self_attention
def build_basic_lstm_model(lstm_input_dim, lstm_output_dim, dense_output_dim, return_sequences):
model = tf.keras.models.Sequential()
model.add(layers.LSTM(input_shape=(None, lstm_input_dim), units = lstm_output_dim, return_sequences = return_sequences))
#model.add(layers.LSTM(100, return_sequences = False))
model.add(layers.Dense(units = dense_output_dim))
#model.add(Activation('softmax'))
model.add(layers.Activation('linear'))
return model
def build_att_lstm_model(lstm_input_dim, lstm_output_dim, dense_output_dim, return_sequences):
model = tf.keras.models.Sequential()
model.add(layers.LSTM(input_shape = (None, lstm_input_dim), units = lstm_output_dim, return_sequences = return_sequences))
#model.add(layers.LSTM(100, return_sequences = False))
#model.add(layers.Attention())
model.add(SeqSelfAttention(attention_activation= 'tanh'))
model.add(layers.Dense(units = dense_output_dim))
#model.add(Activation('softmax'))
model.add(layers.Activation('linear'))
return model
def lstm(learning_rate, window_length, n_features):
model = tf.keras.Sequential()
model.add(layers.LSTM(units = 25, activation='relu', input_shape=(window_length, n_features)))
#model.add(layers.Dropout(0.2))
model.add(layers.Dense(units = n_features, activation='linear')) #We want the model to output a single number, it's prediction of bid1
return model
|
{"/trainPlayground.py": ["/models.py", "/dataUtils.py", "/waveletDenoising.py"], "/lstm_train_single.py": ["/dataUtils.py", "/waveletDenoising.py", "/models.py"], "/GAN/GANmodel.py": ["/models.py", "/dataUtils.py", "/waveletDenoising.py"], "/wdDemo.py": ["/waveletDenoising.py"]}
|
14,652
|
sanj909/CollectiWise
|
refs/heads/main
|
/dataUtils.py
|
import numpy as np
from sklearn.preprocessing import StandardScaler
def split_df_by_asset(df, drop_label_column = True, num_columns_per_asset = 3):
if(drop_label_column):
df = df.drop(columns = ['label'])
asset_dfs = []
for i in range(0, len(df.columns), num_columns_per_asset):
asset_dfs.append(df[df.columns[i : i + num_columns_per_asset]])
return asset_dfs
def train_test_split_lstm(stocks, prediction_time=1, test_data_size=450, unroll_length=50):
"""
Split the data set into training and testing feature for Long Short Term Memory Model
:param stocks: whole data set containing ['Open','Close','Volume'] features
:param prediction_time: no of days
:param test_data_size: size of test data to be used
:param unroll_length: how long a window should be used for train test split
:return: X_train : training sets of feature
:return: X_test : test sets of feature
:return: y_train: training sets of label
:return: y_test: test sets of label
"""
# training data
test_data_cut = test_data_size + unroll_length + 1
x_train = stocks[0:-prediction_time - test_data_cut]
#y_train = stocks[prediction_time:-test_data_cut]['Close'].as_matrix()
y_train = stocks[prediction_time:-test_data_cut]
# test data
x_test = stocks[0 - test_data_cut:-prediction_time]
#y_test = stocks[prediction_time - test_data_cut:]['Close'].as_matrix()
y_test = stocks[prediction_time - test_data_cut:]
return x_train, x_test, y_train, y_test
def unroll(data, sequence_length=24):
"""
use different windows for testing and training to stop from leak of information in the data
:param data: data set to be used for unrolling
:param sequence_length: window length
:return: data sets with different window.
"""
result = []
for index in range(len(data) - sequence_length):
result.append(data[index: index + sequence_length])
return np.asarray(result)
def standardise_df(df):
scalers = [] #save the scalers so we can inverse_transform later
for column in df.columns:
x = np.array(df[column])
scaler = StandardScaler()
scaler.fit(x.reshape(len(x), 1))
df[column] = scaler.transform(x.reshape(len(x), 1))
scalers.append(scaler)
return df, scalers
def reroll(array3d, unroll_length):
array2d = array3d[0][unroll_length-1]
for i in range(1, len(array3d)):
next_row = array3d[i][unroll_length-1]
array2d = np.vstack((array2d, next_row))
return array2d
|
{"/trainPlayground.py": ["/models.py", "/dataUtils.py", "/waveletDenoising.py"], "/lstm_train_single.py": ["/dataUtils.py", "/waveletDenoising.py", "/models.py"], "/GAN/GANmodel.py": ["/models.py", "/dataUtils.py", "/waveletDenoising.py"], "/wdDemo.py": ["/waveletDenoising.py"]}
|
14,653
|
sanj909/CollectiWise
|
refs/heads/main
|
/lstm_train_single.py
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import pywt
import time
import os
import tensorflow as tf
import argparse
from dataUtils import *
from waveletDenoising import *
from models import *
"""
RESULTS WE NEED:
1. single asset result: lstm vs wlstm vs wlstm+a
2. one model trained on all asset results(to test transfer learning): lstm vs wlstm vs wlstm+a
Metrics: mse mae, rmse, R^2
"""
def create_cp_name(path, args):
name = "model_cp"
for key,value in vars(args).items():
name += "_" + key + str(value)
return os.path.join(path, name + ".ckpt")
def main(args):
# Set seed
np.random.seed(args.seed)
tf.random.set_seed(args.seed)
DATA_PATH = "formatted_features.csv"
MODEL_PATH = "model_checkpoints"
if(not os.path.isdir(MODEL_PATH)):
os.mkdir(MODEL_PATH)
checkpoint_save_name = create_cp_name(MODEL_PATH, args)
print(checkpoint_save_name)
df = pd.read_csv(DATA_PATH)
asset_dfs = split_df_by_asset(df)
print("Number of assets: ", len(asset_dfs))
#dataframe for a single asset. Here, XBTUSD
test_df = asset_dfs[args.stock_idx]
print("Shape of test_df: ", test_df.shape)
#dataframe of standardised data
cleaned_data, scalers = standardise_df(test_df)
#dataframe of standardised and denoised data
if("w" in args.model_type):
cleaned_data = denoise_df(cleaned_data)
#–––––––––––––––––––––––––––––––––––––––––––––––––––––––––
#The model will use this many of the most recent rows to make a prediction
unroll_length = args.unroll_length
#The prediction will be this many timesteps in the future. If horizon=1, we're predicting data from the next timestep.
horizon = 1
#percentage of total data to be set aside for testing
train_test_split = 0.1
X_train, X_test, y_train, y_test = train_test_split_lstm(cleaned_data, horizon, int(cleaned_data.shape[0] * train_test_split))
#If X is rows 0 to 1000 of cleaned_data, then y is rows horizon to 1000+horizon of cleaned_data.
#We want to use unroll_length rows to predict the average price, volume and standard deviation in the next row (since horizon=1). So:
#Shape of X data should be in the form (samples, unroll_length, features)
#Shape of y data should be in the form (samples, features)?
X_train = unroll(X_train, unroll_length)
X_test = unroll(X_test, unroll_length)
#y_train = y_train[unroll_length:]
#y_test = y_test[unroll_length:]
y_train = unroll(y_train, unroll_length)
y_test = unroll(y_test, unroll_length)
# Only keep price
#y_train = y_train[:, :, 0]
#y_test = y_test[:, :, 0]
print(X_train.shape)
print(y_train.shape)
print(X_test.shape)
print(y_test.shape)
#–––––––––––––––––––––––––––––––––––––––––––––––––––––––––
print("lstm_input_dim: ",X_train.shape[-1])
print("lstm_output_dim: ", unroll_length)
print("dense_output_dim :", y_train.shape[-1])
if(args.model_type == "lstm" or args.model_type == "wlstm"):
model = build_basic_lstm_model(lstm_input_dim = X_train.shape[-1], lstm_output_dim = unroll_length, dense_output_dim = y_train.shape[-1], return_sequences=True)
elif(args.model_type == "wlstm_a"):
model = build_att_lstm_model(lstm_input_dim = X_train.shape[-1], lstm_output_dim = unroll_length, dense_output_dim = y_train.shape[-1], return_sequences=True)
else:
print("This should not happen")
exit()
#model = lstm(0.01, X_train.shape[1], X_train.shape[2]) #learning rate, input dimension 1, input dimension 2
# Compile the model
start = time.time()
opt = tf.keras.optimizers.Adam(learning_rate = args.lr)
model.compile(loss='mean_squared_error', optimizer = opt, metrics=['mse', 'mae'])
print('compilation time : ', time.time() - start)
# Create a callback that saves the model's weights
#checkpoint_path = "/Users/Sanjit/Repos/CollectiWise/" + checkpoint_save_name
checkpoint_path = os.path.join(os.getcwd(), checkpoint_save_name)
cp_callback = tf.keras.callbacks.ModelCheckpoint(filepath=checkpoint_path, save_weights_only=True, verbose=2)
model.fit(X_train, y_train, epochs = args.max_iter, validation_split = 0.05, callbacks=[cp_callback], verbose=2)
#model.fit(X_train, y_train, epochs = args.max_iter, validation_split = 0.05, verbose=2)
# Load saved weights
model.load_weights(checkpoint_path) #All we have to do before this line is to create and compile the model
results = model.evaluate(X_test, y_test, verbose=1)
print("test loss, mse, mae:", results)
predictions = model.predict(X_test)
print("predictions shape:", predictions.shape)
#–––––––––––––––––––––––––––––––––––––––––––––––––––––––––
#Convert predictions and target back to 2D arrays, i.e. undo the effect of unroll()
predictions = reroll(predictions, unroll_length)
target = reroll(y_test, unroll_length)
#Compute metrics
mse = np.power((predictions - target), 2).sum(axis=0) / len(predictions)
mae = np.abs(predictions - target).sum(axis=0)/len(predictions)
rmse = np.sqrt(mse)
ybar = np.tile(target.sum(axis=0)/len(target), (len(target), 1))
tss = np.power(target - ybar, 2).sum(axis=0)
r2 = 1 - (len(predictions)*mse / tss)
print("MSE: ", mse)
print("MAE: ", mae)
print("RMSE: ", rmse)
print("R-squared: ", r2)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='LSTM Single')
parser.add_argument('--seed', default=1, type=int, help='random seed')
parser.add_argument('--max_iter', default=100, type=float, help='maximum training iteration')
parser.add_argument('--unroll_length', default=50, type=int, help='unroll length')
parser.add_argument('--lr', default=0.01, type=float, help='learning rate')
parser.add_argument('--model_type', default="wlstm_a", type=str, help='model type (lstm, wlstm, wlstm_a)')
parser.add_argument('--stock_idx', default=0, type=int, help='stock index in dataframe')
args = parser.parse_args()
main(args)
'''
1.
Why is our calculated values of mse, mae different from what we get by running model.evaluate?
2.
For each 2hr period, i.e. each row in the dataframe:
How many standard deviations above the average price is the high price?
How many standard deviations below the average price is the low price?
Estimate this from OHLC data.
Then we can give estimate high and low prices in the next 2hr period using predictions from the model.
'''
|
{"/trainPlayground.py": ["/models.py", "/dataUtils.py", "/waveletDenoising.py"], "/lstm_train_single.py": ["/dataUtils.py", "/waveletDenoising.py", "/models.py"], "/GAN/GANmodel.py": ["/models.py", "/dataUtils.py", "/waveletDenoising.py"], "/wdDemo.py": ["/waveletDenoising.py"]}
|
14,654
|
sanj909/CollectiWise
|
refs/heads/main
|
/GAN/GANmodel.py
|
import tensorflow as tf
import tensorflow.keras.layers as layers
import keras as og_keras
from keras_self_attention import SeqSelfAttention
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
#BTC.csv
data = pd.read_csv('/Users/Sanjit/Google Drive/CollectiWise/Data/BTC.csv').drop(columns=['open', 'high', 'low', 'volume'])
#Data preprocessing
#Download the files models.py, dataUtils.py and waveletDenoising.py from LSTM branch and run this script in the same folder as those files.
from models import *
from dataUtils import *
from waveletDenoising import normalise
close_data = normalise(data.close.to_numpy(), 0, 1)
unroll_length = 10
X_train, X_test, y_train, y_test = train_test_split_lstm(close_data, 5, int(close_data.shape[0] * 0.1))
X_train = np.expand_dims(unroll(X_train, unroll_length), axis = 2)
y_train = np.expand_dims(unroll(y_train, unroll_length), axis = 2)
X_test = np.expand_dims(unroll(X_test, unroll_length), axis = 2)
y_test = np.expand_dims(unroll(y_test, unroll_length), axis = 2)
'''
https://machinelearningmastery.com/how-to-develop-a-generative-adversarial-network-for-a-1-dimensional-function-from-scratch-in-keras/
In the example above, generator guesses pairs (x, y). Discriminator classifies them as real or fake.
The generator learns to guess pairs (x, x^2). The discriminator learns to classify only pairs (x, x^2) as real.
We want our generator to guess a time series X, and the disciminator to classify it as real or fake.
Generator: MLP. Returns next unroll_length prices from a randomly generated vector of size latent_dim.
Disciminator: MLP with final sigmoid layer
'Training the discriminator model is straightforward. The goal is to train a generator model, not a discriminator model,
and that is where the complexity of GANs truly lies.'
'When the discriminator is good at detecting fake samples, the generator is updated more, and when the discriminator model
is relatively poor or confused when detecting fake samples, the generator model is updated less.'
'This is because the latent space has no meaning until the generator model starts assigning meaning to points in the space as it learns.
After training, points in the latent space will correspond to points in the output space, e.g. in the space of generated samples.'
In the example, their samples were (x, y) pairs. For us, the sample will be an unroll_length dimensional vector.
'''
#a simple discriminator model
def define_discriminator(n_inputs=unroll_length):
model = tf.keras.models.Sequential()
model.add(layers.Dense(25, activation='relu', kernel_initializer='he_uniform', input_dim=n_inputs))
model.add(layers.Dense(1, activation='sigmoid'))
#Compile model
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
return model
def define_generator(latent_dim, n_outputs=unroll_length):
model = tf.keras.models.Sequential()
model.add(layers.Dense(15, activation='relu', kernel_initializer='he_uniform', input_dim=latent_dim))
model.add(layers.Dense(n_outputs, activation='linear'))
return model
def define_gan(generator, discriminator):
#Include the line below if the discriminator has been pre-trained. This kinda defeats the point of a GAN though
#discriminator.trainable=False
model = tf.keras.models.Sequential()
model.add(generator)
model.add(discriminator)
#Compile model
model.compile(loss='binary_crossentropy', optimizer='adam')
return model
def generate_real_samples(n_epoch, n): #gets n rows from X_train, which has 834 rows total
#Line below slices first n rows when n_epoch = 1, slices next n rows when n_epoch = 2, and so on
X1 = X_train[((n_epoch-1)*n):n_epoch*n]
X = X1.reshape(n, unroll_length)
y = np.ones((n, 1))
return X, y #1 is the class label, 1 means real
def generate_test_samples(n): #gets n rows from X_test, which has 136 rows total
#####
#####
X1 = X_test[:n] #This simply gets the first n rows of X_test. Needs fixing so that we use the whole test set.
#####
#####
X = X1.reshape(n, unroll_length)
y = np.ones((n, 1))
return X, y #1 is the class label, 1 means real
#We must have some vector to input into the first layer of the generator.
#This function creates that input.
def generate_latent_points(latent_dim, n):
x_input = np.random.randn(latent_dim * n)
x_input = x_input.reshape(n, latent_dim)
return x_input
def generate_fake_samples(generator, latent_dim, n): #gets generator prediction
x_input = np.random.randn(latent_dim * n)
x_input = x_input.reshape(n, latent_dim)
X = generator.predict(x_input)
y = np.zeros((n, 1))
return X, y #0 is the class label, 0 means fake
def summarise_performance(epoch, generator, disciminator, latent_dim, n=34):
#n = size of test set / int(n_epochs/n_eval). Here, it's 136/(4) = 34
#My idea is that since int(n_epochs/n_eval) = 4, summarise performance will be called 4 times in train(), so each time
#it's called, we use a quarter of the rows of X_test. Maybe each time it's called, we can use the whole of X_test???
x_real, y_real = generate_test_samples(n) #On each successive call of this function, we want to get the next 34 rows. Right now, we just get the first 34 rows
_, acc_real = discriminator.evaluate(x_real, y_real, verbose=0)
x_fake, y_fake = generate_fake_samples(generator, latent_dim, n)
_, acc_fake = discriminator.evaluate(x_fake, y_fake, verbose=0)
print(epoch, acc_real, acc_fake)
def train(generator, disciminator, gan, latent_dim, n_epochs=417, n_batch=4, n_eval=100):
half_batch = int(n_batch/2)
#834 rows in X_train for BTC.csv.
#n_epochs must be less or equal to 834/half_batch, ideally as close as possible to this limit so that we use all the training data.
#In each epoch, dicriminator is trained on half_batch real samples and half_batch fake samples. We want to train at least once on each real sample in X_train.
for i in range(1, n_epochs+1):
x_real, y_real = generate_real_samples(i, half_batch)
x_fake, y_fake = generate_fake_samples(generator, latent_dim, half_batch)
disciminator.train_on_batch(x_real, y_real)
disciminator.train_on_batch(x_fake, y_fake)
x_gan = generate_latent_points(latent_dim, n_batch)
y_gan = np.ones((n_batch, 1))
gan.train_on_batch(x_gan, y_gan)
#Line below updates discriminator weights, so both models are trained simulataneously.
#This is a deviation from the example, where they train the discriminator first.
#This throws a shit ton of error messages
#gan = define_gan(generator, discriminator)
#evaluate the model every n_eval epochs on the test set
if (i+1)%n_eval == 0:
summarise_performance(i, generator, disciminator, latent_dim)
latent_dim = 20
discriminator = define_discriminator()
generator = define_generator(latent_dim)
gan = define_gan(generator, discriminator)
#train(generator, discriminator, gan, latent_dim)
'''
When we include line 118, partial output is
299 0.0 0.9411764740943909
399 0.23529411852359772 0.970588207244873
When we comment out line 62, the output is
99 0.0 0.6470588445663452
199 0.0 0.20588235557079315
299 0.3235294222831726 0.0882352963089943
399 0.5 0.0882352963089943
Ideally, what we want to see as output is something like
index 0.5 0.5
index 0.49 0.51
index 0.51 0.49
i.e. the generator is so good at creating fakes that the discriminator must guess at random.
Next step: Try this with A LOT more data.
If you want to save the model weights, see trainPlayground.py in LSTM branch, or just see the guide on tensorflow.org.
'''
|
{"/trainPlayground.py": ["/models.py", "/dataUtils.py", "/waveletDenoising.py"], "/lstm_train_single.py": ["/dataUtils.py", "/waveletDenoising.py", "/models.py"], "/GAN/GANmodel.py": ["/models.py", "/dataUtils.py", "/waveletDenoising.py"], "/wdDemo.py": ["/waveletDenoising.py"]}
|
14,655
|
sanj909/CollectiWise
|
refs/heads/main
|
/querying_data.py
|
import pandas as pd
import numpy as np
import json
from google.cloud import bigquery
class SQL:
def __init__(self, project = 'development-302415', dataset = 'machine_learning'):
self.project = project
self.client = bigquery.Client(project = self.project)
self.dataset = self.client.dataset(dataset)
self.query = []
self.data = pd.DataFrame()
def aggregate_to_intervals(self, interval_length, where = "WHERE RIGHT(asset,3) = 'USD'"):
query = """
#Length in seconds of each interval
DECLARE
interval_length INT64 DEFAULT """+str(interval_length)+""";
#Adds a 'intervals' column which acts like an index for the interval each row belongs to
CREATE OR REPLACE TABLE `development-302415.machine_learning.sorted_by_interval` AS
WITH
transactional AS (
SELECT
*,
CAST(TRUNC(TIMESTAMP_DIFF(time_stamp,'2000-01-01 00:00:00+00', second)/interval_length,0) AS INT64) AS intervals,
FROM
`development-302415.machine_learning.weekly_v1`
"""+where+"""
ORDER BY
intervals,
asset )
#Reverts 'intervals' index back to a timestamp, aggregates volume, average prices and OHLC prices over each interval (and over each asset)
SELECT
TIMESTAMP_ADD(TIMESTAMP '2000-01-01 00:00:00+00', INTERVAL t.intervals*interval_length second) AS time_stamp,
t.asset,
SUM(t.volume) AS volume,
AVG(t.price) AS avg_price,
AVG(open) AS open,
MAX(t.price) AS high,
MIN(t.price) AS low,
AVG(close) AS close,
AVG(label) as label,
CASE
WHEN COUNT(t.price) >= 2 THEN STDDEV(t.price)
ELSE
0
END
AS std_price
FROM (
SELECT
*,
FIRST_VALUE(price) OVER(PARTITION BY intervals, asset ORDER BY time_stamp ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS open,
LAST_VALUE(price) OVER(PARTITION BY intervals, asset ORDER BY time_stamp ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS close,
FROM
transactional) AS t
GROUP BY
t.intervals,
t.asset
ORDER BY t.intervals, label;
"""
self.query.append(query)
self.client.query(query)
def convert_to_features(self, features = 'avg_price,volume,std_price,label'):
query = """
#DECLARE target_asset STRING DEFAULT 'ETHUSD';
CALL `development-302415.machine_learning.pivot` (
'development-302415.machine_learning.sorted_by_interval','development-302415.machine_learning.assets_to_features', ['time_stamp'], 'asset','[STRUCT("""+features+""")]',1000,'ANY_VALUE','');
#CREATE OR REPLACE TABLE `development-302415.machine_learning.assets_to_features` AS
#SELECT features.*, labels.label FROM `development-302415.machine_learning.assets_to_features` AS features
#INNER JOIN `development-302415.machine_learning.sorted_by_interval` AS labels
#ON features.time_stamp = labels.time_stamp AND labels.asset = target_asset
#ORDER BY time_stamp;
"""
self.query.append(query)
self.client.query(query)
def get_table(self,table_name, max_results = None, csv_name = None):
if max_results != None:
self.data = self.client.query('SELECT * FROM `development-302415.'+table_name+'` LIMIT '+str(max_results))
else:
self.data = self.client.query('SELECT * FROM `development-302415.'+table_name+'`')
self.data = self.data.result()
self.data = self.data.to_dataframe()
#table = self.client.get_table('development-302415.'+table_name)
#self.data = self.client.list_rows(table).to_dataframe()
if csv_name != None:
self.data.to_csv(csv_name, index = False)
return self.data
def load_csv(self, path):
self.data = pd.read_csv(path, header = 0, index_col = 0)
def save_csv(self, path):
self.data.to_csv(path)
def unnest(self, columns_prefix = 'e_', na_fillers = {'avg_price':'ffill','volume':0,'std_price':0,'label':0,'high':'$avg_price','low':'$avg_price','open':'$avg_price','close':'$avg_price'}, dropna = False, merge_labels = True, label_name = 'label'):
self.data = self.data.applymap(lambda x: x if x != '[]' else '[{}]')
for column in self.data.columns.values:
if column[:len(columns_prefix)] == columns_prefix:
serie = self.data[column].map(lambda x: list(json.loads(x.replace('\'','\"')))[0])
serie = pd.json_normalize(serie)
serie.set_index(self.data.index,inplace = True)
for feature in serie.columns.values:
try:
if type(na_fillers[feature]) == int:
serie[feature] = serie[feature].fillna(value = na_fillers[feature])
self.data[column+' '+feature] = serie[feature]
elif type(na_fillers[feature]) == str:
if na_fillers[feature][0] == '$':
serie[feature] = serie[feature].fillna(serie[na_fillers[feature][1:]])
self.data[column+' '+feature] = serie[feature]
else:
serie[feature] = serie[feature].fillna(method = na_fillers[feature])
self.data[column+' '+feature] = serie[feature]
else:
raise KeyError('Fill method isn\'t int or string for '+feature)
except KeyError:
raise KeyError('No NaN fill method declared for '+feature)
self.data.drop(columns = column, inplace = True)
#Puts all the '<asset> label' columns to the right
if merge_labels:
#print(self.data[[ i for i in list(self.data.columns) if i[-len(label_name):] == label_name]])
labels = self.data[[ i for i in list(self.data.columns) if i[-len(label_name):] == label_name]].values.tolist()
#print(labels)
self.data.drop(columns = [ i for i in list(self.data.columns) if i[-len(label_name):] == label_name], inplace = True)
self.data[label_name] = labels
else:
self.data = self.data[[ i for i in list(self.data.columns) if i[-len(label_name):] != label_name]+[ i for i in list(self.data.columns) if i[-len(label_name):] == label_name]]
if dropna:
self.data.dropna(axis = 0, inplace = True)
return self.data
def create_targets(self, targets = ['high','low'], merge_labels = True):
for target in targets:
df = self.data[[ i for i in list(self.data.columns) if i[-len(target):] == target]]
df = df.rolling()
def summarize(self, na_threshold = None):
print('------------------------------------------------')
if na_threshold != None:
df = pd.Series(dtype = object)
total = len(self.data.index.values)
for column in self.data.columns.values:
df[column] = self.data[column].isna().sum()/total*100
df = df.where(df >= na_threshold).dropna().sort_values(ascending = False)
with pd.option_context('display.max_rows', None, 'display.max_columns', None):
print('Features with more than ',na_threshold,'% NaN: \n',df)
print(len(df),' features with more than ',na_threshold,'% NaN values')
print('Features: ',len(self.data.columns.values)-1,',\n'
'Timestamps:',len(self.data.index.values))
print('------------------------------------------------')
def dropna(self, threshold = 100):
df = pd.Series(dtype = object)
total = len(self.data.index.values)
for column in self.data.columns.values:
df[column] = self.data[column].isna().sum()/total*100
df = df.where(df >= threshold).dropna().sort_values(ascending = False)
assets = [i[:-10] for i in df.index.values if i[-10:] == ' avg_price']
self.data = self.data[[i for i in self.data.columns.values if list(filter(i.startswith, assets)) == []]]
self.data.dropna(inplace = True, axis = 0)
dir = 'cloudshell_open/CollectiWise/'
sql = SQL()
#sql.aggregate_to_intervals(7200)
sql.convert_to_features(features = 'avg_price,volume,std_price,label,high,low')
sql.get_table('machine_learning.assets_to_features', csv_name = dir+'assets_to_features.csv')
sql.data.drop(columns = ['time_stamp'], inplace=True)
sql.load_csv(dir+'assets_to_features.csv')
sql.unnest(merge_labels=True)
print(sql.data.columns.values)
sql.save_csv(dir+'features_df.csv')
sql.load_csv(dir+'features_df.csv')
#sql.summarize(na_threshold=50)
sql.dropna(threshold=50)
sql.summarize()
sql.data.set_index('label', inplace = True)
sql.save_csv(dir+'formatted_features.csv')
sql.load_csv(dir+'formatted_features.csv')
print(sql.data)
|
{"/trainPlayground.py": ["/models.py", "/dataUtils.py", "/waveletDenoising.py"], "/lstm_train_single.py": ["/dataUtils.py", "/waveletDenoising.py", "/models.py"], "/GAN/GANmodel.py": ["/models.py", "/dataUtils.py", "/waveletDenoising.py"], "/wdDemo.py": ["/waveletDenoising.py"]}
|
14,656
|
sanj909/CollectiWise
|
refs/heads/main
|
/wdDemo.py
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import pywt
'''
#Importing stock data
import yfinance as yf
from datetime import date,datetime,timedelta
ticker = '^GSPC'
first_day = datetime(2000, 1, 3)
last_day = datetime(2019, 7, 1)
data = yf.Ticker(ticker).history(interval = '1d', start=first_day, end=last_day)
data.reset_index(inplace=True)
'''
'''
#Importing our crypto data
ticker = 'QTUMUSD' #Try QTUMUSD, XBTEUR, ETCUSD, ZECXBT, GNOXBT, XBTEUR, LTCEUR, XBTUSD, EOSXBT, EOSETH, GNOUSD
data = pd.read_csv('/Users/Sanjit/Google Drive/CollectiWise/Data/high_low.csv') #change this
data = data[data['asset'] == ticker]
data.reset_index(inplace=True, drop=True)
'''
data = pd.read_csv('/Users/Sanjit/Repos/CollectiWise/formatted_features.csv')
column = 'e_XBTUSD avg_price'
data = data[column]
from waveletDenoising import denoise, SNR, RMSE, optDenoise, standardise, gridSearch_v2, optDenoise_v2 #Store this file in the same folder as 'waveletDenoising.py'
from sklearn.preprocessing import StandardScaler, MinMaxScaler
from skimage.restoration import denoise_wavelet
#x = np.array(data.Close)
x = np.array(data)
original_mean = np.mean(x)
original_std = np.std(x)
#In the paper they used zero-mean normalization, which means the series is just shifted vertically downwards by its mean.
x = x - np.mean(x) #equivalently, standardise(x, 0, np.std(x))
#x = standardise(x, 0, 1) #N(0,1) standardisation
#See https://www.youtube.com/watch?v=HSG-gVALa84
#y = denoise_wavelet(x, wavelet='coif3', mode='hard', wavelet_levels=3, method='BayesShrink', rescale_sigma=True)
#method: 'BayesShrink' or 'VisuShrink'
#Most of the time, the denoised series is basically identical to the original. Problem is worse when we standardise to N(0, 1)
#VisuShrink doesn't capture price peaks, and these obviously can't be noise.
y = optDenoise_v2(x)
#x = x + original_mean
#y = y + original_mean
#x = standardise(x, original_mean, original_std)
#y = standardise(x, original_mean, original_std)
print("SNR: ", SNR(x, y))
print("RMSE: ", RMSE(x, y))
plt.plot(data.index, x, color='Green')
plt.plot(data.index, y, color='Red')
#plt.title(ticker)
plt.title(column)
plt.show()
'''
We see strange behaviour when the prices are very large (XBTEUR, XBTUSD, in 1000s) and very small (GNOXBT, EOSXBT, in 0.001s)
When prices are large, the denoised signal is almost identical to the raw signal
When prices are small, the denoised signal is a constant zero signal, i.e. nothing like the raw signal
It seems that in the second case, everything is considered noise since all the movements are so small, and in the first case,
nothing is considered noise since all the movements are so large.
There must be some way to 'normalise' the data, so that the absolute value of prices moves is irrelevant, and only the relative
value of price moves matters.
I've now implented this in the rescale function: it rescales the data to have any mean and std you specify. The issue with
rescaling and then descaling is that RMSE increases by a lot (for GSPC, where new_mean = sqrt(old_mean) and similarly for std).
Despite this, the plot looks alright.
Why do we descale? At some point we need to, either after feeding the data through the model or before.
Rescaling, to the squares of the orignial mean and standard deviation, works really nicely with QTUMUSD.
When the numbers are too small (<1), there seems to be some kind of numerical overflow: the denoised signal is way off. So, the
usual mean = 0 std = 1 transform is not really an option.
Many cryptos were worth extremely small amounts when they started trading. In these cases, the denoised signal at the start of the
period is way off. ZECXBT offers basically no information.
It seems that it's not easy to write one function which can properly denoise every series we give it in just one click.
There needs to be an element of inspection. Maybe we can try a grid search for each series, but I don't see anything better.
I have now implemented a grid search! Don't see how we can do much better. It works for the most part, but for certain assets,
the denoised series is still not right.
'''
|
{"/trainPlayground.py": ["/models.py", "/dataUtils.py", "/waveletDenoising.py"], "/lstm_train_single.py": ["/dataUtils.py", "/waveletDenoising.py", "/models.py"], "/GAN/GANmodel.py": ["/models.py", "/dataUtils.py", "/waveletDenoising.py"], "/wdDemo.py": ["/waveletDenoising.py"]}
|
14,657
|
sanj909/CollectiWise
|
refs/heads/main
|
/waveletDenoising.py
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import pywt #after running pip install PyWavelets (https://github.com/PyWavelets/pywt, https://pywavelets.readthedocs.io/en/latest/)
from sklearn.preprocessing import StandardScaler
from skimage.restoration import denoise_wavelet
'''
The denoising steps are the following : https://www.kaggle.com/theoviel/denoising-with-direct-wavelet-transform
(1)Apply the dwt to the signal
Which signal extension mode is best for financial time series?
See https://pywavelets.readthedocs.io/en/latest/ref/signal-extension-modes.html#ref-modes
Periodization is bad: visually inspect the start of the signal constructed with this mode
Actually it makes barely any difference, after trying out a few.
if level is not specified or level = None, it's calculated using the dwt_max_level function. For this data, it is 5.
coeffs is an array of arrays. len(coeffs) = level + 1
As level increases the denoised signal gets smoother and is more different from the original signal.
(2)Compute the threshold corresponding to the chosen level
Increasing threshold seems to smooth out the curve more
I've no idea how thresholds are calculated. See https://uk.mathworks.com/help/wavelet/ref/thselect.html
threshold = 1 gives great results in terms of a high SNR and low RMSE.
But not denoising at all gives the best result with these metrics (infinite SNR, 0 RMSE)! These metrics seem useless.
I've been assessing performance visually, balancing smoothness of the curve with the similarity to the original.
So, these (wavelet, mode, level) are true hyperparameters, which require retraining and testing of ML model to optimise
https://uk.mathworks.com/help/wavelet/ug/denoising-signals-and-images.html:
'Minimax and SURE threshold selection rules are more conservative and would be more convenient when small details of
the signal lie near the noise range. The two other (sqtwolog, mixture of sqtwolog and SURE) rules remove the noise
more efficiently'
https://uk.mathworks.com/help/wavelet/ug/wavelet-denoising.html:
We assume that the magnitude of noise is constant throughout the signal. MATLAB has a function which automatically
relaxes this assumption, i.e. automatically adjusts the threshold depending on the level of noise at each segment
of the series (no of segments are also calculated automatically) but I'm not sure how it works. If you have MATLAB,
this is one way we can improve the model.
Maybe we can just split every series into 5 parts, denoise each separately, and stitch them back together. This will
only work with a different threshold function though: sqtwolog, which we're using now, depends only on the length of
the series.
(3)Only keep coefficients with a value higher than the threshold
Which threshold mode is best for financial time series?
hard thresholding is much better the soft, by visual inspection of plot. garrote is similar to hard.
It seems that they used hard thresholding in the paper.
(4)Apply the inverse dwt to retrieve the signal
(5)Sometimes, signal length is 1 greater than raw data length. We'll just remove the last value for now.
'''
#Input must be a numpy array. Output is a numpy array.
def denoise(raw, wavelet, level, mode='symmetric'):
coeffs = pywt.wavedec(raw, wavelet, mode=mode, level=level) #(1)
threshold = np.sqrt(2*np.log(len(raw))) #(2)sqtwolog function in MATLAB
coeffs = [pywt.threshold(i, value=threshold, mode='hard') for i in coeffs]#(3)
signal = pywt.waverec(coeffs, wavelet, mode=mode)#(4)
if len(signal) > len(raw):#(5)
signal = np.delete(signal, -1)
return signal
def SNR(raw, denoised): #returns signal-to-noise ratio; equation (9) in paper; xhat_j is presumably the denoised series
num = np.sum(np.power(raw, 2))
den = np.sum(np.power(raw - denoised, 2))
return 10*np.log(num/den)
def RMSE(raw, denoised): #Google 'root mean square deviation' for formula; equation (12) in paper is incorrect
ss = np.sum(np.power(raw - denoised, 2))
return np.sqrt(ss)
#https://stats.stackexchange.com/questions/46429/transform-data-to-desired-mean-and-standard-deviation
def standardise(x, new_mean, new_std):
return new_mean + (x - np.mean(x))*(new_std/np.std(x))
#Rescaling series to ensure consistent performance of denoising function
#The new mean should roughly be between 10 and 100, for most assets, according to the block below.
#denoise function doesn't work with mean 0 variance 1 data for some reason
def rescale(x, orgnl_mean, orgnl_std):
if 1 < orgnl_mean <= 10:
x = standardise(x, np.power(orgnl_mean, 2), np.power(orgnl_std, 2))
elif 100 < orgnl_mean:
x = standardise(x, np.sqrt(orgnl_mean), np.sqrt(orgnl_std))
elif orgnl_mean < 1:
x = standardise(x, np.power(100, orgnl_mean), np.power(100, orgnl_std))
elif orgnl_mean < 0.1:
x = standardise(x, np.power(10000, orgnl_mean), np.power(10000, orgnl_std))
return x
def gridSearch(x, orgnl_mean, orgnl_std):
result = [-100000, '', 0] #SNR - RMSE, wavelet, level
for w in pywt.wavelist(kind='discrete'):
for l in range(2, 5):
#x = rescale(x, orgnl_mean, orgnl_std)
x = standardise(x, 0, 1)
y = denoise(x, w, l)
#x = standardise(x, orgnl_mean, orgnl_std)
#y = standardise(y, orgnl_mean, orgnl_std)
if (SNR(x, y) - RMSE(x, y)) > result[0]:
result[0] = (SNR(x, y) - RMSE(x, y)); result[1] = w; result[2] = l
return result
#Input must be a simple iterable e.g. np.array, pd.Series, array. Output is a numpy array.
def optDenoise(x):
x = np.array(x)
orgnl_mean = np.mean(x); orgnl_std = np.std(x)
params = gridSearch(x, orgnl_mean, orgnl_std)
#x = rescale(x, orgnl_mean, orgnl_std)
y = denoise(x, params[1], params[2])
#standardise back to original distribution
#x = standardise(x, orgnl_mean, orgnl_std)
#y = standardise(y, orgnl_mean, orgnl_std)
return y
#grid search best parameters for denoising function.
def gridSearch_v2(x, metric):
#metric=1: maximise SNR - RMSE
#metric=2: maximise SNR
#metric=3: minimise RMSE
result = ['', 0, '', '', 1000000, 0, -1000000] #wavelet, level, mode, method, RMSE, SNR, SNR-RMSE
#Only consider haar, db, sym, coif wavelet basis functions, as these are relatively suitable for financial data
for w in [wavelet for wavelet in pywt.wavelist(kind='discrete') if wavelet.startswith(('haar', 'db', 'sym', 'coif'))]:
for l in range(1, 5):
for m in ['hard', 'soft']:
for method in ['BayesShrink', 'VisuShrink']:
y = denoise_wavelet(x, wavelet=w, mode=m, wavelet_levels=l, method=method, rescale_sigma=True)
snr = SNR(x, y)
rmse = RMSE(x, y)
if metric == 1:
if (snr - rmse) > result[6]:
result[6] = (snr - rmse); result[0] = w; result[1] = l; result[2] = m; result[3] = method
elif metric == 2:
if (snr) > result[5]:
result[5] = (snr); result[0] = w; result[1] = l; result[2] = m; result[3] = method
elif metric == 3:
if (rmse) < result[4]:
result[4] = (rmse); result[0] = w; result[1] = l; result[2] = m; result[3] = method
return result
def optDenoise_v2(x):
x = np.array(x)
#original_mean = np.mean(x)
#In the paper they used zero-mean normalization, which means the series is just shifted vertically downwards by its mean.
#x = x - np.mean(x) #equivalently, standardise(x, 0, np.std(x))
#grid search best parameters for denoising function.
#maximise SNR-RMSE, as they recommended in the paper.
params = gridSearch_v2(x, 1)
#See https://www.youtube.com/watch?v=HSG-gVALa84
y = denoise_wavelet(x, wavelet=params[0], wavelet_levels=params[1], mode=params[2], method=params[3], rescale_sigma=True)
#y = denoise_wavelet(x, wavelet='coif3', wavelet_levels=3, mode='hard', method='BayesShrink', rescale_sigma=True) #paramters used in paper
'''
method: 'BayesShrink' or 'VisuShrink'
Most of the time, the denoised series is basically identical to the original
# VisuShrink doesn't capture price peaks, and these obviously can't be noise.
'''
#y = y + original_mean
return y
#takes a numerical dataframe as input
#treats each column as a series, denoises each of these series
#stitches back together and returns a dataframe
def denoise_df(df):
for column in df.columns:
x = np.array(df[column])
df[column] = optDenoise_v2(x)
return df
|
{"/trainPlayground.py": ["/models.py", "/dataUtils.py", "/waveletDenoising.py"], "/lstm_train_single.py": ["/dataUtils.py", "/waveletDenoising.py", "/models.py"], "/GAN/GANmodel.py": ["/models.py", "/dataUtils.py", "/waveletDenoising.py"], "/wdDemo.py": ["/waveletDenoising.py"]}
|
14,669
|
sathyainfotech/CRUD-SQLite-Tkinter
|
refs/heads/main
|
/Sqlite.py
|
from tkinter import *
from tkinter import ttk
from tkinter import messagebox
from database import *
db=Database("SqliteDatabase.db")
window=Tk()
window.title("SQLite")
window.geometry("1920x1080")
name=StringVar()
age=StringVar()
gender=StringVar()
address=StringVar()
contact=StringVar()
mail=StringVar()
frame1=Frame(window,padx=20,pady=20,bg="#636e72")
frame1.pack(side=TOP,fill=X)
lblTitle=Label(frame1,bg="#636e72",text="REGISTRATION",font=("times",16,"bold"),fg="white",pady=10)
lblTitle.grid(columnspan=2)
lblName=Label(frame1,text="Name",bg="#636e72",fg="white",font=("times",16,"bold"),pady=10)
lblName.grid(row=1,column=0)
txtName=Entry(frame1,textvariable=name,font=("times",16),width=43)
txtName.grid(row=1,column=1)
lblAge=Label(frame1,text="Age",bg="#636e72",fg="white",font=("times",16,"bold"),pady=10)
lblAge.grid(row=2,column=0)
txtAge=Entry(frame1,font=("times",16),textvariable=age,width=43)
txtAge.grid(row=2,column=1)
lblgen=Label(frame1,text="Gender",bg="#636e72",fg="white",font=("times",16,"bold"),pady=10)
lblgen.grid(row=3,column=0)
cb=ttk.Combobox(frame1,width=41,textvariable=gender,state="readonly",font=("times",16))
cb['values']=("Male","Female","Others")
cb.grid(row=3,column=1)
lblAdd=Label(frame1,text="Address",bg="#636e72",fg="white",font=("times",16,"bold"),pady=10)
lblAdd.grid(row=4,column=0)
txtAdd=Entry(frame1,font=("times",16),width=43,textvariable=address)
txtAdd.grid(row=4,column=1)
lblCon=Label(frame1,text="Contact",bg="#636e72",fg="white",font=("times",16,"bold"),pady=10)
lblCon.grid(row=5,column=0)
txtCon=Entry(frame1,font=("times",16),textvariable=contact,width=43)
txtCon.grid(row=5,column=1)
lblMail=Label(frame1,text="Mail",bg="#636e72",fg="white",font=("times",16,"bold"),pady=10)
lblMail.grid(row=6,column=0)
txtMail=Entry(frame1,font=("times",16),textvariable=mail,width=43)
txtMail.grid(row=6,column=1)
btn_frame=Frame(frame1,bg="#2d3436")
btn_frame.grid(row=7,column=1,columnspan=4)
def fetchData():
table.delete(*table.get_children())
count=0
for row in db.fetch_record():
count+=1
table.insert("",END,values=(count,row[0],row[1],row[2],row[3],row[4],row[5],row[6]))
def addData():
if txtName.get()=="" or txtAge.get()=="" or txtAdd.get()=="" or txtCon.get()=="" or txtMail.get()=="":
messagebox.showinfo("Message","Please Fill All Records")
else:
db.insert(txtName.get(),txtAge.get(),cb.get(),txtAdd.get(),txtCon.get(),txtMail.get())
fetchData()
clearData()
messagebox.showinfo("Message","Record Insert Successfully")
def getrecord(event):
srow = table.focus()
data = table.item(srow)
global row
row = data['values']
name.set(row[2])
age.set(row[3])
gender.set(row[4])
contact.set(row[6])
mail.set(row[7])
address.set(row[5])
def updateData():
if txtName.get() == "" or txtAge.get() == "" or txtAdd.get() == "" or cb.get() == "" or txtCon.get() == "" or txtMail.get() == "":
messagebox.showinfo("Message", "Please Fill All Records")
else:
db.update_record(txtName.get(), txtAge.get(), cb.get(), txtAdd.get(), txtCon.get(), txtMail.get(), (row[1]))
fetchData()
clearData()
messagebox.showinfo("Message", "Record Update Successfully")
def deleteData():
db.remove_record(row[1])
fetchData()
clearData()
messagebox.showinfo("Message", "Record Delete Successfully")
def clearData():
name.set("")
age.set("")
gender.set("")
contact.set("")
mail.set("")
address.set("")
btnSub=Button(btn_frame,text="Insert",bg="#01a3a4",fg="white",width=6,padx=20,pady=5,font=("times",16,"bold"),command=addData)
btnSub.grid(row=0,column=0)
btnUp=Button(btn_frame,text="Update",bg="#F79F1F",fg="white",width=6,padx=20,pady=5,font=("times",16,"bold"),command=updateData)
btnUp.grid(row=0,column=1)
btnDel=Button(btn_frame,text="Delete",bg="#ee5253",fg="white",width=6,padx=20,pady=5,font=("times",16,"bold"),command=deleteData)
btnDel.grid(row=0,column=2)
btnClr=Button(btn_frame,text="Clear",bg="#1289A7",fg="white",width=6,padx=20,pady=5,font=("times",16,"bold"),command=clearData)
btnClr.grid(row=0,column=3)
myFrame=Frame(window)
myFrame.place(x=0,y=425,width=1920,height=500)
style=ttk.Style()
style.configure("Treeview",font=("times",15),rowheight=35)
style.configure("Treeview.Heading",font=("times",16,"bold"))
table=ttk.Treeview(myFrame,columns=(0,1,2,3,4,5,6,7))
table.column("0",anchor=CENTER)
table.column("1",stretch=NO,width=0)
table.column("3",anchor=CENTER)
table.column("6",anchor=CENTER)
table.heading("0",text="S.NO")
table.heading("1",text="ID")
table.heading("2",text="NAME")
table.heading("3",text="AGE")
table.heading("4",text="GENDER")
table.heading("5",text="ADDRESS")
table.heading("6",text="CONTACT")
table.heading("7",text="MAIL")
table["show"]='headings'
table.bind("<ButtonRelease-1>",getrecord)
table.pack(fill=X)
fetchData()
window.mainloop()
|
{"/Sqlite.py": ["/database.py"]}
|
14,670
|
sathyainfotech/CRUD-SQLite-Tkinter
|
refs/heads/main
|
/database.py
|
import sqlite3
class Database:
def __init__(self,db):
self.con = sqlite3.connect(db)
self.c=self.con.cursor()
self.c.execute("""
CREATE TABLE IF NOT EXISTS datas(
pid INTEGER PRIMARY KEY,
name TEXT NOT NULL,
age TEXT NOT NULL,
gender TEXT NOT NULL,
address TEXT NOT NULL,
contact TEXT NOT NULL,
mail TEXT NOT NULL
)
""")
self.con.commit()
def insert(self,name,age,gender,address,contact,mail):
sql="""
insert into datas values(NULL,?,?,?,?,?,?)
"""
self.c.execute(sql,(name,age,gender,address,contact,mail))
self.con.commit()
def fetch_record(self):
self.c.execute("SELECT * FROM datas")
data = self.c.fetchall()
return data
def update_record(self,name,age,gender,address,contact,mail,pid):
sql="""
update datas set name=?,age=?,gender=?,address=?,contact=?,mail=? where pid=?
"""
self.c.execute(sql,(name,age,gender,address,contact,mail,pid))
self.con.commit()
def remove_record(self,pid):
sql="delete from datas where pid=?"
self.c.execute(sql,(pid,))
self.con.commit()
|
{"/Sqlite.py": ["/database.py"]}
|
14,681
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/server.py
|
from flask import Flask, redirect, render_template, jsonify
from collections import OrderedDict
from .main import InfraScraper
import logging
logger = logging.getLogger(__name__)
app = Flask(__name__, static_folder='./assets/static')
@app.route('/')
def index():
scraper = InfraScraper()
config = scraper.status()
# endpoints = config['endpoints'].copy()
# config['endpoints'] = OrderedDict(
# reversed(sorted(endpoints.items(), key=lambda x: x[1])))
return render_template('index.html',
config=config)
@app.route('/layout/<name>/<layout>')
def topology_layout(name, layout):
scraper = InfraScraper()
config = scraper.get_config(name)
return render_template('layout.html',
name=name,
config=config,
layout=layout)
@app.route('/api/<name>/scrape')
def scrape_data(name=None):
scraper = InfraScraper()
scraper.scrape_data(name)
return redirect('.')
@app.route('/api/<name>')
def topology_data(name=None):
scraper = InfraScraper()
data = scraper.get_cached_data(name, 'vis')
return jsonify(data)
@app.route('/api/<name>/hier')
def hierarchy_topology_data(name=None):
scraper = InfraScraper()
data = scraper.get_cached_data(name, 'vis-hier')
return jsonify(data)
def run(*args, **kwargs):
app.run(*args, **kwargs)
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,682
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/storage/base.py
|
import logging
logger = logging.getLogger(__name__)
class BaseStorage(object):
def __init__(self, **kwargs):
self.name = kwargs.get('name')
self.last_timestamp = None
def save_data(self, name, data):
raise NotImplementedError
def load_data(self, name):
raise NotImplementedError
def save_output_data(self, name, kind, data):
raise NotImplementedError
def load_output_data(self, name, kind):
raise NotImplementedError
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,683
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/utils.py
|
import os
import re
import json
import yaml
import logging
_schema_dir = os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'schema')
def setup_logger(name):
msg_format = '%(asctime)s [%(levelname)s] [%(module)s] %(message)s'
formatter = logging.Formatter(fmt=msg_format)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
logger.addHandler(handler)
return logger
def load_yaml_json_file(path):
if os.path.exists(path):
with open(path, 'r') as f:
if path.endswith('json'):
return json.load(f)
else:
return yaml.safe_load(f)
return {}
def get_graph_schema(name):
schema_file = os.path.join(_schema_dir, 'resource', '{}.yaml'.format(name))
return load_yaml_json_file(schema_file)
def get_node_icon(icon):
family, character = icon.split(":")
output = ICON_MAPPING['character'][family][character].copy()
output["family"] = ICON_MAPPING['family'][family]
output['name'] = character
output["char"] = int("0x{}".format(output["char"]), 0)
return output
def to_camel_case(snake_str, first=True):
components = snake_str.split('_')
if first:
return "".join(x.title() for x in components)
else:
return components[0] + "".join(x.title() for x in components[1:])
def to_snake_case(name):
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
class ClassRegistry:
def __init__(self):
self._classes = {}
def add(self, cls):
self._classes[cls.__name__] = cls
def get_type(self, name):
return self._classes.get(name)
icon_file = os.path.join(_schema_dir, 'icon.yaml')
ICON_MAPPING = load_yaml_json_file(icon_file)
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,684
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/exceptions.py
|
class InfraScraperException(Exception):
"""Something went wrong with parsing infra scraper."""
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,685
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/input/openstack.py
|
# -*- coding: utf-8 -*-
import os
import yaml
import tempfile
import os_client_config
from os_client_config import cloud_config
from heatclient.exc import HTTPBadRequest
from infra_scraper.input.base import BaseInput
from infra_scraper.utils import setup_logger
logger = setup_logger('input.openstack')
class OpenStackInput(BaseInput):
def __init__(self, **kwargs):
self.kind = 'openstack'
self.scope = kwargs.get('scope', 'local')
super(OpenStackInput, self).__init__(**kwargs)
config_file, filename = tempfile.mkstemp()
config_content = {
'clouds': {self.name: self.config}
}
os.write(config_file, yaml.safe_dump(config_content).encode())
os.close(config_file)
self.cloud = os_client_config.config \
.OpenStackConfig(config_files=[filename]) \
.get_one_cloud(cloud=self.name)
os.remove(filename)
self.identity_api = self._get_client('identity')
self.compute_api = self._get_client('compute')
self.network_api = self._get_client('network')
self.orch_api = self._get_client('orchestration')
self.image_api = self._get_client('image')
self.volume_api = self._get_client('volume')
def _get_client(self, service_key):
constructor = cloud_config._get_client(service_key)
return self.cloud.get_legacy_client(service_key, constructor)
def scrape_all_resources(self):
if self.scope == 'global':
self.scrape_keystone_projects()
# self.scrape_keystone_users()
self.scrape_cinder_volumes()
self.scrape_glance_images()
if self.scope == 'global':
self.scrape_nova_aggregates()
self.scrape_nova_hypervisors()
self.scrape_nova_keypairs()
self.scrape_nova_flavors()
self.scrape_nova_servers()
# self.scrape_nova_security_groups()
self.scrape_neutron_networks()
self.scrape_neutron_subnets()
self.scrape_neutron_floating_ips()
self.scrape_neutron_routers()
self.scrape_neutron_ports()
self.scrape_heat_stacks()
# self.scrape_heat_resource_types()
def _create_relations(self):
# Define relationships between project and all namespaced resources.
for resource_type, resource_dict in self.resources.items():
for resource_id, resource in resource_dict.items():
if 'tenant_id' in resource['metadata']:
self._scrape_relation(
'in_os_project',
resource_id,
resource['metadata']['tenant_id'])
elif 'project' in resource['metadata']:
self._scrape_relation(
'in_os_project',
resource_id,
resource['metadata']['project'])
for resource_id, resource in self.resources.get('os_stack', {}).items():
for ext_res in resource['metadata']['resources']:
if ext_res['resource_type'] in self._get_resource_mapping():
self._scrape_relation(
'os_stack-{}'.format(
self._get_resource_mapping()[ext_res['resource_type']]),
resource_id,
ext_res['physical_resource_id'])
# Define relationships between aggregate zone and all hypervisors.
for resource_id, resource in self.resources.get('os_aggregate', {}).items():
for host in resource['metadata']['hosts']:
self._scrape_relation(
'in_os_aggregate',
host,
resource_id)
for resource_id, resource in self.resources.get('os_floating_ip', {}).items():
if resource['metadata'].get('port_id', None) is not None:
self._scrape_relation(
'use_os_port',
resource_id,
resource['metadata']['port_id'])
for resource_id, resource in self.resources.get('os_port', {}).items():
self._scrape_relation(
'in_os_net',
resource_id,
resource['metadata']['network_id'])
if resource['metadata']['device_id'] is not None:
self._scrape_relation(
'use_os_port',
resource['metadata']['device_id'],
resource_id)
if self.scope == 'global':
if resource['metadata'].get('binding:host_id', False):
self._scrape_relation(
'on_os_hypervisor',
resource_id,
resource['metadata']['binding:host_id'])
for resource_id, resource in self.resources.get('os_server', {}).items():
if self.scope == 'global':
self._scrape_relation(
'on_os_hypervisor',
resource_id,
resource['metadata']['OS-EXT-SRV-ATTR:host'])
self._scrape_relation(
'use_os_flavor',
resource_id,
resource['metadata']['flavor']['id'])
if resource['metadata']['image'] != '':
if resource['metadata']['image'].get('id', None) is not None:
self._scrape_relation(
'use_os_image',
resource_id,
resource['metadata']['image']['id'])
if resource['metadata']['keypair_name'] != '':
self._scrape_relation(
'use_os_key_pair',
resource_id,
resource['metadata']['keypair_name'])
for resource_id, resource in self.resources.get('os_subnet', {}).items():
self._scrape_relation(
'in_os_net',
resource_id,
resource['metadata']['network_id'])
def scrape_keystone_users(self):
users = self.identity_api.get('/users')
for user in users:
resource = user.to_dict()
self._scrape_resource(resource['id'], resource['name'],
'os_user', None, metadata=resource)
def scrape_keystone_projects(self):
projects = self.identity_api.tenants.list()
for project in projects:
resource = project.to_dict()
self._scrape_resource(resource['id'], resource['name'],
'os_project', None, metadata=resource)
def scrape_nova_aggregates(self):
response = self.compute_api.aggregates.list()
for item in response:
resource = item.to_dict()
self._scrape_resource(resource['name'], resource['name'],
'os_aggregate', None, metadata=resource)
def scrape_nova_keypairs(self):
response = self.compute_api.keypairs.list()
for item in response:
resource = item.to_dict()['keypair']
self._scrape_resource(resource['name'],
resource['name'],
'os_key_pair', None, metadata=resource)
def scrape_nova_flavors(self):
response = self.compute_api.flavors.list()
for item in response:
resource = item.to_dict()
self._scrape_resource(resource['id'],
resource['name'],
'os_flavor', None, metadata=resource)
def scrape_nova_hypervisors(self):
response = self.compute_api.hypervisors.list()
for item in response:
resource = item.to_dict()
self._scrape_resource(resource['service']['host'],
resource['hypervisor_hostname'],
'os_hypervisor', None, metadata=resource)
def scrape_nova_servers(self):
if self.scope == 'global':
search_opts = {'all_tenants': 1}
else:
search_opts = None
response = self.compute_api.servers.list(
search_opts=search_opts)
for item in response:
resource = item.to_dict()
self._scrape_resource(resource['id'], resource['name'],
'os_server', None, metadata=resource)
def scrape_nova_security_groups(self):
response = self.compute_api.security_groups.list(
search_opts={'all_tenants': 1})
for item in response:
resource = item.to_dict()
self._scrape_resource(resource['id'], resource['name'],
'os_security_group', None, metadata=resource)
def scrape_cinder_volumes(self):
response = self.volume_api.volumes.list()
for item in response:
resource = item.to_dict()
self._scrape_resource(resource['id'], resource['name'],
'os_volume', None, metadata=resource)
def scrape_glance_images(self):
response = self.image_api.images.list()
for item in response:
resource = item.__dict__['__original__']
self._scrape_resource(resource['id'], resource['name'],
'os_image', None, metadata=resource)
def scrape_neutron_routers(self):
resources = self.network_api.list_routers().get('routers')
for resource in resources:
self._scrape_resource(resource['id'], resource['id'],
'os_router', None, metadata=resource)
def scrape_neutron_floating_ips(self):
resources = self.network_api.list_floatingips().get('floatingips')
for resource in resources:
self._scrape_resource(resource['id'], resource['id'],
'os_floating_ip', None, metadata=resource)
def scrape_neutron_floating_ip_associations(self):
resources = self.network_api.list_floatingips().get('floatingips')
for resource in resources:
self._scrape_resource(resource['id'], resource['id'],
'os_floating_ip_association', None, metadata=resource)
def scrape_neutron_networks(self):
resources = self.network_api.list_networks().get('networks')
for resource in resources:
self._scrape_resource(resource['id'], resource['name'],
'os_net', None, metadata=resource)
def scrape_neutron_subnets(self):
resources = self.network_api.list_subnets().get('subnets')
for resource in resources:
self._scrape_resource(resource['id'], resource['name'],
'os_subnet', None, metadata=resource)
def scrape_neutron_ports(self):
resources = self.network_api.list_ports().get('ports')
for resource in resources:
self._scrape_resource(resource['id'], resource['name'],
'os_port', None, metadata=resource)
# heat resources
def scrape_heat_resource_types(self):
resource_types = self.orch_api.resource_types.list(
search_opts={'all_tenants': 1})
for resource_type in resource_types:
resource = resource_type.to_dict()
self._scrape_resource(resource, resource,
'os_resource_type', None, metadata=resource)
def scrape_heat_stacks(self):
if self.scope == 'global':
search_opts = {'all_tenants': 1}
else:
search_opts = None
stacks = self.orch_api.stacks.list(
search_opts=search_opts)
for stack in stacks:
resource = stack.to_dict()
resource['resources'] = []
try:
resources = self.orch_api.resources.list(stack.id,
nested_depth=2)
for stack_resource in resources:
resource['resources'].append(stack_resource.to_dict())
except HTTPBadRequest as exception:
logger.error(exception)
self._scrape_resource(resource['id'], resource['stack_name'],
'os_stack', None, metadata=resource)
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,686
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/tests/test_main.py
|
import pytest
import json
import os
from infra_scraper.main import _get_module
modules_file = os.path.join(
os.path.dirname(os.path.realpath('{}/..'.format(__file__))), 'constructors.json')
with open(modules_file) as fileneco:
modules_dict = json.loads(fileneco.read())
modules_list = []
for module_label, module_class in modules_dict.items():
modules_list.append((module_label, module_class))
@pytest.mark.parametrize("test_input,expected_class", modules_list)
def test_load_module(test_input, expected_class):
assert _get_module(test_input).__name__ == expected_class.split('.')[-1]
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,687
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/storage/neo4j.py
|
from .base import BaseStorage
import os
import glob
import yaml
import logging
from infra_scraper.utils import to_camel_case, ClassRegistry
from neomodel import config, StructuredNode, StringProperty, IntegerProperty, JSONProperty
from neomodel.match import OUTGOING, INCOMING, EITHER
from neomodel.relationship_manager import RelationshipManager
from neomodel.relationship import StructuredRel
logger = logging.getLogger(__name__)
registry = ClassRegistry()
class ResourceRel(StructuredRel):
size = IntegerProperty(default=1)
status = StringProperty(default='unknown')
class RelationshipDefinition(object):
def __init__(self, relation_type, cls_name, direction, manager=RelationshipManager, model=None):
self._raw_class = cls_name
self.manager = manager
self.definition = {}
self.definition['relation_type'] = relation_type
self.definition['direction'] = direction
self.definition['model'] = model
def _lookup_node_class(self):
if not isinstance(self._raw_class, str):
self.definition['node_class'] = self._raw_class
else:
name = self._raw_class
self.definition['node_class'] = registry.get_type(name)
def build_manager(self, source, name):
self._lookup_node_class()
return self.manager(source, name, self.definition)
class ZeroOrMore(RelationshipManager):
"""
A relationship of zero or more nodes (the default)
"""
description = "zero or more relationships"
def _relate(cls_name, direction, rel_type, cardinality=None, model=None):
if model and not issubclass(model, (StructuredRel,)):
raise ValueError('model must be a StructuredRel')
return RelationshipDefinition(rel_type, cls_name, direction, cardinality, model)
def RelationshipTo(cls_name, rel_type, cardinality=ZeroOrMore, model=None):
return _relate(cls_name, OUTGOING, rel_type, cardinality, model)
def RelationshipFrom(cls_name, rel_type, cardinality=ZeroOrMore, model=None):
return _relate(cls_name, INCOMING, rel_type, cardinality, model)
def Relationship(cls_name, rel_type, cardinality=ZeroOrMore, model=None):
return _relate(cls_name, EITHER, rel_type, cardinality, model)
class Neo4jStorage(BaseStorage):
def __init__(self, **kwargs):
super(Neo4jStorage, self).__init__(**kwargs)
config.DATABASE_URL = kwargs['database_url']
def convert_relations(self, relation_types):
for relation_name, relation in relation_types.items():
registry.add(type(
relation_name,
(ResourceRel,),
relation.get('model', {})))
def convert_resources(self, resource_types):
for resource_name, resource in resource_types.items():
fields = {
'uid': StringProperty(unique_index=True),
'name': StringProperty(required=True),
'kind': StringProperty(required=True),
'metadata': JSONProperty(required=True),
}
for field_name, field in resource.get('model', {}).items():
cls_name = field.pop("type")
target_cls = field.pop('target')
model_name = field.pop('model')
field['model'] = registry.get_type(model_name)
fields[field_name] = globals().get(to_camel_case(cls_name))(target_cls, model_name, **field)
registry.add(type(resource_name,
(StructuredNode,), fields))
def _get_last_timestamp(self, name):
sinks = glob.glob('{}/*.yaml'.format(self._get_storage_dir(name)))
last_sink = max(sinks, key=os.path.getctime)
return last_sink.split('/')[-1].replace('.yaml', '')
def save_data(self, name, data):
self.convert_relations(data['relation_types'])
self.convert_resources(data['resource_types'])
resources = {}
for resource_type_name, resource_type in data['resources'].items():
cls = registry.get_type(resource_type_name)
for resource_name, resource in resource_type.items():
# import pdb; pdb.set_trace()
resources[resource['uid']] = cls(**resource).save()
for relation_type_name, relation_type in data['relations'].items():
for relation in relation_type:
if relation['source'] in resources and relation['target'] in resources:
source = resources[relation['source']]
target = resources[relation['target']]
try:
rel_field = data['relation_types'][relation_type_name]['relation'][source.kind]
except:
rel_field = data['relation_types'][relation_type_name]['relation']['default']
relation = getattr(source, rel_field).build_manager(source, relation_type_name)
relation.connect(target, {})
self.last_timestamp = data['timestamp']
def load_data(self, name):
data = None
self.last_timestamp = self._get_last_timestamp(name)
filename = '{}/{}.yaml'.format(self._get_storage_dir(name),
self.last_timestamp)
with open(filename, 'r') as stream:
try:
data = yaml.load(stream)
except yaml.YAMLError as exception:
logger.error(exception)
stream.close()
return data
def save_output_data(self, name, kind, data):
pass
def load_output_data(self, name, kind):
pass
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,688
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/input/amazon.py
|
# -*- coding: utf-8 -*-
import boto3
from infra_scraper.input.base import BaseInput
from infra_scraper.utils import setup_logger
logger = setup_logger('input.aws')
class AmazonWebServicesInput(BaseInput):
def __init__(self, **kwargs):
self.kind = 'aws'
self.scope = kwargs.get('scope', 'local')
super(AmazonWebServicesInput, self).__init__(**kwargs)
self.ec2_client = boto3.resource('ec2')
self.s3_client = boto3.resource('s3')
def scrape_all_resources(self):
# self.scrape_ec2_images()
# self.scrape_ec2_elastic_ips()
self.scrape_ec2_instances()
self.scrape_ec2_internet_gateways()
self.scrape_ec2_subnets()
self.scrape_ec2_vpcs()
self.scrape_ec2_key_pairs()
self.scrape_s3_buckets()
def _create_relations(self):
for resource_id, resource in self.resources.get('ec2_instance', {}).items():
if 'VpcId' in resource['metadata']:
if resource['metadata']['VpcId'] in self.resources.get('ec2_vpc', {}):
self._scrape_relation(
'in_ec2_vpc',
resource_id,
resource['metadata']['VpcId'])
if 'KeyName' in resource['metadata']:
if resource['metadata']['KeyName'] in self.resources.get('ec2_key_pair', {}):
self._scrape_relation(
'using_ec2_key_pair',
resource_id,
resource['metadata']['KeyName'])
if 'SubnetId' in resource['metadata']:
if resource['metadata']['SubnetId'] in self.resources.get('ec2_subnet', {}):
self._scrape_relation(
'in_ec2_subnet',
resource_id,
resource['metadata']['SubnetId'])
def scrape_ec2_elastic_ips(self):
for item in self.ec2_client.eips.all():
resource = item.meta.__dict__
resource.pop('resource_model')
resource.pop('client')
self._scrape_resource(resource['data']['InternetGatewayId'],
resource['data']['InternetGatewayId'],
'ec2_internet_gateway', None, metadata=resource['data'])
def scrape_ec2_images(self):
for item in self.ec2_client.images.all():
resource = item.meta.__dict__
resource.pop('resource_model')
resource.pop('client')
if 'Name' in resource['data']:
image_name = resource['data']['Name']
else:
image_name = resource['data']['ImageId']
self._scrape_resource(resource['data']['ImageId'],
image_name,
'ec2_image', None, metadata=resource['data'])
def scrape_ec2_instances(self):
for item in self.ec2_client.instances.all():
resource = item.meta.__dict__
resource.pop('resource_model')
resource.pop('client')
try:
name = resource['data']['NetworkInterfaces'][0]['Association']['PublicDnsName']
except Exception:
name = resource['data']['InstanceId']
print(resource['data'])
self._scrape_resource(resource['data']['InstanceId'],
name,
'ec2_instance', None, metadata=resource['data'])
def scrape_ec2_internet_gateways(self):
for item in self.ec2_client.internet_gateways.all():
resource = item.meta.__dict__
resource.pop('resource_model')
resource.pop('client')
self._scrape_resource(resource['data']['InternetGatewayId'],
resource['data']['InternetGatewayId'],
'ec2_internet_gateway', None, metadata=resource['data'])
def scrape_ec2_key_pairs(self):
for item in self.ec2_client.key_pairs.all():
resource = item.meta.__dict__
resource.pop('resource_model')
resource.pop('client')
self._scrape_resource(resource['data']['KeyName'],
resource['data']['KeyName'],
'ec2_key_pair', None, metadata=resource['data'])
def scrape_ec2_subnets(self):
for item in self.ec2_client.subnets.all():
resource = item.meta.__dict__
resource.pop('resource_model')
resource.pop('client')
self._scrape_resource(resource['data']['SubnetId'],
resource['data']['SubnetId'],
'ec2_subnet', None, metadata=resource['data'])
def scrape_ec2_vpcs(self):
for item in self.ec2_client.vpcs.all():
resource = item.meta.__dict__
resource.pop('resource_model')
resource.pop('client')
name = resource['data']['VpcId']
for tag in resource['data'].get('Tags', {}):
if tag['Key'] == 'Name':
name = tag['Value']
self._scrape_resource(resource['data']['VpcId'],
name,
'ec2_vpc', None, metadata=resource['data'])
def scrape_s3_buckets(self):
for item in self.s3_client.buckets.all():
resource = item.meta.__dict__
resource.pop('resource_model')
resource.pop('client')
self._scrape_resource(resource['data']['Name'],
resource['data']['Name'],
's3_bucket', None, metadata=resource['data'])
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,689
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/output/raw.py
|
import logging
from .base import BaseOutput
logger = logging.getLogger(__name__)
class RawOutput(BaseOutput):
def __init__(self, **kwargs):
super(RawOutput, self).__init__(**kwargs)
def transform_data(self, data):
resources = {}
for resource_name, resource_data in data['resources'].items():
resources[resource_name] = []
for resource_id, resource_item in resource_data.items():
resources[resource_name].append(resource_item['metadata'])
data['resources'] = resources
return data
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,690
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/input/terraform.py
|
# -*- coding: utf-8 -*-
import io
import python_terraform
from pydot import graph_from_dot_data
from infra_scraper.input.base import BaseInput
from infra_scraper.utils import setup_logger
logger = setup_logger('input.terraform')
relation_mapping = {
'tf_openstack_compute_instance_v2-tf_openstack_compute_keypair_v2': 'using_tf_key_pair',
'tf_openstack_networking_subnet_v2-tf_openstack_networking_network_v2': 'in_tf_net',
'tf_openstack_compute_floatingip_associate_v2-tf_openstack_networking_floatingip_v2': 'links_tf_floating_ip',
'tf_openstack_networking_floatingip_v2-tf_openstack_networking_router_interface_v2': 'links_tf_floating_ip',
'tf_openstack_networking_router_interface_v2-tf_openstack_networking_subnet_v2': 'in_tf_subnet',
'tf_openstack_networking_router_interface_v2-tf_openstack_networking_router_v2': 'links_tf_router',
'tf_openstack_compute_instance_v2-tf_openstack_networking_network_v2': 'in_tf_net',
'tf_openstack_compute_floatingip_associate_v2-tf_openstack_compute_instance_v2': 'links_tf_floating_instance',
'tf_openstack_compute_instance_v2-tf_openstack_compute_secgroup_v2': 'has_tf_security_group',
}
class TerraformInput(BaseInput):
def __init__(self, **kwargs):
self.kind = 'terraform'
super(TerraformInput, self).__init__(**kwargs)
self.client = python_terraform.Terraform(
working_dir=self.config['dir'])
def scrape_all_resources(self):
self.scrape_resources()
def clean_name(self, name):
return name.replace('"', '').replace('[root] ', '').strip()
def _create_relations(self):
return_code, raw_data, stderr = self.client.graph(
no_color=python_terraform.IsFlagged)
graph = graph_from_dot_data(raw_data)[0]
for edge in graph.obj_dict['subgraphs']['"root"'][0]['edges']:
source = self.clean_name(edge[0]).split('.')
target = self.clean_name(edge[1]).split('.')
if 'tf_{}'.format(source[0]) in self.resources and 'tf_{}'.format(target[0]) in self.resources:
self._scrape_relation(
relation_mapping['tf_{}-tf_{}'.format(source[0], target[0])],
'{}.{}'.format(source[0], source[1]),
'{}.{}'.format(target[0], target[1]))
def scrape_resources(self):
return_code, raw_data, stderr = self.client.graph(
no_color=python_terraform.IsFlagged)
graph = graph_from_dot_data(raw_data)[0]
nodes = {}
for node in graph.obj_dict['subgraphs']['"root"'][0]['nodes']:
clean_node = 'tf_{}'.format(self.clean_name(node).split('.')[0])
if clean_node in self._schema['resource']:
nodes[self.clean_name(node)] = {
'id': self.clean_name(node),
'name': self.clean_name(node).split('.')[1],
'kind': 'tf_{}'.format(self.clean_name(node).split('.')[0]),
'metadata': {}
}
res = None
return_code, raw_data, stderr = self.client.show(
no_color=python_terraform.IsFlagged)
raw_data = raw_data.split('Outputs:')[0]
data_buffer = io.StringIO(raw_data)
for line in data_buffer.readlines():
if line.strip() == '':
pass
elif line.startswith(' '):
meta_key, meta_value = line.split(' = ')
res['metadata'][meta_key.strip()] = meta_value.strip()
else:
if res is not None:
nodes[res['id']]['metadata'] = res['metadata']
resource_id = line.replace(' (tainted', '') \
.replace(':', '').replace('(', '').replace(')', '').strip()
try:
resource_kind, resource_name = str(resource_id).split('.')
res = {
'id': resource_id,
'name': resource_name.strip(),
'kind': 'tf_{}'.format(resource_kind),
'metadata': {}
}
except Exception as exception:
logger.error(exception)
for node_name, node in nodes.items():
self._scrape_resource(node['id'], node['name'],
node['kind'], None,
metadata=node['metadata'])
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,691
|
slimakcz/infra-scraper
|
refs/heads/master
|
/setup.py
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
VERSION = '0.4'
with open('README.rst') as readme:
LONG_DESCRIPTION = ''.join(readme.readlines())
DESCRIPTION = """Infrastrucutre metadata scraper with support for multiple
resource providers and tools for relational analysis and visualization."""
setup(
name='infra-scraper',
version=VERSION,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author='Aleš Komárek',
author_email='ales.komarek@newt.cz',
license='Apache License, Version 2.0',
url='https://github.com/cznewt/infra-scraper/',
packages=find_packages(),
install_requires=[
'pyyaml',
'msgpack-python',
'Flask',
'Click',
'os_client_config',
'python-cinderclient',
'python-glanceclient',
'python-heatclient',
'python-keystoneclient',
'python-novaclient',
'python-neutronclient',
'pykube',
'boto3',
'tosca-parser',
'salt-pepper',
'python-terraform',
'pydot',
'graphviz',
'juju'
],
extras_require={
'tests': [
'pytest',
'flake8'],
'docs': [
'sphinx >= 1.4',
'sphinx_rtd_theme']
},
classifiers=[
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
],
entry_points={
'console_scripts': [
'scraper_web = infra_scraper.cli:runserver',
'scraper_status = infra_scraper.cli:status',
'scraper_get = infra_scraper.cli:scrape',
'scraper_get_forever = infra_scraper.cli:scrape_forever',
'scraper_get_all = infra_scraper.cli:scrape_all',
'scraper_get_all_forever = infra_scraper.cli:scrape_all_forever',
],
},
)
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,692
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/input/kubernetes.py
|
# -*- coding: utf-8 -*-
import os
import yaml
import tempfile
import pykube
from requests.exceptions import HTTPError
from infra_scraper.input.base import BaseInput
from infra_scraper.utils import setup_logger
logger = setup_logger('input.kubernetes')
class KubernetesInput(BaseInput):
def __init__(self, **kwargs):
self.kind = 'kubernetes'
self.scope = kwargs.get('scope', 'local')
super(KubernetesInput, self).__init__(**kwargs)
config_file, filename = tempfile.mkstemp()
config_content = {
'apiVersion': 'v1',
'clusters': [{
'cluster': self.config['cluster'],
'name': self.name,
}],
'contexts': [{
'context': {
'cluster': self.name,
'user': self.name,
},
'name': self.name,
}],
'current-context': self.name,
'kind': 'Config',
'preferences': {},
'users': [{
'name': self.name,
'user': self.config['user']
}]
}
os.write(config_file, yaml.safe_dump(config_content).encode())
os.close(config_file)
self.config_wrapper = pykube.KubeConfig.from_file(filename)
os.remove(filename)
self.api = pykube.HTTPClient(self.config_wrapper)
def scrape_all_resources(self):
self.scrape_config_maps()
self.scrape_cron_jobs()
self.scrape_daemon_sets()
self.scrape_deployments()
self.scrape_endpoints()
self.scrape_events()
self.scrape_horizontal_pod_autoscalers()
self.scrape_ingresses()
self.scrape_jobs()
if self.scope == 'global':
self.scrape_namespaces()
self.scrape_nodes()
self.scrape_persistent_volumes()
self.scrape_persistent_volume_claims()
self.scrape_pods()
self.scrape_replica_sets()
self.scrape_replication_controllers()
self.scrape_roles()
self.scrape_secrets()
self.scrape_service_accounts()
self.scrape_services()
self.scrape_stateful_sets()
self.scrape_containers()
def _create_relations(self):
namespace_2_uid = {}
for resource_id, resource in self.resources.get('k8s_namespace', {}).items():
resource_mapping = resource['metadata']['metadata']['name']
namespace_2_uid[resource_mapping] = resource_id
node_2_uid = {}
for resource_id, resource in self.resources.get('k8s_node', {}).items():
resource_mapping = resource['metadata']['metadata']['name']
node_2_uid[resource_mapping] = resource_id
secret_2_uid = {}
for resource_id, resource in self.resources.get('k8s_secret', {}).items():
resource_mapping = resource['metadata']['metadata']['name']
secret_2_uid[resource_mapping] = resource_id
volume_2_uid = {}
for resource_id, resource in self.resources.get('k8s_persistent_volume', {}).items():
resource_mapping = resource['metadata']['metadata']['name']
volume_2_uid[resource_mapping] = resource_id
service_run_2_uid = {}
service_app_2_uid = {}
for resource_id, resource in self.resources.get('k8s_service', {}).items():
if resource['metadata']['spec'].get('selector', {}) is not None:
if resource['metadata']['spec'].get('selector', {}).get('run', False):
selector = resource['metadata']['spec']['selector']['run']
service_run_2_uid[selector] = resource_id
if resource['metadata']['spec'].get('selector', {}).get('app', False):
selector = resource['metadata']['spec']['selector']['app']
service_app_2_uid[selector] = resource_id
# Define relationships between namespace and all namespaced resources.
for resource_type, resource_dict in self.resources.items():
for resource_id, resource in resource_dict.items():
if 'namespace' in resource.get('metadata', {}).get('metadata', {}):
self._scrape_relation(
'in_k8s_namespace',
resource_id,
namespace_2_uid[resource['metadata']['metadata']['namespace']])
# Define relationships between service accounts and secrets
for resource_id, resource in self.resources.get('k8s_service_account', {}).items():
for secret in resource['metadata']['secrets']:
self._scrape_relation('use_k8s_secret',
resource_id,
secret_2_uid[secret['name']])
"""
for resource_id, resource in self.resources['k8s_persistent_volume'].items():
self._scrape_relation('k8s_persistent_volume-k8s_persistent_volume_claim',
resource_id,
volume_2_uid[resource['spec']['volumeName']])
"""
# Define relationships between replica sets and deployments
for resource_id, resource in self.resources.get('k8s_replica_set', {}).items():
deployment_id = resource['metadata']['metadata']['ownerReferences'][0]['uid']
self._scrape_relation(
'in_k8s_deployment',
resource_id,
deployment_id)
for resource_id, resource in self.resources.get('k8s_pod', {}).items():
# Define relationships between pods and nodes
if resource['metadata']['spec']['nodeName'] is not None:
node = resource['metadata']['spec']['nodeName']
self._scrape_relation('on_k8s_node',
resource_id,
node_2_uid[node])
# Define relationships between pods and replication sets and
# replication controllers.
if resource['metadata']['metadata'].get('ownerReferences', False):
if resource['metadata']['metadata']['ownerReferences'][0]['kind'] == 'ReplicaSet':
rep_set_id = resource['metadata']['metadata']['ownerReferences'][0]['uid']
self._scrape_relation(
'use_k8s_replication',
rep_set_id,
resource_id)
# Define relationships between pods and services.
if resource['metadata']['metadata']['labels'].get('run', False):
selector = resource['metadata']['metadata']['labels']['run']
self._scrape_relation(
'in_k8s_pod',
service_run_2_uid[selector],
resource_id)
if resource['metadata']['metadata']['labels'].get('app', False):
try:
selector = resource['metadata']['metadata']['labels']['app']
self._scrape_relation(
'in_k8s_pod',
service_app_2_uid[selector],
resource_id)
except Exception:
pass
def _scrape_k8s_resources(self, response, kind):
try:
for item in response:
resource = item.obj
self._scrape_resource(resource['metadata']['uid'],
resource['metadata']['name'],
kind, None, metadata=resource)
except HTTPError as exception:
logger.error(exception)
def scrape_containers(self):
for resource_id, resource in self.resources['k8s_pod'].items():
for container in resource['metadata']['spec']['containers']:
container_id = "{}-{}".format(resource_id,
container['name'])
self._scrape_resource(container_id,
container['name'],
'k8s_container', None,
metadata=container)
self._scrape_relation('in_k8s_pod',
container_id,
resource_id)
def scrape_config_maps(self):
response = pykube.ConfigMap.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_config_map')
def scrape_cron_jobs(self):
response = pykube.CronJob.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_blow_job')
def scrape_daemon_sets(self):
response = pykube.DaemonSet.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_daemon_set')
def scrape_deployments(self):
response = pykube.Deployment.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_deployment')
def scrape_endpoints(self):
response = pykube.Endpoint.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_endpoint')
def scrape_events(self):
response = pykube.Event.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_event')
def scrape_horizontal_pod_autoscalers(self):
response = pykube.HorizontalPodAutoscaler.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_horizontal_pod_autoscaler')
def scrape_ingresses(self):
response = pykube.Ingress.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_ingress')
def scrape_jobs(self):
response = pykube.Job.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_job')
def scrape_namespaces(self):
response = pykube.Namespace.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_namespace')
def scrape_nodes(self):
response = pykube.Node.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_node')
def scrape_persistent_volumes(self):
response = pykube.PersistentVolume.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_persistent_volume')
def scrape_persistent_volume_claims(self):
response = pykube.PersistentVolumeClaim.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_persistent_volume_claim')
def scrape_pods(self):
response = pykube.Pod.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_pod')
def scrape_replica_sets(self):
response = pykube.ReplicaSet.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_replica_set')
def scrape_replication_controllers(self):
response = pykube.ReplicationController.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_replication_controller')
def scrape_roles(self):
response = pykube.Role.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_role')
def scrape_secrets(self):
response = pykube.Secret.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_secret')
def scrape_service_accounts(self):
response = pykube.ServiceAccount.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_service_account')
def scrape_services(self):
response = pykube.Service.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_service')
def scrape_stateful_sets(self):
response = pykube.StatefulSet.objects(self.api)
self._scrape_k8s_resources(response, 'k8s_stateful_set')
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,693
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/input/saltstack.py
|
# -*- coding: utf-8 -*-
from pepper.libpepper import Pepper
from infra_scraper.input.base import BaseInput
from infra_scraper.utils import setup_logger
logger = setup_logger('input.salt')
class SaltStackInput(BaseInput):
def __init__(self, **kwargs):
self.kind = 'salt'
super(SaltStackInput, self).__init__(**kwargs)
self.api = Pepper(self.config['auth_url'])
self.api.login(self.config['username'],
self.config['password'],
'pam')
def scrape_all_resources(self):
self.scrape_jobs()
self.scrape_minions()
self.scrape_services()
self.scrape_high_states()
# self.scrape_low_states()
def _create_relations(self):
"""
for resource_id, resource in self.resources['salt_low_state'].items():
# Define relationships between low states and nodes.
self._scrape_relation(
'salt_minion-salt_low_state',
resource['metadata']['minion'],
resource_id)
split_service = resource['metadata']['__sls__'].split('.')
self._scrape_relation(
'salt_service-salt_low_state',
'{}|{}.{}'.format(resource['metadata']['minion'],
split_service[0], split_service[1]),
resource_id)
"""
for resource_id, resource in self.resources.get('salt_high_state', {}).items():
# Define relationships between high states and nodes.
self._scrape_relation(
'on_salt_minion',
resource_id,
resource['metadata']['minion'])
split_service = resource['metadata']['__sls__'].split('.')
self._scrape_relation(
'contains_salt_high_state',
'{}|{}.{}'.format(resource['metadata']['minion'],
split_service[0], split_service[1]),
resource_id)
for resource_id, resource in self.resources.get('salt_service', {}).items():
self._scrape_relation(
'on_salt_minion',
resource_id,
resource['metadata']['host'])
for resource_id, resource in self.resources.get('salt_job', {}).items():
self._scrape_relation(
'by_salt_user',
resource_id,
resource['metadata']['User'])
for minion_id, result in resource['metadata'].get('Result', {}).items():
self._scrape_relation(
'on_salt_minion',
resource_id,
minion_id)
if type(result) is list:
logger.error(result[0])
else:
for state_id, state in result.items():
if '__id__' in state:
result_id = '{}|{}'.format(minion_id, state['__id__'])
self._scrape_relation(
'contains_salt_high_state',
resource_id,
result_id)
def scrape_jobs(self):
response = self.api.low([{
'client': 'runner',
'fun': 'jobs.list_jobs',
'arg': "search_function='[\"state.apply\", \"state.sls\"]'"
}]).get('return')[0]
for job_id, job in response.items():
if job['Function'] in ['state.apply', 'state.sls']:
result = self.api.lookup_jid(job_id).get('return')[0]
job['Result'] = result
self._scrape_resource(job_id,
job['Function'],
'salt_job', None, metadata=job)
self._scrape_resource(job['User'],
job['User'],
'salt_user', None, metadata={})
def scrape_minions(self):
response = self.api.low([{
'client': 'local',
'tgt': '*',
'fun': 'grains.items'
}]).get('return')[0]
for minion_id, minion in response.items():
self._scrape_resource(minion_id,
minion_id,
'salt_minion', None, metadata=minion)
def scrape_services(self):
response = self.api.low([{
'client': 'local',
'expr_form': 'compound',
'tgt': 'I@salt:master',
'fun': 'saltresource.graph_data'
}]).get('return')[0]
for minion_id, minion in response.items():
for service in minion['graph']:
self._scrape_resource('{}|{}'.format(minion_id,
service['service']),
service['service'],
'salt_service', None,
metadata=service)
def scrape_low_states(self):
response = self.api.low([{
'client': 'local',
'tgt': '*',
'fun': 'state.show_lowstate'
}]).get('return')[0]
for minion_id, low_states in response.items():
for low_state in low_states:
low_state['minion'] = minion_id
self._scrape_resource('{}|{}|{}'.format(minion_id,
low_state['state'],
low_state['__id__']),
'{} {}'.format(low_state['state'],
low_state['__id__']),
'salt_low_state', None,
metadata=low_state)
def scrape_high_states(self):
response = self.api.low([{
'client': 'local',
'tgt': '*',
'fun': 'state.show_highstate'
}]).get('return')[0]
for minion_id, high_states in response.items():
if type(high_states) is list:
logger.error(high_states[0])
else:
for high_state_id, high_state in high_states.items():
high_state['minion'] = minion_id
self._scrape_resource('{}|{}'.format(minion_id,
high_state_id),
high_state_id,
'salt_high_state', None,
metadata=high_state)
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,694
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/storage/file.py
|
from .base import BaseStorage
import os
import glob
import yaml
import msgpack
import logging
logger = logging.getLogger(__name__)
class FileStorage(BaseStorage):
def __init__(self, **kwargs):
super(FileStorage, self).__init__(**kwargs)
self.storage_dir = kwargs.get('storage_dir', '/tmp/scraper')
try:
os.stat(self.storage_dir)
except Exception:
os.mkdir(self.storage_dir)
def _storage_dir_exist(self, name):
try:
os.stat(self._get_storage_dir(name))
except Exception:
os.mkdir(self._get_storage_dir(name))
def _get_storage_dir(self, name):
return os.path.join(self.storage_dir, name)
def _get_last_timestamp(self, name):
sinks = glob.glob('{}/*.yaml'.format(self._get_storage_dir(name)))
last_sink = max(sinks, key=os.path.getctime)
return last_sink.split('/')[-1].replace('.yaml', '')
def save_data(self, name, data):
self._storage_dir_exist(name)
filename = '{}/{}.yaml'.format(self._get_storage_dir(name),
data['timestamp'])
with open(filename, 'w') as outfile:
yaml.safe_dump(data, outfile, default_flow_style=False)
outfile.close()
self.last_timestamp = data['timestamp']
def load_data(self, name):
data = None
self.last_timestamp = self._get_last_timestamp(name)
filename = '{}/{}.yaml'.format(self._get_storage_dir(name),
self.last_timestamp)
with open(filename, 'r') as stream:
try:
data = yaml.load(stream)
except yaml.YAMLError as exception:
logger.error(exception)
stream.close()
return data
def save_output_data(self, name, kind, data):
self._storage_dir_exist(name)
filename = '{}/{}-{}.yml'.format(self._get_storage_dir(name),
data['timestamp'],
kind)
with open(filename, 'w') as outfile:
yaml.safe_dump(data, outfile, default_flow_style=False)
outfile.close()
def load_output_data(self, name, kind):
last_timestamp = self._get_last_timestamp(name)
data = None
filename = '{}/{}-{}.yml'.format(self._get_storage_dir(name),
last_timestamp, kind)
with open(filename, 'r') as stream:
try:
data = yaml.load(stream.read())
except Exception as exception:
logger.error(exception)
data = None
stream.close()
return data
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,695
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/storage/etcd.py
|
from .base import BaseStorage
import os
import etcd
import yaml
import logging
logger = logging.getLogger(__name__)
class EtcdStorage(BaseStorage):
def __init__(self, **kwargs):
super(EtcdStorage, self).__init__(**kwargs)
self.client = etcd.Client(
host='127.0.0.1', port=4003)
self.storage_path = '/scrape'
def _get_storage_path(self, name):
return os.path.join(self.storage_path, self.name)
def save_data(self, name, data):
filename = os.path.join(self._get_storage_path(),
data['timestamp'])
with open(filename, 'w') as outfile:
yaml.safe_dump(data, outfile, default_flow_style=False)
self.last_timestamp = data['timestamp']
def load_data(self, name):
data = None
if self.last_timestamp is not None:
filename = '{}/{}.yaml'.format(self._get_storage_path(),
self.last_timestamp)
with open(filename, 'r') as stream:
try:
data = yaml.load(stream)
except yaml.YAMLError as exception:
logger.error(exception)
return data
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,696
|
slimakcz/infra-scraper
|
refs/heads/master
|
/doc/source/conf.py
|
# -*- coding: utf-8 -*-
import sys
import os
sys.path.insert(0, os.path.abspath('..'))
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.imgmath',
'sphinx.ext.viewcode',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'InfraScraper'
copyright = u'2017, Aleš Komárek'
version = '0.2'
release = '0.2.0'
exclude_patterns = []
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_options = {
'collapse_navigation': False,
'display_version': False,
}
latex_elements = {}
latex_documents = [
('index', 'infra_scraper.tex', u'InfraScraper Documentation',
u'InfraScraper Team', 'manual'),
]
man_pages = [
('index', 'infra_scraper', u'InfraScraper Documentation',
[u'Komarek'], 1)
]
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,697
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/cli.py
|
import click
import yaml
import logging
from infra_scraper.main import InfraScraper
from infra_scraper.server import run
logger = logging.getLogger(__name__)
@click.group()
def cli():
pass
@click.command()
@click.argument('name')
def scrape(name):
scraper = InfraScraper()
scraper.scrape_data(name)
@click.command()
@click.argument('name')
@click.argument('interval', default=10)
def scrape_forever(name, interval):
scraper = InfraScraper()
scraper.scrape_data_forever(name, int(interval))
@click.command()
def scrape_all():
scraper = InfraScraper()
scraper.scrape_all_data()
@click.command()
@click.argument('--interval', default=10)
def scrape_all_forever(interval):
scraper = InfraScraper()
scraper.scrape_all_data_forever(int(interval))
@click.command()
def status():
scraper = InfraScraper()
print(yaml.safe_dump(scraper.status()))
@click.command()
@click.argument('--host', default="0.0.0.0")
@click.argument('--port', default=8076)
def runserver(__host, __port):
run(host=__host, port=__port)
cli.add_command(status)
cli.add_command(scrape)
cli.add_command(runserver)
cli.add_command(scrape_all)
cli.add_command(scrape_forever)
cli.add_command(scrape_all_forever)
if __name__ == '__main__':
cli()
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,698
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/output/base.py
|
import yaml
import json
import logging
logger = logging.getLogger(__name__)
class BaseOutput(object):
def __init__(self, **kwargs):
pass
def get_data(self, data_format='raw', raw_data={}):
data = raw_data.copy()
if data_format == 'yaml':
return self.yaml_output(self.transform_data(data))
elif data_format == 'json':
return self.json_output(self.transform_data(data))
else:
return self.transform_data(data)
def yaml_output(self, data):
return yaml.safe_dump(data, default_flow_style=False)
def json_output(self, data):
return json.dumps(data)
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,699
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/constructors.py
|
import json
import os
import threading
_json_path = os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'constructors.json')
_class_mapping = None
_class_mapping_lock = threading.Lock()
def get_constructor_mapping():
global _class_mapping
if _class_mapping is not None:
return _class_mapping.copy()
with _class_mapping_lock:
if _class_mapping is not None:
return _class_mapping.copy()
tmp_class_mapping = {}
with open(_json_path, 'r') as json_file:
tmp_class_mapping.update(json.load(json_file))
_class_mapping = tmp_class_mapping
return tmp_class_mapping.copy()
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,700
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/output/count.py
|
from datetime import datetime
import logging
from .base import BaseOutput
logger = logging.getLogger(__name__)
class CountOutput(BaseOutput):
def __init__(self, **kwargs):
super(CountOutput, self).__init__(**kwargs)
def transform_data(self, data):
resources = {}
relations = {}
for resource_name, resource_data in data['resources'].items():
resources[resource_name] = len(resource_data)
for relation_name, relation_data in data['relations'].items():
relations[relation_name] = len(relation_data)
data['resources'] = resources
data['relations'] = relations
data.pop('resource_types')
data['date'] = datetime.fromtimestamp(data['timestamp']).strftime('%Y-%m-%dT%H:%M:%S')
return data
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,701
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/input/reclass.py
|
# -*- coding: utf-8 -*-
from infra_scraper.input.saltstack import SaltStackInput
from infra_scraper.utils import setup_logger
logger = setup_logger('input.reclass')
class SaltReclassInput(SaltStackInput):
def __init__(self, **kwargs):
super(SaltReclassInput, self).__init__(**kwargs)
self.kind = 'salt'
def _create_relations(self):
for resource_id, resource in self.resources.get('salt_job', {}).items():
for minion_id, result in resource['metadata'].get('Result', {}).items():
self._scrape_relation(
'on_salt_minion',
resource_id,
minion_id)
def scrape_all_resources(self):
self.scrape_minions()
self.scrape_resources()
self.scrape_jobs()
# self.scrape_services()
def scrape_resources(self):
response = self.api.low([{
'client': 'local',
'expr_form': 'compound',
'tgt': 'I@salt:master',
'fun': 'reclass.graph_data'
}]).get('return')[0]
for minion_id, minion in response.items():
for service in minion['graph']:
service_id = '{}|{}'.format(service['host'],
service['service'])
self._scrape_resource(service_id,
service['service'],
'salt_service', None,
metadata=service)
self._scrape_relation(
'on_salt_minion',
service_id,
service['host'])
for rel in service['relations']:
if rel['host'] not in self.resources['salt_minion']:
self._scrape_resource(rel['host'],
rel['host'],
'salt_minion', None,
metadata={})
rel_service_id = '{}|{}'.format(rel['host'],
rel['service'])
if rel_service_id not in self.resources['salt_service']:
self._scrape_resource(rel_service_id,
rel['service'],
'salt_service', None,
metadata={})
self._scrape_relation(
'on_salt_minion',
rel_service_id,
rel['host'])
self._scrape_relation(
'requires_salt_service',
service_id,
rel_service_id)
def scrape_jobs(self):
response = self.api.low([{
'client': 'runner',
'fun': 'jobs.list_jobs',
'arg': "search_function='[\"state.apply\", \"state.sls\"]'"
}]).get('return')[0]
for job_id, job in response.items():
if job['Function'] in ['state.apply', 'state.sls']:
result = self.api.lookup_jid(job_id).get('return')[0]
job['Result'] = result
self._scrape_resource(job_id,
job['Function'],
'salt_job', None, metadata=job)
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,702
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/output/vis.py
|
import logging
from .base import BaseOutput
from datetime import datetime
from infra_scraper.utils import get_node_icon
logger = logging.getLogger(__name__)
class VisOutput(BaseOutput):
def __init__(self, **kwargs):
super(VisOutput, self).__init__(**kwargs)
def _transform_openstack(self, data):
resources = {}
relations = []
axes = {}
i = 0
kinds = 0
for resource_name, resource_data in data['resources'].items():
if resource_name != 'os_port':
kinds += 1
for resource_name, resource_data in data['resources'].items():
if resource_name != 'os_port':
for resource_id, resource_item in resource_data.items():
resource_item.pop('metadata')
resources[resource_id] = resource_item
icon = get_node_icon(data['resource_types'][resource_name]['icon'])
axes[resource_name] = {
'x': i,
'angle': 360 / kinds * i,
'innerRadius': 0.2,
'outerRadius': 1.0,
'name': data['resource_types'][resource_name]['name'],
'items': len(data['resources'][resource_name]),
'kind': resource_name,
'icon': icon,
}
i += 1
for relation_name, relation_data in data['relations'].items():
for relation in relation_data:
if relation['source'] in resources and relation['target'] in resources:
relations.append(relation)
data['resources'] = resources
data['relations'] = relations
data['axes'] = axes
return data
def _transform_default(self, data):
resources = {}
relations = []
axes = {}
i = 0
kinds = len(data['resources'])
for resource_name, resource_data in data['resources'].items():
for resource_id, resource_item in resource_data.items():
resource_item.pop('metadata')
resources[resource_id] = resource_item
icon = get_node_icon(data['resource_types'][resource_name]['icon'])
axes[resource_name] = {
'x': i,
'angle': 360 / kinds * i,
'innerRadius': 0.2,
'outerRadius': 1.0,
'name': data['resource_types'][resource_name]['name'],
'items': len(data['resources'][resource_name]),
'kind': resource_name,
'icon': icon,
}
i += 1
for relation_name, relation_data in data['relations'].items():
for relation in relation_data:
if relation['source'] in resources and relation['target'] in resources:
relations.append(relation)
data['resources'] = resources
data['relations'] = relations
data['axes'] = axes
return data
def transform_data(self, data):
data['date'] = datetime.fromtimestamp(data['timestamp']).strftime('%Y-%m-%dT%H:%M:%S')
if data['kind'] == 'openstack':
return self._transform_openstack(data)
else:
return self._transform_default(data)
class VisHierOutput(BaseOutput):
def __init__(self, **kwargs):
super(VisHierOutput, self).__init__(**kwargs)
def _transform_openstack(self, data):
resources = {}
out_resources = []
for resource_name, resource_data in resources.items():
out_resources.append({
'name': resource_name,
'size': 1,
'relations': resource_data['relations']
})
data['resources'] = out_resources
data.pop('relations')
data.pop('resource_types')
return data
def _transform_default(self, data):
resources = {}
out_resources = []
for resource_name, resource_data in data['resources'].items():
if resource_name == 'salt_service':
for resource_id, resource_item in resource_data.items():
resource_item['relations'] = []
resources['root|{}'.format(resource_id)] = resource_item
for relation_name, relation_data in data['relations'].items():
if relation_name == 'salt_service-salt_service':
for relation in relation_data:
relation['source'] = 'root|{}'.format(relation['source'])
relation['target'] = 'root|{}'.format(relation['target'])
resources[relation['source']]['relations'].append(
relation['target'])
for resource_name, resource_data in resources.items():
out_resources.append({
'name': resource_name,
'size': 1,
'relations': resource_data['relations']
})
data['resources'] = out_resources
data.pop('relations')
data.pop('resource_types')
return data
def transform_data(self, data):
data['date'] = datetime.fromtimestamp(data['timestamp']).strftime('%Y-%m-%dT%H:%M:%S')
if data['kind'] == 'openstack':
return self._transform_openstack(data)
else:
return self._transform_default(data)
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,703
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/input/base.py
|
import time
from infra_scraper.utils import setup_logger, get_graph_schema
logger = setup_logger('input.base')
class BaseInput(object):
def __init__(self, **kwargs):
self.name = kwargs['name']
self.config = kwargs['config']
self.resources = {}
self.resource_types = {}
self.relations = {}
self.timestamp = int(time.time())
self._reverse_map = None
self._schema = get_graph_schema(self.kind)
def _create_relations(self):
raise NotImplementedError
def to_dict(self):
self._create_relations()
return {
'name': self.name,
'kind': self.kind,
'timestamp': self.timestamp,
'resource_types': self._get_resource_types(),
'resources': self.resources,
'relation_types': self._get_relation_types(),
'relations': self.relations,
}
def _get_resource_types(self):
res_map = {}
for resource_name, resource in self.resources.items():
res_map[resource_name] = self._schema['resource'][resource_name]
return res_map
def _get_relation_types(self):
rel_map = {}
for relation_name, relation in self.relations.items():
rel_map[relation_name] = self._schema['relation'][relation_name]
return rel_map
def _get_resource_mapping(self):
if self._reverse_map is None:
self._reverse_map = {}
for resource_name, resource in self._schema['resource'].items():
self._reverse_map[resource['resource']] = resource_name
return self._reverse_map
def _scrape_resource(self, uid, name, kind, link=None, metadata={}):
if kind not in self.resources:
self.resources[kind] = {}
self.resources[kind][uid] = {
'uid': uid,
'kind': kind,
'name': name,
'metadata': metadata,
}
def _scrape_relation(self, kind, source, target):
if kind not in self.relations:
self.relations[kind] = []
self.relations[kind].append({
'source': source,
'target': target,
})
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,704
|
slimakcz/infra-scraper
|
refs/heads/master
|
/infra_scraper/main.py
|
import os
import importlib
import time
from infra_scraper import constructors
from infra_scraper import exceptions
from infra_scraper.utils import load_yaml_json_file, setup_logger
logger = setup_logger(__name__)
config_backend = os.environ.get('INFRA_SCRAPER_CONFIG_BACKEND',
'localfs')
config_file = os.environ.get('INFRA_SCRAPER_CONFIG_PATH',
'/etc/infra-scraper/config.yaml')
def _get_module(module_key):
class_mapping = constructors.get_constructor_mapping()
if module_key not in class_mapping:
raise exceptions.InfraScraperException(
"Service {module_key} is unkown. Please pass in a client"
" constructor or submit a patch to infra scraper".format(
module_key=module_key))
mod_name, ctr_name = class_mapping[module_key].rsplit('.', 1)
lib_name = mod_name.split('.')[0]
try:
mod = importlib.import_module(mod_name)
except ImportError:
raise exceptions.InfraScraperException(
"Client for '{module_key}' was requested, but"
" {mod_name} was unable to be imported. Either import"
" the module yourself and pass the constructor in as an argument,"
" or perhaps you do not have module {lib_name} installed.".format(
module_key=module_key,
mod_name=mod_name,
lib_name=lib_name))
try:
ctr = getattr(mod, ctr_name)
except AttributeError:
raise exceptions.InfraScraperException(
"Client for '{module_key}' was requested, but although"
" {mod_name} imported fine, the constructor at {fullname}"
" as not found.".format(
module_key=module_key,
mod_name=mod_name,
fullname=class_mapping[module_key]))
return ctr
class InfraScraper(object):
def __init__(self):
self.config = self.get_global_config()
storage_class = self.config.get('storage', {'backend': 'localfs'})
self.storage = self._get_module('storage',
storage_class['backend'],
storage_class)
def _get_module(self, module_file, module_key, module_init={}):
module_class = _get_module("{}-{}".format(
module_file, module_key))
return module_class(**module_init)
def get_global_config(self):
return load_yaml_json_file(config_file)
def get_config(self, name):
config = self.config['endpoints'][name]
config['name'] = name
return config
def status(self):
config = self.config
for endpoint_name, endpoint in self.config['endpoints'].items():
endpoint.pop('config')
endpoint['status'] = self.get_endpoint_status(endpoint_name)
return config
def get_endpoint_status(self, name):
try:
data = self.get_cached_data(name, 'count')
except Exception as e:
logger.error('Cannot get last status for {}, with error {}.'.format(name, e))
data = None
return data
def scrape_all_data_forever(self, interval):
config = self.get_global_config()
while True:
for endpoint_name, endpoint in config['endpoints'].items():
self.scrape_data(endpoint_name)
time.sleep(config.get('scrape_interval', 60))
def scrape_all_data(self):
config = self.get_global_config()
for endpoint_name, endpoint in config['endpoints'].items():
if config.get('debug', False):
return self.scrape_data(endpoint_name)
try:
self.scrape_data(endpoint_name)
except Exception as e:
logger.error('Scraping endpoint {} failed with error: {}'.format(endpoint_name, e))
def scrape_data_forever(self, name, interval):
config = self.get_global_config()
sleep_interval = config.get('scrape_interval', interval)
while True:
self.scrape_data(name)
logger.info('Sleeping for {} seconds.'.format(sleep_interval))
time.sleep(sleep_interval)
def scrape_data(self, name):
config = self.get_config(name)
self.input = self._get_module('input', config['kind'], config)
self.out_count = self._get_module('output', 'count')
self.out_vis = self._get_module('output', 'vis')
self.out_vis_hier = self._get_module('output', 'vis-hier')
logger.info('Scraping of {} started.'.format(name))
self.input.scrape_all_resources()
data = self.input.to_dict()
self.storage.save_data(name, data.copy())
self.storage.save_output_data(name, 'count',
self.out_count.get_data('raw', data.copy()))
self.storage.save_output_data(name, 'vis',
self.out_vis.get_data('raw', data.copy()))
self.storage.save_output_data(name, 'vis-hier',
self.out_vis_hier.get_data('raw', data.copy()))
logger.info('Scraping of {} completed.'.format(name))
def get_cached_data(self, name, kind):
storage = self._get_module('storage', 'file')
data = storage.load_output_data(name, kind)
return data
def get_data(self, name, kind, format='raw'):
self.output = self._get_module('output', kind)
data = self.storage.load_data(name)
return self.output.get_data(format, data)
|
{"/infra_scraper/server.py": ["/infra_scraper/main.py"], "/infra_scraper/input/openstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/tests/test_main.py": ["/infra_scraper/main.py"], "/infra_scraper/storage/neo4j.py": ["/infra_scraper/storage/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/amazon.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/output/raw.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/terraform.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/kubernetes.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/saltstack.py": ["/infra_scraper/input/base.py", "/infra_scraper/utils.py"], "/infra_scraper/storage/file.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/storage/etcd.py": ["/infra_scraper/storage/base.py"], "/infra_scraper/cli.py": ["/infra_scraper/main.py", "/infra_scraper/server.py"], "/infra_scraper/output/count.py": ["/infra_scraper/output/base.py"], "/infra_scraper/input/reclass.py": ["/infra_scraper/input/saltstack.py", "/infra_scraper/utils.py"], "/infra_scraper/output/vis.py": ["/infra_scraper/output/base.py", "/infra_scraper/utils.py"], "/infra_scraper/input/base.py": ["/infra_scraper/utils.py"], "/infra_scraper/main.py": ["/infra_scraper/utils.py"]}
|
14,706
|
arunpogula/Dvara_project
|
refs/heads/master
|
/Taskapp/urls.py
|
from django.urls import path
from .views import Home, sub_category_list
urlpatterns = [
path('', Home.as_view(), name="home"),
path('ajax/sub_category_list/', sub_category_list,
name='sub_category_list'),
]
|
{"/Taskapp/urls.py": ["/Taskapp/views.py"], "/Taskapp/forms.py": ["/Taskapp/models.py"], "/Taskapp/views.py": ["/Taskapp/forms.py", "/Taskapp/models.py"]}
|
14,707
|
arunpogula/Dvara_project
|
refs/heads/master
|
/Taskapp/forms.py
|
from django import forms
from .models import Category, SubCategory
class UploadFileForm(forms.Form):
file = forms.FileField()
|
{"/Taskapp/urls.py": ["/Taskapp/views.py"], "/Taskapp/forms.py": ["/Taskapp/models.py"], "/Taskapp/views.py": ["/Taskapp/forms.py", "/Taskapp/models.py"]}
|
14,708
|
arunpogula/Dvara_project
|
refs/heads/master
|
/Taskapp/views.py
|
from django.shortcuts import render, HttpResponse
from django.views import View
from .forms import UploadFileForm
from .models import *
import pandas as pd
from django.db import connection
from django.conf import settings
# Create your views here.
def sub_category_list(request):
sub_category_list = SubCategory.objects.filter(
cat_id=request.GET.get('cat_id')).values_list('cat_id', 'subCategory_name')
# generate an html template for the specific option
html_code = ""
for sub_category in sub_category_list:
var = f"<option value = '{sub_category[0]}' > {sub_category[1]} </option>"
html_code += var
return HttpResponse(html_code)
class Home(View):
form_obj = UploadFileForm
def inserting_rows(self, file_data):
tables_list = connection.introspection.table_names()
df = pd.ExcelFile(file_data)
sheet_names = df.sheet_names
all_tables_exist = False
for sheet in sheet_names:
if sheet.lower() in tables_list:
all_tables_exist = True
else:
all_tables_exist = False
if all_tables_exist:
try:
df1 = pd.read_excel(df, 'category')
df2 = pd.read_excel(df, 'subcategory', names=[
'id', 'subcategory'])
dff = pd.merge(df1, df2, on="id")
category_obj = Category()
for i in dff.categories.unique():
Category.objects.create(category_name=i)
Category.save
filter_data = dff.loc[dff['categories'] == i]
for j in filter_data.subcategory.unique():
SubCategory.objects.create(
cat_id=Category.objects.latest('id'), subCategory_name=j)
SubCategory.save
return "Data loaded sucessfully"
except Exception as error:
return f"Error arise while loading the data,{error}"
else:
return "Please check the sheet names, no tables are present "
return "Data was not loaded"
def get(self, request):
category_data = Category.objects.all()
return render(request, 'index.html', {'form': self.form_obj(), 'category': category_data})
def post(self, request):
form = self.form_obj(request.POST, request.FILES)
if form.is_valid():
file_data = request.FILES['file']
resp = self.inserting_rows(file_data)
return render(request, 'index.html', {"data": resp})
return HttpResponse("<h1>not working</h1>")
|
{"/Taskapp/urls.py": ["/Taskapp/views.py"], "/Taskapp/forms.py": ["/Taskapp/models.py"], "/Taskapp/views.py": ["/Taskapp/forms.py", "/Taskapp/models.py"]}
|
14,709
|
arunpogula/Dvara_project
|
refs/heads/master
|
/Taskapp/models.py
|
from django.db import models
# Create your models here.
class Category(models.Model):
category_name = models.CharField(max_length=50)
def __str__(self):
return self.category_name
class Meta:
db_table = "category"
class SubCategory(models.Model):
cat_id = models.ForeignKey(Category, on_delete=models.CASCADE)
subCategory_name = models.CharField(max_length=100)
def __str__(self):
return self.subCategory_name
class Meta:
db_table = "subcategory"
|
{"/Taskapp/urls.py": ["/Taskapp/views.py"], "/Taskapp/forms.py": ["/Taskapp/models.py"], "/Taskapp/views.py": ["/Taskapp/forms.py", "/Taskapp/models.py"]}
|
14,710
|
RGunning/Pokemon_Go_API
|
refs/heads/master
|
/main.py
|
import argparse
import os
import platform
from getpass import getpass
import dirty
import login
def get_acces_token(usr, pws, type):
access_token = None
ltype = None
if 'goo' in type:
print '[!] Using google as login..'
google_data = None
if platform.system() == 'Windows':
google_data = login.login_google(usr, pws)
if google_data is not None:
access_token = google_data['id_token']
else:
access_token = login.login_google_v2(usr, pws)
if access_token is not None:
ltype = 'google'
else:
print '[!] I am a poketrainer..'
access_token = login.login_pokemon(usr, pws)
ltype = 'ptc'
dirty.accessToken = access_token
dirty.globalltype = ltype
return access_token, ltype
def main():
if platform.system() == 'Windows':
os.system("title Pokemon GO API Python")
os.system("cls")
else:
# Catches "Lunux" and "Darwin" (OSX), among others
os.system("clear")
parser = argparse.ArgumentParser()
parser.add_argument("-u", "--username", help="Login", default=None)
parser.add_argument("-p", "--password", help="Password", default=None)
parser.add_argument("-t", "--type", help="Google/PTC", required=True)
parser.add_argument("-l", "--location", help="Location", required=True)
# parser.add_argument("-d", "--distance", help="Distance", required=True)
dirty.argsStored = parser.parse_args()
if not dirty.argsStored.username:
dirty.argsStored.username = getpass("Username: ")
if not dirty.argsStored.password:
dirty.argsStored.password = getpass("Password: ")
if 'ptc' in dirty.argsStored.type.lower() or 'goo' in dirty.argsStored.type.lower():
# config.distance=dirty.argsStored.distance
dirty.start()
else:
print '[!] used type "%s" only Google or PTC valid' % (dirty.argsStored.type.lower())
if __name__ == '__main__':
main()
|
{"/location.py": ["/config.py"]}
|
14,711
|
RGunning/Pokemon_Go_API
|
refs/heads/master
|
/dirty.py
|
import time
from multiprocessing import Process
import api
import config
import location
import logic
import main
import pokemon_pb2
multi = False
argsStored = []
startTime = time.time()
accessToken = None
globalltype = None
def start():
global argsStored
while True:
if accessToken is None or globalltype is None:
refresh_access()
location.set_location(argsStored.location)
print '[+] Token:', accessToken[:40] + '...'
prot1 = logic.gen_first_data(accessToken, globalltype)
local_ses = api.get_rpc_server(accessToken, prot1)
new_rcp_point = 'https://%s/rpc' % (local_ses.rpc_server,)
work_stop(local_ses, new_rcp_point)
def refresh_access():
global accessToken, globalltype
accessToken, globalltype = main.get_acces_token(argsStored.username, argsStored.password, argsStored.type.lower())
if accessToken is None:
print '[-] access Token bad'
raise RuntimeError
def walk_random():
COORDS_LATITUDE, COORDS_LONGITUDE, COORDS_ALTITUDE = location.get_location_coords()
COORDS_LATITUDE = location.l2f(COORDS_LATITUDE)
COORDS_LONGITUDE = location.l2f(COORDS_LONGITUDE)
COORDS_ALTITUDE = location.l2f(COORDS_ALTITUDE)
COORDS_LATITUDE = COORDS_LATITUDE + config.steps
COORDS_LONGITUDE = COORDS_LONGITUDE + config.steps
location.set_location_coords(COORDS_LATITUDE, COORDS_LONGITUDE, COORDS_ALTITUDE)
def split_list(a_list):
half = len(a_list) / 2
return a_list[:half], a_list[half:]
def work_half_list(part, ses, new_rcp_point):
for t in part:
if config.debug:
print '[!] farming pokestop..'
work_with_stops(t, ses, new_rcp_point)
def work_stop(local_ses, new_rcp_point):
while True:
proto_all = logic.all_stops(local_ses)
all_stops = api.use_api(new_rcp_point, proto_all)
maps = pokemon_pb2.maps()
maps.ParseFromString(all_stops)
data_list = location.get_near(maps)
data_list = sorted(data_list, key=lambda x: x[1])
if len(data_list) > 0:
print '[+] found: %s Pokestops near' % (len(data_list))
if local_ses is not None and data_list is not None:
print '[+] starting show'
if multi:
a, b = split_list(data_list)
p = Process(target=work_half_list, args=(a, local_ses.ses, new_rcp_point))
o = Process(target=work_half_list, args=(a, local_ses.ses, new_rcp_point))
p.start()
o.start()
p.join()
o.join()
print '[!] farming done..'
else:
for t in data_list:
if config.debug:
print '[!] farming pokestop..'
if not work_with_stops(t, local_ses.ses, new_rcp_point):
break
else:
walk_random()
def work_with_stops(current_stop, ses, new_rcp_point):
Kinder = logic.gen_stop_data(ses, current_stop)
tmp_api = api.use_api(new_rcp_point, Kinder)
try:
if tmp_api is not None:
map = pokemon_pb2.map()
map.ParseFromString(tmp_api)
st = map.sess[0].status
config.earned_xp += map.sess[0].amt
if st == 4:
print "[!] +%s (%s)" % (map.sess[0].amt, config.earned_xp)
elif st == 3:
print "[!] used"
elif st == 2:
print "[!] charging"
elif st == 1:
print "[!] walking.."
expPerHour()
time.sleep(14)
work_with_stops(current_stop, ses, new_rcp_point)
else:
print "[?]:", st
else:
print '[-] tmp_api empty'
return True
except:
print '[-] error work_with_stops - Trying to restart process'
return False
def expPerHour():
diff = time.time() - startTime
minutesRun = diff / 60.
hoursRun = minutesRun / 60.
earned = float(config.earned_xp)
if hoursRun > 0:
expHour = int(earned / hoursRun)
else:
expHour = "n/a"
print "[!] Gained: %s (%s exp/h)" % (config.earned_xp, expHour)
|
{"/location.py": ["/config.py"]}
|
14,712
|
RGunning/Pokemon_Go_API
|
refs/heads/master
|
/config.py
|
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
# urls
api_url = 'https://pgorelease.nianticlabs.com/plfe/rpc'
login_url = 'https://sso.pokemon.com/sso/oauth2.0/authorize?client_id=mobile-app_pokemon-go&redirect_uri=https%3A%2F%2Fwww.nianticlabs.com%2Fpokemongo%2Ferror'
login_oauth = 'https://sso.pokemon.com/sso/oauth2.0/accessToken'
# urls end
# values
use_proxy = False
debug = False
# distance=0
steps = 0.000095
google = True
pub = None
earned_xp = 0
use_powerball = False
# values end
# session
proxies = {
'http': 'http://127.0.0.1:8888',
'https': 'http://127.0.0.1:8888',
}
s = requests.session()
if use_proxy:
s.proxies.update(proxies)
s.verify = False
s.headers.update({'User-Agent': 'Niantic App'})
# session end
###########################################################################################
# public
API_URL = 'https://pgorelease.nianticlabs.com/plfe/rpc'
LOGIN_URL = 'https://sso.pokemon.com/sso/login?service=https%3A%2F%2Fsso.pokemon.com%2Fsso%2Foauth2.0%2FcallbackAuthorize'
LOGIN_OAUTH = 'https://sso.pokemon.com/sso/oauth2.0/accessToken'
PTC_CLIENT_SECRET = 'w8ScCUXJQc6kXKw8FiOhd8Fixzht18Dq3PEVkUCP5ZPxtgyWsbTvWHFLm2wNY0JR'
SESSION = requests.session()
SESSION.headers.update({'User-Agent': 'Niantic App'})
if use_proxy:
SESSION.proxies.update(proxies)
SESSION.verify = False
DEBUG = True
###########################################################################################
|
{"/location.py": ["/config.py"]}
|
14,713
|
RGunning/Pokemon_Go_API
|
refs/heads/master
|
/location.py
|
import math
import struct
from math import radians, cos, sin, asin, sqrt
from geopy.distance import vincenty
from geopy.geocoders import GoogleV3
import config
COORDS_LATITUDE = 0
COORDS_LONGITUDE = 0
COORDS_ALTITUDE = 0
FLOAT_LAT = 0
FLOAT_LONG = 0
def get_location_coords():
return (COORDS_LATITUDE, COORDS_LONGITUDE, COORDS_ALTITUDE)
def get_lat():
return COORDS_LATITUDE
def get_lot():
return COORDS_LONGITUDE
def set_lat(new):
global COORDS_LATITUDE
COORDS_LATITUDE = f2i(new)
def set_lot(new):
global COORDS_LONGITUDE
COORDS_LONGITUDE = f2i(new)
def set_location(location_name):
geolocator = GoogleV3()
loc = geolocator.geocode(location_name)
print('[!] Your given location: {}'.format(loc.address.encode('utf-8')))
set_location_coords(loc.latitude, loc.longitude, loc.altitude)
def set_location_coords(lat, long, alt):
if config.debug:
print('[!] lat/long/alt: {} {} {}'.format(lat, long, alt))
global COORDS_LATITUDE, COORDS_LONGITUDE, COORDS_ALTITUDE
global FLOAT_LAT, FLOAT_LONG
FLOAT_LAT = lat
FLOAT_LONG = long
COORDS_LATITUDE = f2i(lat)
COORDS_LONGITUDE = f2i(long)
COORDS_ALTITUDE = f2i(alt)
def encode(cellid):
output = []
encoder._VarintEncoder()(output.append, cellid)
return ''.join(output)
def getNeighbors():
origin = CellId.from_lat_lng(LatLng.from_degrees(FLOAT_LAT, FLOAT_LONG)).parent(15)
walk = [origin.id()]
# 10 before and 10 after
next = origin.next()
prev = origin.prev()
for i in range(10):
walk.append(prev.id())
walk.append(next.id())
next = next.next()
prev = prev.prev()
return walk
def i2f(int):
return struct.unpack('<Q', struct.pack('<d', int))[0]
def f2h(float):
return hex(struct.unpack('<Q', struct.pack('<d', float))[0])
def f2i(float):
return struct.unpack('<Q', struct.pack('<d', float))[0]
def l2f(float):
return struct.unpack('d', struct.pack('Q', int(bin(float), 0)))[0]
def h2f(hex):
return struct.unpack('<d', struct.pack('<Q', int(hex, 16)))[0]
def get_near(map):
ms = []
ms.append(('start', get_lat(), get_lot(), get_distance(get_lat(), get_lot(), COORDS_LATITUDE, COORDS_LONGITUDE)))
for cell in [map]:
for block in cell.b:
for obj in block.c:
for stop in obj.s:
# if distance(stop.lat,stop.lon,COORDS_LATITUDE,COORDS_LONGITUDE):
ms.append((stop.name, stop.lat, stop.lon,
get_distance(stop.lat, stop.lon, COORDS_LATITUDE, COORDS_LONGITUDE)))
return ms
def get_near_p(map):
ms = []
ms.append(('start', get_lat(), get_lot(), 'start', 'start',
get_distance(get_lat(), get_lot(), COORDS_LATITUDE, COORDS_LONGITUDE)))
for cell in [map]:
for block in cell.b:
for obj in block.c:
for stop in obj.p:
# if distance(stop.lat,stop.lon,COORDS_LATITUDE,COORDS_LONGITUDE):
ms.append((stop.t.type, stop.lat, stop.lon, stop.name, stop.hash,
get_distance(stop.lat, stop.lon, COORDS_LATITUDE, COORDS_LONGITUDE)))
# for stop in obj.s:
# if stop.p.type:
# ms.append((stop.p.type,stop.lat,stop.lon,stop.name,stop.p.u2,get_distance(stop.lat,stop.lon,COORDS_LATITUDE,COORDS_LONGITUDE)))
return ms
def move_to(lat1, lot1, lat2, lot2):
if (lat1 > lat2):
while (lat1 < lat2):
lat1 = lat1 - 0.000095
else:
while (lat1 < lat2):
lat1 = lat1 + 0.000095
if (lot1 > lot2):
while (lot1 > lot2):
lot1 = lot1 - 0.000095
else:
while (lot2 > lot1):
lot1 = lot1 + 0.000095
return lat1, lot1, lat2, lot2
def distance(lat1, lon1, lat2, lon2):
lat1 = l2f(lat1)
lon1 = l2f(lon1)
lat2 = l2f(lat2)
lon2 = l2f(lon2)
radius = 6371 # km *1000 m
dlat = math.radians(lat2 - lat1)
dlon = math.radians(lon2 - lon1)
a = math.sin(dlat / 2) * math.sin(dlat / 2) + math.cos(math.radians(lat1)) \
* math.cos(math.radians(lat2)) * math.sin(dlon / 2) * math.sin(
dlon / 2)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
d = radius * c * 1000
return d < config.distance
def get_distance(lat1, lon1, lat2, lon2):
lat1 = l2f(lat1)
lon1 = l2f(lon1)
lat2 = l2f(lat2)
lon2 = l2f(lon2)
# convert decimal degrees to radians
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
# haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
a = sin(dlat / 2) ** 2 + cos(lat1) * cos(lat2) * sin(dlon / 2) ** 2
c = 2 * asin(sqrt(a))
meter = 6367000 * c
return meter
def haversine(lon1, lat1, lon2, lat2):
lat1 = l2f(lat1)
lon1 = l2f(lon1)
lat2 = l2f(lat2)
lon2 = l2f(lon2)
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees)
"""
# convert decimal degrees to radians
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
# haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
a = sin(dlat / 2) ** 2 + cos(lat1) * cos(lat2) * sin(dlon / 2) ** 2
c = 2 * asin(sqrt(a))
r = 6371 # Radius of earth in kilometers. Use 3956 for miles
return c * r * 1000
def is_near(locx, locy, myx, myy):
tmp1 = (l2f(locx), l2f(locy))
tmp2 = (l2f(myx), l2f(myy))
res = vincenty(tmp1, tmp2).meters
return res < config.distance
|
{"/location.py": ["/config.py"]}
|
14,733
|
Rejlentjless/store_order_management_system-heroku
|
refs/heads/master
|
/management_system/services/cashier.py
|
from datetime import datetime
from typing import List
from fastapi import Depends, HTTPException, status
from sqlalchemy.orm import Session
from ..database import tables
from ..database.conf_db import get_session
from ..models.cashier import ProductListModel, OrderListModel
class CashierService:
"""представления кассира"""
def __init__(self, session: Session = Depends(get_session)):
self.session = session
@staticmethod
def __sale(price, create_date):
"""применение скидки если товару больше 30 дней"""
sale = 20 # % скидка
if abs(datetime.toordinal(datetime.utcnow()) - datetime.toordinal(create_date)) > 30:
return price - (price*sale/100)
return price
def _get_product(self, id_product: int) -> tables.ProductDB:
"""возвращает продук по его id"""
product = self.session.query(tables.ProductDB).filter_by(id=id_product).first()
if not product:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
return product
def _get_order(self, id_order: int) -> tables.OrderDB:
"""выводит заказ по id"""
order = self.session.query(tables.OrderDB).filter_by(id=id_order).first()
if not order:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
return order
def get_products_list(self) -> List[tables.ProductDB]:
"""возвращает список продуктов"""
products = self.session.query(tables.ProductDB).all()
return products
def create_order(self, product_id: int) -> tables.OrderDB:
"""создание заказа"""
product = self._get_product(product_id)
order = tables.OrderDB(
id_product=product.id,
name_product=product.name,
price_order=self.__sale(product.price, product.create_product_date)
)
self.session.add(order)
self.session.commit()
return order
def get_list_order_completed(self) -> List[tables.OrderDB]:
"""возвращаеть заказы со статусом <выполненно>"""
orders = self.session.query(tables.OrderDB).filter_by(status_order="completed", status_check=False).all()
return orders
def get_checks(self) -> List[tables.CheckDB]:
"""возвращает не оплаченные счета """
checks = self.session.query(tables.CheckDB).filter_by(status_pay=False).all()
return checks
def create_check(self, order_id: int) -> tables.CheckDB:
"""создание счета"""
order = self.session.query(tables.OrderDB).filter_by(id=order_id).first()
if not order or order.status_order != "completed":
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
check = tables.CheckDB(
id_order=order.id,
name_product=order.name_product,
price_to_pay=order.price_order
)
order.status_check = True
self.session.add(check)
self.session.commit()
return check
def check_close(self, check_id: int) -> tables.CheckDB:
"""закрытие счета - счет оплачен"""
check = self.session.query(tables.CheckDB).filter_by(id=check_id).first()
if not check or check.status_pay:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
order = self.session.query(tables.OrderDB).filter_by(id=check.id_order).first()
order.status_order = "payed"
check.status_pay = True
self.session.commit()
return check
|
{"/management_system/services/cashier.py": ["/management_system/models/cashier.py"], "/management_system/models/accountant.py": ["/management_system/models/cashier.py"], "/management_system/services/seller.py": ["/management_system/models/seller.py"], "/management_system/models/cashier.py": ["/management_system/models/__init__.py"], "/management_system/api/accountant.py": ["/management_system/models/accountant.py", "/management_system/services/accountant.py"], "/management_system/api/cashier.py": ["/management_system/models/cashier.py", "/management_system/services/cashier.py"], "/management_system/app.py": ["/management_system/api/__init__.py"], "/management_system/services/products.py": ["/management_system/models/products.py"], "/management_system/api/__init__.py": ["/management_system/api/cashier.py", "/management_system/api/products.py", "/management_system/api/seller.py", "/management_system/api/accountant.py"], "/management_system/services/accountant.py": ["/management_system/models/accountant.py"], "/management_system/api/seller.py": ["/management_system/models/seller.py", "/management_system/services/seller.py"], "/management_system/models/seller.py": ["/management_system/models/cashier.py"], "/app.py": ["/management_system/app.py"], "/management_system/api/products.py": ["/management_system/models/products.py", "/management_system/models/cashier.py", "/management_system/services/products.py"]}
|
14,734
|
Rejlentjless/store_order_management_system-heroku
|
refs/heads/master
|
/management_system/models/accountant.py
|
from .cashier import OrderListModel
|
{"/management_system/services/cashier.py": ["/management_system/models/cashier.py"], "/management_system/models/accountant.py": ["/management_system/models/cashier.py"], "/management_system/services/seller.py": ["/management_system/models/seller.py"], "/management_system/models/cashier.py": ["/management_system/models/__init__.py"], "/management_system/api/accountant.py": ["/management_system/models/accountant.py", "/management_system/services/accountant.py"], "/management_system/api/cashier.py": ["/management_system/models/cashier.py", "/management_system/services/cashier.py"], "/management_system/app.py": ["/management_system/api/__init__.py"], "/management_system/services/products.py": ["/management_system/models/products.py"], "/management_system/api/__init__.py": ["/management_system/api/cashier.py", "/management_system/api/products.py", "/management_system/api/seller.py", "/management_system/api/accountant.py"], "/management_system/services/accountant.py": ["/management_system/models/accountant.py"], "/management_system/api/seller.py": ["/management_system/models/seller.py", "/management_system/services/seller.py"], "/management_system/models/seller.py": ["/management_system/models/cashier.py"], "/app.py": ["/management_system/app.py"], "/management_system/api/products.py": ["/management_system/models/products.py", "/management_system/models/cashier.py", "/management_system/services/products.py"]}
|
14,735
|
Rejlentjless/store_order_management_system-heroku
|
refs/heads/master
|
/management_system/services/seller.py
|
from typing import List
from fastapi import Depends, HTTPException, status
from sqlalchemy.orm import Session
from ..database.conf_db import get_session
from ..database import tables
from ..models.seller import OrderListModel
class SellerService:
def __init__(self, session: Session = Depends(get_session)):
self.session = session
def get_orders(self) -> List[tables.OrderDB]:
"""возвращает заказы со статусом <new>"""
orders = self.session.query(tables.OrderDB).filter_by(status_order="new").all()
return orders
def update_status_order(self, order_id: int) -> tables.OrderDB:
"""меняет статус заказа на выполненный <completed>"""
order = self.session.query(tables.OrderDB).filter_by(id=order_id).first()
if not order or order.status_order != "new":
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
order.status_order = "completed"
self.session.commit()
return order
|
{"/management_system/services/cashier.py": ["/management_system/models/cashier.py"], "/management_system/models/accountant.py": ["/management_system/models/cashier.py"], "/management_system/services/seller.py": ["/management_system/models/seller.py"], "/management_system/models/cashier.py": ["/management_system/models/__init__.py"], "/management_system/api/accountant.py": ["/management_system/models/accountant.py", "/management_system/services/accountant.py"], "/management_system/api/cashier.py": ["/management_system/models/cashier.py", "/management_system/services/cashier.py"], "/management_system/app.py": ["/management_system/api/__init__.py"], "/management_system/services/products.py": ["/management_system/models/products.py"], "/management_system/api/__init__.py": ["/management_system/api/cashier.py", "/management_system/api/products.py", "/management_system/api/seller.py", "/management_system/api/accountant.py"], "/management_system/services/accountant.py": ["/management_system/models/accountant.py"], "/management_system/api/seller.py": ["/management_system/models/seller.py", "/management_system/services/seller.py"], "/management_system/models/seller.py": ["/management_system/models/cashier.py"], "/app.py": ["/management_system/app.py"], "/management_system/api/products.py": ["/management_system/models/products.py", "/management_system/models/cashier.py", "/management_system/services/products.py"]}
|
14,736
|
Rejlentjless/store_order_management_system-heroku
|
refs/heads/master
|
/management_system/models/cashier.py
|
from datetime import date
from decimal import Decimal
from pydantic import BaseModel
from . import BaseClassModel
class ProductListModel(BaseClassModel):
name: str
price: Decimal
create_product_date: date
class OrderListModel(BaseClassModel):
id_product: int
name_product: str
price_order: Decimal
status_order: str
create_order_date: date
class CreateOrderModel(BaseModel):
product_id: int
class CreateCheckModel(BaseModel):
order_id: int
class CheckListModel(BaseClassModel):
id_order: int
name_product: str
price_to_pay: Decimal
create_check_date: date
|
{"/management_system/services/cashier.py": ["/management_system/models/cashier.py"], "/management_system/models/accountant.py": ["/management_system/models/cashier.py"], "/management_system/services/seller.py": ["/management_system/models/seller.py"], "/management_system/models/cashier.py": ["/management_system/models/__init__.py"], "/management_system/api/accountant.py": ["/management_system/models/accountant.py", "/management_system/services/accountant.py"], "/management_system/api/cashier.py": ["/management_system/models/cashier.py", "/management_system/services/cashier.py"], "/management_system/app.py": ["/management_system/api/__init__.py"], "/management_system/services/products.py": ["/management_system/models/products.py"], "/management_system/api/__init__.py": ["/management_system/api/cashier.py", "/management_system/api/products.py", "/management_system/api/seller.py", "/management_system/api/accountant.py"], "/management_system/services/accountant.py": ["/management_system/models/accountant.py"], "/management_system/api/seller.py": ["/management_system/models/seller.py", "/management_system/services/seller.py"], "/management_system/models/seller.py": ["/management_system/models/cashier.py"], "/app.py": ["/management_system/app.py"], "/management_system/api/products.py": ["/management_system/models/products.py", "/management_system/models/cashier.py", "/management_system/services/products.py"]}
|
14,737
|
Rejlentjless/store_order_management_system-heroku
|
refs/heads/master
|
/management_system/api/accountant.py
|
from typing import List, Optional
from datetime import date, datetime
from fastapi import APIRouter, Depends
from ..models.accountant import OrderListModel
from ..services.accountant import AccountantService
router = APIRouter(
prefix="/accountant",
tags=["Бухгалтер"]
)
@router.get("/orders", response_model=List[OrderListModel])
def get_order_list(
date_start: Optional[str] = None,
date_end: Optional[str] = None,
service: AccountantService = Depends()
):
"""
## Получение списка заказов с возможностью выбора промежутка времени (в формате <число>.<месяц>.<год>)
\f
:param date_start:
:param date_end:
:param service:
:return:
"""
return service.get_orders(date_start, date_end)
|
{"/management_system/services/cashier.py": ["/management_system/models/cashier.py"], "/management_system/models/accountant.py": ["/management_system/models/cashier.py"], "/management_system/services/seller.py": ["/management_system/models/seller.py"], "/management_system/models/cashier.py": ["/management_system/models/__init__.py"], "/management_system/api/accountant.py": ["/management_system/models/accountant.py", "/management_system/services/accountant.py"], "/management_system/api/cashier.py": ["/management_system/models/cashier.py", "/management_system/services/cashier.py"], "/management_system/app.py": ["/management_system/api/__init__.py"], "/management_system/services/products.py": ["/management_system/models/products.py"], "/management_system/api/__init__.py": ["/management_system/api/cashier.py", "/management_system/api/products.py", "/management_system/api/seller.py", "/management_system/api/accountant.py"], "/management_system/services/accountant.py": ["/management_system/models/accountant.py"], "/management_system/api/seller.py": ["/management_system/models/seller.py", "/management_system/services/seller.py"], "/management_system/models/seller.py": ["/management_system/models/cashier.py"], "/app.py": ["/management_system/app.py"], "/management_system/api/products.py": ["/management_system/models/products.py", "/management_system/models/cashier.py", "/management_system/services/products.py"]}
|
14,738
|
Rejlentjless/store_order_management_system-heroku
|
refs/heads/master
|
/management_system/api/cashier.py
|
from typing import List
from fastapi import APIRouter, Depends
from ..models.cashier import ProductListModel, CreateOrderModel, OrderListModel, CheckListModel, CreateCheckModel
from ..services.cashier import CashierService
router = APIRouter(
prefix="/cashier",
tags=["Кассир"]
)
@router.get("/products", response_model=List[ProductListModel])
def get_product_list(service: CashierService = Depends()):
"""
## Получение списка товаров
"""
return service.get_products_list()
@router.post("/create-order", response_model=OrderListModel)
def create_order(
product_data: CreateOrderModel,
service: CashierService = Depends()
):
"""
## Создание заказа по указанному номеру (id) товара
"""
return service.create_order(product_data.product_id)
@router.get("/orders_completed", response_model=List[OrderListModel])
def get_orders_list(service: CashierService = Depends()):
"""
## Получение списка заказов которые обработанны продавцом-консультантом (выполненные)
"""
return service.get_list_order_completed()
@router.post("/create_check", response_model=CheckListModel)
def create_check(
order_data: CreateCheckModel,
service: CashierService = Depends()
):
"""
## Генерация счета на выполненныйй заказ
"""
return service.create_check(order_data.order_id)
@router.get("/checks", response_model=List[CheckListModel])
def get_checks(service: CashierService = Depends()):
"""
## Полугчение спичка открытых (не оплоченных) счетов
"""
return service.get_checks()
@router.put("/check_close/{check_id}", response_model=CheckListModel)
def close_check(
check_id: int,
service: CashierService = Depends()
):
"""
## Зактытие счета (счет оплачен)
"""
return service.check_close(check_id)
|
{"/management_system/services/cashier.py": ["/management_system/models/cashier.py"], "/management_system/models/accountant.py": ["/management_system/models/cashier.py"], "/management_system/services/seller.py": ["/management_system/models/seller.py"], "/management_system/models/cashier.py": ["/management_system/models/__init__.py"], "/management_system/api/accountant.py": ["/management_system/models/accountant.py", "/management_system/services/accountant.py"], "/management_system/api/cashier.py": ["/management_system/models/cashier.py", "/management_system/services/cashier.py"], "/management_system/app.py": ["/management_system/api/__init__.py"], "/management_system/services/products.py": ["/management_system/models/products.py"], "/management_system/api/__init__.py": ["/management_system/api/cashier.py", "/management_system/api/products.py", "/management_system/api/seller.py", "/management_system/api/accountant.py"], "/management_system/services/accountant.py": ["/management_system/models/accountant.py"], "/management_system/api/seller.py": ["/management_system/models/seller.py", "/management_system/services/seller.py"], "/management_system/models/seller.py": ["/management_system/models/cashier.py"], "/app.py": ["/management_system/app.py"], "/management_system/api/products.py": ["/management_system/models/products.py", "/management_system/models/cashier.py", "/management_system/services/products.py"]}
|
14,739
|
Rejlentjless/store_order_management_system-heroku
|
refs/heads/master
|
/management_system/app.py
|
# основной файл приложения
from fastapi import FastAPI
from .api import router
tags_metadata = [
{
"name": "Кассир",
"description": "Добавляет заказ, генерирует счет, закрывает счет и заказ"
},
{
"name": "Продавец-консультант",
"description": "Обрабатывает заказ, меняет статус заказа"
},
{
"name": "Бухгалтер",
"description": "Просматривает все заказы. Может выбирать диапозон создания заказов (например с 01.07.2021 до 31.07.2021)"
},
]
app = FastAPI(
title="Store order management system",
description="Система управления заказами в магазине",
version="1.0.0",
openapi_tags=tags_metadata
)
app.include_router(router=router)
|
{"/management_system/services/cashier.py": ["/management_system/models/cashier.py"], "/management_system/models/accountant.py": ["/management_system/models/cashier.py"], "/management_system/services/seller.py": ["/management_system/models/seller.py"], "/management_system/models/cashier.py": ["/management_system/models/__init__.py"], "/management_system/api/accountant.py": ["/management_system/models/accountant.py", "/management_system/services/accountant.py"], "/management_system/api/cashier.py": ["/management_system/models/cashier.py", "/management_system/services/cashier.py"], "/management_system/app.py": ["/management_system/api/__init__.py"], "/management_system/services/products.py": ["/management_system/models/products.py"], "/management_system/api/__init__.py": ["/management_system/api/cashier.py", "/management_system/api/products.py", "/management_system/api/seller.py", "/management_system/api/accountant.py"], "/management_system/services/accountant.py": ["/management_system/models/accountant.py"], "/management_system/api/seller.py": ["/management_system/models/seller.py", "/management_system/services/seller.py"], "/management_system/models/seller.py": ["/management_system/models/cashier.py"], "/app.py": ["/management_system/app.py"], "/management_system/api/products.py": ["/management_system/models/products.py", "/management_system/models/cashier.py", "/management_system/services/products.py"]}
|
14,740
|
Rejlentjless/store_order_management_system-heroku
|
refs/heads/master
|
/management_system/services/products.py
|
from fastapi import Depends
from sqlalchemy.orm import Session
from ..database import tables
from ..database.conf_db import get_session
from ..models.products import CreateProductModel
class ProductService:
def __init__(self, session: Session = Depends(get_session)):
self.session = session
def create_product(self, product_data: CreateProductModel) -> tables.ProductDB:
product = tables.ProductDB(
name=product_data.name,
price=product_data.price
)
self.session.add(product)
self.session.commit()
return product
|
{"/management_system/services/cashier.py": ["/management_system/models/cashier.py"], "/management_system/models/accountant.py": ["/management_system/models/cashier.py"], "/management_system/services/seller.py": ["/management_system/models/seller.py"], "/management_system/models/cashier.py": ["/management_system/models/__init__.py"], "/management_system/api/accountant.py": ["/management_system/models/accountant.py", "/management_system/services/accountant.py"], "/management_system/api/cashier.py": ["/management_system/models/cashier.py", "/management_system/services/cashier.py"], "/management_system/app.py": ["/management_system/api/__init__.py"], "/management_system/services/products.py": ["/management_system/models/products.py"], "/management_system/api/__init__.py": ["/management_system/api/cashier.py", "/management_system/api/products.py", "/management_system/api/seller.py", "/management_system/api/accountant.py"], "/management_system/services/accountant.py": ["/management_system/models/accountant.py"], "/management_system/api/seller.py": ["/management_system/models/seller.py", "/management_system/services/seller.py"], "/management_system/models/seller.py": ["/management_system/models/cashier.py"], "/app.py": ["/management_system/app.py"], "/management_system/api/products.py": ["/management_system/models/products.py", "/management_system/models/cashier.py", "/management_system/services/products.py"]}
|
14,741
|
Rejlentjless/store_order_management_system-heroku
|
refs/heads/master
|
/management_system/api/__init__.py
|
from fastapi import APIRouter
from .cashier import router as cashier_router
from .products import router as product_router
from .seller import router as seller_router
from .accountant import router as accountant_router
# корневой роутер
router = APIRouter()
# подключение роутеров
router.include_router(product_router)
router.include_router(cashier_router)
router.include_router(seller_router)
router.include_router(accountant_router)
|
{"/management_system/services/cashier.py": ["/management_system/models/cashier.py"], "/management_system/models/accountant.py": ["/management_system/models/cashier.py"], "/management_system/services/seller.py": ["/management_system/models/seller.py"], "/management_system/models/cashier.py": ["/management_system/models/__init__.py"], "/management_system/api/accountant.py": ["/management_system/models/accountant.py", "/management_system/services/accountant.py"], "/management_system/api/cashier.py": ["/management_system/models/cashier.py", "/management_system/services/cashier.py"], "/management_system/app.py": ["/management_system/api/__init__.py"], "/management_system/services/products.py": ["/management_system/models/products.py"], "/management_system/api/__init__.py": ["/management_system/api/cashier.py", "/management_system/api/products.py", "/management_system/api/seller.py", "/management_system/api/accountant.py"], "/management_system/services/accountant.py": ["/management_system/models/accountant.py"], "/management_system/api/seller.py": ["/management_system/models/seller.py", "/management_system/services/seller.py"], "/management_system/models/seller.py": ["/management_system/models/cashier.py"], "/app.py": ["/management_system/app.py"], "/management_system/api/products.py": ["/management_system/models/products.py", "/management_system/models/cashier.py", "/management_system/services/products.py"]}
|
14,742
|
Rejlentjless/store_order_management_system-heroku
|
refs/heads/master
|
/management_system/models/products.py
|
from decimal import Decimal
from pydantic import BaseModel
class CreateProductModel(BaseModel):
name: str
price: Decimal
|
{"/management_system/services/cashier.py": ["/management_system/models/cashier.py"], "/management_system/models/accountant.py": ["/management_system/models/cashier.py"], "/management_system/services/seller.py": ["/management_system/models/seller.py"], "/management_system/models/cashier.py": ["/management_system/models/__init__.py"], "/management_system/api/accountant.py": ["/management_system/models/accountant.py", "/management_system/services/accountant.py"], "/management_system/api/cashier.py": ["/management_system/models/cashier.py", "/management_system/services/cashier.py"], "/management_system/app.py": ["/management_system/api/__init__.py"], "/management_system/services/products.py": ["/management_system/models/products.py"], "/management_system/api/__init__.py": ["/management_system/api/cashier.py", "/management_system/api/products.py", "/management_system/api/seller.py", "/management_system/api/accountant.py"], "/management_system/services/accountant.py": ["/management_system/models/accountant.py"], "/management_system/api/seller.py": ["/management_system/models/seller.py", "/management_system/services/seller.py"], "/management_system/models/seller.py": ["/management_system/models/cashier.py"], "/app.py": ["/management_system/app.py"], "/management_system/api/products.py": ["/management_system/models/products.py", "/management_system/models/cashier.py", "/management_system/services/products.py"]}
|
14,743
|
Rejlentjless/store_order_management_system-heroku
|
refs/heads/master
|
/management_system/services/accountant.py
|
from typing import List, Optional
from datetime import date
from fastapi import Depends
from sqlalchemy.orm import Session
from sqlalchemy import and_
from ..database.conf_db import get_session
from ..database import tables
from ..models.accountant import OrderListModel
class AccountantService:
def __init__(self, session: Session = Depends(get_session)):
self.session = session
@staticmethod
def format_date(str_date) -> date:
temp_date = str_date.split(".")
try:
new_date = date(
int(temp_date[2]),
int(temp_date[1]),
int(temp_date[0])
)
except:
new_date = None
return new_date
def get_orders(self, date_start: Optional[str] = None, date_end: Optional[str] = None) -> List[OrderListModel]:
query = self.session.query(tables.OrderDB)
if date_end and date_start:
query = query.filter(and_(tables.OrderDB.create_order_date >= self.format_date(date_start), tables.OrderDB.create_order_date <= self.format_date(date_end)))
orders = query.all()
return orders
|
{"/management_system/services/cashier.py": ["/management_system/models/cashier.py"], "/management_system/models/accountant.py": ["/management_system/models/cashier.py"], "/management_system/services/seller.py": ["/management_system/models/seller.py"], "/management_system/models/cashier.py": ["/management_system/models/__init__.py"], "/management_system/api/accountant.py": ["/management_system/models/accountant.py", "/management_system/services/accountant.py"], "/management_system/api/cashier.py": ["/management_system/models/cashier.py", "/management_system/services/cashier.py"], "/management_system/app.py": ["/management_system/api/__init__.py"], "/management_system/services/products.py": ["/management_system/models/products.py"], "/management_system/api/__init__.py": ["/management_system/api/cashier.py", "/management_system/api/products.py", "/management_system/api/seller.py", "/management_system/api/accountant.py"], "/management_system/services/accountant.py": ["/management_system/models/accountant.py"], "/management_system/api/seller.py": ["/management_system/models/seller.py", "/management_system/services/seller.py"], "/management_system/models/seller.py": ["/management_system/models/cashier.py"], "/app.py": ["/management_system/app.py"], "/management_system/api/products.py": ["/management_system/models/products.py", "/management_system/models/cashier.py", "/management_system/services/products.py"]}
|
14,744
|
Rejlentjless/store_order_management_system-heroku
|
refs/heads/master
|
/management_system/settings.py
|
# ---
# Конфигурации приложения
# ---
from pydantic import BaseSettings
class Settings(BaseSettings):
server_host: str = "127.0.0.1"
server_port: int = 8000
# подключение базы данных
database_url: str = "sqlite:///db.sqlite3"
settings = Settings(
# чтение настроек с dotenv
_env_file=".env",
_env_file_encoding="utf-8"
)
|
{"/management_system/services/cashier.py": ["/management_system/models/cashier.py"], "/management_system/models/accountant.py": ["/management_system/models/cashier.py"], "/management_system/services/seller.py": ["/management_system/models/seller.py"], "/management_system/models/cashier.py": ["/management_system/models/__init__.py"], "/management_system/api/accountant.py": ["/management_system/models/accountant.py", "/management_system/services/accountant.py"], "/management_system/api/cashier.py": ["/management_system/models/cashier.py", "/management_system/services/cashier.py"], "/management_system/app.py": ["/management_system/api/__init__.py"], "/management_system/services/products.py": ["/management_system/models/products.py"], "/management_system/api/__init__.py": ["/management_system/api/cashier.py", "/management_system/api/products.py", "/management_system/api/seller.py", "/management_system/api/accountant.py"], "/management_system/services/accountant.py": ["/management_system/models/accountant.py"], "/management_system/api/seller.py": ["/management_system/models/seller.py", "/management_system/services/seller.py"], "/management_system/models/seller.py": ["/management_system/models/cashier.py"], "/app.py": ["/management_system/app.py"], "/management_system/api/products.py": ["/management_system/models/products.py", "/management_system/models/cashier.py", "/management_system/services/products.py"]}
|
14,745
|
Rejlentjless/store_order_management_system-heroku
|
refs/heads/master
|
/management_system/database/tables.py
|
# ---
# описание таблиц базы данных
# ---
from datetime import datetime
from sqlalchemy import Column, Integer, String, Boolean, Numeric, DateTime
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.sql import func
Base = declarative_base()
class ProductDB(Base):
"""модель товара с бд"""
__tablename__ = "product"
id = Column(Integer, primary_key=True, unique=True)
name = Column(String)
price = Column(Numeric(10, 2))
create_product_date = Column(DateTime(timezone=True), default=func.now())
def __repr__(self):
return f"Идинтификатор товара: {self.id}"
class OrderDB(Base):
"""модель заказа с бд"""
__tablename__ = "order"
id = Column(Integer, primary_key=True, unique=True)
id_product = Column(Integer)
name_product = Column(String)
price_order = Column(Numeric(10, 2))
create_order_date = Column(DateTime(timezone=True), default=func.now())
status_order = Column(String, default="new")
status_check = Column(Boolean, default=False)
def __repr__(self):
return f"Номер заказа: {self.id}"
class CheckDB(Base):
"""модель счета с бд"""
__tablename__ = "check"
id = Column(Integer, primary_key=True, unique=True)
id_order = Column(Integer)
name_product = Column(String)
price_to_pay = Column(Numeric(10, 2))
create_check_date = Column(DateTime(timezone=True), default=func.now())
status_pay = Column(Boolean, default=False)
def __repr__(self):
return f"Номер счета: {self.id}"
# создание базы данных
# from management_system.database.conf_db import engine
# Base.metadata.create_all(engine)
|
{"/management_system/services/cashier.py": ["/management_system/models/cashier.py"], "/management_system/models/accountant.py": ["/management_system/models/cashier.py"], "/management_system/services/seller.py": ["/management_system/models/seller.py"], "/management_system/models/cashier.py": ["/management_system/models/__init__.py"], "/management_system/api/accountant.py": ["/management_system/models/accountant.py", "/management_system/services/accountant.py"], "/management_system/api/cashier.py": ["/management_system/models/cashier.py", "/management_system/services/cashier.py"], "/management_system/app.py": ["/management_system/api/__init__.py"], "/management_system/services/products.py": ["/management_system/models/products.py"], "/management_system/api/__init__.py": ["/management_system/api/cashier.py", "/management_system/api/products.py", "/management_system/api/seller.py", "/management_system/api/accountant.py"], "/management_system/services/accountant.py": ["/management_system/models/accountant.py"], "/management_system/api/seller.py": ["/management_system/models/seller.py", "/management_system/services/seller.py"], "/management_system/models/seller.py": ["/management_system/models/cashier.py"], "/app.py": ["/management_system/app.py"], "/management_system/api/products.py": ["/management_system/models/products.py", "/management_system/models/cashier.py", "/management_system/services/products.py"]}
|
14,746
|
Rejlentjless/store_order_management_system-heroku
|
refs/heads/master
|
/management_system/models/__init__.py
|
from pydantic import BaseModel
class BaseClassModel(BaseModel):
id: int
class Config:
orm_mode = True
|
{"/management_system/services/cashier.py": ["/management_system/models/cashier.py"], "/management_system/models/accountant.py": ["/management_system/models/cashier.py"], "/management_system/services/seller.py": ["/management_system/models/seller.py"], "/management_system/models/cashier.py": ["/management_system/models/__init__.py"], "/management_system/api/accountant.py": ["/management_system/models/accountant.py", "/management_system/services/accountant.py"], "/management_system/api/cashier.py": ["/management_system/models/cashier.py", "/management_system/services/cashier.py"], "/management_system/app.py": ["/management_system/api/__init__.py"], "/management_system/services/products.py": ["/management_system/models/products.py"], "/management_system/api/__init__.py": ["/management_system/api/cashier.py", "/management_system/api/products.py", "/management_system/api/seller.py", "/management_system/api/accountant.py"], "/management_system/services/accountant.py": ["/management_system/models/accountant.py"], "/management_system/api/seller.py": ["/management_system/models/seller.py", "/management_system/services/seller.py"], "/management_system/models/seller.py": ["/management_system/models/cashier.py"], "/app.py": ["/management_system/app.py"], "/management_system/api/products.py": ["/management_system/models/products.py", "/management_system/models/cashier.py", "/management_system/services/products.py"]}
|
14,747
|
Rejlentjless/store_order_management_system-heroku
|
refs/heads/master
|
/management_system/api/seller.py
|
from typing import List
from fastapi import APIRouter, Depends
from ..models.seller import OrderListModel
from ..services.seller import SellerService
router = APIRouter(
prefix="/seller",
tags=["Продавец-консультант"]
)
@router.get("/orders_new", response_model=List[OrderListModel])
def get_orders_status_new(service: SellerService = Depends()):
"""
## Получение списка новых созданных кассиром заказов
"""
return service.get_orders()
@router.put("/order_completed/{order_id}", response_model=OrderListModel)
def update_status_order(
order_id: int,
service: SellerService = Depends()
):
"""
## Перевод заказа в статус выполненный с указание id заказа
"""
return service.update_status_order(order_id)
|
{"/management_system/services/cashier.py": ["/management_system/models/cashier.py"], "/management_system/models/accountant.py": ["/management_system/models/cashier.py"], "/management_system/services/seller.py": ["/management_system/models/seller.py"], "/management_system/models/cashier.py": ["/management_system/models/__init__.py"], "/management_system/api/accountant.py": ["/management_system/models/accountant.py", "/management_system/services/accountant.py"], "/management_system/api/cashier.py": ["/management_system/models/cashier.py", "/management_system/services/cashier.py"], "/management_system/app.py": ["/management_system/api/__init__.py"], "/management_system/services/products.py": ["/management_system/models/products.py"], "/management_system/api/__init__.py": ["/management_system/api/cashier.py", "/management_system/api/products.py", "/management_system/api/seller.py", "/management_system/api/accountant.py"], "/management_system/services/accountant.py": ["/management_system/models/accountant.py"], "/management_system/api/seller.py": ["/management_system/models/seller.py", "/management_system/services/seller.py"], "/management_system/models/seller.py": ["/management_system/models/cashier.py"], "/app.py": ["/management_system/app.py"], "/management_system/api/products.py": ["/management_system/models/products.py", "/management_system/models/cashier.py", "/management_system/services/products.py"]}
|
14,748
|
Rejlentjless/store_order_management_system-heroku
|
refs/heads/master
|
/management_system/models/seller.py
|
from .cashier import OrderListModel
|
{"/management_system/services/cashier.py": ["/management_system/models/cashier.py"], "/management_system/models/accountant.py": ["/management_system/models/cashier.py"], "/management_system/services/seller.py": ["/management_system/models/seller.py"], "/management_system/models/cashier.py": ["/management_system/models/__init__.py"], "/management_system/api/accountant.py": ["/management_system/models/accountant.py", "/management_system/services/accountant.py"], "/management_system/api/cashier.py": ["/management_system/models/cashier.py", "/management_system/services/cashier.py"], "/management_system/app.py": ["/management_system/api/__init__.py"], "/management_system/services/products.py": ["/management_system/models/products.py"], "/management_system/api/__init__.py": ["/management_system/api/cashier.py", "/management_system/api/products.py", "/management_system/api/seller.py", "/management_system/api/accountant.py"], "/management_system/services/accountant.py": ["/management_system/models/accountant.py"], "/management_system/api/seller.py": ["/management_system/models/seller.py", "/management_system/services/seller.py"], "/management_system/models/seller.py": ["/management_system/models/cashier.py"], "/app.py": ["/management_system/app.py"], "/management_system/api/products.py": ["/management_system/models/products.py", "/management_system/models/cashier.py", "/management_system/services/products.py"]}
|
14,749
|
Rejlentjless/store_order_management_system-heroku
|
refs/heads/master
|
/app.py
|
from .management_system.app import app
|
{"/management_system/services/cashier.py": ["/management_system/models/cashier.py"], "/management_system/models/accountant.py": ["/management_system/models/cashier.py"], "/management_system/services/seller.py": ["/management_system/models/seller.py"], "/management_system/models/cashier.py": ["/management_system/models/__init__.py"], "/management_system/api/accountant.py": ["/management_system/models/accountant.py", "/management_system/services/accountant.py"], "/management_system/api/cashier.py": ["/management_system/models/cashier.py", "/management_system/services/cashier.py"], "/management_system/app.py": ["/management_system/api/__init__.py"], "/management_system/services/products.py": ["/management_system/models/products.py"], "/management_system/api/__init__.py": ["/management_system/api/cashier.py", "/management_system/api/products.py", "/management_system/api/seller.py", "/management_system/api/accountant.py"], "/management_system/services/accountant.py": ["/management_system/models/accountant.py"], "/management_system/api/seller.py": ["/management_system/models/seller.py", "/management_system/services/seller.py"], "/management_system/models/seller.py": ["/management_system/models/cashier.py"], "/app.py": ["/management_system/app.py"], "/management_system/api/products.py": ["/management_system/models/products.py", "/management_system/models/cashier.py", "/management_system/services/products.py"]}
|
14,750
|
Rejlentjless/store_order_management_system-heroku
|
refs/heads/master
|
/management_system/api/products.py
|
from fastapi import APIRouter, Depends
from ..models.products import CreateProductModel
from ..models.cashier import ProductListModel
from ..services.products import ProductService
router = APIRouter(
prefix="/create-product",
tags=["Создание товара"]
)
@router.post("/", response_model=ProductListModel)
def create_product(
product_data: CreateProductModel,
service: ProductService = Depends()
):
"""
## Добавить товар в базу данных
\f
:param product_data:
:param service:
:return:
"""
return service.create_product(product_data)
|
{"/management_system/services/cashier.py": ["/management_system/models/cashier.py"], "/management_system/models/accountant.py": ["/management_system/models/cashier.py"], "/management_system/services/seller.py": ["/management_system/models/seller.py"], "/management_system/models/cashier.py": ["/management_system/models/__init__.py"], "/management_system/api/accountant.py": ["/management_system/models/accountant.py", "/management_system/services/accountant.py"], "/management_system/api/cashier.py": ["/management_system/models/cashier.py", "/management_system/services/cashier.py"], "/management_system/app.py": ["/management_system/api/__init__.py"], "/management_system/services/products.py": ["/management_system/models/products.py"], "/management_system/api/__init__.py": ["/management_system/api/cashier.py", "/management_system/api/products.py", "/management_system/api/seller.py", "/management_system/api/accountant.py"], "/management_system/services/accountant.py": ["/management_system/models/accountant.py"], "/management_system/api/seller.py": ["/management_system/models/seller.py", "/management_system/services/seller.py"], "/management_system/models/seller.py": ["/management_system/models/cashier.py"], "/app.py": ["/management_system/app.py"], "/management_system/api/products.py": ["/management_system/models/products.py", "/management_system/models/cashier.py", "/management_system/services/products.py"]}
|
14,751
|
fjcarnevale/studying
|
refs/heads/master
|
/Chaper1.py
|
import sorting
def expect_equal(a, b, name):
if a == b:
print("Pass: " + name)
else:
print("Fail: " + name)
# Question 1.1a
def unique_characters(S):
chars = {}
for c in S:
if c in chars:
return False
chars[c] = True
return True
# Test 1.1a
S = "abcdefg"
expect_equal(unique_characters(S), True, "1.1a Unique")
S = "abcdeefg"
expect_equal(unique_characters(S), False, "1.1a Not Unique")
# Question 1.1b
def unique_characters_no_hash(S):
A = list(S)
sorting.quicksort(A)
for i in range(len(A) - 1):
if A[i] == A[i + 1]:
return False
return True
# Test 1.1b
S = "abcdefg"
expect_equal(unique_characters_no_hash(S), True, "1.1b Unique")
S = "aebcdefg"
expect_equal(unique_characters_no_hash(S), False, "1.1b Not Unique")
# Question 1.2
def reverse_string(S):
A = list(S)
i = 0
j = len(A) - 1
while i < j:
A[i],A[j] = A[j],A[i]
i += 1
j -= 1
return "".join(A)
# Test 1.2
S = "abcde"
expect_equal(reverse_string(S), "edcba", "1.2 Odd")
S = "abcdef"
expect_equal(reverse_string(S), "fedcba", "1.2 Even")
# Question 1.3
def is_permutation(A,B):
A = list(A)
B = list(B)
if len(A) != len(B):
return False
letters = {}
for c in A:
if c not in letters:
letters[c] = {"A":0, "B":0}
letters[c]["A"] += 1
for c in B:
if c not in letters:
return False
letters[c]["B"] += 1
for c in letters:
if letters[c]["A"] != letters[c]["B"]:
return False
return True
# Test 1.3
A = "abcde"
B = "aebdc"
expect_equal(is_permutation(A,B), True, "1.3 Match")
B = "aebdcf"
expect_equal(is_permutation(A,B), False, "1.3 Mismatch Different Length")
B = "abdcz"
expect_equal(is_permutation(A,B), False, "1.3 Mismatch Different Letters")
|
{"/Chaper1.py": ["/sorting.py"]}
|
14,752
|
fjcarnevale/studying
|
refs/heads/master
|
/sorting.py
|
def quicksort(A):
quicksort_impl(A, 0, len(A) - 1)
def quicksort_impl(A, p, r):
if p >= r:
return
pivot = A[r]
next_swap = p
for i in range(p, r):
if A[i] < A[r]:
A[next_swap],A[i] = A[i], A[next_swap]
next_swap += 1
A[next_swap],A[r] = A[r], A[next_swap]
quicksort_impl(A, p, next_swap - 1)
quicksort_impl(A, next_swap + 1, r)
|
{"/Chaper1.py": ["/sorting.py"]}
|
14,753
|
rageobi/DetectionSystem
|
refs/heads/master
|
/densityscan.py
|
from typing import NamedTuple
import math
import numpy as np
import random
count = 0
class Cluster():
def __init__(self, x=None, y=None):
global count
self.x = x
self.y = y
self.id = count = count + 1
self.cluster = -2
def __repr__(self):
return [self.x], [self.y], [self.cluster]
def CheckValidPoints(self, point, x_dist, y_dist) -> int:
#same point as the base cluster
if self.x == point.x and self.y == point.y:
return 2
#Within the mentioned distance from Base cluster
elif self.GetDistance(point, 1) <= x_dist and self.GetDistance(point, 2) <= y_dist:
return 1
#Not Within the mentioned distance from Base cluster
else:
return 0
def GetDistance(self, p2, check):
#Get X Distance
if check == 1:
return round(abs(p2.x - self.x), 5)
#Get Y Distance
elif check == 2:
return round(abs(p2.y - self.y), 5)
#Wrong option
else:
return -1
class ClusterLists():
#cluster_val = 0
def __init__(self):
self.cluster_list = []
self.randoms = []
self.cluster_val = 1
def get_cluster_labels(self):
st = []
for x in self.cluster_list:
st.append(x.cluster)
#if verbose:
#print((" {} clusters for the frame").format(len(st)))
return st
def update_random(self):
if (type(self.cluster_list).__module__ != np.__name__):
self.reshape()
if (len(self.randoms) != len(self.cluster_list)):
self.randoms = list(range(self.cluster_list.shape[0]))
def cluster_cluster(self,x_dist,y_dist):
self.update_random()
for i in range(0, len(self.cluster_list)):#len(self.randoms)):
#choice = random.choice(self.randoms)
self.CheckValidClusters(self.cluster_list[i], x_dist, y_dist)
#self.randoms.remove(choice)
return np.array(self.get_cluster_labels())
def append(self, cluster: Cluster):
self.cluster_list.append(cluster)
def reshape(self):
self.cluster_list = np.array(self.cluster_list) # .reshape(shape_0,)
def CheckValidClusters(self, base_cluster, x_dist, y_dist):
if base_cluster.cluster == -2:
for cluster in self.cluster_list:
if cluster.cluster == -2:
d_check = base_cluster.CheckValidPoints(
cluster, x_dist, y_dist)
if d_check == 1:
cluster.cluster = self.cluster_val
base_cluster.cluster = self.cluster_val
self.cluster_val += 1
def testMethod():
p1 = Cluster(1, 2)
p2 = Cluster(2, 3)
p = ClusterLists()
p.append(p1)
p.append(p2)
p.append(Cluster(3, 1))
p.append(Cluster(1, 1))
p.append(Cluster(2, 2))
p.append(Cluster(3, 3))
p.append(Cluster(1, 3))
p.append(Cluster(2, 1))
p.append(Cluster(3, 2))
p.append(Cluster(4, 1))
p.append(Cluster(2, 4))
p.append(Cluster(4, 4))
p.append(Cluster(3, 4))
p.append(Cluster(2, 4))
p.update_random()
print(p.randoms)
for i in range(0, len(p.randoms)):
choice = random.choice(p.randoms)
p.CheckValidClusters(p.cluster_list[choice], 1, 1)
p.randoms.remove(choice)
print(p.randoms)
for cluster in p.cluster_list:
print("x ={}, y={}, cluster{}".format(
cluster.x, cluster.y, cluster.cluster))
s = p.cluster_list[0].CheckValidPoints(p.cluster_list[1], 1.5, 1.5)
p.reshape()
p3 = p1.CheckValidPoints(p2, 1.5, 1.5)
print(p1.z)
|
{"/detectionsystem.py": ["/densityscan.py"]}
|
14,754
|
rageobi/DetectionSystem
|
refs/heads/master
|
/detectionsystem.py
|
import argparse
import multiprocessing
from functools import partial
import math
from multiprocessing import Pool
#from cvlib.object_detection import draw_bbox
#import cvlib as cv
import glob
import cv2 as cv2
import numpy as np
import matplotlib.pyplot as plt
import random
from skimage.feature import hog
from scipy.ndimage.measurements import label
from scipy.ndimage import find_objects
from nuscenes.nuscenes import NuScenes
from nuscenes.utils.data_classes import RadarPointCloud as rpc
from nuscenes.utils.data_classes import LidarPointCloud as lpc
from matplotlib import pyplot as plt
import matplotlib.ticker as ticker
import os.path as osp
import matplotlib.patches as patches
from PIL import Image
from pyquaternion import Quaternion
from nuscenes.utils.geometry_utils import view_points, box_in_image, BoxVisibility, transform_matrix
from densityscan import Cluster, ClusterLists
import random
import pickle
import time
from shapely.geometry import Polygon
# golbal variables
svc = None
net = None
output_layers = None
classes = None
c_slide = None
xscaler = None
def calculation_of_radar_data(radar):
"""
Function to extract features from radar pointcloud data
Parameters
----------
:param radar: Pointcloud data
Returns
----------
point_dist -> array: Distance magnitude of the point from sensor
point_phi -> array : Azimuth of the point from sensor
point_rad_velocity -> array : Compensated radial velocity of the point
velocity_phi -> array : Azimuth of the radial velocity vectors
"""
## Get required features from radar pointcloud
x_points = radar.points[0]
y_points = radar.points[1]
z_points = radar.points[2]
x_comp_velocity = radar.points[8]
y_comp_velocity = radar.points[9]
x_velocity = radar.points[6]
y_velocity = radar.points[7]
velocity_phi = np.rad2deg(np.arctan2(y_velocity, x_velocity))
point_dist = np.sqrt(x_points ** 2 + y_points ** 2 + z_points ** 2)
point_phi = np.rad2deg(np.arctan2(y_points, x_points))
point_rad_velocity = np.sqrt(x_comp_velocity ** 2 + y_comp_velocity ** 2)
return point_dist, point_phi, point_rad_velocity, velocity_phi
def custom_map_pointcloud_to_image(nusc,
pointsensor_token,
camera_token,
verbose=False):
# Inspired from the NuScenes Dev-Kit
"""
Helper function to retrieve the image coordinate transformed point coordinates, clusters mappings for the points and the image frame.
Parameters
----------
:param nusc: Nuscenes object
:param pointsensor_token: Point sensor token
:param cam_token: Camera sensor token
:param verbose: Boolean variable to display console logs
Returns
----------
points -> ndarray: Points data transformed to Image coordinates
coloring -> list : Cluster associated for points
im -> PIL Image : Image frame for the instance
"""
# rpc.abidefaults()
## Disable all the radar filter settings
rpc.disable_filters()
cam = nusc.get('sample_data', camera_token)
pointsensor = nusc.get('sample_data', pointsensor_token)
pcl_path = osp.join(nusc.dataroot, pointsensor['filename'])
if pointsensor['sensor_modality'] == 'lidar':
pc = lpc.from_file(pcl_path)
else:
pc = rpc.from_file(pcl_path)
im = Image.open(osp.join(nusc.dataroot, cam['filename']))
# Points live in the point sensor frame. So they need to be transformed via global to the image plane.
# First step: transform the point-cloud to the ego vehicle frame for the timestamp of the sweep.
point_dist, point_phi, point_rad_velocity, velocity_phi = calculation_of_radar_data(
pc)
## Convert from meters/h to Km/h
detections_radial_velocity_kmph = point_rad_velocity * 3.6
## Get Clusterlist object for velocity vectors azimuth and point distance
point_cluster = appendtoclusterlist(velocity_phi, point_dist)
## Cluster all points which are within 2.5 radians of vel_phi and 5m distance as same cluster
cluster_list = point_cluster.cluster_cluster(2.5, 5)
detections_radial_velocity_kmph = np.reshape(
detections_radial_velocity_kmph, (1, detections_radial_velocity_kmph.shape[0]))
d_phi = np.reshape(
point_phi, (1, point_phi.shape[0]))
d_dist = np.reshape(
point_dist, (1, point_dist.shape[0]))
velocity_phi = np.reshape(
velocity_phi, (1, velocity_phi.shape[0]))
## append calculated features to the radar pointcloud
points = np.append(pc.points, velocity_phi, axis=0)
points = np.append(points, d_phi, axis=0)
points = np.append(points, d_dist, axis=0)
points = np.append(points, detections_radial_velocity_kmph, axis=0)
#mask = np.where(points[18, :] >= -200)
#pos = points[:, mask]
#points = np.reshape(points, (points.shape[0], points.shape[2]))
pc.points = points
cs_record = nusc.get('calibrated_sensor',
pointsensor['calibrated_sensor_token'])
pc.rotate(Quaternion(cs_record['rotation']).rotation_matrix)
pc.translate(np.array(cs_record['translation']))
# Second step: transform to the global frame.
poserecord = nusc.get('ego_pose', pointsensor['ego_pose_token'])
pc.rotate(Quaternion(poserecord['rotation']).rotation_matrix)
pc.translate(np.array(poserecord['translation']))
# Third step: transform into the ego vehicle frame for the timestamp of the image.
poserecord = nusc.get('ego_pose', cam['ego_pose_token'])
pc.translate(-np.array(poserecord['translation']))
pc.rotate(Quaternion(poserecord['rotation']).rotation_matrix.T)
# Fourth step: transform into the camera.
cs_record = nusc.get('calibrated_sensor', cam['calibrated_sensor_token'])
pc.translate(-np.array(cs_record['translation']))
pc.rotate(Quaternion(cs_record['rotation']).rotation_matrix.T)
# Fifth step: actually take a "picture" of the point cloud.
# Grab the depths (camera frame z axis points away from the camera).
depths = pc.points[2, :]
## Let the coloring be based on clusters formed
coloring = cluster_list
# Take the actual picture (matrix multiplication with camera-matrix + renormalization).
points = view_points(pc.points[:3, :], np.array(
cs_record['camera_intrinsic']), normalize=True)
## rebuilding the pointcloud features
points = np.append(points, pc.points[3:22, :], axis=0)
# Remove points that are either outside or behind the camera. Leave a margin of 1 pixel for aesthetic reasons.
# Also make sure points are at least 1m in front of the camera to avoid seeing the lidar points on the camera
# casing for non-keyframes which are slightly out of sync.
mask = np.ones(depths.shape[0], dtype=bool)
mask = np.logical_and(mask, depths > 1)
mask = np.logical_and(mask, points[0, :] > 1)
mask = np.logical_and(mask, points[0, :] < im.size[0] - 1)
mask = np.logical_and(mask, points[1, :] > 1)
mask = np.logical_and(mask, points[1, :] < im.size[1] - 1)
points = points[:, mask]
coloring = coloring[mask]
if verbose:
print(' Total number of points in frame', points.shape[1])
return points, coloring, im
def appendtoclusterlist(x, y):
"""
Append points to the Clusterlist
Parameters
----------
:param x: X cordinate of the clusterlist
:param y: Y cordinate of the clusterlist
Returns
--------
cl -> ClusterLists : ClusterList of all the points provided
"""
cl = ClusterLists()
## Forming the clustelist based on data provided
for data in zip(x, y):
cl.append(Cluster(data[0], data[1]))
return cl
def get_boxes_yolo(frame, method, point, visualize=False, verbose=False):
"""
Helper function to predict the vehicle box coordinates through YOLO net approach
Parameters
----------
:param frame: Image frame which needs to be predicted
:param method: int which specifies the classifier type. (2 for Modified YOLOv3 and 3 for Original YOLOv3)
:param point: The point data of the current frame instance
:param visualize: Boolean variable to check if user needs to visualize region frames which are proposed and marked
:param verbose: Boolean variable to display console logs
Returns
--------
bbox -> list : Vehicle detected box coordinates
"""
if method == 2: ## Modified YOLOv3
#frame_copy = np.copy(frame)
## Empirically define the region or sub-frame size based on point distance value
if point[3] < 15:
frame_size = 450
elif point[3] < 20:
frame_size = 200
else:
frame_size = 100
## Crop regions to form a new frame
x1 = int(round(point[0])) - (frame_size)
y1 = int(round(point[1])) - (frame_size)
x2 = int(round(point[0])) + (frame_size)
y2 = int(round(point[1])) + (frame_size)
frame = np.array(frame.crop((x1, y1, x2, y2)))
else:
## Original YOLOv3
frame = np.array(frame)
x1 = y1 = 0
bbox, label, confidence = get_yolo_detections(frame, (x1, y1))
if visualize:
for i, box in enumerate(bbox):
frame_copy = np.copy(frame)
a = (box[0][0] - x1)
b = (box[0][1] - y1)
c = (box[1][0] - x1)
d = (box[1][1] - y1)
cv2.rectangle(frame_copy, (a, b), (c, d), (0, 255, 0))
# cv2.putText(frame_copy, label[i], (box[0][0]-x1, box[0]
# [1]-y1-10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)
plt.imshow(frame_copy)
plt.show()
return bbox
def load_svc():
"""
Function to load trained model for the MODEL B approach
Returns
----------
svc : The SVC model
"""
filename = "data/svc_hope.p"
svc = pickle.load(open(filename, 'rb'))
return svc
def load_svc_2():
"""
Function to load trained model for the MODEL A approach
Returns
----------
svc : The SVC model
xscaler : The fitted scaler value
"""
filename = "data/svmhopeful.p"
svc = pickle.load(open(filename, 'rb'))
filename = "data/xscalerhopeful.p"
xscaler = pickle.load(open(filename, 'rb'))
return svc, xscaler
# Python/Project/data/YOLOv3/yolov3.cfg data/YOLOv3/yolov3.weights
def load_net(weights_location='data/YOLOv3/yolov3.weights', config_location='data/YOLOv3/yolov3.cfg', names_location='data/YOLOv3/yolov3_classes.txt'):
"""
Helper function to load the YOLO network
Parameters
----------
:param weights_location: Network weights file location
:param config_location: Network conifg file location
:param names_location: Network classes file location
Returns
--------
net -> dnn : Loaded Network
output_layers -> list : Network layers
classes -> list : Class names
"""
## Load the net based on weights and config provided
net = cv2.dnn.readNet(weights_location, config_location)
#net = cv2.dnn_DetectionModel(config_location, weights_location)
classes = []
## Load all the classes
with open(names_location, "r") as f:
classes = [line.strip() for line in f.readlines()]
## Define the output layers built based on loaded net
layer_names = net.getLayerNames()
output_layers = [layer_names[i[0] - 1]
for i in net.getUnconnectedOutLayers()]
return net, output_layers, classes
def get_yolo_detections(frame, primary_origin=(0, 0)):
# Reference - https://pysource.com/2019/06/27/yolo-object-detection-using-opencv-with-python/
"""
Function to predict boxes through the loaded YOLO network
Parameters
----------
:param frame: Image frame which needs to be predicted
:param primary_origin: Tuple with starting coordinates of image. (0,0) for uncropped image. But if region of the frame is sent, pass the starting coordinates of the region wrt to orginal uncropped frame
Returns
--------
bbox -> list : Predicted bounding boxes
label -> list : Predicted box labels
confidence -> list : Predicted boxes confidence scores
"""
global net, output_layers, classes
height, width, channels = frame.shape
blob = cv2.dnn.blobFromImage(
frame, 0.00392, (320, 320), (0, 0, 0), True, crop=False)
net.setInput(blob)
outs = net.forward(output_layers)
class_ids = []
confidences = []
boxes = []
for out in outs:
for detection in out:
scores = detection[5:]
class_id = np.argmax(scores)
confidence = scores[class_id]
## Take the detections whose confidence score is greater than 0.5 and classes of the boxes are [car,bus,truck]
if confidence > 0.5 and class_id in [2, 5, 7]:
center_x = int(detection[0] * width)
center_y = int(detection[1] * height)
w = int(detection[2] * width)
h = int(detection[3] * height)
x = int(center_x - w / 2)
y = int(center_y - h / 2)
boxes.append([x, y, w, h])
confidences.append(float(confidence))
class_ids.append(class_id)
## All the boxes with scores greater than 0.5 and Non-Max Sopression greater than 0.4 are defined as predicted detections
indexes = cv2.dnn.NMSBoxes(boxes, confidences, 0.5, 0.4)
bbox = []
label = []
confidence = []
for i in indexes:
i = i[0]
box = boxes[i]
x = int(box[0]) + primary_origin[0]
y = int(box[1]) + primary_origin[1]
w = int(box[2])
h = int(box[3])
bbox.append(((x, y), ((x+w), (y+h))))
label.append(str(classes[class_ids[i]]))
confidence.append(confidences[i])
return bbox, label, confidence
def get_boxes_svm(frame=None, visualize=False, verbose=False, method=1, point=None):
# Inspired from https://github.com/JunshengFu/vehicle-detection/blob/master/svm_pipeline.py
"""
Helper function to predict the vehicle box coordinates through SVM classifier approach
Parameters
----------
:param frame: Image frame which needs to be predicted
:param visualize: Boolean variable to check if user needs to visualize region frames which are proposed and marked
:param verbose: Boolean variable to display console logs
:param method: int which specifies the classifier type. (1 for MODEL B and 0 for MODEL A)
:param point: The point data of the current frame instance
Returns
--------
final_boxes -> list : Vehicle detected box coordinates
"""
## Empirically define the region or sub-frame size based on point distance value
if point[3] < 15:
frame_size = 500
#frame_size_y = 500
elif point[3] < 20:
frame_size = 250
#frame_size_y = 250
elif point[3] < 30:
frame_size = 200
#frame_size_y = 200
elif point[3] < 40:
frame_size = 150
#frame_size_y = 150
elif point[3] < 50:
frame_size = 120
#frame_size_y = 120
elif point[3] < 70:
frame_size = 50
#frame_size_y = 50
else:
frame_size = False
## Empirically calculate the window sizes based on the frame size
if frame_size:
if point[3] > 14:
window_size_1 = int(0.5 * (frame_size))
window_size_2 = int(0.3 * (frame_size))
else:
window_size_1 = int(0.65 * (frame_size))
window_size_2 = int(0.45 * (frame_size))
## Crop regions to form a new frame
x1 = int(round(point[0])) - ((frame_size) // 2)
y1 = int(round(point[1])) - ((frame_size) // 2)
x2 = int(round(point[0])) + ((frame_size) // 1.5)
y2 = int(round(point[1])) + ((frame_size) // 3)
frame = frame.crop((x1, y1, x2, y2))
## Define the overlap value based on the SVM model/method
if method == 0:
overlap = 0.09
else:
overlap = 0.10
frame = np.array(frame)
## Get all the windows
sliding_window_1 = get_window_slides(
frame, window_size_1, overlap=overlap)
# sliding_window_1 = get_window_slides(
# frame, window_size_2, overlap=0.10)
sliding_windows = sliding_window_1 # + sliding_window_2
## Get all windows predicted as vehicles
vehicle_slides = predict_vehicles_slides_2(
frame, method, sliding_windows)
#vehicle_slides = predict_vehicles_slides(frame, sliding_windows)
## Get the final bounding boxes based on vehicle window predictions
proba_frame, calculated_slides = get_calculated_box(
frame.shape, vehicle_slides)
## Draw all the windows/boxes on the image frame
frame_slides_complete = frame_slides_canvas(frame, sliding_windows)
frame_slides_refined = frame_slides_canvas(frame, vehicle_slides)
frame_slides_final = frame_slides_canvas(frame, calculated_slides)
if visualize:
f, axes = plt.subplots(1, 3, figsize=(20, 100))
axes[0].set_title("All Sliding Windows")
axes[0].imshow(frame_slides_complete)
axes[1].set_title("Refined Sliding Windows")
axes[1].imshow(frame_slides_refined)
axes[2].set_title("Final Prediction")
axes[2].imshow(frame_slides_final)
plt.show()
final_boxes = []
for j, slide in enumerate(calculated_slides):
## Convert the bounding boxes from sub-frame to image co-ordinates
if (slide != None and len(slide) > 0):
a = x1 + slide[0][0]
b = y1 + slide[0][1]
c = x1 + slide[1][0]
d = y1 + slide[1][1]
final_boxes.append([(a, b), (c, d)])
return final_boxes
def get_marked_frames(nusc, pointsensor_token, camera_token, method=(2, 0), visualize_frames=False, visualize_sub_frames=False, verbose=False):
"""
Main helper function which handles the calls to other helper function. Gets all the vehicle predicition boxes and the box marked frames.
Parameters
----------
:param nusc: Nuscenes object
:param pointsensor_token: Radar sensor token
:param cam_token: Camera sensor token
:param method: Tuple which specifies the (classifier,isParallel)
:param validate_results: Boolean variable to check if user needs to validate results
:param visualize_frames: Boolean variable to check if user needs to visualize fully marked image frames
:param visualize_sub_frames: Boolean variable to check if user needs to visualize region frames which are proposed and marked
:param verbose: Boolean variable to display console logs
Returns
--------
frame -> ndarray : Marked image frames
box -> list : Vehicle detected box coordinates
"""
p, color, frame = custom_map_pointcloud_to_image(
nusc, pointsensor_token, camera_token, verbose)
## Get only the X, Y and the calculated Radar features from the pointcloud
filtered_col = p[[0, 1, 18, 19, 20, 21], :]
## Cluster information
color = np.array(color).reshape(1, color.shape[0])
## Append both to a np array
new_p = np.append(filtered_col, color, axis=0)
## Get all unique cluster values
un = np.unique(color, axis=1)
averages = []
def restrict_dupli_frames(average, averages):
"""
Checks if the "average" region is redundant for other "averages" regions
"""
flag = 1
for avg in averages:
if abs(avg[0] - average[0]) < 51 and abs(avg[1] - average[1]) < 45:
flag = 0
return False
return True
## Loop through unique cluster values
for i, val in enumerate(un[0], 0):
## Getting all the filtered pointcloud data for a specific cluster value and also has compensated radial velocity above a threshold
mask = np.logical_and(new_p[6, :] == val, new_p[5, :] > 7)
filtered_points = new_p[:, mask]
if filtered_points.shape[1] > 0:
## Average all the point cloud data and store it in a var
average = np.mean(filtered_points, axis=1)
if len(averages) == 0:
averages.append(
[average[0], average[1], average[3], average[4]])
else:
## Check for dupilcate frames and append only if not
if restrict_dupli_frames([average[0], average[1]], averages):
averages.append(
[average[0], average[1], average[3], average[4]])
boxes = []
box = []
if verbose:
print(' Total number of point regions to be verified:', len(averages))
## method[0]= 0 = MODEL A,
## 1 = MODEL B,
## 2 = Modified YOLOv3,
## 3 = Original YOLOv3
## method[1]= 0 = No parallel processing,
## 1 = Parallel processing,
if method[0] <= 1:
if (method[1]) == 1:
## Open process pool and get bounding boxes through get_boxes_svm(...) for every "average" radar point
pool = multiprocessing.Pool()
func = partial(get_boxes_svm, frame,
visualize_sub_frames, verbose, method[0])
boxes = (pool.map(func, averages))
pool.close()
pool.join()
else:
## Get bounding boxes through get_boxes_svm(...) for every "average" radar point
for average in averages:
boxes.append(get_boxes_svm(
frame, visualize_sub_frames, verbose, method[0], average))
elif method[0] == 2:
## Get bounding boxes through get_boxes_yolo(...) for every "average" radar point
for average in averages:
boxes.append(get_boxes_yolo(
frame, method[0], average, visualize_sub_frames, verbose))
else:
## Get bounding boxes through get_boxes_yolo(...) for every "average" radar point
boxes.append(get_boxes_yolo(
frame, method[0], (0, 0, 0, 0), visualize_sub_frames, verbose))
frame = np.array(frame)
for i, bbox in enumerate(boxes):
if (bbox != None and len(bbox) > 0):
for j in range(len(bbox)):
if (bbox[j] != None and len(bbox[j]) > 0):
a = bbox[j][0][0]
b = bbox[j][0][1]
c = bbox[j][1][0]
d = bbox[j][1][1]
if (len(box) < 1):
box.append([[a, b], [c, b], [c, d], [a, d]])
#cv2.rectangle(frame, (a,b),(c,d), color=(0, 0, 255), thickness=2)
# plt.imshow(frame)
# plt.show()
else:
## Check if an approximate bounding box is predicted already and if it's predicted already add/retain the one with more area and remove the other
if check_box_area([[a, b], [c, b], [c, d], [a, d]], box, frame):
box.append([[a, b], [c, b], [c, d], [a, d]])
#cv2.rectangle(frame, (a, b), (c, d), color=(0, 255, 0), thickness=2)
else:
global c_slide
# b1_area, b2_area = get_box_area(
# c_slide, [[a, b], [c, b], [c, d], [a, d]])
b1_area = get_box_area(c_slide)
b2_area = get_box_area(
[[a, b], [c, b], [c, d], [a, d]])
if b2_area > b1_area:
box.remove(c_slide)
box.append([[a, b], [c, b], [c, d], [a, d]])
#cv2.rectangle(frame, (a, b), (c, d), color=(0, 255, 0), thickness=2)
#cv2.rectangle(frame, (c_box[0][0], c_box[0][1]), (c_box[2][0], c_box[2][1]), color=(255, 0, 0), thickness=2)
#cv2.rectangle (frame,(a,b),(c,d), color=(0, 255, 0), thickness=2)
# plt.imshow(frame)
# plt.show()
if verbose:
print(' Total number of vehicle regions predicted in frame:', len(box))
marked_boxes = []
## Build the final bounding boxes unified to same format (All the approaches)
for rect in box:
cv2.rectangle(frame, (rect[0][0], rect[0][1]), (rect[2][0], rect[2][1]), color=(
0, 255, 0), thickness=2)
marked_boxes.append(
((rect[0][0], rect[0][1]), (rect[2][0], rect[2][1])))
if visualize_frames:
# plt.imshow(frame)
# plt.show()
if verbose:
print(' Visualising points and predicted frames')
points_in_image(p, np.array(averages)[:, :2], color, frame)
return frame, box
def points_in_image(points, averages, colouring, frame):
"""
Function which can help in scattering the points over frame and visualize information of the points on hover.
Parameters
----------
:param points: Pointcloud data
:param averages: Clustered and averaged points which are considered for region proposal
:param colouring: Coloring of the points which are to be scattered. n_points should be equal to n_coloring values
:param frame: The image frame on which points are to be scattered
"""
frame_copy = np.copy(frame)
fig, ax = plt.subplots()
## Scatter points based on transformed X & Y coordinates of Radar points and color based on its cluster value
sc = ax.scatter(points[0, ], points[1, ], c=colouring[0], s=8, alpha=0.5)
averages = np.transpose(averages)
annot = ax.annotate("", xy=(0, 0), xytext=(20, 20), textcoords="offset points",
bbox=dict(boxstyle="round", fc="w"),
arrowprops=dict(arrowstyle="->"))
t = sc.get_offsets()
def update_annot(ind):
"""
Build the hover data
"""
pos = sc.get_offsets()[ind["ind"][0]]
annot.xy = pos
text = "{}\n Velocitty Phi ={},\n Phi = {}\n dist={},\n Rad vel ={},\n cluster ={}".format(" ".join(list(map(str, ind["ind"]))),
" ".join(
str([points[18, n] for n in ind["ind"]])),
" ".join(str([points[19, n]
for n in ind["ind"]])),
" ".join(str([points[20, n]
for n in ind["ind"]])),
" ".join(str([points[21, n]
for n in ind["ind"]])),
" ".join(str([colouring[0, n] for n in ind["ind"]])))
annot.set_text(text)
annot.get_bbox_patch().set_alpha(0.4)
def hover(event):
"""
Capture the hover event and perform suitable action(s)
"""
vis = annot.get_visible()
if event.inaxes == ax:
cont, ind = sc.contains(event)
if cont:
update_annot(ind)
annot.set_visible(True)
fig.canvas.draw_idle()
else:
if vis:
annot.set_visible(False)
fig.canvas.draw_idle()
fig.canvas.mpl_connect("motion_notify_event", hover)
## Scatter the predicted moving vehicle predicted points
sc2 = ax.scatter(averages[0, ], averages[1, ], s=14, alpha=0.9)
plt.imshow(frame_copy)
plt.show()
def get_box_area(box):
"""
Helper function to calculate areas of box
Parameters
----------
:param box: Coordinates for first shape. Sample for rectangle [[a, b], [c, b], [c, d], [a, d]]
Returns
----------
box.area-> GEOSimpl : Area of both the boxes
"""
return Polygon(box).area
def check_box_area(box1, boxes, frame, visualize=False):
"""
Function checks if box1 is already present in the list of boxes. A box is considered to be present if
intersection area is greater than 85% for box which has been added with the one which is already present.
If Box1 has greater area, it is saved in global variable for replacing the other box which it intersected with.
Parameters
----------
:param box_1: Box cordinates which needs to checked if present already
:param boxes: All the boxes which have been added prior to box_1 instance
:param frame: The image frame over which the box rectangles need to visualised
:param visualize: Boolean variable to check if user needs to visualize fully marked image frames
Returns
----------
bool : If present already or not
"""
for box2 in boxes:
if not (box2 == box1):
intersection = calculate_intersection(box1, box2)
#a1, a2 = get_box_area(box1, box2)
a1 = get_box_area(box1)
a2 = get_box_area(box2)
## Checks if area of interesection between two boxes is less than 15% of other, if not it is considered as redundant prediction
if intersection < 0.15*a1 and intersection < 0.15*a2:
continue
else:
global c_slide
if visualize:
if (a1 > a2):
c1 = (0, 255, 0)
c2 = (255, 0, 0)
else:
c1 = (255, 0, 0)
c2 = (0, 255, 0)
cv2.rectangle(
frame, (box2[0][0], box2[0][1]), (box2[2][0], box2[2][1]), color=c2, thickness=2)
cv2.rectangle(
frame, (box1[0][0], box1[0][1]), (box1[2][0], box1[2][1]), color=c1, thickness=2)
plt.imshow(frame)
plt.show()
c_slide = box2
return False
return True
def calculate_intersection(box_1, box_2):
"""
Helper function to calculate Intersection over Union for two shapes
Parameters
----------
:param box_1: Coordinates for first shape. Sample for rectangle [[a, b], [c, b], [c, d], [a, d]]
:param box_2: Coordinates for second shape. Sample for rectangle [[a, b], [c, b], [c, d], [a, d]]
Returns
----------
intersection -> list : Area of Intersection between two objects
"""
poly_1 = Polygon(box_1)
poly_2 = Polygon(box_2)
intersection = poly_1.intersection(
poly_2).area # / poly_1.union(poly_2).area
return intersection
def calculate_iou(box_1, box_2):
"""
Helper function to calculate Intersection over Union for two shapes
Parameters
----------
:param box_1: Coordinates for first shape. Sample for rectangle [[a, b], [c, b], [c, d], [a, d]]
:param box_2: Coordinates for second shape. Sample for rectangle [[a, b], [c, b], [c, d], [a, d]]
Returns
----------
iou -> list : IOU value ranging between 0 to 1
"""
poly_1 = Polygon(box_1)
poly_2 = Polygon(box_2)
iou = poly_1.intersection(poly_2).area / poly_1.union(poly_2).area
return iou
def get_window_slides(frame, window_size, overlap):
"""
Helper function to retrieve window co-ordinates for an image frame
Parameters
----------
:param frame: The image frame
:param window_size: Size of windows which need to be extracted
:param overlap: Overlapping which a window can have over other (0 -1)
Returns
----------
window_slides -> list : List of window box coordinates
"""
assert frame.shape[1] > window_size
window_slides = []
# print(frame.shape[0],frame.shape[1],window_size)
## Defines number of windows in rows and coloumns based on the frame shape, winodow size and overlap
n_x_windows = int(frame.shape[1]//(window_size*overlap))
n_y_windows = int(frame.shape[0]//(window_size*overlap))
# print(n_x_windows,n_y_windows)
## Next row starting point
y_window_seed = 0
for i in range(0, n_y_windows):
if (y_window_seed+window_size) < frame.shape[0]:
## Next column starting point
x_window_seed = 0
for j in range(0, n_x_windows):
if (x_window_seed + window_size) < frame.shape[1]:
# print((x_window_seed,y_window_seed),(x_window_seed+window_size,y_window_seed+window_size))
window_slides.append(
[(x_window_seed, y_window_seed), (x_window_seed+window_size, y_window_seed+window_size)])
## Update column starting point
x_window_seed = int(x_window_seed + (window_size*overlap))
else:
break
## Update row starting point
y_window_seed = int(y_window_seed + (window_size*overlap))
else:
break
return window_slides
def get_other_features(sub_frame):
"""
Feature extractor function to extract the resized image bins and channel based histogram extracted
Parameters
----------
:param sub_frame: The image frame
Returns
----------
rs_bins -> list : Binned resized image stored as list
sf_hist -> list : The sub frame whose image channels features are extracted and stored as list
"""
rs_bins = []
sf_hist = []
for i in range(3):
rs_bins.append(cv2.resize(sub_frame[:, :, i], (32, 32)).ravel())
sf_hist.append(np.histogram(sub_frame[:, :, i], bins=32))
rs_bins = np.concatenate((rs_bins[0], rs_bins[1], rs_bins[2]))
sf_hist = np.concatenate((sf_hist[0][0], sf_hist[1][0], sf_hist[2][0]))
return rs_bins, sf_hist
def frame_slides_canvas(frame, slide_windows):
"""
Function to draw rectangles over image frame
Parameters
----------
:param frame: The image frame
:param slide_windows: All the windows boxes which are to be drawn over the image frame as rectangles
Returns
----------
frame_copy -> ndarray : Image frame with rectangles drawn
"""
frame_copy = np.array(frame)
for slide_window in slide_windows:
color = (random.randint(0, 255), random.randint(
0, 255), random.randint(0, 255))
cv2.rectangle(frame_copy, (slide_window[0][0], slide_window[0][1]), (
slide_window[1][0], slide_window[1][1]), (color), 1)
return frame_copy
def predict_vehicles_slides_2(frame, method, slide_windows):
"""
Function to predict all the windows with vehicle detections
Parameters
----------
:param frame: The sub-frame image region
:param method: Defines the SVM approach to follow. 0 for MODEL A approach and 1 for MODEL B approach
:param slide_windows: All the windows boxes drawn for the original image frame
Returns
----------
vehicle_slides -> list : List of predicted vehicle boxes
"""
vehicle_slides = []
## Get the loaded model data
global svc, xscaler
for slide_window in slide_windows:
sub_frame = frame[slide_window[0][1]: slide_window[1]
[1], slide_window[0][0]: slide_window[1][0], :]
sub_frame = cv2.cvtColor(sub_frame, cv2.COLOR_RGB2YUV)
sub_frame = cv2.resize(sub_frame, (64, 64))
if method == 0:
## Get all the required features from images to feed in the classifer as input
hog_feat = get_hog_features(sub_frame, 15, (8, 8))
rs_bins, sf_hist = get_other_features(sub_frame)
test_stacked = np.hstack(
(rs_bins, sf_hist, hog_feat[0])).reshape(1, -1)
#test_stacked = np.hstack((rs_bins, hog_feat[0])).reshape(1, -1)
## Normalize value using the Standard scaler value which is already built
hog_feat_2 = xscaler.transform(test_stacked)
# prediction=svc.predict(j)
prediction = svc.predict(hog_feat_2)
else:
## Extract the required image feature
hog_feat = get_hog_features(sub_frame)
prediction = svc.predict(hog_feat)
if prediction == 1:
vehicle_slides.append(slide_window)
return vehicle_slides
def predict_vehicles_slides(frame, slide_windows):
## Replaced this function with predict_vehicles_slides_2 function
vehicle_slides = []
global svc
for slide_window in slide_windows:
sub_frame = frame[slide_window[0][1]: slide_window[1]
[1], slide_window[0][0]: slide_window[1][0], :]
sub_frame = cv2.cvtColor(sub_frame, cv2.COLOR_RGB2YUV)
sub_frame = cv2.resize(sub_frame, (64, 64))
hog_feat = get_hog_features(sub_frame)
# prediction=svc.predict(j)
prediction = svc.predict(hog_feat)
if prediction == 1:
vehicle_slides.append(slide_window)
return vehicle_slides
def get_hog_features(frame, orientations=9, pixels_per_cell=(16, 16), cells_per_block=(2, 2), visualize=False, feature_vector=True, multichannel=None):
"""
Helper Function to call the hog feature extractor and return a single or two outputs based on visualize parameter
Parameters
----------
Same as HOG from skimage module
Returns
----------
Returns ravel list of normalized_blocks with hog features and its image if visualize param is set to True; Else returns just the former.
"""
normalized_blocks = []
if visualize:
normalized_blocks, hog_image = hog(
frame[:, :, :], orientations, pixels_per_cell, cells_per_block, visualize=visualize, feature_vector=feature_vector)
return normalized_blocks, hog_image
else:
for channel in range(frame.shape[2]):
normalized_blocks.append(hog(frame[:, :, channel], orientations, pixels_per_cell,
cells_per_block, visualize=visualize, feature_vector=feature_vector))
normalized_blocks = [np.ravel(normalized_blocks)]
return normalized_blocks
def get_calculated_box(frame_size, slide_windows):
"""
Function to check for the most overlapping area in the predicted car regions and return the final vehicle detection box
Parameters
----------
:param frame_size: Image frame size 1600 x 900 for all nuscenes image frames
:param slide_windows: Sliding windows which are refined to vehicles predictions
Returns
----------
proba_frame -> Tuple : Refined windows take labelled as ndarray
calculated_slides -> list : List of predicted vehicle boxes in Frame of size frame_size
"""
## Build a dummy frame based on original frame size
proba_frame = np.zeros((frame_size[0], frame_size[1]))
## Increase counter value for all the predicted car regions
for slide_window in slide_windows:
proba_frame[slide_window[0][1]:slide_window[1][1],
slide_window[0][0]:slide_window[1][0]] += 1
# print(proba_frame)
## Set all the counters to zero where the values are less than number of predicted car regions
proba_frame[proba_frame <= (len(slide_windows)//2)] = 0
proba_frame, n_vehicles = label(proba_frame)
calculated_slides = []
detected_slides = find_objects(proba_frame)
for y_row, x_col in detected_slides:
calculated_slides.append(
((x_col.start, y_row.start), (x_col.stop, y_row.stop)))
#cv2.rectangle (frame,(x.start,y.start),(x.stop,y.stop), color=(0, 255, 0), thickness=1)
# plt.imshow(frame)
# plt.show()
return proba_frame, calculated_slides
def save_video(frames, filename, fps, size):
"""
Function to save as video in .avi format
Parameters
----------
:param frames: Takes in all the frames as list
:param filename: Name of the file
:param fps: Frames per second value
:param size: Size of image frames
"""
fourcc = cv2.VideoWriter_fourcc(*'XVID')
filename = str('data/videos/')+filename+str(random.randint(0, 1000))+'.avi'
video = cv2.VideoWriter(filename, fourcc, fps, (size[1], size[0]))
for frame in frames:
video.write(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))
cv2.destroyAllWindows()
video.release()
print(('Saved Video {} successfully').format(filename))
def get_annotations(nusc, scene_annotations, cam_token, visualize=False, verbose=False):
"""
Function to get all the annotated object boxes
Parameters
----------
:param nusc: Nuscenes object
:param scene_annotations: Scene annotation tokens
:param cam_token: Camera sensor token
:param visualize: Boolean variable to visualize all the annotated frame
:param verbose: Boolean variable to display console logs
Returns
----------
list: Annotated boxes
"""
annotated_boxes = []
for ann_token in scene_annotations:
cam = cam_token
ann_record = nusc.get('sample_annotation', ann_token)
## Filtering the annotation to 'car' and 'truck', with a 'vehicle.moving' attribute
if len(ann_record['attribute_tokens']) > 0 and ann_record['category_name'] in ['vehicle.car', 'vehicle.truck']:
att_token = ann_record['attribute_tokens'][0]
att_record = nusc.get('attribute', att_token)
# ,'vehicle.stopped']):
if(att_record['name'] in ['vehicle.moving']):
data_path, boxes, camera_intrinsic = nusc.get_sample_data(
cam_token, selected_anntokens=[ann_token])
## Build the annotated_boxes
for box in boxes:
corners = view_points(
box.corners(), view=camera_intrinsic, normalize=True)[:2, :]
mins = corners.T.min(axis=0)
maxs = corners.T.max(axis=0)
a = int(mins[0])
b = int(mins[1])
c = int(maxs[0])
d = int(maxs[1])
annotated_boxes.append([[a, b], [c, b], [c, d], [a, d]])
if visualize:
if verbose:
print(' Visualising annotations')
frame = Image.open(data_path)
frame_copy = np.array(frame)
cv2.rectangle(frame_copy, (a, b),
(c, d), (0, 255, 0), 2)
plt.imshow(frame_copy)
plt.show()
return annotated_boxes
def str2bool(v):
"""
Returns boolean value for the string
Parameters
----------
:param v: Value that needs to be checked and converted
Returns
----------
Boolean value for the inputted value
"""
# https://stackoverflow.com/questions/15008758/parsing-boolean-values-with-argparse/31347222
if isinstance(v, bool):
return v
if v.lower() in ('yes', 'true', 't', 'y', '1'):
return True
elif v.lower() in ('no', 'false', 'f', 'n', '0'):
return False
else:
raise argparse.ArgumentTypeError('Boolean value expected.')
def get_accuracy(marked_boxes, annotated_boxes, frame, visualize=False, verbose=False):
"""
Evaluation/Validation function to calculate the accuracy
Parameters
----------
:param marked_boxes: list -> Marked object boxes for the image
:param annotated_boxes: list -> Annotated object boxes for the image
:param frame: PIL Image -> frame taken for detection in the current instance
:param visualize: Boolean variable to visualize object-by-object predicted vs annotated truth for comparison
:param verbose: Boolean variable to display console logs
Returns
----------
Precision: float
Recall: float
True positives : int
False Positives : int
"""
tp = fp = fn = 0
iou_list = []
average = 0.5
for annotated_box in annotated_boxes:
frame_copy = np.copy(frame)
## Default values
max_iou = -1
pos = -1
for i, marked_box in enumerate(marked_boxes):
frame_copy2 = np.copy(frame_copy)
iou = calculate_iou(marked_box, annotated_box)
## Checks for the best predicted/marked box match in comparison with annotated box
if iou > max_iou and iou > 0.5:
max_iou = iou
pos = i
""" cv2.rectangle(frame_copy2, (marked_box[0][0], marked_box[0][1]), (marked_box[2][0], marked_box[2][1]), color=(
255, 0, 0), thickness=2)
cv2.rectangle(frame_copy2, (annotated_box[0][0], annotated_box[0][1]), (annotated_box[2][0], annotated_box[2][1]), color=(
0, 255, 0), thickness=2)
plt.imshow(frame_copy2)
plt.show() """
if verbose:
print(' IoU is:', max_iou)
## Build confusion matrix quadrants based on the 'max_iou' value
if max_iou > 0.5:
tp = tp + 1
elif max_iou >= 0:
fp = fp + 1
if pos == -1:
fn = fn + 1
#print("Correct prediction",tp,"Wrong prediction",fp,"Not Predicted",fn)
if max_iou >= -1:
if visualize:
if verbose:
print(' Visualising IOU taken vs actual')
cv2.rectangle(frame_copy, (marked_boxes[pos][0][0], marked_boxes[pos][0][1]), (marked_boxes[pos][2][0], marked_boxes[pos][2][1]), color=(
255, 0, 0), thickness=2)
cv2.rectangle(frame_copy, (annotated_box[0][0], annotated_box[0][1]), (annotated_box[2][0], annotated_box[2][1]), color=(
0, 255, 0), thickness=2)
cv2.putText(frame_copy, str(round(max_iou, 3)), (marked_boxes[pos][0][0], marked_boxes[pos][0]
[1]-10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)
plt.imshow(frame_copy)
plt.show()
iou_list.append(max_iou)
if tp > 0 or fp > 0:
precision = tp / (len(marked_boxes))
else:
precision = 0
if tp > 0 or fn > 0:
recall = tp / (tp + fn)
else:
recall = 0
if len(iou_list) > 0.1:
average = round(sum(iou_list) / len(iou_list), 3)
# if verbose:
# print(' Average IoU is:', average)
return precision, recall, tp, fp
def run_detection_system(method=(2, 0), validate_results=False, visualize_frames=False, visualize_sub_frames=False, verbose=False, save_file=False):
"""
Main function which takes uses all the helper functions to make the detections
Parameters
----------
:param method: Tuple which specifies the (classifier,isParallel)
:param validate_results: Boolean variable to check if user needs to validate results
:param visualize_frames: Boolean variable to check if user needs to visualize fully marked image frames
:param visualize_sub_frames: Boolean variable to check if user needs to visualize region frames which are proposed and marked
:param verbose: Boolean variable to display console logs
:param save_file: Boolean variable to save detections to a file
"""
## Load Nuscenes object and specify required channels
location = 'data/v1.0-mini'
nusc = NuScenes(version='v1.0-mini', dataroot=location, verbose=False)
pointsensor_channel = 'RADAR_FRONT'
camera_channel = 'CAM_FRONT'
frames = []
global net, output_layers, classes, svc, xscaler
## Loading model/network once per session, so that it is not repeated for every single scene/frame
if method[0] > 1:
net, output_layers, classes = load_net()
if verbose:
print('Loaded YOLO Net')
filename = 'YOLOv3_'
else:
if method[0] == 0:
svc, xscaler = load_svc_2()
else:
svc = load_svc()
if verbose:
print('Loaded SVM predictor')
filename = 'HOG_SVM_'
t0 = time.time()
## Scenes iterator
for scene in nusc.scene:
# if verbose:
# print('Scene description: ',scene['description'])
first_sample_token = scene['first_sample_token']
last_sample_token = scene['last_sample_token']
check_token = first_sample_token
pre = []
rec = []
while (check_token != '') and scene['name'] == 'scene-0061':
if verbose:
print(' -------------------New-Scene----------------')
sample_record = nusc.get('sample', check_token)
## Getting front radar and camera sensors' token value
pointsensor_token = sample_record['data'][pointsensor_channel]
camera_token = sample_record['data'][camera_channel]
## Get all the frames with detected moving vehicles
marked_frames, marked_boxes = get_marked_frames(
nusc, pointsensor_token, camera_token, method, visualize_frames, visualize_sub_frames, verbose)
frames.append(marked_frames)
## Validates the prediction based on validate_result parameter
if validate_results:
scene_annotations = sample_record['anns']
annotated_boxes = get_annotations(
nusc, scene_annotations, camera_token)
cam = nusc.get('sample_data', camera_token)
frame = Image.open(osp.join(nusc.dataroot, cam['filename']))
precision, recall, trueP, trueN = get_accuracy(
marked_boxes, annotated_boxes, frame, visualize_sub_frames, verbose)
pre.append(precision)
rec.append(recall)
check_token = sample_record['next']
if validate_results and scene['name'] == 'scene-0061':
print('Avg Precision is:', sum(pre) / (len(pre)))
print('Avg Recall is:', sum(rec)/(len(rec)))
## Not using mAP for just one scene and hence commented the below function call
#getmap(pre, rec)
t1 = time.time()
t = t1-t0
print('Time for ', filename, 'is:', t)
## Save the detected frames as video if needed
if save_file:
save_video(frames, filename, 10, frames[0].shape[:2])
def validate_args(args):
"""
Validates if the arguments passed are correct
:param args: Arguments retrieved through argsparser
"""
try:
if type(args.c) == int and type(args.p) == int and args.c >= 0 and args.c <= 3 and args.p >= 0 and args.p <= 1:
if args.c > 1 and args.p > 0:
print(
'No Parallel processing required for YOLOv3 version. Setting it to default')
args.p = 0
else:
raise ValueError(
'Use -h for help. You have entered a wrong integer input')
if type(args.t) != bool and type(args.f) != bool and type(args.s) != bool and type(args.v) != bool:
raise ValueError(
'Use -h for help. You have entered a wrong boolean input')
except Exception as error:
print('Caught this error: ' + repr(error))
exit()
return args
if __name__ == "__main__":
# run_detection_system((2, 0), True,
# False, False, False, False)
parser = argparse.ArgumentParser()
parser.add_argument('-c', type=int, default=2,
help="0 = Slow SVM, 1 = Fast SVM, 2 = Modified YOLO, 3 = Orginal YOLO")
parser.add_argument('-p', type=int, default=0,
help="0 = For normal processing, 1 = For parallel processing")
parser.add_argument('-t', type=str2bool, nargs='?',
const=True, default=False, help="Validate results")
parser.add_argument('-f', type=str2bool, nargs='?',
const=True, default=False, help="Visualize Frames")
parser.add_argument('-s', type=str2bool, nargs='?',
const=True, default=False, help="Visualize Sub-Frames")
parser.add_argument('-v', type=str2bool, nargs='?',
const=True, default=False, help="Verbose")
parser.add_argument('-k', type=str2bool, nargs='?',
const=True, default=False, help="Save/keep detections to a file")
args = parser.parse_args()
args = validate_args(args)
run_detection_system((args.c, args.p), args.t,
args.f, args.s, args.v, args.k)
|
{"/detectionsystem.py": ["/densityscan.py"]}
|
14,756
|
arnoldlayne0/tictactoe
|
refs/heads/master
|
/big_board.py
|
from copy import deepcopy
import random
# import numpy as np
# import pandas as pd
from functools import reduce
import board
class TakenFieldError(Exception):
pass
class LocalBoardFinishedError(Exception):
pass
class NotPositiveIntegerError(Exception):
pass
class BigBoard:
def __init__(self, size):
self.size = size
self.subboards = [reduce((lambda x, y: x + y),
([board.Board(size)] for _ in range(self.size)))
for _ in range(self.size)]
self.moves_history = []
self.metaboard = board.Board(size)
def big_board_to_string(self):
big_rows = []
for br in range(self.size):
small_rows = []
for sr in range(self.size):
board_list = []
for col in range(self.size):
row = ''.join(self.subboards[br][col].board[sr])
board_list.append(row)
res1 = '|'.join(board_list)
small_rows.append(res1)
res2 = '\n'.join(small_rows)
big_rows.append(res2)
div_list = ['/' * 3] * 3
div_str = '\n' + '|'.join(div_list) + '\n'
res3 = div_str.join(big_rows) + '\n'
return res3
def __str__(self):
return self.big_board_to_string()
def num_to_ind(self, num):
if num < 1:
raise NotPositiveIntegerError
num -= 1
return divmod(num, self.size)
def get_subboard_list(self):
return [i for sub in self.subboards for i in sub]
def get_legal_subboards(self):
subboard_list = self.get_subboard_list()
return [i + 1 for i, p in enumerate(subboard_list) if p.board_finished() == False]
def is_restricted(self):
if len(self.moves_history) == 0:
return False
board_row, board_col = self.num_to_ind(self.moves_history[-1]['field'])
if self.subboards[board_row][board_col].board_finished():
return False
return True
def _make_move(self, board_num, field_num, sym):
if board_num not in self.get_legal_subboards():
raise LocalBoardFinishedError
board_row, board_col = self.num_to_ind(board_num)
field_row, field_col = self.num_to_ind(field_num)
curr_local_board = self.subboards[board_row][board_col]
if curr_local_board.board[field_row][field_col] != '_':
raise TakenFieldError
try:
# nie lap bledu tutaj jesli chcesz lapac pozniej
curr_local_board.board[field_row][field_col] = sym
return self
except (IndexError, ValueError):
pass
def make_player_move(self, sym):
while True:
if self.is_restricted():
board_num = self.moves_history[-1]['field']
else:
board_num = input('input board number')
field_num = input('input field number')
try:
board_num = int(board_num)
field_num = int(field_num)
self._make_move(board_num, field_num, sym)
# self._last_move = field_num
# append moves history in _make_move()
self.moves_history.append({'number': len(self.moves_history), 'board': board_num, 'field': field_num})
break
except (NotPositiveIntegerError, ValueError):
print('input a positive integer')
except IndexError:
print('make a valid move within the board')
except LocalBoardFinishedError:
print('make a move on a valid board')
except TakenFieldError:
print('field taken')
def make_random_legal_move(self, sym):
if self.is_restricted():
board_num = self.moves_history[-1]['field']
else:
board_num = random.choice(self.get_legal_subboards())
board_row, board_col = self.num_to_ind(board_num)
field_num = random.choice(self.subboards[board_row][board_col].get_legal_moves())
self._make_move(board_num, field_num, sym)
self.moves_history.append({'number': len(self.moves_history), 'board': board_num, 'field': field_num})
def get_local_winner(self):
return [b.get_winner() or 'draw' if b.board_finished() else '_' for b in self.get_subboard_list()]
def update_metaboard(self):
self.metaboard = board.Board(self.size)
for ind, sym in enumerate(self.get_local_winner()):
self.metaboard._make_move(ind + 1, sym)
def get_global_winner(self):
if self.metaboard.board_finished():
return self.metaboard.get_winner() or 'draw'
else:
return 'nobody yet'
def play_one_random_game(self):
i = 0
while self.metaboard.board_finished() == False:
if i % 2 == 0:
sym = 'x'
else:
sym = 'o'
self.make_random_legal_move(sym)
i += 1
self.update_metaboard()
winner = self.get_global_winner()
return winner
def play_two_players(self):
i = 0
while self.metaboard.board_finished() == False:
if i % 2 == 0:
sym = 'x'
else:
sym = 'o'
self.make_player_move(sym)
print(self)
i += 1
self.update_metaboard()
winner = self.get_get_winner() or 'draw'
return winner
# te dwie funkcje moga byc jedna
def _human_or_machine_move(self, who, sym):
if who == 'h':
self.make_player_move(sym)
elif who == 'm':
self.make_random_legal_move(sym)
else:
raise Exception
def play_against_the_machine(self):
goes_first = None
while goes_first not in 'hm':
goes_first = input('choose who goes first (input h for human or m for machine)')
i = 0
if goes_first == 'h':
sym_play_dict = {'x': 'h', 'o': 'm'}
elif goes_first == 'm':
sym_play_dict = {'x': 'm', 'o': 'h'}
while self.metaboard.board_finished() == False:
if i % 2 == 0:
sym = 'x'
else:
sym = 'o'
self._human_or_machine_move(sym_play_dict[sym], sym)
print(self)
i += 1
self.update_metaboard()
winner = self.get_get_winner() or 'draw'
return winner
|
{"/big_board.py": ["/board.py"], "/mc_big.py": ["/big_board.py"]}
|
14,757
|
arnoldlayne0/tictactoe
|
refs/heads/master
|
/mc_big.py
|
import big_board
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
def simulate_n_games(n):
cols = ['draw', 'o', 'x']
games_df = pd.DataFrame(0, columns=cols, index=range(n))
for i in range(n):
my_board = big_board.BigBoard(3)
winner = my_board.play_one_random_game()
games_df.loc[i][winner] = 1
wins = games_df.mean()
return wins, games_df
wins, games = simulate_n_games(5)
print(wins)
def simulate_n_games_convergence(n):
cols = ['draw', 'o', 'x']
summary_df = pd.DataFrame(columns=cols, index=range(1, n+1))
for i in range(1, n+1):
summary_df.loc[i]['draw':'x'], _ = simulate_n_games(n)
print(str(i) + ' done')
summary_df.columns = ['draws', 'o wins', 'x wins']
return summary_df
def plot_convergence(summary_df, filename):
sns.set()
fig = plt.figure()
ax = fig.add_subplot(111)
for col in summary_df.columns:
ax.plot(summary_df.index, np.array(summary_df.loc[:, col]), label=col)
ax.legend(loc='best')
ax.set_ylabel('result percentage')
ax.set_xlabel('number of games')
ax.set_title('ultimate kolko i krzyzyk mc convergence')
fig.savefig(filename)
#summary_5 = simulate_n_games_convergence(5)
#plot_convergence(summary_5, 'mc_5_test')
|
{"/big_board.py": ["/board.py"], "/mc_big.py": ["/big_board.py"]}
|
14,758
|
arnoldlayne0/tictactoe
|
refs/heads/master
|
/board.py
|
from copy import deepcopy
import random
#import numpy as np
#import pandas as pd
#import matplotlib.pyplot as plt
#import seaborn as sns
class TakenFieldError(Exception):
pass
class Board:
def __init__(self, size):
self.size = size
self.board = [['_'] * self.size for i in range(self.size)]
rows = [[(i, j) for j in range(self.size)] for i in range(self.size)]
cols = [[(i, j) for i in range(self.size)] for j in range(self.size)]
diag_one = [[(i, i) for i in range(self.size)]]
diag_two = [[(i, self.size - 1 - i) for i in range(self.size)]]
self._all_combs = rows + cols + diag_one + diag_two
self.moves_history = []
def print_board(self):
for row in self.board:
print(row)
def board_to_string(self):
rows = []
for row in self.board:
rows.append(''.join(row))
return '\n'.join(rows)
def __str__(self):
return self.board_to_string()
def get_board_list(self):
return [i for sub in self.board for i in sub]
def get_legal_moves(self):
return [i + 1 for i, p in enumerate(self.get_board_list()) if p == "_"]
def num_to_ind(self, num):
num -= 1
return divmod(num, self.size)
def ind_to_num(self, ind):
return ind[0] * self.size + ind[1] + 1
# uzywaj tego
def _make_move(self, num, sym):
row, col = self.num_to_ind(num)
if self.board[row][col] != '_':
raise TakenFieldError
try:
self.board[row][col] = sym
return self
except (IndexError, ValueError):
pass
def make_player_move(self, sym):
while True:
move = raw_input()
try:
move = int(move)
self._make_move(move, sym)
self.moves_history.append(move)
break
#except (Exception, IndexError, ValueError):
# print('move outside of the board, on a taken field or not a number, please make a valid move')
except ValueError:
print('insert a number')
except IndexError:
print('make a move within the board')
except TakenFieldError:
print('make a move on a field that is not already taken')
def make_random_legal_move(self, sym):
move = random.choice(self.get_legal_moves())
self._make_move(move, sym)
self.moves_history.append(move)
def get_winner(self):
for comb in self._all_combs:
vals = {self.board[i][j] for (i, j) in comb}
if len(vals) == 1 and (vals != {'_'}):
return vals.pop()
def board_finished(self):
if self.get_winner() != None or len(self.get_legal_moves()) == 0:
return True
return False
def moves_to_boards(self):
boards = [Board(self.size) for i in range(len(self.moves_history)+1)]
for i in range(1, len(self.moves_history)+1):
if i % 2 == 0:
sym = 'o'
else:
sym = 'x'
m = self.moves_history[i-1]
boards[i] = deepcopy(boards[i-1])._make_move(m, sym)
boards = [b.board for b in boards]
return boards
def play_one_random_game(self):
i = 0
boards = []
while self.board_finished() == False:
if i % 2 == 0:
sym = 'x'
else:
sym = 'o'
self.make_random_legal_move(sym)
boards.append(deepcopy(self.board))
i += 1
winner = self.get_winner() or 'draw'
return winner, self.moves_history, boards
def play_two_players(self):
i = 0
while self.board_finished() == False:
if i % 2 == 0:
sym = 'x'
else:
sym = 'o'
self.make_player_move(sym)
print(self.board_to_string())
i += 1
winner = self.get_winner() or 'draw'
return winner, self.moves_history
|
{"/big_board.py": ["/board.py"], "/mc_big.py": ["/big_board.py"]}
|
14,759
|
nanophyr/osrs_customboard
|
refs/heads/master
|
/common.py
|
from flask import *
import os
import scrape
app = Flask(__name__)
@app.route('/')
def home():
flash(scrape.getTotal('nanoluck'))
return render_template('home.html')
if __name__ == '__main__':
app.secret_key = os.urandom(24)
app.run(debug=True)
|
{"/common.py": ["/scrape.py"]}
|
14,760
|
nanophyr/osrs_customboard
|
refs/heads/master
|
/scrape.py
|
import urllib
from bs4 import BeautifulSoup
def ripToLines():
# kill all script and style elements
for script in soup(["script", "style"]):
script.extract() # rip it out
text = soup.get_text().splitlines()
return text
def find_between( s, first, last):
try:
start = s.index(first) + len(first)
end = s.index( last, start)
return s[start:end]
except ValueError:
return ""
def ripToLines(soup):
# kill all script and style elements
for script in soup(["script", "style"]):
script.extract() # rip it out
text = soup.get_text().splitlines()
return text
#returns total level for given user
def getTotal(user):
url = "https://secure.runescape.com/m=hiscore_oldschool/index_lite.ws?player=" + user
html = urllib.urlopen(url).read()
soup = BeautifulSoup(html,features="html.parser")
text = ripToLines(soup)
return find_between(str(text[0]), "," , ",")
#prints rank level exp
#print str(text[0])
|
{"/common.py": ["/scrape.py"]}
|
14,764
|
edwinbalani/grov
|
refs/heads/master
|
/manual_control.py
|
"""
Demonstrates using custom hillshading in a 3D surface plot.
"""
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cbook
from matplotlib import cm
from matplotlib.colors import LightSource
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.animation as animation
import time
import copy
def command_interpreter(standing_point, command):
command_words = command.split()
prop_stand_point = copy.copy(standing_point)
if command_words[0].lower() == "east":
prop_stand_point[0] += int(command_words[1])
if command_words[0].lower() == "west":
prop_stand_point[0] -= int(command_words[1])
if command_words[0].lower() == "north":
prop_stand_point[1] += int(command_words[1])
if command_words[0].lower() == "south":
prop_stand_point[1] -= int(command_words[1])
return prop_stand_point
filename = cbook.get_sample_data('jacksboro_fault_dem.npz', asfileobj=False)
with np.load(filename) as dem:
z = dem['elevation']
nrows, ncols = z.shape
x = np.linspace(dem['xmin'], dem['xmax'], ncols)
y = np.linspace(dem['ymin'], dem['ymax'], nrows)
x, y = np.meshgrid(x, y)
region = np.s_[5:50, 5:50]
x, y, z = x[region], y[region], z[region]
barriers = [[25, 25], []]
fig, ax = plt.subplots(subplot_kw=dict(projection='3d'))
ax.set_xlabel("South-North")
ax.set_ylabel("East-West")
ls = LightSource(270, 45)
# To use a custom hillshading mode, override the built-in shading and pass
# in the rgb colors of the shaded surface calculated from "shade".
rgb = ls.shade(z, cmap=cm.gist_earth, vert_exag=0.1, blend_mode='soft')
surf = ax.plot_surface(x, y, z, rstride=1, cstride=1, facecolors=rgb,
linewidth=0, antialiased=False, shade=False)
origin = [20, 20]
standing_point = origin
goal = [26, 28]
obstacles = [[20, 22]]
trajectory = ax.plot([x[origin[0]][origin[1]]], [y[origin[0]][origin[1]]],
[z[origin[0]][origin[1]]], markerfacecolor='m',
markeredgecolor='w', marker='o', markersize=5, alpha=0.6)
trajectory = ax.plot([x[goal[0]][goal[1]]], [y[goal[0]][goal[1]]], [z[goal[0]][goal[1]]], markerfacecolor='g',
markeredgecolor='w', marker='o', markersize=5, alpha=0.6)
plt.show(block=False)
plt.pause(5)
while standing_point != goal:
command = input("**Please type your command with direction and distance, e.g. east 2**\n")
proposed_standing_point = command_interpreter(standing_point, command)
if proposed_standing_point in obstacles:
print("Ah oh, there is an obstacle at that location.")
print("Please avoid that point marked in black.")
trajectory = ax.plot([x[proposed_standing_point[0]][proposed_standing_point[1]]], [y[proposed_standing_point[0]][proposed_standing_point[1]]],
[z[proposed_standing_point[0]][proposed_standing_point[1]]], markerfacecolor='k',
markeredgecolor='k', marker='o', markersize=5, alpha=0.6)
else:
standing_point = proposed_standing_point
trajectory = ax.plot([x[standing_point[0]][standing_point[1]]], [y[standing_point[0]][standing_point[1]]],
[z[standing_point[0]][standing_point[1]]], markerfacecolor='r', markeredgecolor='r',
marker='o', markersize=5, alpha=0.6)
plt.draw()
plt.show(block=False)
plt.pause(2)
print("You have reached the goal!")
plt.show(block = False)
plt.pause(10)
|
{"/friction.py": ["/analysis.py"]}
|
14,765
|
edwinbalani/grov
|
refs/heads/master
|
/analysis.py
|
# Copyright 2017 Edwin Bahrami Balani and Qiaochu Jiang
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains functions for grid data analysis."""
import numpy as np
def window(grid, indices, window_size=5):
"""
Return a square window from a 2-D grid.
Note: edge values are padded using `numpy.pad(mode='edge')`
:param grid: 2-D grid of values
:param indices: (x, y) tuple of indices
:param window_size: length of the window, must be odd
:return:
"""
if window_size % 2 != 1:
raise ValueError('window_size must be odd')
if len(indices) != 2:
raise ValueError('indices (x, y) must be specified')
x, y = indices
# Preemptive padding on all edges prevents having to check if our window falls beyond the end
# However, we must then adjust our indices due to the grid expansion
grid = np.pad(grid, window_size, 'edge')
x += window_size
y += window_size
xmin = x - window_size//2
ymin = y - window_size//2
xmax = x + window_size//2 + 1
ymax = y + window_size//2 + 1
grid = grid[xmin:xmax, ymin:ymax]
return grid
def gradient(grid, indices, window_size=5, dx=1, dy=1):
"""
Finds the gradient at a point in a 2-D grid using a polynomial fit
:param grid: 2-D grid of z-values
:param indices: (x, y) tuple of indices where gradient will be found
:param window_size: size of the square window used for the polynomial fit (default 5)
:param dx: spacing between adjacent x-points (default 1)
:param dy: spacing betwween adjacent y-points (default 1)
:return: a 3-D gradient vector
"""
X, Y, coeff = fit(grid, indices, window_size)
# Coefficients in order 1, x, y, x^2, x^2.y, x^2.y^2, y^2, x.y^2, x.y
# a, b, c, d, e, f, g, h, i
# I am not proud of this:
# TODO rewrite this section for smarter/iterative processing
# (This also has the benefit of allowing for a general nth-order polynomial fit:
# see https://gistpreview.github.io/?f9990a6c0eec76c0c8176b050121e694)
x, y = X[window_size//2, window_size//2], Y[window_size//2, window_size//2]
return np.array([__wrt_x(x, y, coeff) / dx, __wrt_y(x, y, coeff) / dy])
def fit(grid, indices, window_size=5):
"""
Calculates a second-order 2-D fit function at a point on a grid
:param grid: 2-D grid of z-values
:param indices: (x, y) tuple of indices where gradient will be found
:param window_size: size of the square window used for the polynomial fit (default 5)
:return:
"""
grid = window(grid, indices, window_size=window_size)
X, Y = np.mgrid[0:window_size, 0:window_size]
Xf = X.flatten()
Yf = Y.flatten()
A = np.array([np.ones(X.size), Xf, Yf, Xf ** 2, Xf ** 2 * Yf, Xf ** 2 * Yf ** 2, Yf ** 2, Xf * Yf ** 2, Xf * Yf]).T
B = grid.flatten()
return X, Y, np.linalg.lstsq(A, B)
def __z(x, y, c):
a = c[0]
b = c[1]
c = c[2]
d = c[3]
e = c[4]
f = c[5]
g = c[6]
h = c[7]
i = c[8]
return a + b*x + c*y + d*x**2 + e*x**2*y + f*x**2*y**2 + g*y**2 + h*x*y**2 + i*x*y
def __wrt_x(x, y, c):
b = c[1]
c = c[2]
d = c[3]
e = c[4]
f = c[5]
h = c[7]
i = c[8]
return b + 2*d*x + 2*e*x*y + 2*f*x*y**2 + h*y**2 + i*y
def __wrt_y(x, y, c):
c = c[2]
e = c[4]
f = c[5]
g = c[6]
h = c[7]
i = c[8]
return c + e*x**2 + 2*f*x**2*y + 2*g*y + 2*h*x*y + i*x
def direc_deriv(grad: np.ndarray, direc: np.ndarray, norm=True):
"""
Calculates the directional derivative of a function
:param grad: Gradient vector (2-D)
:param dir: Direction
:param: norm: Whether to normalise the direction vector (default True)
:return:
"""
if not grad.size == 2:
raise ValueError("Gradient vector must have 2 components")
return grad * (np.linalg.norm(direc) if norm else direc)
def slope(grad: np.ndarray):
"""
Calculates the slope of a function in the direction of its gradient
:param grad: Gradient vector (2-D)
:return:
"""
return direc_deriv(grad, grad, norm=True)
|
{"/friction.py": ["/analysis.py"]}
|
14,766
|
edwinbalani/grov
|
refs/heads/master
|
/util.py
|
# Copyright 2017 Edwin Bahrami Balani and Qiaochu Jiang
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility functions for testing purposes"""
import numpy as np
def rgrid(size=10, integers=True, low=None, high=None):
"""Return a grid of random values"""
if low is None:
low = 1 if integers else 0
if high is None:
high = 21 if integers else 1
if integers:
return np.random.randint(low, high, (size, size))
else:
return np.random.randn(low, high, (size, size))
|
{"/friction.py": ["/analysis.py"]}
|
14,767
|
edwinbalani/grov
|
refs/heads/master
|
/autopilot.py
|
"""
Demonstrates using custom hillshading in a 3D surface plot.
"""
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cbook
from matplotlib import cm
from matplotlib.colors import LightSource
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.animation as animation
import time
from math import sqrt
import copy
import math
def gradient_calculation(z, point1, point2):
return (z[point2[0]][point2[1]] - z[point1[0]][point1[1]]) / (math.sqrt((point1[0]-point2[0]) ** 2 + (point1[1]-point2[1]) ** 2))
def gradient_difficulty(gradient):
return 1 + 1 / (1 + math.exp(-gradient))
def command_interpreter(standing_point, command):
command_words = command.split()
if (command_words[0].lower() == "east"):
standing_point[0] += int(command_words[1])
if (command_words[0].lower() == "west"):
standing_point[0] -= int(command_words[1])
if (command_words[0].lower() == "north"):
standing_point[1] += int(command_words[1])
if (command_words[0].lower() == "south"):
standing_point[1] -= int(command_words[1])
print(standing_point)
return standing_point
def priority_enqueue(q, key, distance, heur, parent):
l = len(q) - 1
if l >= 0:
while (q[l][1] + q[l][2] > distance + heur and l >= 0):
l -= 1
q.insert(l + 1, (key, distance, heur, parent))
else:
q.append((key, distance, heur, parent))
def heuristic(standing_point, goal):
return sqrt((goal[0] - standing_point[0]) ** 2 + (goal[1] - standing_point[1]) ** 2)
def a_star_search(origin, goal, heuristic, coordinates, anomaly, marked, z):
original_marked = copy.copy(marked)
pq = list()
trace = dict()
pq.append((origin, 0, heuristic(origin, goal), origin))
while len(pq) > 0:
boundary = pq.pop(0)
if boundary[0] == goal:
return trace, boundary[1], boundary[3]
if boundary[0] in anomaly:
trajectory = ax.plot([x[boundary[0][0]][boundary[0][1]]], [y[boundary[0][0]][boundary[0][1]]],
[z[boundary[0][0]][boundary[0][1]]], markerfacecolor='k',
markeredgecolor='k', marker='o', markersize=5, alpha=0.6)
print("There is an obastacle at", (boundary[0]))
print("Start over, avoiding", boundary[0])
marked = original_marked
marked[boundary[0]] = True
return a_star_search(origin, goal, heuristic, coordinates, anomaly, marked, z)
if (boundary[0][0] + 1 in coordinates[0]) and (boundary[0][1] + 1 in coordinates[1]) and (
marked[(boundary[0][0] + 1, boundary[0][1] + 1)] == False):
grad = gradient_calculation(z, boundary[0], (boundary[0][0] + 1, boundary[0][1] + 1))
marked[(boundary[0][0] + 1, boundary[0][1] + 1)] = True
priority_enqueue(pq, (boundary[0][0] + 1, boundary[0][1] + 1), boundary[1] + 1.414 * gradient_difficulty(grad),
heuristic((boundary[0][0] + 1, boundary[0][1] + 1), goal), boundary[0])
trace[(boundary[0][0] + 1, boundary[0][1] + 1)] = boundary[0]
if (boundary[0][0] + 1 in coordinates[0]) and (boundary[0][1] - 1 in coordinates[1]) and (
marked[(boundary[0][0] + 1, boundary[0][1] - 1)] == False):
grad = gradient_calculation(z, boundary[0], (boundary[0][0] + 1, boundary[0][1] - 1))
marked[(boundary[0][0] + 1, boundary[0][1] - 1)] = True
priority_enqueue(pq, (boundary[0][0] + 1, boundary[0][1] - 1), boundary[1] + 1.414 * gradient_difficulty(grad),
heuristic((boundary[0][0] + 1, boundary[0][1] - 1), goal), boundary[0])
trace[(boundary[0][0] + 1, boundary[0][1] - 1)] = boundary[0]
if (boundary[0][0] - 1 in coordinates[0]) and (boundary[0][1] + 1 in coordinates[1]) and (
marked[(boundary[0][0] - 1, boundary[0][1] + 1)] == False):
grad = gradient_calculation(z, boundary[0], (boundary[0][0] - 1, boundary[0][1] + 1))
marked[(boundary[0][0] - 1, boundary[0][1] + 1)] = True
priority_enqueue(pq, (boundary[0][0] - 1, boundary[0][1] + 1), boundary[1] + 1.414 * gradient_difficulty(grad),
heuristic((boundary[0][0] - 1, boundary[0][1] + 1), goal), boundary[0])
trace[(boundary[0][0] - 1, boundary[0][1] + 1)] = boundary[0]
if (boundary[0][0] - 1 in coordinates[0]) and (boundary[0][1] - 1 in coordinates[1]) and (
marked[(boundary[0][0] - 1, boundary[0][1] - 1)] == False):
grad = gradient_calculation(z, boundary[0], (boundary[0][0] - 1, boundary[0][1] - 1))
marked[(boundary[0][0] - 1, boundary[0][1] - 1)] = True
priority_enqueue(pq, (boundary[0][0] - 1, boundary[0][1] - 1), boundary[1] + 1.414 * gradient_difficulty(grad),
heuristic((boundary[0][0] - 1, boundary[0][1] - 1), goal), boundary[0])
trace[(boundary[0][0] - 1, boundary[0][1] - 1)] = boundary[0]
if (boundary[0][0] in coordinates[0]) and (boundary[0][1] - 1 in coordinates[1]) and (
marked[(boundary[0][0], boundary[0][1] - 1)] == False):
grad = gradient_calculation(z, boundary[0], (boundary[0][0], boundary[0][1] - 1))
marked[(boundary[0][0], boundary[0][1] - 1)] = True
priority_enqueue(pq, (boundary[0][0], boundary[0][1] - 1), boundary[1] + 1 * gradient_difficulty(grad),
heuristic((boundary[0][0], boundary[0][1] - 1), goal), boundary[0])
trace[(boundary[0][0], boundary[0][1] - 1)] = boundary[0]
if (boundary[0][0] in coordinates[0]) and (boundary[0][1] + 1 in coordinates[1]) and (
marked[(boundary[0][0], boundary[0][1] + 1)] == False):
grad = gradient_calculation(z, boundary[0], (boundary[0][0], boundary[0][1] + 1))
marked[(boundary[0][0], boundary[0][1] + 1)] = True
priority_enqueue(pq, (boundary[0][0], boundary[0][1] + 1), boundary[1] + 1 * gradient_difficulty(grad),
heuristic((boundary[0][0], boundary[0][1] + 1), goal), boundary[0])
trace[(boundary[0][0], boundary[0][1] + 1)] = boundary[0]
if (boundary[0][0] - 1 in coordinates[0]) and (boundary[0][1] in coordinates[1]) and (
marked[(boundary[0][0] - 1, boundary[0][1])] == False):
grad = gradient_calculation(z, boundary[0], (boundary[0][0] - 1, boundary[0][1]))
marked[(boundary[0][0] - 1, boundary[0][1])] = True
priority_enqueue(pq, (boundary[0][0] - 1, boundary[0][1]), boundary[1] + 1 * gradient_difficulty(grad),
heuristic((boundary[0][0] - 1, boundary[0][1]), goal), boundary[0])
trace[(boundary[0][0] - 1, boundary[0][1])] = boundary[0]
if (boundary[0][0] + 1 in coordinates[0]) and (boundary[0][1] in coordinates[1]) and (
marked[(boundary[0][0] + 1, boundary[0][1])] == False):
grad = gradient_calculation(z, boundary[0], (boundary[0][0] + 1, boundary[0][1]))
marked[(boundary[0][0] + 1, boundary[0][1])] = True
priority_enqueue(pq, (boundary[0][0] + 1, boundary[0][1]), boundary[1] + 1 * gradient_difficulty(grad),
heuristic((boundary[0][0] + 1, boundary[0][1]), goal), boundary[0])
trace[(boundary[0][0] + 1, boundary[0][1])] = boundary[0]
print("There is no way we can reach the goal.")
filename = cbook.get_sample_data('jacksboro_fault_dem.npz', asfileobj=False)
with np.load(filename) as dem:
z = dem['elevation']
nrows, ncols = z.shape
x = np.linspace(dem['xmin'], dem['xmax'], ncols)
y = np.linspace(dem['ymin'], dem['ymax'], nrows)
x, y = np.meshgrid(x, y)
coordinates = (range(0, 45), range(0, 45))
region = np.s_[5:50, 5:50]
x, y, z = x[region], y[region], z[region]
fig, ax = plt.subplots(subplot_kw=dict(projection='3d'))
ax.set_xlabel("South-North")
ax.set_ylabel("West-East")
ls = LightSource(270, 45)
# To use a custom hillshading mode, override the built-in shading and pass
# in the rgb colors of the shaded surface calculated from "shade".
rgb = ls.shade(z, cmap=cm.gist_earth, vert_exag=0.1, blend_mode='soft')
surf = ax.plot_surface(x, y, z, rstride=1, cstride=1, facecolors=rgb,
linewidth=0, antialiased=False, shade=False)
origin = (5, 5)
goal = (40, 40)
obstacles = [(7, 9), (8, 9), (9, 9), (10, 9), (11, 9), (12, 9),
(20, 16), (20, 17), (20, 18), (20, 19), (20, 20)]
for i in range(15, 25):
for j in range(15, 25):
obstacles.append((i, j))
for i in range(30, 40):
for j in range(30, 40):
obstacles.append((i, j))
#obstacles = []
trajectory = ax.plot([x[origin[0]][origin[1]]], [y[origin[0]][origin[1]]],
[z[origin[0]][origin[1]]], markerfacecolor='m',
markeredgecolor='w', marker='o', markersize=5, alpha=0.6)
trajectory = ax.plot([x[goal[0]][goal[1]]], [y[goal[0]][goal[1]]],
[z[goal[0]][goal[1]]], markerfacecolor='g',
markeredgecolor='w', marker='o', markersize=5, alpha=0.6)
plt.show(block=False)
plt.pause(5)
marked = dict()
for i in coordinates[0]:
for j in coordinates[1]:
marked[(i, j)] = False
trace, dist, parent = a_star_search(origin, goal, heuristic, coordinates, obstacles, marked, z)
while parent != origin:
print(parent[0], parent[1])
trajectory = ax.plot([x[parent[0]][parent[1]]], [y[parent[0]][parent[1]]],
[z[parent[0]][parent[1]]], markerfacecolor='r',
markeredgecolor='r', marker='o', markersize=5, alpha=0.6)
parent = trace[parent]
plt.draw()
print("You have reached the goal!")
print("The final distance walked is", dist)
plt.show()
|
{"/friction.py": ["/analysis.py"]}
|
14,768
|
edwinbalani/grov
|
refs/heads/master
|
/friction.py
|
# Copyright 2017 Edwin Bahrami Balani and Qiaochu Jiang
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions related to probe traction"""
import numpy as np
import analysis
def fix_angle_range(a: float):
"""Fix radian angles to range [0, pi]"""
while a < 0:
a += 2*np.pi
while a > 2*np.pi:
a -= 2*np.pi
if a > np.pi:
a = 2*np.pi - a
return a
def safe_slope(grad: np.ndarray, mu: float):
"""
Return a True/False value determining whether the current slope is safe, given a coefficient of friction.
:param grad: Gradient vector representing steepest slope at a point
:param mu: Coefficient of friction
:return:
"""
if not grad.size == 2:
raise ValueError("Gradient vector must have two components")
grad = np.linalg.norm(grad)
phi = np.arctan2(mu, 1) # Angle of limiting friction
slope_angle = fix_angle_range(np.arctan2(grad[1], grad[0]))
return slope_angle <= phi
def safe_point(grid, indices, mu, window_size=5):
"""
Determine whether a point is safe for the probe to climb.
:param grid: 2-D grid of Z values
:param indices: indices of point
:param mu: coefficient of friction at point
:param window_size: window size for gradient fit calculation (default 5)
:return:
"""
return safe_slope(analysis.gradient(grid, indices, window_size=window_size), mu)
|
{"/friction.py": ["/analysis.py"]}
|
14,772
|
tingwen0125/L-store-database-management-system
|
refs/heads/main
|
/template/index.py
|
"""
A data strucutre holding indices for various columns of a table.
Key column should be indexd by default, other columns can be indexed through this object.
Indices are usually B-Trees, but other data structures can be used as well.
The Index class provides a data structure that allows fast processing of queries (e.g.,
select or update) by indexing columns of tables over their values. Given a certain
value for a column, the index should efficiently locate all records having that value. The
key column of all tables is usually indexed by default for performance reasons.
Supporting indexing is optional for this milestone. The API for this class exposes the
two functions create_index and drop_index (optional for this milestone).
"""
class Index:
def __init__(self, table):
# One index for each table. All are empty initially.
self.indices = [None] * table.num_columns
pass
"""
# returns the location of all records with the given value on column "column"
"""
def locate(self, column, value):
pass
"""
# Returns the RIDs of all records with values in column "column" between "begin" and "end"
"""
def locate_range(self, begin, end, column):
pass
"""
# optional: Create index on specific column
"""
def create_index(self, column_number):
pass
"""
# optional: Drop index of specific column
"""
def drop_index(self, column_number):
pass
|
{"/template/query.py": ["/template/table.py", "/template/index.py"], "/template/db.py": ["/template/table.py"], "/template/table.py": ["/template/index.py", "/template/config.py"]}
|
14,773
|
tingwen0125/L-store-database-management-system
|
refs/heads/main
|
/template/query.py
|
from template.table import Table, Record
from template.index import Index
from template.page import Page, BasePage, PageRange
import datetime
'''
The Query class provides standard SQL operations such as insert, select,
update, delete and sum. The select function returns the specified set of columns
from the record with the given key (if available). The insert function will insert a new
record in the table. All columns should be passed a non-NULL value when inserting. The
update function updates values for the specified set of columns. The delete function
will delete the record with the specified key from the table. The sum function will sum
over the values of the selected column for a range of records specified by their key
values. We query tables by direct function calls rather than parsing SQL queries.
'''
class Query:
"""
# Creates a Query object that can perform different queries on the specified table
Queries that fail must return False
Queries that succeed should return the result or True
Any query that crashes (due to exceptions) should return False
"""
def __init__(self, table):
self.table = table
pass
"""
# internal Method
# Read a record with specified key
# Returns True upon succesful deletion
# Return False if record doesn't exist or is locked due to 2PL
"""
def delete(self, key):
pass
"""
# Insert a record with specified columns
# Return True upon succesful insertion
# Returns False if insert fails for whatever reason
"""
def insert(self, *columns):
'''record example:[0, 0, 20210131111207, 0, 906659671, 93, 0, 0, 0]'''
# Check if key is duplicated
if (columns[self.table.key] in self.table.keyToBaseRID):
return False
total_col = []
schema_encoding = int('0' * self.table.num_columns, 2)
time = datetime.datetime.now()
int_time = int(time.strftime("%Y%m%d%H%M%S"))
curPageRange = self.table.pageRanges[-1]
curBasePage = curPageRange.basePageList[-1]
# open a new page range or new base page
if curPageRange.has_capacity() == False:
self.table.pageRanges.append(PageRange(self.table.num_columns))
curPageRange = self.table.pageRanges[-1]
curBasePage = curPageRange.basePageList[-1]
elif curBasePage.has_capacity() == False:
curPageRange.basePageList.append(BasePage(self.table.num_columns))
curBasePage = curPageRange.basePageList[-1]
total_col.extend([0, self.table.baseRID, int_time, schema_encoding])
total_col += columns
for i in range(len(total_col)):
curBasePage.basePage[i].write(total_col[i])
#test
#start = (curBasePage.basePage[i].num_records - 1) * 8
#end = curBasePage.basePage[i].num_records * 8
#int_val=int.from_bytes(curBasePage.basePage[i].data[start:end],'big')
#print(int_val)
self.table.keyToBaseRID[total_col[self.table.key + 4]] = self.table.baseRID
self.table.baseRID += 1
return True
"""
# Read a record with specified key
# :param key: the key value to select records based on
# :param query_columns: what columns to return. array of 1 or 0 values.
# Returns a list of Record objects upon success
# Returns False if record locked by TPL
# Assume that select will never be called on a key that doesn't exist
"""
def select(self, key, column, query_columns):
listSelect = []
recordSelect = []
#locate record position
if key in self.table.keyToBaseRID.keys():
baseRID = self.table.keyToBaseRID[key]
baseRecord = self.table.baseRIDToRecord(baseRID)
for i in range(len(query_columns)):
if query_columns[i] == 1:
val = baseRecord[i+4]
recordSelect.append(val)
else:
recordSelect.append(None)
listSelect.append(Record(baseRID, key, recordSelect))
return listSelect
"""
# Update a record with specified key and columns
# Returns True if update is succesful
# Returns False if no records exist with given key or if the target record cannot be accessed due to 2PL locking
"""
'''
def getUpdateRID(self,key):
return self.table.keyToBaseRID[key]
def getUpdatePageR(self,rid):
return self.table.getPageR(rid)
'''
def update(self, key, *columns):
baseRID = self.table.keyToBaseRID[key]
location = self.table.baseRIDToLocation(baseRID)
pageRange_index = location[0]
baseRecord = self.table.baseRIDToRecord(baseRID)
#print("Before update:", baseRecord)
#check if the tail page in that page range still have space
if self.table.pageRanges[pageRange_index].tailPageList[-1].has_capacity() == False: #if no capacity, add a new tail page
self.table.pageRanges[pageRange_index].tailPageList.append(BasePage(self.table.num_columns))
updateEncoding = "" #updated schema encoding
for i in range(len(columns)):
if columns[i] == None:
updateEncoding += "0"
else:
updateEncoding += "1"
updateEncoding = int(updateEncoding, 2)
time = datetime.datetime.now()
int_time = int(time.strftime("%Y%m%d%H%M%S"))
baseRecordIndirect = baseRecord[0]
tailIndirect = 0
# Current tailRecord is not the first update to a baseRecord then get the last tail record RID
if (baseRecordIndirect != 0):
tailIndirect = baseRecordIndirect
# Update baseRecord indirect column
self.table.writeByte(self.table.tailRID, location, 0)
tailrecord = [self.table.tailRID, tailIndirect, int_time,updateEncoding]+list(columns)
currTailPage = self.table.pageRanges[pageRange_index].tailPageList[-1]
for i in range(len(tailrecord)):
currTailPage.basePage[i].write(tailrecord[i])
self.table.tailRID += 1
#baseRecord = self.table.baseRIDToRecord(baseRID)
#print("After update:", baseRecord)
return True
"""
:param start_range: int # Start of the key range to aggregate
:param end_range: int # End of the key range to aggregate
:param aggregate_columns: int # Index of desired column to aggregate
# this function is only called on the primary key.
# Returns the summation of the given range upon success
# Returns False if no record exists in the given range
"""
def sum(self, start_range, end_range, aggregate_column_index):
startRID = start_range + 1
endRID = end_range + 1
sum = 0
if (startRID > self.table.baseRID):
return False
for i in range(startRID, endRID):
baseRecord = self.table.baseRIDToRecord(i)
sum += baseRecord[aggregate_column_index+4]
return sum
"""
incremenets one column of the record
this implementation should work if your select and update queries already work
:param key: the primary of key of the record to increment
:param column: the column to increment
# Returns True is increment is successful
# Returns False if no record matches key or if target record is locked by 2PL.
"""
def increment(self, key, column):
r = self.select(key, self.table.key, [1] * self.table.num_columns)[0]
if r is not False:
updated_columns = [None] * self.table.num_columns
updated_columns[column] = r[column] + 1
u = self.update(key, *updated_columns)
return u
return False
|
{"/template/query.py": ["/template/table.py", "/template/index.py"], "/template/db.py": ["/template/table.py"], "/template/table.py": ["/template/index.py", "/template/config.py"]}
|
14,774
|
tingwen0125/L-store-database-management-system
|
refs/heads/main
|
/template/db.py
|
from template.table import Table
'''
The Database class is a general interface to the database and handles high-level
operations such as starting and shutting down the database instance and loading the
database from stored disk files. This class also handles the creation and deletion of
tables via the create and drop function.The create function will create a new
table in the database. The Table constructor takes as input the name of the table,
number of columns and the index of the key column. The drop function drops the
specified table.
'''
class Database():
def __init__(self):
self.tables = {}
pass
# Not required for milestone1
def open(self, path):
pass
def close(self):
pass
"""
# Creates a new table
:param name: string #Table name
:param num_columns: int #Number of Columns: all columns are integer
:param key: int #Index of table key in columns
"""
def create_table(self, name, num_columns, key):
table = Table(name, num_columns, key)
self.tables[name]=table
return table
"""
# Deletes the specified table
"""
def drop_table(self, name):
return self.tables.pop(name)
"""
# Returns table with the passed name
"""
def get_table(self, name):
return self.tables[name]
|
{"/template/query.py": ["/template/table.py", "/template/index.py"], "/template/db.py": ["/template/table.py"], "/template/table.py": ["/template/index.py", "/template/config.py"]}
|
14,775
|
tingwen0125/L-store-database-management-system
|
refs/heads/main
|
/template/config.py
|
# Global Setting for the Database
# PageSize, StartRID, etc..
'''
The config.py file is meant to act as centralized storage for all the configuration options
and the constant values used in the code. It is good practice to organize such
information into a Singleton object accessible from every file in the project. This class
will find more use when implementing persistence in the next milestone.
'''
PAGE_SIZE = 4096
INT_SIZE = 8
PAGE_RANGE_SIZE = 65536
MAX_NUM_RECORD = PAGE_SIZE / INT_SIZE
BASE_PAGE_PER_PAGE_RANGE = PAGE_RANGE_SIZE / PAGE_SIZE
|
{"/template/query.py": ["/template/table.py", "/template/index.py"], "/template/db.py": ["/template/table.py"], "/template/table.py": ["/template/index.py", "/template/config.py"]}
|
14,776
|
tingwen0125/L-store-database-management-system
|
refs/heads/main
|
/template/table.py
|
from template.page import PageRange
from template.index import Index
from template.config import *
from time import time
INDIRECTION_COLUMN = 0
RID_COLUMN = 1
TIMESTAMP_COLUMN = 2
SCHEMA_ENCODING_COLUMN = 3
'''
The Table class provides the core of our relational storage functionality. All columns are
64-bit integers in this implementation. Users mainly interact with tables through queries.
Tables provide a logical view over the actual physically stored data and mostly manage
the storage and retrieval of data. Each table is responsible for managing its pages and
requires an internal page directory that given a RID it returns the actual physical location
of the record. The table class should also manage the periodical merge of its
corresponding page ranges.
'''
#16 base pages in one page range
class Record:
def __init__(self, rid, key, columns):
self.rid = rid
self.key = key
self.columns = columns
def getColumns(self):
return self.columns
class Table:
"""
:param name: string #Table name
:param num_columns: int #Number of Columns: all columns are integer
:param key: int #Index of table key in columns
"""
def __init__(self, name, num_columns, key):
self.name = name
self.key = key
self.num_columns = num_columns
self.page_directory = {}
self.index = Index(self)
self.pageRanges = [PageRange(self.num_columns)]
self.keyToBaseRID = {}
self.baseRID = 1
self.tailRID = 1
pass
# Given a baseRID return a baseRecord
# The way to access a value using a location:
# e.g. value = int.from_bytes(pageRanges[pageRange_index].basePageList
# [basePageList_index].basePage[columnNum].data[offset_index*8:(offset_index+1)*8], 'big')
def baseRIDToRecord(self, baseRID):
pageRange_index = (baseRID-1) // 8192 #512*16
basePageList_index = (baseRID-1-512 * 16 * pageRange_index) // 512
offset_index = baseRID-512 * (16*pageRange_index+basePageList_index)-1
baseRecord = []
for i in range(4+self.num_columns):
baseRecord.append(int.from_bytes(self.pageRanges[pageRange_index].basePageList[basePageList_index].basePage[i].data \
[offset_index*8:(offset_index+1)*8], 'big'))
return baseRecord
def baseRIDToLocation(self, baseRID):
pageRange_index = (baseRID-1) // 8192 #512*16
basePageList_index = (baseRID-1-512 * 16 * pageRange_index) // 512
offset_index = baseRID-512 * (16*pageRange_index+basePageList_index)-1
location = [pageRange_index, basePageList_index, offset_index]
return location
'''
def tailRIDToLocation(self, tailRID):
pageRange_index = (tailRID-1) // 8192
tailPageList_index = (tailRID-1-512 * 16 * pageRange_index) // 512
offset_index = (tailRID-512 * (16*pageRange_index+tailPageList_index)-1)
location = [pageRange_index, tailPageList_index, offset_index]
'''
def writeByte(self, value, location, columnNum):
pageRange_index = location[0]
basePageList_index = location[1]
offset_index = location[2]
self.pageRanges[pageRange_index].basePageList[basePageList_index] \
.basePage[columnNum].data[offset_index*8:(offset_index+1)*8] = \
value.to_bytes(8, 'big')
return True
def printRecord(self, rid):
pass
'''
def getPageR(self,rid): #given rid return the page range the rid record is at
print("PageR", rid//MAX_NUM_RECORD//BASE_PAGE_PER_PAGE_RANGE)
print(type(rid//MAX_NUM_RECORD//BASE_PAGE_PER_PAGE_RANGE))
return int(rid//MAX_NUM_RECORD//BASE_PAGE_PER_PAGE_RANGE)
'''
def __merge(self):
pass
|
{"/template/query.py": ["/template/table.py", "/template/index.py"], "/template/db.py": ["/template/table.py"], "/template/table.py": ["/template/index.py", "/template/config.py"]}
|
14,780
|
pakalnis92/property_manager
|
refs/heads/master
|
/propmanager/forms.py
|
from django.forms import forms
from propmanager.models import Property, Owner
class PropertyForm(forms.ModelForm):
class Meta:
model = Property
fields = ('address', 'value', 'location', 'property_type', 'owner')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['owner'].queryset = Owner.objects.none()
|
{"/propmanager/forms.py": ["/propmanager/models.py"], "/propmanager/urls.py": ["/propmanager/views.py"], "/propmanager/views.py": ["/propmanager/models.py"], "/propmanager/admin.py": ["/propmanager/models.py"], "/prop_manage/urls.py": ["/propmanager/views.py"]}
|
14,781
|
pakalnis92/property_manager
|
refs/heads/master
|
/propmanager/urls.py
|
from django.urls import path
from propmanager.views import *
from . import views
urlpatterns = [
# path('', views.index, name='index'),
# path('edit/<int:room_id>/', EditProperty.as_view(), name='show-price_change_data-on-map'),
path('properties-list', PropertiesList.as_view(), name='show-price_change_data-on-map'),
]
|
{"/propmanager/forms.py": ["/propmanager/models.py"], "/propmanager/urls.py": ["/propmanager/views.py"], "/propmanager/views.py": ["/propmanager/models.py"], "/propmanager/admin.py": ["/propmanager/models.py"], "/prop_manage/urls.py": ["/propmanager/views.py"]}
|
14,782
|
pakalnis92/property_manager
|
refs/heads/master
|
/propmanager/migrations/0001_initial.py
|
# Generated by Django 2.2.6 on 2019-10-27 11:42
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Owner',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(default='N/A', max_length=100)),
('last_name', models.CharField(default='N/A', max_length=100)),
],
),
migrations.CreateModel(
name='PropertyType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default='N/A', max_length=100)),
],
),
migrations.CreateModel(
name='Property',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('address_line_1', models.CharField(default='N/A', max_length=100)),
('address_line_2', models.CharField(default='N/A', max_length=100)),
('city_town', models.CharField(default='N/A', max_length=100)),
('county', models.CharField(default='N/A', max_length=100)),
('post_code', models.CharField(default='N/A', max_length=100)),
('value', models.DecimalField(decimal_places=2, max_digits=20)),
('location', models.CharField(default='N/A', max_length=100)),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='propmanager.Owner')),
('property_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='propmanager.PropertyType')),
],
),
]
|
{"/propmanager/forms.py": ["/propmanager/models.py"], "/propmanager/urls.py": ["/propmanager/views.py"], "/propmanager/views.py": ["/propmanager/models.py"], "/propmanager/admin.py": ["/propmanager/models.py"], "/prop_manage/urls.py": ["/propmanager/views.py"]}
|
14,783
|
pakalnis92/property_manager
|
refs/heads/master
|
/propmanager/views.py
|
from django.http import HttpResponseRedirect
from django.shortcuts import render, get_object_or_404
from django.contrib import messages
# Create your views here.
from django.urls import reverse_lazy, reverse
from django.views.generic import TemplateView
from propmanager.models import *
class PropertiesList(TemplateView):
template_name = "PropertyList.html"
def post(self, request, *args, **kwargs):
if self.request.POST.get('go_edit'):
return HttpResponseRedirect(reverse('add-property'))
elif self.request.POST.get('main_menu'):
return HttpResponseRedirect(reverse('main-menu'))
def get_context_data(self, room_id=None, **kwargs):
all_properties = Property.objects.all()
context = {'properties': all_properties}
return context
class EditPropertyDetails(TemplateView):
template_name = "edit.html"
def post(self, request, *args, **kwargs):
if self.request.POST.get('edit'):
property_id = int(self.kwargs['property_id'])
owner_pk = int(request.POST.get("owner", None))
owner = Owner.objects.get(pk=owner_pk)
type_pk = request.POST.get("type", None)
type = PropertyType.objects.get(pk=type_pk)
value = request.POST.get("value", None)
edited_property = Property.objects.get(pk=property_id)
edited_property.owner = owner
edited_property.value = value
edited_property.property_type = type
edited_property.save(update_fields=['owner', 'value', 'property_type'])
messages.success(request, 'Property details was successfully updated')
return HttpResponseRedirect(f'/edit_property/{property_id}/')
if self.request.POST.get('delete'):
property_id = int(self.kwargs['property_id'])
Property.objects.get(pk=property_id).delete()
messages.success(request, 'Property was successfully deleted')
return HttpResponseRedirect(reverse('properties-list'))
if self.request.POST.get('list'):
return HttpResponseRedirect(reverse('properties-list'))
if self.request.POST.get('main'):
return HttpResponseRedirect(reverse('main-menu'))
def get_context_data(self, property_id=None, **kwargs):
selected_property = Property.objects.get(pk=property_id)
all_owners = Owner.objects.all()
# All owners final excludes the owner which is linked to selected property, so no duplicates are displayed.
all_owners_final = all_owners.exclude(pk=selected_property.owner.pk)
all_property_types = PropertyType.objects.all()
# All properties final excludes the property type which is linked to selected property, so no duplicates are displayed.
all_property_types_final = all_property_types.exclude(name=selected_property.property_type.name)
context = {'property': selected_property,
'owners': all_owners_final,
'property_types': all_property_types_final
}
return context
class MainMenu(TemplateView):
template_name = "main.html"
def post(self, request, *args, **kwargs):
if self.request.POST.get('list'):
return HttpResponseRedirect('properties-list/')
elif self.request.POST.get('add_new'):
return HttpResponseRedirect('add-property/')
class AddProperty(TemplateView):
template_name = "new_property.html"
def post(self, request, *args, **kwargs):
if self.request.POST.get('create_property'):
address1 = request.POST.get("address1", None)
address2 = request.POST.get("address2", None)
city_town = request.POST.get("city_town", None)
county = request.POST.get("county", None)
post_code = request.POST.get("post_code", None)
location = request.POST.get("location", None)
value = request.POST.get("value", None)
owner_pk = int(request.POST.get("owner", None))
owner = Owner.objects.get(pk=owner_pk)
type_pk = request.POST.get("type", None)
type = PropertyType.objects.get(pk=type_pk)
Property.objects.create(address_line_1=address1, address_line_2=address2, city_town=city_town,
county=county,
post_code=post_code, location=location, property_type=type, value=value,
owner=owner)
messages.success(request,
f'Property with address: {address1} {address2} {city_town} {county} {post_code} was added to the system.')
return HttpResponseRedirect(reverse('properties-list'))
elif self.request.POST.get('main_menu'):
return HttpResponseRedirect(reverse('main-menu'))
def get_context_data(self, **kwargs):
all_properties = Property.objects.all()
all_owners = Owner.objects.all()
all_property_types = PropertyType.objects.all()
context = {'properties': all_properties,
'owners': all_owners,
'property_types': all_property_types,
}
return context
|
{"/propmanager/forms.py": ["/propmanager/models.py"], "/propmanager/urls.py": ["/propmanager/views.py"], "/propmanager/views.py": ["/propmanager/models.py"], "/propmanager/admin.py": ["/propmanager/models.py"], "/prop_manage/urls.py": ["/propmanager/views.py"]}
|
14,784
|
pakalnis92/property_manager
|
refs/heads/master
|
/propmanager/admin.py
|
from django.contrib import admin
from propmanager.models import *
# Register your models here.
@admin.register(Owner)
class OwnerAdmin(admin.ModelAdmin):
model = Owner
list_display = ('first_name', 'last_name', 'email', 'telephone')
@admin.register(PropertyType)
class PropertyTypeAdmin(admin.ModelAdmin):
model = PropertyType
list_display = ('name',)
|
{"/propmanager/forms.py": ["/propmanager/models.py"], "/propmanager/urls.py": ["/propmanager/views.py"], "/propmanager/views.py": ["/propmanager/models.py"], "/propmanager/admin.py": ["/propmanager/models.py"], "/prop_manage/urls.py": ["/propmanager/views.py"]}
|
14,785
|
pakalnis92/property_manager
|
refs/heads/master
|
/propmanager/models.py
|
from enum import Enum
from django.db import models
# Create your models here.
class Owner(models.Model):
first_name = models.CharField(max_length=50, default="N/A")
last_name = models.CharField(max_length=50, default="N/A")
email = models.CharField(max_length=20, default="N/A")
telephone = models.CharField(max_length=20, default="N/A")
def __str__(self):
return 'Owner full name: %s %s' % (self.first_name, self.last_name)
class PropertyType(models.Model):
name = models.CharField(max_length=100, default="N/A")
def __str__(self):
return ' %s ' % (self.name)
class Property(models.Model):
"""
Model to represent the property.
"""
address_line_1 = models.CharField(max_length=100, default="N/A")
address_line_2 = models.CharField(max_length=100, default="N/A")
city_town = models.CharField(max_length=100, default="N/A")
county = models.CharField(max_length=100, default="N/A")
post_code = models.CharField(max_length=100, default="N/A")
value = models.DecimalField(max_digits=20, decimal_places=2)
location = models.CharField(max_length=100, default="N/A")
property_type = models.ForeignKey(PropertyType, on_delete=models.CASCADE)
owner = models.ForeignKey(Owner, on_delete=models.CASCADE)
def __str__(self):
return 'Property at %s %s %s %s %s' % (self.address_line_1, self.address_line_2, self.city_town, self.county, self.post_code)
|
{"/propmanager/forms.py": ["/propmanager/models.py"], "/propmanager/urls.py": ["/propmanager/views.py"], "/propmanager/views.py": ["/propmanager/models.py"], "/propmanager/admin.py": ["/propmanager/models.py"], "/prop_manage/urls.py": ["/propmanager/views.py"]}
|
14,786
|
pakalnis92/property_manager
|
refs/heads/master
|
/prop_manage/urls.py
|
"""prop_manage URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from propmanager.views import *
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
# path('propmanager/', include('propmanager.urls')),
path('properties-list/', PropertiesList.as_view(), name='properties-list'),
path('add-property/', AddProperty.as_view(), name='add-property'),
path('edit_property/<int:property_id>/', EditPropertyDetails.as_view(), name='edit-property'),
path('main', MainMenu.as_view(), name='main-menu'),
]
|
{"/propmanager/forms.py": ["/propmanager/models.py"], "/propmanager/urls.py": ["/propmanager/views.py"], "/propmanager/views.py": ["/propmanager/models.py"], "/propmanager/admin.py": ["/propmanager/models.py"], "/prop_manage/urls.py": ["/propmanager/views.py"]}
|
14,787
|
pakalnis92/property_manager
|
refs/heads/master
|
/propmanager/apps.py
|
from django.apps import AppConfig
class PropmanagerConfig(AppConfig):
name = 'propmanager'
|
{"/propmanager/forms.py": ["/propmanager/models.py"], "/propmanager/urls.py": ["/propmanager/views.py"], "/propmanager/views.py": ["/propmanager/models.py"], "/propmanager/admin.py": ["/propmanager/models.py"], "/prop_manage/urls.py": ["/propmanager/views.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.