index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
42,380 | DanielWinklehner/py_particle_processor | refs/heads/master | /py_particle_processor_qt/tools/CollimOPAL/__init__.py | from py_particle_processor_qt.tools.CollimOPAL.CollimOPAL import *
| {"/py_particle_processor_qt/plotting.py": ["/py_particle_processor_qt/gui/plot_settings.py", "/py_particle_processor_qt/gui/default_plot_settings.py"], "/py_particle_processor_qt/test/__main__.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/tools/OrbitTool/OrbitTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/OrbitTool/orbittoolgui.py"], "/examples/gui_example.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/drivers/OPALDriver/OPALDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/COMSOLDriver/COMSOLDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/IBSimuDriver/__init__.py": ["/py_particle_processor_qt/drivers/IBSimuDriver/IBSimuDriver.py"], "/py_particle_processor_qt/tools/TranslateTool/__init__.py": ["/py_particle_processor_qt/tools/TranslateTool/TranslateTool.py"], "/py_particle_processor_qt/tools/TranslateTool/TranslateTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/TranslateTool/translatetoolgui.py"], "/py_particle_processor_qt/drivers/FreeCADDriver/__init__.py": ["/py_particle_processor_qt/drivers/FreeCADDriver/FreeCADDriver.py"], "/py_particle_processor_qt/tools/AnimateXY/AnimateXY.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/AnimateXY/animateXYgui.py"], "/py_particle_processor_qt/dataset.py": ["/py_particle_processor_qt/drivers/__init__.py"], "/py_particle_processor_qt/tools/OrbitTool/__init__.py": ["/py_particle_processor_qt/tools/OrbitTool/OrbitTool.py"], "/py_particle_processor_qt/generator.py": ["/py_particle_processor_qt/gui/generate_main.py", "/py_particle_processor_qt/gui/generate_error.py", "/py_particle_processor_qt/gui/generate_envelope.py", "/py_particle_processor_qt/gui/generate_twiss.py", "/py_particle_processor_qt/drivers/TraceWinDriver/__init__.py"], "/py_particle_processor_qt/drivers/OPALDriver/__init__.py": ["/py_particle_processor_qt/drivers/OPALDriver/OPALDriver.py"], "/py_particle_processor_qt/tools/BeamChar/BeamChar.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/BeamChar/beamchargui.py"], "/py_particle_processor_qt/tools/AnimateXY/__init__.py": ["/py_particle_processor_qt/tools/AnimateXY/AnimateXY.py"], "/py_particle_processor_qt/tools/RotateTool/__init__.py": ["/py_particle_processor_qt/tools/RotateTool/RotateTool.py"], "/py_particle_processor_qt/tools/__init__.py": ["/py_particle_processor_qt/tools/ScaleTool/__init__.py", "/py_particle_processor_qt/tools/TranslateTool/__init__.py", "/py_particle_processor_qt/tools/AnimateXY/__init__.py", "/py_particle_processor_qt/tools/BeamChar/__init__.py", "/py_particle_processor_qt/tools/CollimOPAL/__init__.py", "/py_particle_processor_qt/tools/OrbitTool/__init__.py", "/py_particle_processor_qt/tools/RotateTool/__init__.py"], "/py_particle_processor_qt/tools/BeamChar/__init__.py": ["/py_particle_processor_qt/tools/BeamChar/BeamChar.py"], "/py_particle_processor_qt/drivers/TrackDriver/__init__.py": ["/py_particle_processor_qt/drivers/TrackDriver/TrackDriver.py"], "/py_particle_processor_qt/__init__.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/drivers/COMSOLDriver/__init__.py": ["/py_particle_processor_qt/drivers/COMSOLDriver/COMSOLDriver.py"], "/py_particle_processor_qt/tools/RotateTool/RotateTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/RotateTool/rotatetoolgui.py"], "/py_particle_processor_qt/tools/ScaleTool/ScaleTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/ScaleTool/scaletoolgui.py"], "/py_particle_processor_qt/tools/ScaleTool/__init__.py": ["/py_particle_processor_qt/tools/ScaleTool/ScaleTool.py"], "/py_particle_processor_qt/py_particle_processor_qt.py": ["/py_particle_processor_qt/dataset.py", "/py_particle_processor_qt/gui/main_window.py", "/py_particle_processor_qt/gui/species_prompt.py", "/py_particle_processor_qt/plotting.py", "/py_particle_processor_qt/generator.py", "/py_particle_processor_qt/tools/__init__.py"], "/py_particle_processor_qt/drivers/TrackDriver/TrackDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/FreeCADDriver/FreeCADDriver.py": ["/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/tools/CollimOPAL/__init__.py": ["/py_particle_processor_qt/tools/CollimOPAL/CollimOPAL.py"], "/py_particle_processor_qt/tools/CollimOPAL/CollimOPAL.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/CollimOPAL/collimOPALgui.py"], "/py_particle_processor_qt/drivers/__init__.py": ["/py_particle_processor_qt/drivers/OPALDriver/__init__.py", "/py_particle_processor_qt/drivers/TraceWinDriver/__init__.py", "/py_particle_processor_qt/drivers/COMSOLDriver/__init__.py", "/py_particle_processor_qt/drivers/IBSimuDriver/__init__.py", "/py_particle_processor_qt/drivers/TrackDriver/__init__.py", "/py_particle_processor_qt/drivers/FreeCADDriver/__init__.py"], "/py_particle_processor_qt/drivers/IBSimuDriver/IBSimuDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"]} |
42,381 | DanielWinklehner/py_particle_processor | refs/heads/master | /py_particle_processor/py_particle_processor.py | import gi
gi.require_version('Gtk', '3.0') # nopep8
gi.require_version('Gdk', '3.0') # nopep8
# from gi.repository import Gtk, GLib, GObject, Gdk
from dataset import *
__author__ = "Philip Weigel, Daniel Winklehner"
__doc__ = """A GTK+3 based GUI that allows loading particle data from
various simulation codes and exporting them for various other simulation codes.
"""
# Initialize some global constants
amu = const.value("atomic mass constant energy equivalent in MeV")
echarge = const.value("elementary charge")
clight = const.value("speed of light in vacuum")
class PyParticleProcessor(object):
def __init__(self, debug=False):
"""
Initialize the GUI
"""
self._debug = debug
self._colors = MyColors()
# --- Load the GUI from XML file and initialize connections --- #
self._builder = Gtk.Builder()
self._builder.add_from_file("py_particle_processor.glade")
self._builder.connect_signals(self.get_connections())
# --- Get some widgets from the builder --- #
self._main_window = self._builder.get_object("main_window")
self._status_bar = self._builder.get_object("main_statusbar")
self._log_textbuffer = self._builder.get_object("log_texbuffer")
self._datasets_ls = self._builder.get_object("species_ls")
self._datasets_tv = self._builder.get_object("species_tv")
self._base_plots_xy = MPLCanvasWrapper(main_window=self._main_window)
self._base_plots_xxp = MPLCanvasWrapper(main_window=self._main_window)
self._base_plots_yyp = MPLCanvasWrapper(main_window=self._main_window)
self._base_plots_phe = MPLCanvasWrapper(main_window=self._main_window)
self._builder.get_object("alignment2").add(self._base_plots_xxp)
self._builder.get_object("alignment3").add(self._base_plots_yyp)
self._builder.get_object("alignment4").add(self._base_plots_xy)
self._builder.get_object("alignment6").add(self._base_plots_phe)
# --- Create some CellRenderers for the Species TreeView
_i = 0
for item in ["mass_tvc", "charge_tvc", "current_tvc", "np_tvc", "filename_tvc"]:
_i += 1
crt = Gtk.CellRendererText()
self._builder.get_object(item).pack_start(crt, False)
self._builder.get_object(item).add_attribute(crt, "text", _i)
crtog = Gtk.CellRendererToggle()
self._builder.get_object("toggle_tvc").pack_start(crtog, True)
self._builder.get_object("toggle_tvc").add_attribute(crtog, "active", 0)
crtog.connect("toggled", self.cell_toggled, self._datasets_ls, "dat")
self._datasets = []
def about_program_callback(self, menu_item):
"""
:param menu_item:
:return:
"""
if self._debug:
print("About Dialog called by {}".format(menu_item))
dialog = self._builder.get_object("about_dialogbox")
dialog.run()
dialog.destroy()
return 0
def cell_toggled(self, widget, path, model, mode):
"""
Callback function for toggling one of the checkboxes in the species
TreeView. Updates the View and refreshes the plots...
"""
if self._debug:
print("cell_toggled was called with widget {} and mode {}".format(widget, mode))
model[path][0] = not model[path][0]
# TODO: Update some draw variable -DW
return 0
def delete_ds_callback(self, widget):
"""
Callback for Delete Dataset... button
:param widget:
:return:
"""
if self._debug:
print("delete_ds_callback was called with widget {}".format(widget))
path, focus = self._datasets_tv.get_cursor()
del_iter = self._datasets_ls.get_iter(path)
self._datasets[path[0]].close()
self._datasets.pop(path[0])
self._datasets_ls.remove(del_iter)
return 0
def get_connections(self):
"""
This just returns a dictionary of connections
:return:
"""
con = {"main_quit": self.main_quit,
"notebook_page_changed": self.notebook_page_changed_callback,
"on_main_statusbar_text_pushed": self.statusbar_changed_callback,
"about_program_menu_item_activated": self.about_program_callback,
"on_load_dataset_activate": self.load_new_ds_callback,
"on_add_dataset_activate": self.load_add_ds_callback,
"on_delete_dataset_activate": self.delete_ds_callback,
}
return con
def initialize(self):
"""
Do all remaining initializations
:return: 0
"""
if self._debug:
print("Called initialize() function.")
self._status_bar.push(0, "Program Initialized.")
return 0
def load_add_ds_callback(self, widget):
"""
Callback for Add Dataset... button
:param widget:
:return:
"""
if self._debug:
print("load_add_ds_callback was called with widget {}".format(widget))
_fd = FileDialog()
filename = _fd.get_filename(action="open", parent=self._main_window)
print(filename)
if filename is None:
return 0
_new_ds = Dataset(debug=self._debug)
if _new_ds.load_from_file(filename) == 0:
self._datasets.append(_new_ds)
# Update the liststore
self._datasets_ls.append([False,
self._datasets[0].get_a(),
self._datasets[0].get_q(),
self._datasets[0].get_i(),
self._datasets[0].get_npart(),
self._datasets[0].get_filename()]
)
if self._debug:
print("load_add_ds_callback: Finished loading.")
return 0
def load_new_ds_callback(self, widget):
"""
Callback for Load Dataset... button
:param widget:
:return:
"""
if self._debug:
print("load_new_ds_callback was called with widget {}".format(widget))
_fd = FileDialog()
filename = _fd.get_filename(action="open", parent=self._main_window)
print(filename)
if filename is None:
return 0
_new_ds = Dataset(debug=self._debug)
if _new_ds.load_from_file(filename, driver="TraceWin") == 0:
self._datasets = [_new_ds]
# Update the liststore (should be called dataset_ls...)
self._datasets_ls.clear()
self._datasets_ls.append([False,
self._datasets[0].get_a(),
self._datasets[0].get_q(),
self._datasets[0].get_i(),
self._datasets[0].get_npart(),
self._datasets[0].get_filename()]
)
if self._debug:
print("load_new_ds_callback: Finished loading.")
self._base_plots_xy.clear()
self._base_plots_xy.scatter(self._datasets[0].get("x"), self._datasets[0].get("y"),
c=self._colors[0], s=0.5, edgecolor='')
self._base_plots_xy.draw_idle()
self._base_plots_xxp.clear()
self._base_plots_xxp.scatter(self._datasets[0].get("x"), self._datasets[0].get("px"),
c=self._colors[0], s=0.5, edgecolor='')
self._base_plots_xxp.draw_idle()
self._base_plots_yyp.clear()
self._base_plots_yyp.scatter(self._datasets[0].get("y"), self._datasets[0].get("py"),
c=self._colors[0], s=0.5, edgecolor='')
self._base_plots_yyp.draw_idle()
return 0
def main_quit(self, widget):
"""
Shuts down the program (and threads) gracefully.
:return:
"""
if self._debug:
print("Called main_quit for {}".format(widget))
self._main_window.destroy()
Gtk.main_quit()
return 0
def notebook_page_changed_callback(self, notebook, page, page_num):
"""
Callback for when user switches to a different notebook page in the main notebook.
:param notebook: a pointer to the gtk notebook object
:param page: a pointer to the top level child of the page
:param page_num: page number starting at 0
:return:
"""
if self._debug:
print("Debug: Notebook {} changed page to {} (page_num = {})".format(notebook,
page,
page_num))
return 0
def run(self):
"""
Run the GUI
:return:
"""
self.initialize()
# --- Show the GUI --- #
self._main_window.maximize()
self._main_window.show_all()
Gtk.main()
return 0
def statusbar_changed_callback(self, statusbar, context_id, text):
"""
Callback that handles what happens when a message is pushed in the
statusbar
"""
if self._debug:
print("Called statusbar_changed callback for statusbar {}, ID = {}".format(statusbar, context_id))
_timestr = time.strftime("%d %b, %Y, %H:%M:%S: ", time.localtime())
self._log_textbuffer.insert(self._log_textbuffer.get_end_iter(), _timestr + text + "\n")
return 0
if __name__ == "__main__":
mydebug = True
ppp = PyParticleProcessor(debug=mydebug)
ppp.run()
| {"/py_particle_processor_qt/plotting.py": ["/py_particle_processor_qt/gui/plot_settings.py", "/py_particle_processor_qt/gui/default_plot_settings.py"], "/py_particle_processor_qt/test/__main__.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/tools/OrbitTool/OrbitTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/OrbitTool/orbittoolgui.py"], "/examples/gui_example.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/drivers/OPALDriver/OPALDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/COMSOLDriver/COMSOLDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/IBSimuDriver/__init__.py": ["/py_particle_processor_qt/drivers/IBSimuDriver/IBSimuDriver.py"], "/py_particle_processor_qt/tools/TranslateTool/__init__.py": ["/py_particle_processor_qt/tools/TranslateTool/TranslateTool.py"], "/py_particle_processor_qt/tools/TranslateTool/TranslateTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/TranslateTool/translatetoolgui.py"], "/py_particle_processor_qt/drivers/FreeCADDriver/__init__.py": ["/py_particle_processor_qt/drivers/FreeCADDriver/FreeCADDriver.py"], "/py_particle_processor_qt/tools/AnimateXY/AnimateXY.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/AnimateXY/animateXYgui.py"], "/py_particle_processor_qt/dataset.py": ["/py_particle_processor_qt/drivers/__init__.py"], "/py_particle_processor_qt/tools/OrbitTool/__init__.py": ["/py_particle_processor_qt/tools/OrbitTool/OrbitTool.py"], "/py_particle_processor_qt/generator.py": ["/py_particle_processor_qt/gui/generate_main.py", "/py_particle_processor_qt/gui/generate_error.py", "/py_particle_processor_qt/gui/generate_envelope.py", "/py_particle_processor_qt/gui/generate_twiss.py", "/py_particle_processor_qt/drivers/TraceWinDriver/__init__.py"], "/py_particle_processor_qt/drivers/OPALDriver/__init__.py": ["/py_particle_processor_qt/drivers/OPALDriver/OPALDriver.py"], "/py_particle_processor_qt/tools/BeamChar/BeamChar.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/BeamChar/beamchargui.py"], "/py_particle_processor_qt/tools/AnimateXY/__init__.py": ["/py_particle_processor_qt/tools/AnimateXY/AnimateXY.py"], "/py_particle_processor_qt/tools/RotateTool/__init__.py": ["/py_particle_processor_qt/tools/RotateTool/RotateTool.py"], "/py_particle_processor_qt/tools/__init__.py": ["/py_particle_processor_qt/tools/ScaleTool/__init__.py", "/py_particle_processor_qt/tools/TranslateTool/__init__.py", "/py_particle_processor_qt/tools/AnimateXY/__init__.py", "/py_particle_processor_qt/tools/BeamChar/__init__.py", "/py_particle_processor_qt/tools/CollimOPAL/__init__.py", "/py_particle_processor_qt/tools/OrbitTool/__init__.py", "/py_particle_processor_qt/tools/RotateTool/__init__.py"], "/py_particle_processor_qt/tools/BeamChar/__init__.py": ["/py_particle_processor_qt/tools/BeamChar/BeamChar.py"], "/py_particle_processor_qt/drivers/TrackDriver/__init__.py": ["/py_particle_processor_qt/drivers/TrackDriver/TrackDriver.py"], "/py_particle_processor_qt/__init__.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/drivers/COMSOLDriver/__init__.py": ["/py_particle_processor_qt/drivers/COMSOLDriver/COMSOLDriver.py"], "/py_particle_processor_qt/tools/RotateTool/RotateTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/RotateTool/rotatetoolgui.py"], "/py_particle_processor_qt/tools/ScaleTool/ScaleTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/ScaleTool/scaletoolgui.py"], "/py_particle_processor_qt/tools/ScaleTool/__init__.py": ["/py_particle_processor_qt/tools/ScaleTool/ScaleTool.py"], "/py_particle_processor_qt/py_particle_processor_qt.py": ["/py_particle_processor_qt/dataset.py", "/py_particle_processor_qt/gui/main_window.py", "/py_particle_processor_qt/gui/species_prompt.py", "/py_particle_processor_qt/plotting.py", "/py_particle_processor_qt/generator.py", "/py_particle_processor_qt/tools/__init__.py"], "/py_particle_processor_qt/drivers/TrackDriver/TrackDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/FreeCADDriver/FreeCADDriver.py": ["/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/tools/CollimOPAL/__init__.py": ["/py_particle_processor_qt/tools/CollimOPAL/CollimOPAL.py"], "/py_particle_processor_qt/tools/CollimOPAL/CollimOPAL.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/CollimOPAL/collimOPALgui.py"], "/py_particle_processor_qt/drivers/__init__.py": ["/py_particle_processor_qt/drivers/OPALDriver/__init__.py", "/py_particle_processor_qt/drivers/TraceWinDriver/__init__.py", "/py_particle_processor_qt/drivers/COMSOLDriver/__init__.py", "/py_particle_processor_qt/drivers/IBSimuDriver/__init__.py", "/py_particle_processor_qt/drivers/TrackDriver/__init__.py", "/py_particle_processor_qt/drivers/FreeCADDriver/__init__.py"], "/py_particle_processor_qt/drivers/IBSimuDriver/IBSimuDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"]} |
42,382 | DanielWinklehner/py_particle_processor | refs/heads/master | /py_particle_processor_qt/gui/generate_error.py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'generate_error.ui'
#
# Created by: PyQt5 UI code generator 5.6
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Generate_Error(object):
def setupUi(self, Generate_Error):
Generate_Error.setObjectName("Generate_Error")
Generate_Error.resize(291, 153)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(Generate_Error.sizePolicy().hasHeightForWidth())
Generate_Error.setSizePolicy(sizePolicy)
self.centralwidget = QtWidgets.QWidget(Generate_Error)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.centralwidget.sizePolicy().hasHeightForWidth())
self.centralwidget.setSizePolicy(sizePolicy)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayoutWidget = QtWidgets.QWidget(self.centralwidget)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(0, 30, 291, 111))
self.verticalLayoutWidget.setObjectName("verticalLayoutWidget")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout_3.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_3.setSpacing(0)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.label = QtWidgets.QLabel(self.verticalLayoutWidget)
self.label.setObjectName("label")
self.verticalLayout_3.addWidget(self.label, 0, QtCore.Qt.AlignHCenter)
self.buttonBox = QtWidgets.QDialogButtonBox(self.verticalLayoutWidget)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout_3.addWidget(self.buttonBox, 0, QtCore.Qt.AlignHCenter)
self.dataset_label = QtWidgets.QLabel(self.centralwidget)
self.dataset_label.setGeometry(QtCore.QRect(50, -40, 201, 107))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.dataset_label.sizePolicy().hasHeightForWidth())
self.dataset_label.setSizePolicy(sizePolicy)
self.dataset_label.setAlignment(QtCore.Qt.AlignCenter)
self.dataset_label.setObjectName("dataset_label")
Generate_Error.setCentralWidget(self.centralwidget)
self.retranslateUi(Generate_Error)
QtCore.QMetaObject.connectSlotsByName(Generate_Error)
def retranslateUi(self, Generate_Error):
_translate = QtCore.QCoreApplication.translate
Generate_Error.setWindowTitle(_translate("Generate_Error", "Error"))
self.label.setText(_translate("Generate_Error", "Please fill out all fields available."))
self.dataset_label.setText(_translate("Generate_Error", "Parameter(s) Not Entered!"))
| {"/py_particle_processor_qt/plotting.py": ["/py_particle_processor_qt/gui/plot_settings.py", "/py_particle_processor_qt/gui/default_plot_settings.py"], "/py_particle_processor_qt/test/__main__.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/tools/OrbitTool/OrbitTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/OrbitTool/orbittoolgui.py"], "/examples/gui_example.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/drivers/OPALDriver/OPALDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/COMSOLDriver/COMSOLDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/IBSimuDriver/__init__.py": ["/py_particle_processor_qt/drivers/IBSimuDriver/IBSimuDriver.py"], "/py_particle_processor_qt/tools/TranslateTool/__init__.py": ["/py_particle_processor_qt/tools/TranslateTool/TranslateTool.py"], "/py_particle_processor_qt/tools/TranslateTool/TranslateTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/TranslateTool/translatetoolgui.py"], "/py_particle_processor_qt/drivers/FreeCADDriver/__init__.py": ["/py_particle_processor_qt/drivers/FreeCADDriver/FreeCADDriver.py"], "/py_particle_processor_qt/tools/AnimateXY/AnimateXY.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/AnimateXY/animateXYgui.py"], "/py_particle_processor_qt/dataset.py": ["/py_particle_processor_qt/drivers/__init__.py"], "/py_particle_processor_qt/tools/OrbitTool/__init__.py": ["/py_particle_processor_qt/tools/OrbitTool/OrbitTool.py"], "/py_particle_processor_qt/generator.py": ["/py_particle_processor_qt/gui/generate_main.py", "/py_particle_processor_qt/gui/generate_error.py", "/py_particle_processor_qt/gui/generate_envelope.py", "/py_particle_processor_qt/gui/generate_twiss.py", "/py_particle_processor_qt/drivers/TraceWinDriver/__init__.py"], "/py_particle_processor_qt/drivers/OPALDriver/__init__.py": ["/py_particle_processor_qt/drivers/OPALDriver/OPALDriver.py"], "/py_particle_processor_qt/tools/BeamChar/BeamChar.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/BeamChar/beamchargui.py"], "/py_particle_processor_qt/tools/AnimateXY/__init__.py": ["/py_particle_processor_qt/tools/AnimateXY/AnimateXY.py"], "/py_particle_processor_qt/tools/RotateTool/__init__.py": ["/py_particle_processor_qt/tools/RotateTool/RotateTool.py"], "/py_particle_processor_qt/tools/__init__.py": ["/py_particle_processor_qt/tools/ScaleTool/__init__.py", "/py_particle_processor_qt/tools/TranslateTool/__init__.py", "/py_particle_processor_qt/tools/AnimateXY/__init__.py", "/py_particle_processor_qt/tools/BeamChar/__init__.py", "/py_particle_processor_qt/tools/CollimOPAL/__init__.py", "/py_particle_processor_qt/tools/OrbitTool/__init__.py", "/py_particle_processor_qt/tools/RotateTool/__init__.py"], "/py_particle_processor_qt/tools/BeamChar/__init__.py": ["/py_particle_processor_qt/tools/BeamChar/BeamChar.py"], "/py_particle_processor_qt/drivers/TrackDriver/__init__.py": ["/py_particle_processor_qt/drivers/TrackDriver/TrackDriver.py"], "/py_particle_processor_qt/__init__.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/drivers/COMSOLDriver/__init__.py": ["/py_particle_processor_qt/drivers/COMSOLDriver/COMSOLDriver.py"], "/py_particle_processor_qt/tools/RotateTool/RotateTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/RotateTool/rotatetoolgui.py"], "/py_particle_processor_qt/tools/ScaleTool/ScaleTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/ScaleTool/scaletoolgui.py"], "/py_particle_processor_qt/tools/ScaleTool/__init__.py": ["/py_particle_processor_qt/tools/ScaleTool/ScaleTool.py"], "/py_particle_processor_qt/py_particle_processor_qt.py": ["/py_particle_processor_qt/dataset.py", "/py_particle_processor_qt/gui/main_window.py", "/py_particle_processor_qt/gui/species_prompt.py", "/py_particle_processor_qt/plotting.py", "/py_particle_processor_qt/generator.py", "/py_particle_processor_qt/tools/__init__.py"], "/py_particle_processor_qt/drivers/TrackDriver/TrackDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/FreeCADDriver/FreeCADDriver.py": ["/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/tools/CollimOPAL/__init__.py": ["/py_particle_processor_qt/tools/CollimOPAL/CollimOPAL.py"], "/py_particle_processor_qt/tools/CollimOPAL/CollimOPAL.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/CollimOPAL/collimOPALgui.py"], "/py_particle_processor_qt/drivers/__init__.py": ["/py_particle_processor_qt/drivers/OPALDriver/__init__.py", "/py_particle_processor_qt/drivers/TraceWinDriver/__init__.py", "/py_particle_processor_qt/drivers/COMSOLDriver/__init__.py", "/py_particle_processor_qt/drivers/IBSimuDriver/__init__.py", "/py_particle_processor_qt/drivers/TrackDriver/__init__.py", "/py_particle_processor_qt/drivers/FreeCADDriver/__init__.py"], "/py_particle_processor_qt/drivers/IBSimuDriver/IBSimuDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"]} |
42,383 | DanielWinklehner/py_particle_processor | refs/heads/master | /py_particle_processor_qt/tools/BeamChar/beamchargui.py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'beamchargui.ui'
#
# Created by: PyQt5 UI code generator 5.6
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_BeamChar(object):
def setupUi(self, BeamChar):
BeamChar.setObjectName("BeamChar")
BeamChar.resize(421, 303)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(BeamChar.sizePolicy().hasHeightForWidth())
BeamChar.setSizePolicy(sizePolicy)
self.centralwidget = QtWidgets.QWidget(BeamChar)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.centralwidget.sizePolicy().hasHeightForWidth())
self.centralwidget.setSizePolicy(sizePolicy)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayoutWidget = QtWidgets.QWidget(self.centralwidget)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(10, 3, 401, 299))
self.verticalLayoutWidget.setObjectName("verticalLayoutWidget")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout_3.setSizeConstraint(QtWidgets.QLayout.SetMinimumSize)
self.verticalLayout_3.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_3.setSpacing(0)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.label = QtWidgets.QLabel(self.verticalLayoutWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label.sizePolicy().hasHeightForWidth())
self.label.setSizePolicy(sizePolicy)
self.label.setMaximumSize(QtCore.QSize(16777215, 200))
self.label.setObjectName("label")
self.verticalLayout_3.addWidget(self.label)
self.gridLayout = QtWidgets.QGridLayout()
self.gridLayout.setObjectName("gridLayout")
self.label_9 = QtWidgets.QLabel(self.verticalLayoutWidget)
self.label_9.setObjectName("label_9")
self.gridLayout.addWidget(self.label_9, 7, 0, 1, 1)
self.intens = QtWidgets.QCheckBox(self.verticalLayoutWidget)
self.intens.setText("")
self.intens.setObjectName("intens")
self.gridLayout.addWidget(self.intens, 10, 1, 1, 1, QtCore.Qt.AlignHCenter)
self.label_8 = QtWidgets.QLabel(self.verticalLayoutWidget)
self.label_8.setObjectName("label_8")
self.gridLayout.addWidget(self.label_8, 1, 0, 1, 1)
self.centroid = QtWidgets.QCheckBox(self.verticalLayoutWidget)
self.centroid.setText("")
self.centroid.setObjectName("centroid")
self.gridLayout.addWidget(self.centroid, 6, 1, 1, 1, QtCore.Qt.AlignHCenter)
self.label_4 = QtWidgets.QLabel(self.verticalLayoutWidget)
self.label_4.setObjectName("label_4")
self.gridLayout.addWidget(self.label_4, 6, 0, 1, 1, QtCore.Qt.AlignHCenter)
self.rms = QtWidgets.QCheckBox(self.verticalLayoutWidget)
self.rms.setText("")
self.rms.setObjectName("rms")
self.gridLayout.addWidget(self.rms, 2, 1, 1, 1, QtCore.Qt.AlignHCenter)
self.halo = QtWidgets.QCheckBox(self.verticalLayoutWidget)
self.halo.setText("")
self.halo.setObjectName("halo")
self.gridLayout.addWidget(self.halo, 4, 1, 1, 1, QtCore.Qt.AlignHCenter)
self.label_3 = QtWidgets.QLabel(self.verticalLayoutWidget)
self.label_3.setObjectName("label_3")
self.gridLayout.addWidget(self.label_3, 4, 0, 1, 1, QtCore.Qt.AlignHCenter)
self.label_2 = QtWidgets.QLabel(self.verticalLayoutWidget)
self.label_2.setMaximumSize(QtCore.QSize(16777215, 100))
self.label_2.setObjectName("label_2")
self.gridLayout.addWidget(self.label_2, 2, 0, 1, 1, QtCore.Qt.AlignHCenter)
self.label_6 = QtWidgets.QLabel(self.verticalLayoutWidget)
self.label_6.setObjectName("label_6")
self.gridLayout.addWidget(self.label_6, 9, 0, 1, 1, QtCore.Qt.AlignHCenter)
self.turnsep = QtWidgets.QCheckBox(self.verticalLayoutWidget)
self.turnsep.setText("")
self.turnsep.setObjectName("turnsep")
self.gridLayout.addWidget(self.turnsep, 5, 1, 1, 1, QtCore.Qt.AlignHCenter)
self.label_5 = QtWidgets.QLabel(self.verticalLayoutWidget)
self.label_5.setObjectName("label_5")
self.gridLayout.addWidget(self.label_5, 5, 0, 1, 1, QtCore.Qt.AlignHCenter)
self.ehist = QtWidgets.QCheckBox(self.verticalLayoutWidget)
self.ehist.setText("")
self.ehist.setObjectName("ehist")
self.gridLayout.addWidget(self.ehist, 9, 1, 1, 1, QtCore.Qt.AlignHCenter)
self.label_7 = QtWidgets.QLabel(self.verticalLayoutWidget)
self.label_7.setObjectName("label_7")
self.gridLayout.addWidget(self.label_7, 10, 0, 1, 1, QtCore.Qt.AlignHCenter)
self.label_10 = QtWidgets.QLabel(self.verticalLayoutWidget)
self.label_10.setObjectName("label_10")
self.gridLayout.addWidget(self.label_10, 8, 0, 1, 1, QtCore.Qt.AlignHCenter)
self.xz = QtWidgets.QCheckBox(self.verticalLayoutWidget)
self.xz.setText("")
self.xz.setObjectName("xz")
self.gridLayout.addWidget(self.xz, 8, 1, 1, 1, QtCore.Qt.AlignHCenter)
self.verticalLayout_3.addLayout(self.gridLayout)
spacerItem = QtWidgets.QSpacerItem(20, 10, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
self.verticalLayout_3.addItem(spacerItem)
self.buttonBox = QtWidgets.QDialogButtonBox(self.verticalLayoutWidget)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout_3.addWidget(self.buttonBox)
BeamChar.setCentralWidget(self.centralwidget)
self.retranslateUi(BeamChar)
QtCore.QMetaObject.connectSlotsByName(BeamChar)
def retranslateUi(self, BeamChar):
_translate = QtCore.QCoreApplication.translate
BeamChar.setWindowTitle(_translate("BeamChar", "Choosing Plots"))
self.label.setText(_translate("BeamChar", "<html><head/><body><p>Please select the beam characteristic(s) you would like to plot.</p></body></html>"))
self.label_9.setText(_translate("BeamChar", "Probes"))
self.label_8.setText(_translate("BeamChar", "Full Simulation"))
self.label_4.setText(_translate("BeamChar", "Centroid Position"))
self.label_3.setText(_translate("BeamChar", "Halo Parameter"))
self.label_2.setText(_translate("BeamChar", "RMS Beam Size"))
self.label_6.setText(_translate("BeamChar", "Energy Histogram"))
self.label_5.setText(_translate("BeamChar", "Turn Separation"))
self.label_7.setText(_translate("BeamChar", "Beam Intensity vs Radius"))
self.label_10.setText(_translate("BeamChar", "R-Z Scatter Plot"))
| {"/py_particle_processor_qt/plotting.py": ["/py_particle_processor_qt/gui/plot_settings.py", "/py_particle_processor_qt/gui/default_plot_settings.py"], "/py_particle_processor_qt/test/__main__.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/tools/OrbitTool/OrbitTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/OrbitTool/orbittoolgui.py"], "/examples/gui_example.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/drivers/OPALDriver/OPALDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/COMSOLDriver/COMSOLDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/IBSimuDriver/__init__.py": ["/py_particle_processor_qt/drivers/IBSimuDriver/IBSimuDriver.py"], "/py_particle_processor_qt/tools/TranslateTool/__init__.py": ["/py_particle_processor_qt/tools/TranslateTool/TranslateTool.py"], "/py_particle_processor_qt/tools/TranslateTool/TranslateTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/TranslateTool/translatetoolgui.py"], "/py_particle_processor_qt/drivers/FreeCADDriver/__init__.py": ["/py_particle_processor_qt/drivers/FreeCADDriver/FreeCADDriver.py"], "/py_particle_processor_qt/tools/AnimateXY/AnimateXY.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/AnimateXY/animateXYgui.py"], "/py_particle_processor_qt/dataset.py": ["/py_particle_processor_qt/drivers/__init__.py"], "/py_particle_processor_qt/tools/OrbitTool/__init__.py": ["/py_particle_processor_qt/tools/OrbitTool/OrbitTool.py"], "/py_particle_processor_qt/generator.py": ["/py_particle_processor_qt/gui/generate_main.py", "/py_particle_processor_qt/gui/generate_error.py", "/py_particle_processor_qt/gui/generate_envelope.py", "/py_particle_processor_qt/gui/generate_twiss.py", "/py_particle_processor_qt/drivers/TraceWinDriver/__init__.py"], "/py_particle_processor_qt/drivers/OPALDriver/__init__.py": ["/py_particle_processor_qt/drivers/OPALDriver/OPALDriver.py"], "/py_particle_processor_qt/tools/BeamChar/BeamChar.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/BeamChar/beamchargui.py"], "/py_particle_processor_qt/tools/AnimateXY/__init__.py": ["/py_particle_processor_qt/tools/AnimateXY/AnimateXY.py"], "/py_particle_processor_qt/tools/RotateTool/__init__.py": ["/py_particle_processor_qt/tools/RotateTool/RotateTool.py"], "/py_particle_processor_qt/tools/__init__.py": ["/py_particle_processor_qt/tools/ScaleTool/__init__.py", "/py_particle_processor_qt/tools/TranslateTool/__init__.py", "/py_particle_processor_qt/tools/AnimateXY/__init__.py", "/py_particle_processor_qt/tools/BeamChar/__init__.py", "/py_particle_processor_qt/tools/CollimOPAL/__init__.py", "/py_particle_processor_qt/tools/OrbitTool/__init__.py", "/py_particle_processor_qt/tools/RotateTool/__init__.py"], "/py_particle_processor_qt/tools/BeamChar/__init__.py": ["/py_particle_processor_qt/tools/BeamChar/BeamChar.py"], "/py_particle_processor_qt/drivers/TrackDriver/__init__.py": ["/py_particle_processor_qt/drivers/TrackDriver/TrackDriver.py"], "/py_particle_processor_qt/__init__.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/drivers/COMSOLDriver/__init__.py": ["/py_particle_processor_qt/drivers/COMSOLDriver/COMSOLDriver.py"], "/py_particle_processor_qt/tools/RotateTool/RotateTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/RotateTool/rotatetoolgui.py"], "/py_particle_processor_qt/tools/ScaleTool/ScaleTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/ScaleTool/scaletoolgui.py"], "/py_particle_processor_qt/tools/ScaleTool/__init__.py": ["/py_particle_processor_qt/tools/ScaleTool/ScaleTool.py"], "/py_particle_processor_qt/py_particle_processor_qt.py": ["/py_particle_processor_qt/dataset.py", "/py_particle_processor_qt/gui/main_window.py", "/py_particle_processor_qt/gui/species_prompt.py", "/py_particle_processor_qt/plotting.py", "/py_particle_processor_qt/generator.py", "/py_particle_processor_qt/tools/__init__.py"], "/py_particle_processor_qt/drivers/TrackDriver/TrackDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/FreeCADDriver/FreeCADDriver.py": ["/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/tools/CollimOPAL/__init__.py": ["/py_particle_processor_qt/tools/CollimOPAL/CollimOPAL.py"], "/py_particle_processor_qt/tools/CollimOPAL/CollimOPAL.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/CollimOPAL/collimOPALgui.py"], "/py_particle_processor_qt/drivers/__init__.py": ["/py_particle_processor_qt/drivers/OPALDriver/__init__.py", "/py_particle_processor_qt/drivers/TraceWinDriver/__init__.py", "/py_particle_processor_qt/drivers/COMSOLDriver/__init__.py", "/py_particle_processor_qt/drivers/IBSimuDriver/__init__.py", "/py_particle_processor_qt/drivers/TrackDriver/__init__.py", "/py_particle_processor_qt/drivers/FreeCADDriver/__init__.py"], "/py_particle_processor_qt/drivers/IBSimuDriver/IBSimuDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"]} |
42,384 | DanielWinklehner/py_particle_processor | refs/heads/master | /py_particle_processor_qt/tools/CollimOPAL/CollimOPAL.py | from ..abstract_tool import AbstractTool
from PyQt5.QtWidgets import QMainWindow
from .collimOPALgui import Ui_CollimOPAL
import numpy as np
import string
import matplotlib.pyplot as plt
import os
DEBUG = True
""""
A tool for generating collimator code for OPAL.
"""
class CollimOPAL(AbstractTool):
def __init__(self, parent):
super(CollimOPAL, self).__init__(parent)
self._name = "Generate Collimator"
self._parent = parent
self._filename = ""
self._settings = {}
self._datasource = None # h5py datasource for orbit data
# --- Initialize the GUI --- #
self._collimOPALWindow = QMainWindow()
self._collimOPALGUI = Ui_CollimOPAL()
self._collimOPALGUI.setupUi(self._collimOPALWindow)
self._collimOPALGUI.buttonBox.accepted.connect(self.callback_apply)
self._collimOPALGUI.buttonBox.rejected.connect(self._collimOPALWindow.close)
self._has_gui = True
self._need_selection = True
self._min_selections = 1
self._max_selections = 1
self._redraw_on_exit = False
# Debug plotting:
if DEBUG:
self._fig = plt.figure()
plt.rc('font', **{'family': 'serif', 'serif': ['Computer Modern']})
plt.rc('text', usetex=True)
plt.rc('grid', linestyle=':')
self._ax = plt.gca()
self._ax.set_xlabel("x (mm)")
self._ax.set_ylabel("y (mm)")
def apply_settings(self):
self._settings["step"] = int(self._collimOPALGUI.step.text())
self._settings["width"] = float(self._collimOPALGUI.gap.text())
self._settings["length"] = float(self._collimOPALGUI.hl.text())
self._settings["cwidth"] = float(self._collimOPALGUI.w.text())
self._settings["label"] = int(self._collimOPALGUI.num.text())
self._settings["nseg"] = int(self._collimOPALGUI.nseg.text())
def callback_apply(self):
self.apply_settings()
script = ""
script += self.gen_script()
self._collimOPALGUI.textBrowser.setText(script)
with open(os.path.join(os.environ.get("HOME", ""), "collim.txt"), 'w') as outfile:
outfile.write(script)
if DEBUG:
plt.show()
# @staticmethod
# def read_data(fn):
#
# with open(fn, 'r') as infile:
# lines = infile.readlines()
#
# design_particle_lines = []
#
# for line in lines:
# if "ID0" in line:
# design_particle_lines.append(line.strip())
#
# npts = len(design_particle_lines)
#
# x = np.zeros(npts)
# y = np.zeros(npts)
# px = np.zeros(npts)
# py = np.zeros(npts)
#
# for i, line in enumerate(design_particle_lines):
# _, _x, _px, _y, _py, _, _ = line.split()
# x[i] = float(_x) * 1000.0
# y[i] = float(_y) * 1000.0
# px[i] = float(_px)
# py[i] = float(_py)
#
# return np.array([x, px, y, py])
def get_xy_mean_at_step_mm(self, step):
x = 1e3 * np.mean(np.array(self._datasource["Step#{}".format(step)]["x"]))
y = 1e3 * np.mean(np.array(self._datasource["Step#{}".format(step)]["y"]))
return x, y
def gen_script(self):
script = ""
letters = list(string.ascii_lowercase)
# Central collimator placement
x_cent, y_cent = self.get_xy_mean_at_step_mm(self._settings["step"])
for n in range(self._settings["nseg"]):
i = self._settings["step"]
if n != 0: # n = 0 indicates the central segment
x_temp, y_temp = self.get_xy_mean_at_step_mm(i)
if n % 2 == 1: # n congruent to 1 mod 2 indicates placement ahead of the central segment
while np.sqrt(np.square(x_cent - x_temp) + np.square(y_cent - y_temp)) \
< (int(n / 2) + (n % 2 > 0)) * self._settings["cwidth"]:
i += 1
x_temp, y_temp = self.get_xy_mean_at_step_mm(i)
else: # n > 0 congruent to 0 mod 2 indicates placement behind of the central segment
while np.sqrt(np.square(x_cent - x_temp) + np.square(y_cent - y_temp)) \
< (int(n / 2) + (n % 2 > 0)) * self._settings["cwidth"]:
i -= 1
x_temp, y_temp = self.get_xy_mean_at_step_mm(i)
x_new, y_new = self.get_xy_mean_at_step_mm(i)
px_new = np.mean(np.array(self._datasource["Step#{}".format(i)]["px"]))
py_new = np.mean(np.array(self._datasource["Step#{}".format(i)]["py"]))
collim = self.gen_collim(x_new, y_new, px_new, py_new)
script += "Collim_{}{}:CCOLLIMATOR, XSTART={}, YSTART={}, XEND={}, YEND={}, WIDTH={};\n\n" \
.format(self._settings["label"], letters[2 * n], collim["x1a"], collim["y1a"], collim["x1b"],
collim["y1b"], self._settings["cwidth"])
script += "Collim_{}{}:CCOLLIMATOR, XSTART={}, YSTART={}, XEND={}, YEND={}, WIDTH={};\n\n" \
.format(self._settings["label"], letters[2 * n + 1], collim["x2a"], collim["y2a"], collim["x2b"],
collim["y2b"], self._settings["cwidth"])
if DEBUG:
plt.plot([collim["x1a"], collim["x1b"]], [collim["y1a"], collim["y1b"]])
plt.plot([collim["x2a"], collim["x2b"]], [collim["y2a"], collim["y2b"]])
if DEBUG:
self._ax.set_title("Collimator at step {} in global frame".format(self._settings["step"]))
self._ax.set_aspect('equal')
x_plot = 1e3 * np.array(self._datasource["Step#{}".format(self._settings["step"])]["x"])
y_plot = 1e3 * np.array(self._datasource["Step#{}".format(self._settings["step"])]["y"])
plt.plot(x_plot, y_plot, 'o', alpha=0.8, markersize=0.01)
return script
def gen_collim(self, x, y, px, py):
# Find angle to rotate collimator according to momentum
theta = np.arccos(px/np.sqrt(np.square(px) + np.square(py)))
if py < 0:
theta = -theta
theta1 = theta + np.pi/2
theta2 = theta - np.pi/2
# Calculate coordinates
x1a = self._settings["width"] * np.cos(theta1) + x
x2a = self._settings["width"] * np.cos(theta2) + x
x1b = (self._settings["width"] + self._settings["length"]) * np.cos(theta1) + x
x2b = (self._settings["width"] + self._settings["length"]) * np.cos(theta2) + x
y1a = self._settings["width"] * np.sin(theta1) + y
y2a = self._settings["width"] * np.sin(theta2) + y
y1b = (self._settings["width"] + self._settings["length"]) * np.sin(theta1) + y
y2b = (self._settings["width"] + self._settings["length"]) * np.sin(theta2) + y
return {"x1a": x1a, "x2a": x2a, "x1b": x1b, "x2b": x2b, "y1a": y1a, "y2a": y2a, "y1b": y1b, "y2b": y2b}
# @staticmethod
# def read(filename):
# text_file = open("C:/Users/Maria/PycharmProjects/py_particle_processor"
# "/py_particle_processor_qt/tools/CollimOPAL/{}.txt".format(filename), "r")
# lines = text_file.read().split(',')
# data = np.array(lines).astype(np.float)
# text_file.close()
# return data
def run(self):
# --- Calculate the positions to center the window --- #
screen_size = self._parent.screen_size()
_x = 0.5 * (screen_size.width() - self._collimOPALWindow.width())
_y = 0.5 * (screen_size.height() - self._collimOPALWindow.height())
# --- Show the GUI --- #
self._collimOPALWindow.show()
self._collimOPALWindow.move(_x, _y)
def open_gui(self):
# Get parent dataset/source for orbit data
dataset = self._selections[0]
self._datasource = dataset.get_datasource()
self.run()
| {"/py_particle_processor_qt/plotting.py": ["/py_particle_processor_qt/gui/plot_settings.py", "/py_particle_processor_qt/gui/default_plot_settings.py"], "/py_particle_processor_qt/test/__main__.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/tools/OrbitTool/OrbitTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/OrbitTool/orbittoolgui.py"], "/examples/gui_example.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/drivers/OPALDriver/OPALDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/COMSOLDriver/COMSOLDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/IBSimuDriver/__init__.py": ["/py_particle_processor_qt/drivers/IBSimuDriver/IBSimuDriver.py"], "/py_particle_processor_qt/tools/TranslateTool/__init__.py": ["/py_particle_processor_qt/tools/TranslateTool/TranslateTool.py"], "/py_particle_processor_qt/tools/TranslateTool/TranslateTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/TranslateTool/translatetoolgui.py"], "/py_particle_processor_qt/drivers/FreeCADDriver/__init__.py": ["/py_particle_processor_qt/drivers/FreeCADDriver/FreeCADDriver.py"], "/py_particle_processor_qt/tools/AnimateXY/AnimateXY.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/AnimateXY/animateXYgui.py"], "/py_particle_processor_qt/dataset.py": ["/py_particle_processor_qt/drivers/__init__.py"], "/py_particle_processor_qt/tools/OrbitTool/__init__.py": ["/py_particle_processor_qt/tools/OrbitTool/OrbitTool.py"], "/py_particle_processor_qt/generator.py": ["/py_particle_processor_qt/gui/generate_main.py", "/py_particle_processor_qt/gui/generate_error.py", "/py_particle_processor_qt/gui/generate_envelope.py", "/py_particle_processor_qt/gui/generate_twiss.py", "/py_particle_processor_qt/drivers/TraceWinDriver/__init__.py"], "/py_particle_processor_qt/drivers/OPALDriver/__init__.py": ["/py_particle_processor_qt/drivers/OPALDriver/OPALDriver.py"], "/py_particle_processor_qt/tools/BeamChar/BeamChar.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/BeamChar/beamchargui.py"], "/py_particle_processor_qt/tools/AnimateXY/__init__.py": ["/py_particle_processor_qt/tools/AnimateXY/AnimateXY.py"], "/py_particle_processor_qt/tools/RotateTool/__init__.py": ["/py_particle_processor_qt/tools/RotateTool/RotateTool.py"], "/py_particle_processor_qt/tools/__init__.py": ["/py_particle_processor_qt/tools/ScaleTool/__init__.py", "/py_particle_processor_qt/tools/TranslateTool/__init__.py", "/py_particle_processor_qt/tools/AnimateXY/__init__.py", "/py_particle_processor_qt/tools/BeamChar/__init__.py", "/py_particle_processor_qt/tools/CollimOPAL/__init__.py", "/py_particle_processor_qt/tools/OrbitTool/__init__.py", "/py_particle_processor_qt/tools/RotateTool/__init__.py"], "/py_particle_processor_qt/tools/BeamChar/__init__.py": ["/py_particle_processor_qt/tools/BeamChar/BeamChar.py"], "/py_particle_processor_qt/drivers/TrackDriver/__init__.py": ["/py_particle_processor_qt/drivers/TrackDriver/TrackDriver.py"], "/py_particle_processor_qt/__init__.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/drivers/COMSOLDriver/__init__.py": ["/py_particle_processor_qt/drivers/COMSOLDriver/COMSOLDriver.py"], "/py_particle_processor_qt/tools/RotateTool/RotateTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/RotateTool/rotatetoolgui.py"], "/py_particle_processor_qt/tools/ScaleTool/ScaleTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/ScaleTool/scaletoolgui.py"], "/py_particle_processor_qt/tools/ScaleTool/__init__.py": ["/py_particle_processor_qt/tools/ScaleTool/ScaleTool.py"], "/py_particle_processor_qt/py_particle_processor_qt.py": ["/py_particle_processor_qt/dataset.py", "/py_particle_processor_qt/gui/main_window.py", "/py_particle_processor_qt/gui/species_prompt.py", "/py_particle_processor_qt/plotting.py", "/py_particle_processor_qt/generator.py", "/py_particle_processor_qt/tools/__init__.py"], "/py_particle_processor_qt/drivers/TrackDriver/TrackDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/FreeCADDriver/FreeCADDriver.py": ["/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/tools/CollimOPAL/__init__.py": ["/py_particle_processor_qt/tools/CollimOPAL/CollimOPAL.py"], "/py_particle_processor_qt/tools/CollimOPAL/CollimOPAL.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/CollimOPAL/collimOPALgui.py"], "/py_particle_processor_qt/drivers/__init__.py": ["/py_particle_processor_qt/drivers/OPALDriver/__init__.py", "/py_particle_processor_qt/drivers/TraceWinDriver/__init__.py", "/py_particle_processor_qt/drivers/COMSOLDriver/__init__.py", "/py_particle_processor_qt/drivers/IBSimuDriver/__init__.py", "/py_particle_processor_qt/drivers/TrackDriver/__init__.py", "/py_particle_processor_qt/drivers/FreeCADDriver/__init__.py"], "/py_particle_processor_qt/drivers/IBSimuDriver/IBSimuDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"]} |
42,385 | DanielWinklehner/py_particle_processor | refs/heads/master | /py_particle_processor_qt/drivers/__init__.py | from py_particle_processor_qt.drivers.OPALDriver import *
from py_particle_processor_qt.drivers.TraceWinDriver import *
from py_particle_processor_qt.drivers.COMSOLDriver import *
from py_particle_processor_qt.drivers.IBSimuDriver import *
from py_particle_processor_qt.drivers.TrackDriver import *
from py_particle_processor_qt.drivers.FreeCADDriver import *
"""
The driver mapping contains the information needed for the ImportExportDriver class to wrap around the drivers
Rules:
key has to be unique and one continuous 'word'
several extensions can be specified for one driver
"""
driver_mapping = {'OPAL': {'driver': OPALDriver,
'extensions': ['.h5', '.dat']},
'TraceWin': {'driver': TraceWinDriver,
'extensions': ['.txt', '.dat']},
'COMSOL': {'driver': COMSOLDriver,
'extensions': ['.txt']},
'IBSimu': {'driver': IBSimuDriver,
'extensions': ['.txt']},
'Track': {'driver': TrackDriver,
'extensions': ['.out']},
'FreeCAD': {'driver': FreeCADDriver,
'extensions': ['.dat']}
}
| {"/py_particle_processor_qt/plotting.py": ["/py_particle_processor_qt/gui/plot_settings.py", "/py_particle_processor_qt/gui/default_plot_settings.py"], "/py_particle_processor_qt/test/__main__.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/tools/OrbitTool/OrbitTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/OrbitTool/orbittoolgui.py"], "/examples/gui_example.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/drivers/OPALDriver/OPALDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/COMSOLDriver/COMSOLDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/IBSimuDriver/__init__.py": ["/py_particle_processor_qt/drivers/IBSimuDriver/IBSimuDriver.py"], "/py_particle_processor_qt/tools/TranslateTool/__init__.py": ["/py_particle_processor_qt/tools/TranslateTool/TranslateTool.py"], "/py_particle_processor_qt/tools/TranslateTool/TranslateTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/TranslateTool/translatetoolgui.py"], "/py_particle_processor_qt/drivers/FreeCADDriver/__init__.py": ["/py_particle_processor_qt/drivers/FreeCADDriver/FreeCADDriver.py"], "/py_particle_processor_qt/tools/AnimateXY/AnimateXY.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/AnimateXY/animateXYgui.py"], "/py_particle_processor_qt/dataset.py": ["/py_particle_processor_qt/drivers/__init__.py"], "/py_particle_processor_qt/tools/OrbitTool/__init__.py": ["/py_particle_processor_qt/tools/OrbitTool/OrbitTool.py"], "/py_particle_processor_qt/generator.py": ["/py_particle_processor_qt/gui/generate_main.py", "/py_particle_processor_qt/gui/generate_error.py", "/py_particle_processor_qt/gui/generate_envelope.py", "/py_particle_processor_qt/gui/generate_twiss.py", "/py_particle_processor_qt/drivers/TraceWinDriver/__init__.py"], "/py_particle_processor_qt/drivers/OPALDriver/__init__.py": ["/py_particle_processor_qt/drivers/OPALDriver/OPALDriver.py"], "/py_particle_processor_qt/tools/BeamChar/BeamChar.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/BeamChar/beamchargui.py"], "/py_particle_processor_qt/tools/AnimateXY/__init__.py": ["/py_particle_processor_qt/tools/AnimateXY/AnimateXY.py"], "/py_particle_processor_qt/tools/RotateTool/__init__.py": ["/py_particle_processor_qt/tools/RotateTool/RotateTool.py"], "/py_particle_processor_qt/tools/__init__.py": ["/py_particle_processor_qt/tools/ScaleTool/__init__.py", "/py_particle_processor_qt/tools/TranslateTool/__init__.py", "/py_particle_processor_qt/tools/AnimateXY/__init__.py", "/py_particle_processor_qt/tools/BeamChar/__init__.py", "/py_particle_processor_qt/tools/CollimOPAL/__init__.py", "/py_particle_processor_qt/tools/OrbitTool/__init__.py", "/py_particle_processor_qt/tools/RotateTool/__init__.py"], "/py_particle_processor_qt/tools/BeamChar/__init__.py": ["/py_particle_processor_qt/tools/BeamChar/BeamChar.py"], "/py_particle_processor_qt/drivers/TrackDriver/__init__.py": ["/py_particle_processor_qt/drivers/TrackDriver/TrackDriver.py"], "/py_particle_processor_qt/__init__.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/drivers/COMSOLDriver/__init__.py": ["/py_particle_processor_qt/drivers/COMSOLDriver/COMSOLDriver.py"], "/py_particle_processor_qt/tools/RotateTool/RotateTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/RotateTool/rotatetoolgui.py"], "/py_particle_processor_qt/tools/ScaleTool/ScaleTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/ScaleTool/scaletoolgui.py"], "/py_particle_processor_qt/tools/ScaleTool/__init__.py": ["/py_particle_processor_qt/tools/ScaleTool/ScaleTool.py"], "/py_particle_processor_qt/py_particle_processor_qt.py": ["/py_particle_processor_qt/dataset.py", "/py_particle_processor_qt/gui/main_window.py", "/py_particle_processor_qt/gui/species_prompt.py", "/py_particle_processor_qt/plotting.py", "/py_particle_processor_qt/generator.py", "/py_particle_processor_qt/tools/__init__.py"], "/py_particle_processor_qt/drivers/TrackDriver/TrackDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/FreeCADDriver/FreeCADDriver.py": ["/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/tools/CollimOPAL/__init__.py": ["/py_particle_processor_qt/tools/CollimOPAL/CollimOPAL.py"], "/py_particle_processor_qt/tools/CollimOPAL/CollimOPAL.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/CollimOPAL/collimOPALgui.py"], "/py_particle_processor_qt/drivers/__init__.py": ["/py_particle_processor_qt/drivers/OPALDriver/__init__.py", "/py_particle_processor_qt/drivers/TraceWinDriver/__init__.py", "/py_particle_processor_qt/drivers/COMSOLDriver/__init__.py", "/py_particle_processor_qt/drivers/IBSimuDriver/__init__.py", "/py_particle_processor_qt/drivers/TrackDriver/__init__.py", "/py_particle_processor_qt/drivers/FreeCADDriver/__init__.py"], "/py_particle_processor_qt/drivers/IBSimuDriver/IBSimuDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"]} |
42,386 | DanielWinklehner/py_particle_processor | refs/heads/master | /py_particle_processor_qt/drivers/IBSimuDriver/IBSimuDriver.py | from ..arraywrapper import ArrayWrapper
from ..abstractdriver import AbstractDriver
from dans_pymodules import IonSpecies, ParticleDistribution
import numpy as np
import scipy.constants as const
from PyQt5.QtWidgets import QInputDialog
amu_kg = const.value("atomic mass constant") # (kg)
amu_mev = const.value("atomic mass constant energy equivalent in MeV") # (MeV)
clight = const.value("speed of light in vacuum") # (m/s)
class IBSimuDriver(AbstractDriver):
def __init__(self, parent=None, debug=False):
super(IBSimuDriver, self).__init__()
# IBSimu particle file: I (A), M (kg), t, x (m), vx (m/s), y (m), vy (m/s), z (m), vz (m/s)
self._debug = debug
self._program_name = "IBSimu"
self._parent = parent
def import_data(self, filename, species):
# TODO: There is a lot of looping going on, the fewer instructions the better. -PW
if self._debug:
print("Importing data from program: {}".format(self._program_name))
try:
datasource = {}
data = {}
with open(filename, 'rb') as infile:
lines = infile.readlines()
npart = len(lines)
current = np.empty(npart)
mass = np.empty(npart)
x = np.empty(npart)
y = np.empty(npart)
z = np.empty(npart)
vx = np.empty(npart)
vy = np.empty(npart)
vz = np.empty(npart)
for i, line in enumerate(lines):
current[i], mass[i], _, x[i], vx[i], y[i], vy[i], z[i], vz[i] = [float(item) for item in
line.strip().split()]
masses = np.sort(np.unique(mass)) # mass in MeV, sorted in ascending order (protons before h2+)
particle_distributions = []
for i, m in enumerate(masses):
m_mev = m / amu_kg * amu_mev
species_indices = np.where((mass == m) & (vz > 5.0e5)) # TODO: v_z selection should be in IBSimu -DW
ion = IonSpecies("Species {}".format(i + 1),
mass_mev=m_mev,
a=m_mev / amu_mev,
z=np.round(m_mev / amu_mev, 0),
q=1.0,
current=np.sum(current[species_indices]),
energy_mev=1) # Note: Set energy to 1 for now, will be recalculated
# ParticleDistribution corretly calculates the mean energy
particle_distributions.append(
ParticleDistribution(ion=ion,
x=x[species_indices],
y=y[species_indices],
z=z[species_indices],
vx=vx[species_indices],
vy=vy[species_indices],
vz=vz[species_indices]
))
print(particle_distributions[-1].calculate_emittances()["summary"])
n_species = len(particle_distributions)
if n_species > 1:
items = []
for dist in particle_distributions:
items.append("a = {:.5f}, q = {:.1f}".format(dist.ion.a(), dist.ion.q()))
item, ok = QInputDialog.getItem(self._parent,
"IBSimu Import",
"Found {} ion species, which one do you want?".format(n_species),
items, 0, False)
index = np.where(np.array(items) == item)[0][0]
else:
index = 0
pd = particle_distributions[index]
species = pd.ion
npart = len(pd.x)
step_str = "Step#{}".format(0)
datasource[step_str] = {}
datasource[step_str]["x"] = ArrayWrapper(pd.x)
datasource[step_str]["y"] = ArrayWrapper(pd.y)
datasource[step_str]["z"] = ArrayWrapper(pd.z)
datasource[step_str]["px"] = ArrayWrapper(pd.vx/clight / np.sqrt(1.0 - (pd.vx/clight)**2.0))
datasource[step_str]["py"] = ArrayWrapper(pd.vy/clight / np.sqrt(1.0 - (pd.vy/clight)**2.0))
datasource[step_str]["pz"] = ArrayWrapper(pd.vz/clight / np.sqrt(1.0 - (pd.vz/clight)**2.0))
v_mean_sq = pd.vx**2.0 + pd.vy**2.0 + pd.vz**2.0
datasource[step_str]["E"] = ArrayWrapper(
(1.0 / np.sqrt(1.0 - (v_mean_sq / clight ** 2.0)) - 1.0) * species.mass_mev())
data["datasource"] = datasource
data["ion"] = species
data["energy"] = species.energy_mev() * species.a()
data["mass"] = species.a()
data["charge"] = species.q()
data["steps"] = 1
data["current"] = species.current()
data["particles"] = npart
if self._debug:
print("Found {} steps in the file.".format(data["steps"]))
print("Found {} particles in the file.".format(data["particles"]))
return data
except Exception as e:
print("Exception happened during particle loading with {} "
"ImportExportDriver: {}".format(self._program_name, e))
return None
# try:
#
# datasource = {}
# data = {}
#
# with open(filename, 'rb') as infile:
#
# _n = 7 # Length of the n-tuples to unpack from the values list
# key_list = ["x", "y", "z", "px", "py", "pz", "E"] # Things we want to save
#
# firstline = infile.readline()
# lines = infile.readlines()
# raw_values = [float(item) for item in firstline.strip().split()]
# nsteps = int((len(raw_values) - 1) / _n) # Number of steps
# npart = len(lines) + 1
#
# # Fill in the values for the first line now
# _id = int(raw_values.pop(0))
#
# for step in range(nsteps):
# step_str = "Step#{}".format(step)
#
# datasource[step_str] = {}
#
# for key in key_list:
# datasource[step_str][key] = ArrayWrapper(np.zeros(npart))
#
# values = raw_values[(step * _n):(_n + step * _n)]
#
# gamma = values[6] / species.mass_mev() + 1.0
# beta = np.sqrt(1.0 - np.power(gamma, -2.0))
# v_tot = np.sqrt(values[3] ** 2.0 + values[4] ** 2.0 + values[5] ** 2.0)
#
# values[0:3] = [r for r in values[0:3]]
# values[3:6] = [beta * gamma * v / v_tot for v in values[3:6]] # Convert velocity to momentum
#
# for idx, key in enumerate(key_list):
# datasource[step_str][key][_id - 1] = values[idx]
#
# # Now for every other line
# for line in lines:
#
# raw_values = [float(item) for item in line.strip().split()] # Data straight from the text file
# _id = int(raw_values.pop(0)) # Particle ID number
#
# for step in range(nsteps):
# step_str = "Step#{}".format(step)
# values = raw_values[(step * _n):(_n + step * _n)]
#
# gamma = values[6] / species.mass_mev() + 1.0
# beta = np.sqrt(1.0 - gamma ** (-2.0))
# v_tot = np.sqrt(values[3] ** 2.0 + values[4] ** 2.0 + values[5] ** 2.0)
#
# values[0:3] = [r for r in values[0:3]]
# values[3:6] = [beta * gamma * v / v_tot for v in values[3:6]] # Convert velocity to momentum
#
# for idx, key in enumerate(key_list):
# datasource[step_str][key][_id - 1] = values[idx]
#
# species.calculate_from_energy_mev(datasource["Step#0"]["E"][0])
#
# data["datasource"] = datasource
# data["ion"] = species
# data["mass"] = species.a()
# data["charge"] = species.q()
# data["steps"] = len(datasource.keys())
# data["current"] = None
# data["particles"] = len(datasource["Step#0"]["x"])
#
# if self._debug:
# print("Found {} steps in the file.".format(data["steps"]))
# print("Found {} particles in the file.".format(data["particles"]))
#
# return data
#
# except Exception as e:
#
# print("Exception happened during particle loading with {} "
# "ImportExportDriver: {}".format(self._program_name, e))
#
# return None
def export_data(self, dataset, filename):
print("Sorry, exporting not implemented yet!")
# if self._debug:
# print("Exporting data for program: {}".format(self._program_name))
#
# datasource = dataset.get_datasource()
# ion = dataset.get_ion()
# nsteps = dataset.get_nsteps()
# npart = dataset.get_npart()
#
# with open(filename + ".txt", "w") as outfile:
# for i in range(npart):
# outstring = "{} ".format(i)
# for step in range(nsteps):
# _px = datasource.get("Step#{}".format(step)).get("px")[i]
# _py = datasource.get("Step#{}".format(step)).get("py")[i]
# _pz = datasource.get("Step#{}".format(step)).get("pz")[i]
#
# _vx, _vy, _vz = (clight * _px / np.sqrt(_px ** 2.0 + 1.0),
# clight * _py / np.sqrt(_py ** 2.0 + 1.0),
# clight * _pz / np.sqrt(_pz ** 2.0 + 1.0))
#
# outstring += "{} {} {} {} {} {} {} ".format(datasource.get("Step#{}".format(step)).get("x")[i],
# datasource.get("Step#{}".format(step)).get("y")[i],
# datasource.get("Step#{}".format(step)).get("z")[i],
# _vx, _vy, _vz, ion.energy_mev())
# outfile.write(outstring + "\n")
| {"/py_particle_processor_qt/plotting.py": ["/py_particle_processor_qt/gui/plot_settings.py", "/py_particle_processor_qt/gui/default_plot_settings.py"], "/py_particle_processor_qt/test/__main__.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/tools/OrbitTool/OrbitTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/OrbitTool/orbittoolgui.py"], "/examples/gui_example.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/drivers/OPALDriver/OPALDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/COMSOLDriver/COMSOLDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/IBSimuDriver/__init__.py": ["/py_particle_processor_qt/drivers/IBSimuDriver/IBSimuDriver.py"], "/py_particle_processor_qt/tools/TranslateTool/__init__.py": ["/py_particle_processor_qt/tools/TranslateTool/TranslateTool.py"], "/py_particle_processor_qt/tools/TranslateTool/TranslateTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/TranslateTool/translatetoolgui.py"], "/py_particle_processor_qt/drivers/FreeCADDriver/__init__.py": ["/py_particle_processor_qt/drivers/FreeCADDriver/FreeCADDriver.py"], "/py_particle_processor_qt/tools/AnimateXY/AnimateXY.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/AnimateXY/animateXYgui.py"], "/py_particle_processor_qt/dataset.py": ["/py_particle_processor_qt/drivers/__init__.py"], "/py_particle_processor_qt/tools/OrbitTool/__init__.py": ["/py_particle_processor_qt/tools/OrbitTool/OrbitTool.py"], "/py_particle_processor_qt/generator.py": ["/py_particle_processor_qt/gui/generate_main.py", "/py_particle_processor_qt/gui/generate_error.py", "/py_particle_processor_qt/gui/generate_envelope.py", "/py_particle_processor_qt/gui/generate_twiss.py", "/py_particle_processor_qt/drivers/TraceWinDriver/__init__.py"], "/py_particle_processor_qt/drivers/OPALDriver/__init__.py": ["/py_particle_processor_qt/drivers/OPALDriver/OPALDriver.py"], "/py_particle_processor_qt/tools/BeamChar/BeamChar.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/BeamChar/beamchargui.py"], "/py_particle_processor_qt/tools/AnimateXY/__init__.py": ["/py_particle_processor_qt/tools/AnimateXY/AnimateXY.py"], "/py_particle_processor_qt/tools/RotateTool/__init__.py": ["/py_particle_processor_qt/tools/RotateTool/RotateTool.py"], "/py_particle_processor_qt/tools/__init__.py": ["/py_particle_processor_qt/tools/ScaleTool/__init__.py", "/py_particle_processor_qt/tools/TranslateTool/__init__.py", "/py_particle_processor_qt/tools/AnimateXY/__init__.py", "/py_particle_processor_qt/tools/BeamChar/__init__.py", "/py_particle_processor_qt/tools/CollimOPAL/__init__.py", "/py_particle_processor_qt/tools/OrbitTool/__init__.py", "/py_particle_processor_qt/tools/RotateTool/__init__.py"], "/py_particle_processor_qt/tools/BeamChar/__init__.py": ["/py_particle_processor_qt/tools/BeamChar/BeamChar.py"], "/py_particle_processor_qt/drivers/TrackDriver/__init__.py": ["/py_particle_processor_qt/drivers/TrackDriver/TrackDriver.py"], "/py_particle_processor_qt/__init__.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/drivers/COMSOLDriver/__init__.py": ["/py_particle_processor_qt/drivers/COMSOLDriver/COMSOLDriver.py"], "/py_particle_processor_qt/tools/RotateTool/RotateTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/RotateTool/rotatetoolgui.py"], "/py_particle_processor_qt/tools/ScaleTool/ScaleTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/ScaleTool/scaletoolgui.py"], "/py_particle_processor_qt/tools/ScaleTool/__init__.py": ["/py_particle_processor_qt/tools/ScaleTool/ScaleTool.py"], "/py_particle_processor_qt/py_particle_processor_qt.py": ["/py_particle_processor_qt/dataset.py", "/py_particle_processor_qt/gui/main_window.py", "/py_particle_processor_qt/gui/species_prompt.py", "/py_particle_processor_qt/plotting.py", "/py_particle_processor_qt/generator.py", "/py_particle_processor_qt/tools/__init__.py"], "/py_particle_processor_qt/drivers/TrackDriver/TrackDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/FreeCADDriver/FreeCADDriver.py": ["/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/tools/CollimOPAL/__init__.py": ["/py_particle_processor_qt/tools/CollimOPAL/CollimOPAL.py"], "/py_particle_processor_qt/tools/CollimOPAL/CollimOPAL.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/CollimOPAL/collimOPALgui.py"], "/py_particle_processor_qt/drivers/__init__.py": ["/py_particle_processor_qt/drivers/OPALDriver/__init__.py", "/py_particle_processor_qt/drivers/TraceWinDriver/__init__.py", "/py_particle_processor_qt/drivers/COMSOLDriver/__init__.py", "/py_particle_processor_qt/drivers/IBSimuDriver/__init__.py", "/py_particle_processor_qt/drivers/TrackDriver/__init__.py", "/py_particle_processor_qt/drivers/FreeCADDriver/__init__.py"], "/py_particle_processor_qt/drivers/IBSimuDriver/IBSimuDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"]} |
42,387 | DanielWinklehner/py_particle_processor | refs/heads/master | /py_particle_processor_qt/tools/TranslateTool/translatetoolgui.py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'tools/TranslateTool/translatetoolgui.ui'
#
# Created by: PyQt5 UI code generator 5.5.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_TranslateToolGUI(object):
def setupUi(self, TranslateToolGUI):
TranslateToolGUI.setObjectName("TranslateToolGUI")
TranslateToolGUI.resize(189, 157)
self.centralwidget = QtWidgets.QWidget(TranslateToolGUI)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayoutWidget = QtWidgets.QWidget(self.centralwidget)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(0, 0, 186, 155))
self.verticalLayoutWidget.setObjectName("verticalLayoutWidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout.setObjectName("verticalLayout")
self.label = QtWidgets.QLabel(self.verticalLayoutWidget)
self.label.setObjectName("label")
self.verticalLayout.addWidget(self.label, 0, QtCore.Qt.AlignHCenter|QtCore.Qt.AlignVCenter)
self.gridLayout = QtWidgets.QGridLayout()
self.gridLayout.setObjectName("gridLayout")
self.x_label = QtWidgets.QLabel(self.verticalLayoutWidget)
self.x_label.setObjectName("x_label")
self.gridLayout.addWidget(self.x_label, 0, 0, 1, 1)
self.y_label = QtWidgets.QLabel(self.verticalLayoutWidget)
self.y_label.setFocusPolicy(QtCore.Qt.NoFocus)
self.y_label.setObjectName("y_label")
self.gridLayout.addWidget(self.y_label, 1, 0, 1, 1)
self.z_label = QtWidgets.QLabel(self.verticalLayoutWidget)
self.z_label.setObjectName("z_label")
self.gridLayout.addWidget(self.z_label, 2, 0, 1, 1)
self.y_trans = QtWidgets.QLineEdit(self.verticalLayoutWidget)
self.y_trans.setFocusPolicy(QtCore.Qt.ClickFocus)
self.y_trans.setObjectName("y_trans")
self.gridLayout.addWidget(self.y_trans, 1, 1, 1, 1)
self.x_trans = QtWidgets.QLineEdit(self.verticalLayoutWidget)
self.x_trans.setFocusPolicy(QtCore.Qt.ClickFocus)
self.x_trans.setClearButtonEnabled(False)
self.x_trans.setObjectName("x_trans")
self.gridLayout.addWidget(self.x_trans, 0, 1, 1, 1)
self.z_trans = QtWidgets.QLineEdit(self.verticalLayoutWidget)
self.z_trans.setFocusPolicy(QtCore.Qt.ClickFocus)
self.z_trans.setObjectName("z_trans")
self.gridLayout.addWidget(self.z_trans, 2, 1, 1, 1)
self.m1 = QtWidgets.QLabel(self.verticalLayoutWidget)
self.m1.setObjectName("m1")
self.gridLayout.addWidget(self.m1, 0, 2, 1, 1)
self.m2 = QtWidgets.QLabel(self.verticalLayoutWidget)
self.m2.setObjectName("m2")
self.gridLayout.addWidget(self.m2, 1, 2, 1, 1)
self.m3 = QtWidgets.QLabel(self.verticalLayoutWidget)
self.m3.setObjectName("m3")
self.gridLayout.addWidget(self.m3, 2, 2, 1, 1)
self.verticalLayout.addLayout(self.gridLayout)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.cancel_button = QtWidgets.QPushButton(self.verticalLayoutWidget)
self.cancel_button.setObjectName("cancel_button")
self.horizontalLayout.addWidget(self.cancel_button)
self.apply_button = QtWidgets.QPushButton(self.verticalLayoutWidget)
self.apply_button.setDefault(True)
self.apply_button.setObjectName("apply_button")
self.horizontalLayout.addWidget(self.apply_button)
self.verticalLayout.addLayout(self.horizontalLayout)
TranslateToolGUI.setCentralWidget(self.centralwidget)
self.retranslateUi(TranslateToolGUI)
QtCore.QMetaObject.connectSlotsByName(TranslateToolGUI)
def retranslateUi(self, TranslateToolGUI):
_translate = QtCore.QCoreApplication.translate
TranslateToolGUI.setWindowTitle(_translate("TranslateToolGUI", "Scale Tool"))
self.label.setText(_translate("TranslateToolGUI", "Translate Tool"))
self.x_label.setText(_translate("TranslateToolGUI", "X"))
self.y_label.setText(_translate("TranslateToolGUI", "Y"))
self.z_label.setText(_translate("TranslateToolGUI", "Z"))
self.y_trans.setText(_translate("TranslateToolGUI", "0.0"))
self.x_trans.setText(_translate("TranslateToolGUI", "0.0"))
self.z_trans.setText(_translate("TranslateToolGUI", "0.0"))
self.m1.setText(_translate("TranslateToolGUI", "m"))
self.m2.setText(_translate("TranslateToolGUI", "m"))
self.m3.setText(_translate("TranslateToolGUI", "m"))
self.cancel_button.setText(_translate("TranslateToolGUI", "Cancel"))
self.apply_button.setText(_translate("TranslateToolGUI", "Apply"))
| {"/py_particle_processor_qt/plotting.py": ["/py_particle_processor_qt/gui/plot_settings.py", "/py_particle_processor_qt/gui/default_plot_settings.py"], "/py_particle_processor_qt/test/__main__.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/tools/OrbitTool/OrbitTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/OrbitTool/orbittoolgui.py"], "/examples/gui_example.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/drivers/OPALDriver/OPALDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/COMSOLDriver/COMSOLDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/IBSimuDriver/__init__.py": ["/py_particle_processor_qt/drivers/IBSimuDriver/IBSimuDriver.py"], "/py_particle_processor_qt/tools/TranslateTool/__init__.py": ["/py_particle_processor_qt/tools/TranslateTool/TranslateTool.py"], "/py_particle_processor_qt/tools/TranslateTool/TranslateTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/TranslateTool/translatetoolgui.py"], "/py_particle_processor_qt/drivers/FreeCADDriver/__init__.py": ["/py_particle_processor_qt/drivers/FreeCADDriver/FreeCADDriver.py"], "/py_particle_processor_qt/tools/AnimateXY/AnimateXY.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/AnimateXY/animateXYgui.py"], "/py_particle_processor_qt/dataset.py": ["/py_particle_processor_qt/drivers/__init__.py"], "/py_particle_processor_qt/tools/OrbitTool/__init__.py": ["/py_particle_processor_qt/tools/OrbitTool/OrbitTool.py"], "/py_particle_processor_qt/generator.py": ["/py_particle_processor_qt/gui/generate_main.py", "/py_particle_processor_qt/gui/generate_error.py", "/py_particle_processor_qt/gui/generate_envelope.py", "/py_particle_processor_qt/gui/generate_twiss.py", "/py_particle_processor_qt/drivers/TraceWinDriver/__init__.py"], "/py_particle_processor_qt/drivers/OPALDriver/__init__.py": ["/py_particle_processor_qt/drivers/OPALDriver/OPALDriver.py"], "/py_particle_processor_qt/tools/BeamChar/BeamChar.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/BeamChar/beamchargui.py"], "/py_particle_processor_qt/tools/AnimateXY/__init__.py": ["/py_particle_processor_qt/tools/AnimateXY/AnimateXY.py"], "/py_particle_processor_qt/tools/RotateTool/__init__.py": ["/py_particle_processor_qt/tools/RotateTool/RotateTool.py"], "/py_particle_processor_qt/tools/__init__.py": ["/py_particle_processor_qt/tools/ScaleTool/__init__.py", "/py_particle_processor_qt/tools/TranslateTool/__init__.py", "/py_particle_processor_qt/tools/AnimateXY/__init__.py", "/py_particle_processor_qt/tools/BeamChar/__init__.py", "/py_particle_processor_qt/tools/CollimOPAL/__init__.py", "/py_particle_processor_qt/tools/OrbitTool/__init__.py", "/py_particle_processor_qt/tools/RotateTool/__init__.py"], "/py_particle_processor_qt/tools/BeamChar/__init__.py": ["/py_particle_processor_qt/tools/BeamChar/BeamChar.py"], "/py_particle_processor_qt/drivers/TrackDriver/__init__.py": ["/py_particle_processor_qt/drivers/TrackDriver/TrackDriver.py"], "/py_particle_processor_qt/__init__.py": ["/py_particle_processor_qt/py_particle_processor_qt.py"], "/py_particle_processor_qt/drivers/COMSOLDriver/__init__.py": ["/py_particle_processor_qt/drivers/COMSOLDriver/COMSOLDriver.py"], "/py_particle_processor_qt/tools/RotateTool/RotateTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/RotateTool/rotatetoolgui.py"], "/py_particle_processor_qt/tools/ScaleTool/ScaleTool.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/ScaleTool/scaletoolgui.py"], "/py_particle_processor_qt/tools/ScaleTool/__init__.py": ["/py_particle_processor_qt/tools/ScaleTool/ScaleTool.py"], "/py_particle_processor_qt/py_particle_processor_qt.py": ["/py_particle_processor_qt/dataset.py", "/py_particle_processor_qt/gui/main_window.py", "/py_particle_processor_qt/gui/species_prompt.py", "/py_particle_processor_qt/plotting.py", "/py_particle_processor_qt/generator.py", "/py_particle_processor_qt/tools/__init__.py"], "/py_particle_processor_qt/drivers/TrackDriver/TrackDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/drivers/FreeCADDriver/FreeCADDriver.py": ["/py_particle_processor_qt/drivers/abstractdriver.py"], "/py_particle_processor_qt/tools/CollimOPAL/__init__.py": ["/py_particle_processor_qt/tools/CollimOPAL/CollimOPAL.py"], "/py_particle_processor_qt/tools/CollimOPAL/CollimOPAL.py": ["/py_particle_processor_qt/tools/abstract_tool.py", "/py_particle_processor_qt/tools/CollimOPAL/collimOPALgui.py"], "/py_particle_processor_qt/drivers/__init__.py": ["/py_particle_processor_qt/drivers/OPALDriver/__init__.py", "/py_particle_processor_qt/drivers/TraceWinDriver/__init__.py", "/py_particle_processor_qt/drivers/COMSOLDriver/__init__.py", "/py_particle_processor_qt/drivers/IBSimuDriver/__init__.py", "/py_particle_processor_qt/drivers/TrackDriver/__init__.py", "/py_particle_processor_qt/drivers/FreeCADDriver/__init__.py"], "/py_particle_processor_qt/drivers/IBSimuDriver/IBSimuDriver.py": ["/py_particle_processor_qt/drivers/arraywrapper.py", "/py_particle_processor_qt/drivers/abstractdriver.py"]} |
42,404 | wilfriedE/EmoBot | refs/heads/master | /defaults/__init__.py | """
Default configurations
"""
import os
import json
server = "irc.freenode.net"
botnick = "emo-bot"
src_dir = os.path.dirname(__file__)
with open(os.path.join(src_dir, 'channels.json')) as channels_file:
channels = json.load(channels_file) | {"/tests.py": ["/emoji/__init__.py"], "/bot.py": ["/defaults/__init__.py", "/emoji/__init__.py"]} |
42,405 | wilfriedE/EmoBot | refs/heads/master | /tests.py | """
Tests for EmoBot and it's components
"""
from emoji import *
Emoticons = Emoji()
def test_emojies():
print(":smiley: => ", Emoticons.get(":smiley:"))
print(":raised_hands: => ", Emoticons.get(":raised_hands:"))
print(":heart_eyes_cat: => ", Emoticons.get(":heart_eyes_cat:"))
print(":innocent: => ", Emoticons.get(":innocent:"))
print(":punch: => ", Emoticons.get(":punch:"))
print(":muscle: => ", Emoticons.get(":muscle:"))
print(":pray: => ", Emoticons.get(":pray:"))
print(":spy: => ", Emoticons.get(":spy:"))
print(":alien: => ", Emoticons.get(":alien:"))
print(":globe_with_meridians: => ", Emoticons.get(":globe_with_meridians:"))
print(":rocket: => ", Emoticons.get(":rocket:"))
print(":100: => ", Emoticons.get(":100:"))
if __name__ == '__main__':
test_emojies() | {"/tests.py": ["/emoji/__init__.py"], "/bot.py": ["/defaults/__init__.py", "/emoji/__init__.py"]} |
42,406 | wilfriedE/EmoBot | refs/heads/master | /bot.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from defaults import *
from emoji import *
import re
import socket
def commands(nick,channel,message):
if containsGreetings(message):
hello(channel, nick)
global optout_nicks
if message.find(botnick+': about')!=-1 or message.find(botnick+' about')!=-1:
ircsock.send('PRIVMSG %s :%s: I am an Emoji bot that adds emoji auto corrects. Go http://git.io/v05Ik for more info.!\r\n' % (channel,nick))
elif message.find(botnick+': optout')!=-1 or message.find(botnick+' optout')!=-1:
if nick not in optout_nicks:
optout_nicks += [nick]
ircsock.send('PRIVMSG %s :%s: You have been opted out %s. Note when emo-bot disconnects you will be opted back in by default. More at [gir-url].\r\n' % (channel,nick, Emo.get(":cry:")))
elif message.find(botnick+': optin')!=-1 or message.find(botnick+' optin')!=-1:
ircsock.send('PRIVMSG %s :%s: You are already been opted in %s .\r\n' % (channel,nick, Emo.get(":guitar:")))
elif message.find(botnick+': help')!=-1 or message.find(botnick+' help')!=-1:
ircsock.send('PRIVMSG %s :%s: Available commands [about, optout,].\r\n' % (channel,nick))
else:
print("Not a valid command")
def opted_out_commads(nick, channel, message):
"""
commands Available for opted out users.
"""
global optout_nicks
if message.find(botnick+': optin')!=-1 or message.find(botnick+' optin')!=-1:
if nick in optout_nicks:
optout_nicks.remove(nick)
ircsock.send('PRIVMSG %s :%s: You have been opted in %s .\r\n' % (channel,nick, Emo.get(":guitar:")))
elif message.find(botnick+': help')!=-1 or message.find(botnick+' help')!=-1:
ircsock.send('PRIVMSG %s :%s: Available commands [about, optin].\r\n' % (channel,nick))
def containsGreetings(msg):
message = msg.lower()
if (message.find("hello "+ botnick) != -1 or
message.find("hi "+ botnick) != -1 or
message.find("hey "+ botnick) != -1 or
message.find(botnick+': hello')!=-1 or
message.find(botnick+': hi')!=-1 or
message.find(botnick+': hey')!=-1 or
message.find(botnick+' hello')!=-1 or
message.find(botnick+' hi')!=-1 or
message.find(botnick+' hey')!=-1
):
return True
else:
False
# Some basic variables used to configure the bot
def ping():
"""respond to server Pings."""
ircsock.send("PONG :pingis\n")
def sendmsg(chan , msg):
"""simply sends messages to the channel."""
ircsock.send("PRIVMSG "+ chan +" :"+ msg +"\n")
def joinchan(chan): # This function is used to join channels.
ircsock.send("JOIN "+ chan +"\n")
def hello(chan, nick):
ircsock.send("PRIVMSG "+ chan +" :Hello! " + nick + " " + Emo.get(":smiley:") +" \n")
def getNick(msg):
return msg.split('!')[0][1:]
def getChannel(msg):
return msg.split(' PRIVMSG ')[-1].split(' :')[0]
def joinInvite(msg):
channel = msg.split("INVITE " + botnick)[-1].split(':')[-1]
joinchan(channel)
sendmsg(channel , "Hello! "+ channel + " . emo-bot is here type 'emo-bot help' for more.")
def run():
ircsock.connect((server, 6667)) # Here we connect to the server using the port 6667
ircsock.send("USER "+ botnick +" "+ botnick +" "+ botnick +" :Emojie Bot to ligthen up your day 😄. More at http://git.io/v05Ik.\n") # user authentication
ircsock.send("NICK "+ botnick +"\n") # here we actually assign the nick to the bot
for channel in channels:
joinchan(channel)
while 1:
ircmsg = ircsock.recv(2048) # receive data from the server
ircmsg = ircmsg.strip('\n\r') # removing any unnecessary linebreaks.
print(ircmsg) # Here we print what's coming from the server
channel = getChannel(ircmsg)
nick = getNick(ircmsg)
if nick not in optout_nicks:
emojies = re.findall(r':([\w+-]+):', ircmsg)
emojies = [ Emo.get(":"+emoji+":") for emoji in emojies if Emo.valid(":"+emoji+":")]
if emojies:
ircsock.send('PRIVMSG %s :%s: means* ' % (channel,nick) + ' '.join([ emoji for emoji in emojies]) +' \r\n')
commands(nick,channel,ircmsg)
else:
opted_out_commads(nick,channel,ircmsg)
if ircmsg.find("INVITE " + botnick)!=-1: #when invited to channel join channel
joinInvite(ircmsg)
if ircmsg.find("PING :") != -1: # if the server pings us then we've got to respond!
ping()
def test():
print("beginning tests\n")
ircmsg = ':botiE!~botiE@unaffiliated/botiE PRIVMSG #theChannel :Hello emo-bot'
print("nick => botiE : ", getNick(ircmsg) == "botiE")
print("channel => theChannel : ", getChannel(ircmsg) == "#theChannel")
print("tests have completed.\n\n")
if __name__ == '__main__':
Emo = Emoji()
optout_nicks = []
test()
ircsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
run()
| {"/tests.py": ["/emoji/__init__.py"], "/bot.py": ["/defaults/__init__.py", "/emoji/__init__.py"]} |
42,407 | wilfriedE/EmoBot | refs/heads/master | /emoji/__init__.py | """
This is a simple Emoji class to allow access to all emojies
"""
import os
import json
class Emoji(object):
"""Emoji
A class that allows easy accessibility of emojies.
"""
def __init__(self):
src_dir = os.path.dirname(__file__)
with open(os.path.join(src_dir, 'emoji_map.json')) as emoji_map_file:
self.emojies = json.load(emoji_map_file)
def get(self, emoji):
"""
returns an emojie based on mapping
"""
if emoji in self.emojies:
return self.emojies[emoji].encode('utf-8')
def valid(self, emoji):
"""
returns True or False based on wether or not the emojie exists
"""
if emoji in self.emojies:
return True
else:
return False
| {"/tests.py": ["/emoji/__init__.py"], "/bot.py": ["/defaults/__init__.py", "/emoji/__init__.py"]} |
42,408 | wilfriedE/EmoBot | refs/heads/master | /emoji_mapper.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
A simple script to mapp the emojies in emoji.json
It makes retrieval of emojies simpler.
"""
import json
with open('emoji/emoji.json') as emoji_file:
emojies = json.load(emoji_file)
EmojiMap = {}
for emoji in emojies:
if "emoji" in emoji:
for alias in emoji["aliases"]:
alias = alias.strip() #just in case it is not fully stripped by defualt
EmojiMap[":"+alias+":"] = emoji["emoji"].encode("utf-8")
for tag in emoji["tags"]:
tag = tag.strip()
EmojiMap[":"+tag+":"] = emoji["emoji"].encode("utf-8")
if __name__ == '__main__':
print(EmojiMap)
print("\nwritting to file\n")
with open('emoji/emoji_map.json', 'w') as f:
print("...")
json.dump(EmojiMap, f)
print("\nfinished writting.") | {"/tests.py": ["/emoji/__init__.py"], "/bot.py": ["/defaults/__init__.py", "/emoji/__init__.py"]} |
42,409 | Nagakiran1/Crowd-Counting-CNN-density-based | refs/heads/main | /Utils.py | import csv
import pickle
import cv2
from pandas import DataFrame
import numpy as np
import pandas as pd
from IPython.core.debugger import set_trace
from shapely.geometry import Point
from shapely import geometry
def AreaOfContour(x):
return x[2]*x[3]
def FilterContours(cordinates):
# Creating the Numpy array of filtered Contour Coordinates
ContourInfo = np.array(cordinates)
# Finding the Area of each contour and concatenating the area to axis of numpy array
ContourAreas = np.apply_along_axis(AreaOfContour,1,ContourInfo)[:,None]
ContourInfo = np.concatenate([ContourInfo,ContourAreas],axis=1)
# Sorting the Contours based on the Area of contours
ContourInfo = ContourInfo[ContourInfo[:,4].argsort()[::-1]]
# Adding Flag of Table boundary to Contoursa
ContourInfo = np.concatenate([ContourInfo,np.zeros(len(ContourInfo)).astype(int)[:,None]],axis=1)
count = 0
# Filtering the Inner Bounded Contours from Tables by looping the through the Contours sorted by Area,
# All contours will be checked with the First level area contours if it fits inside, which ever iside of it, will be Deleted from that.
while (len(ContourInfo) != ContourInfo[:,5].sum()):
# Picking First level of Contour from Area
ContourInfo[count][-1] = 1
Cont1 = ContourInfo[count]
x,y,w,h = Cont1[0],Cont1[1],Cont1[2],Cont1[3]
polygon = geometry.polygon.Polygon([(x,y),(x,y+h),(x+w,y+h),(x+w,y)])
InnerContours = []
# Checking the Each contour one by one till it reaches to Low level and appending the rows of contours inside of it
for val,Cont2 in enumerate(ContourInfo[count+1:]):
#Cont2 = ContourInfo[1]
X,Y,W,H = Cont2[0],Cont2[1],Cont2[2],Cont2[3]
if any([polygon.contains(Point(point)) for point in [(X,Y),(X,Y+H),(X+W,Y+H),(X+W,Y)]]):
other_polygon = geometry.polygon.Polygon([(X,Y),(X,Y+H),(X+W,Y+H),(X+W,Y)])
if polygon.intersection(other_polygon).area/other_polygon.area > 0.45:
InnerContours.append(val+count+1)
# Deleting the Contours which are inside of it.
ContourInfo = np.delete(ContourInfo,InnerContours, axis=0)
count +=1
return ContourInfo
def PreprocessEmbeddings(n):
m = n.copy()
m = (m.max()-m)*(255/m.max())
m = np.dstack([m,m,m])
return m
def ApplyContours(image, kernel_size=3, thickness=1, iterations = 11):
#im = np.dstack([image,image,image])
im = PreprocessEmbeddings(image)
im1 = image.copy()
# kernel = np.ones((kernel_size, kernel_size),np.uint8)
# dilated = cv2.dilate(thresh1,kernel,iterations = iterations)
# We use cvtColor, to convert to grayscale
gray_image = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
ret,thresh1 = cv2.threshold(gray_image, 180, 255, cv2.THRESH_BINARY_INV)
contours, hierarchy = cv2.findContours(thresh1.astype(np.uint8), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
cordinates = []
contours1 = []
# Extracting the Outer Bound boxes
#contours = [(cnt,hie)[0] for cnt,hie in zip(contours,hierarchy[0]) if hie[2] != -1]
# Extracting the Boxes with more than 8% area
for cnt,hie in zip(contours,hierarchy[0]):
#x,y,w,h = cnt[0],cnt[1],cnt[2],cnt[3]
x,y,w,h = cv2.boundingRect(cnt)
#bound the images
if hie[-1]==-1:
#bound the images
cordinates.append((x,y,w,h, cv2.contourArea(cnt)))
cv2.rectangle(im1,(x,y),(x+w,y+h),(0,255,0),thickness)
#im = cv2.putText(im, str(count), (x,y-5), cv2.FONT_HERSHEY_SIMPLEX, 0.4, (0,0,255), 1)
contours1.append(cnt)
df = DataFrame(cordinates, columns = ['x','y','w','h','area'])
df[['x', 'w']] = df[['x', 'w']].multiply(480/im1.shape[0]).astype(int)
df[['y', 'h']] = df[['y', 'h']].multiply(640/im1.shape[1]).astype(int)
return df, im1
def AdjustContours(df, im1):
df['thr'] = pd.cut(df['y'], bins=np.arange(0,480,50), labels=np.flip(np.multiply(np.arange(9),5))).fillna(0).astype(int)
df['x'] = df['x'] - df['thr']
df['y'] = df['y'] - df['thr'].multiply(2)
df.loc[df['x'].lt(0), 'x'] = 0
df.loc[df['y'].lt(0), 'y'] = 0
df['h'] = df['h'] + df['thr']
df['w'] = df['w'] + df['thr'].multiply(1.5)
FilteredConts = FilterContours(df.values[:,:4])
Original = im1.copy()
df1 = pd.DataFrame(FilteredConts[:,:4].astype(int), columns=['x','y','w','h'])
df[['x','y','w', 'h']] = df[['x','y','w', 'h']].astype(int)
df1[['x','y','w', 'h']] = df1[['x','y','w', 'h']].astype(int)
df = df.merge(df1, on=['x','y','w','h'], how='inner')
ImSegments = []
for ind, row in df.iterrows():
x,y,w,h = row[['x','y','w','h']].astype(int).values
cv2.rectangle(im1,(x,y),(x+w,y+h),(0,255,0),2)
ImSegments.append(Original[y:y+h,x:x+w])
return df, im1, ImSegments
def FilterImage(word):
shape = np.array(word.shape[:2])
w,l = np.divide(np.subtract(shape.max(),shape),2).astype(int)
word = cv2.copyMakeBorder(word, w, w, l, l,cv2.BORDER_CONSTANT,value=[255,255,255])
if shape.max()<480:
word = cv2.resize(word, (480,480))
w, l = np.divide(np.subtract(np.array([480, 640]),max(word.shape)),2).astype(int)
word = cv2.copyMakeBorder(word, w, w, l, l,cv2.BORDER_CONSTANT,value=[255,255,255])
word = cv2.resize(word, (640, 480))
return word
def Preoprocessimage(a, ModeofImage):
#a[np.abs(a[:,:,0]-m[:,:,0])<10] = 255
a[np.abs(a.sum(2) - ModeofImage.sum(2))<25] = 255
kernel = np.ones((2,2),np.uint8)
b = cv2.morphologyEx(a, cv2.MORPH_OPEN, kernel)
b = cv2.GaussianBlur(b,(3,3),0)
return b
def GetNoofPersons(points, row):
coords = points[((points[:,0]>row['x']) & (points[:,0]<(row['x']+row['w']))) & ((points[:,1]>row['y']) & (points[:,1]<(row['y']+row['h'])))]
return coords.shape[0]
def GenerateInsideData(activation_model, array, points, testing=False):
# Buidling inbetween 6 layer model of trained model
a = activation_model.predict(array)
# print(int(model.predict(array)[0][0]), ' - ', labels[rvalue])
# Taking Fifthe layer Embeddings to represent the Convolution features of Images
first_layer_activation = a[5]
n = first_layer_activation[0, :, :, 0].copy()
# Applying the Median Blur Image processing technique to reduce Noise of images
n = cv2.medianBlur(n,3)
# Applying the Median Blur Image processing technique to reduce Noise of images
n[n<n.max()/3] = 0
n = cv2.medianBlur(n,3)
# Applyint Contours to detected crowd patterns
df, im1 = ApplyContours(n.copy())
# Adjusting the multiple contours and the area wise dilation of contours from Camera angle
df, im1, ImSegments = AdjustContours(df, img[rvalue].copy())
# Assigning Number of persons associated with the each Cluster cropped
df['NoofPersons'] = df.apply(lambda x: GetNoofPersons(points, x), 1)
# Filter and zero padding the images to apply on the Crowd Counting model
CrowdGroups = np.concatenate([FilterImage(word)[np.newaxis] for word in ImSegments])
if testing:
return df, CrowdGroups, im1
#df['Images'] = CrowdGroups
return df, CrowdGroups
| {"/TestModel.py": ["/Utils.py"]} |
42,410 | Nagakiran1/Crowd-Counting-CNN-density-based | refs/heads/main | /DownloadData.py | from pydrive.auth import GoogleAuth
from pydrive.drive import GoogleDrive
from google.colab import auth
from oauth2client.client import GoogleCredentials
auth.authenticate_user()
gauth = GoogleAuth()
gauth.credentials = GoogleCredentials.get_application_default()
drive1 = GoogleDrive(gauth)
%%time
IMAGE_FILE_ID = "1y0BoU7RIisMgj4PsCPbY02PPm4CEXaEF"
downloaded = drive.CreateFile({'id':IMAGE_FILE_ID})
downloaded.GetContentFile('images.npy')
IMAGE_GT_FILE_ID = "1F8MSiZ974Hta6gZcex9DFvGwbhA2u3s8"
downloaded = drive.CreateFile({'id':IMAGE_GT_FILE_ID})
downloaded.GetContentFile('images_gt.npy')
IMAGE_DENSITY_FILE_ID = "1-1JXmD6sumzJcATFQzm9Yq7CCDZ2K7cT"
downloaded = drive.CreateFile({'id':IMAGE_DENSITY_FILE_ID})
downloaded.GetContentFile('images_density.npy')
LABEL_FILE_ID = "1NYHK2AcLmm-sHpZWFR5vn_ejxX3KB7Mz"
downloaded = drive.CreateFile({'id':LABEL_FILE_ID})
downloaded.GetContentFile('labels.npy') | {"/TestModel.py": ["/Utils.py"]} |
42,411 | Nagakiran1/Crowd-Counting-CNN-density-based | refs/heads/main | /TestModel.py | from keras.models import model_from_json
import numpy as np
from keras import models
import cv2, os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from Utils import FilterContours,ApplyContours, AdjustContours, FilterImage, Preoprocessimage
# def Preoprocessimage(a, ModeofImage):
# #a[np.abs(a[:,:,0]-m[:,:,0])<10] = 255
# a[np.abs(a.sum(2) - ModeofImage.sum(2))<25] = 255
# kernel = np.ones((2,2),np.uint8)
# b = cv2.morphologyEx(a, cv2.MORPH_OPEN, kernel)
# b = cv2.GaussianBlur(b,(3,3),0)
# return b
def GetNoofPersons(points, row):
coords = points[((points[:,0]>row['x']) & (points[:,0]<(row['x']+row['w']))) & ((points[:,1]>row['y']) & (points[:,1]<(row['y']+row['h'])))]
return coords.shape[0]
def GenerateInsideData(activation_model, array, image, points=None, testing=False):
# Buidling inbetween 6 layer model of trained model
a = activation_model.predict(array)
# print(int(model.predict(array)[0][0]), ' - ', labels[rvalue])
# Taking Fifthe layer Embeddings to represent the Convolution features of Images
first_layer_activation = a[5]
n = first_layer_activation[0, :, :, 0].copy()
# Applying the Median Blur Image processing technique to reduce Noise of images
n = cv2.medianBlur(n,3)
# Applying the Median Blur Image processing technique to reduce Noise of images
n[n<n.max()/3] = 0
n = cv2.medianBlur(n,3)
# Applyint Contours to detected crowd patterns
df, im1 = ApplyContours(n.copy())
# Adjusting the multiple contours and the area wise dilation of contours from Camera angle
df, im2, ImSegments = AdjustContours(df, image.copy())
# Filter and zero padding the images to apply on the Crowd Counting model
CrowdGroups = np.concatenate([FilterImage(word)[np.newaxis] for word in ImSegments])
if testing:
return df, CrowdGroups,n,im2
# Assigning Number of persons associated with the each Cluster cropped
df['NoofPersons'] = df.apply(lambda x: GetNoofPersons(points, x), 1)
#df['Images'] = CrowdGroups
return df, CrowdGroups
def PlotGroups(im1, df1, font=0.7):
for ind, row in df1.iterrows():
x,y,w,h = row[['x','y','w','h']].astype(int).values
cv2.rectangle(im1,(x,y),(x+w,y+h),(0,255,0),2)
cv2.putText(im1, 'group size - '+ str(int(row['NoofPersons'])),
(x,y+20), cv2.FONT_HERSHEY_SIMPLEX , font, (255, 0, 0), 2, cv2.LINE_AA)
return im1
def ReadData():
if not os.path.exists('images.npy'):
ims = []
for file in os.listdir('data'):
im = cv2.imread(os.path.join('data',file))
ims.append(im[np.newaxis])
else:
img = np.load('images.npy')
img = np.concatenate(ims)
return img
def LoadModels():
# load json and create model
json_file = open('Models/model1.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
GroupModel = model_from_json(loaded_model_json)
# load weights into new model
GroupModel.load_weights("Models/model1.h5")
print("Loaded model from disk")
# load json and create model
json_file = open('Models/PersonModel.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
PersonModel = model_from_json(loaded_model_json)
# load weights into new model
PersonModel.load_weights("Models/PersonModel.h5")
print("Loaded model from disk")
# Extracts the outputs of the top 12 layers
layer_outputs = [layer.output for layer in GroupModel.layers[:12]]
# Creates a model that will return these outputs, given the model input
activation_model = models.Model(inputs=GroupModel.input, outputs=layer_outputs)
return GroupModel, PersonModel, activation_model
ModeofImage = np.load('ModeOfImage.npy')
img = ReadData()
GroupModel, PersonModel, activation_model = LoadModels()
while True:
# Testing on one random image
rvalue = np.random.randint(50)
image = img[rvalue]
array = Preoprocessimage(image.copy(), ModeofImage)[None,:]
df, CG, n, im2 = GenerateInsideData(activation_model, array, image.copy(), testing=True)
df['NoofPersons'] = PersonModel.predict(CG).flatten()
df1 = df.loc[df['NoofPersons']>3]
im1 = image.copy()
im1 = PlotGroups(im1, df1, font=0.8)
cv2.imshow(im1)
if cv2.waitKey(33)==27:
break
| {"/TestModel.py": ["/Utils.py"]} |
42,412 | Nagakiran1/Crowd-Counting-CNN-density-based | refs/heads/main | /Model.py | # Importig required Libaries
from keras.callbacks import TensorBoard
from keras.layers import Activation, Dense, Flatten, Cropping2D, Conv2D
from keras.models import Sequential
from keras.preprocessing.image import ImageDataGenerator
from keras.layers import Lambda
from keras.models import model_from_json
def Pattern_Recognion_Model_API(X_train,y_train):
'''
Pattern Recognition model with the use Functional API method consists of 24 Sequential layers
6 Convolutional layers followed by relu activation layer.
5 Fully connected layers followed bye relu activation layer.
Convolutional layers plays important role in segregating all lane curve extractions and curvation associated information.
Fully connected layers plays important role in reducing network size layer by layer in extracting curvature of lanes in taking the steering angle prediction
Loss
----
mean squared error loss is considered in optimizing the model performance
Optimizer
--------
Adam optimizer is considered in changing the learning rate to converging Neural network in getting high performance in prediction
default learning rate of 0.9 to 0.999 increment of optimization with the step of 0.001 is considered
keras.optimizers.Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0.0, amsgrad=False)
'''
X_input = Input(shape=X_train.shape, name='img_in')
#X = Cropping2D(cropping=((70, 25), (0, 0)))(X_input)
#X = Lambda(lambda image: ktf.image.resize_images(image, (80, 200)))(X)
X = Lambda(lambda x: (x / 255.0) - 0.5)(X_input)
X = Conv2D(filters=6, kernel_size=(3, 3), strides=(1, 1), padding='valid', dilation_rate=(1, 1), use_bias=True,
kernel_initializer='glorot_uniform', bias_initializer='zeros')(X)
X = Activation('relu')(X)
X = Conv2D(filters=6, kernel_size=(5, 5), strides=(2, 2), padding='valid', dilation_rate=(1, 1), use_bias=True,
kernel_initializer='glorot_uniform', bias_initializer='zeros')(X)
X = Activation('relu')(X)
# X = Conv2D(filters=6, kernel_size=(5, 5), strides=(2, 2), padding='valid', dilation_rate=(1, 1), use_bias=True,
# kernel_initializer='glorot_uniform', bias_initializer='zeros')(X)
# X = Activation('relu')(X)
X = Conv2D(filters=16, kernel_size=(5, 5), strides=(2, 2), padding='valid', dilation_rate=(1, 1), use_bias=True,
kernel_initializer='glorot_uniform', bias_initializer='zeros')(X)
X = Activation('relu')(X)
X = Conv2D(filters=16, kernel_size=(3, 3), strides=(1, 1), padding='valid', dilation_rate=(1, 1), use_bias=True,
kernel_initializer='glorot_uniform', bias_initializer='zeros')(X)
X = Activation('relu')(X)
X = Conv2D(filters=32,kernel_size=(3, 3), strides=(1, 1), padding='valid', dilation_rate=(1, 1), use_bias=True,
kernel_initializer='glorot_uniform', bias_initializer='zeros')(X)
X = Activation('relu')(X)
# Fully connected
X = Flatten()(X)
# model.add(Dropout(0.35))
X = Dense(units=1164)(X)
X = Activation('relu')(X)
X = Dense(units=100)(X)
X = Activation('relu')(X)
X = Dense(units=50)(X)
X = Activation('relu')(X)
X = Dense(units=10)(X)
X = Activation('relu')(X)
X = Dense(units=1)(X)
model=Model(inputs=X_input, outputs=X, name='Convolve')
model.compile(optimizer='adam',loss='mean_squared_error',metrics = ['mse'])
return model
def PatternRecognitionModel(input_shape):
'''
Pattern Recognition model consists of 24 Sequential layers
6 Convolutional layers followed by relu activation layer.
5 Fully connected layers followed bye relu activation layer.
Convolutional layers plays important role in segregating all lane curve extractions and curvation associated information.
Fully connected layers plays important role in reducing network size layer by layer in extracting curvature of lanes in taking the steering angle prediction
Loss
----
mean squared error loss is considered in optimizing the model performance
Optimizer
--------
Adam optimizer is considered in changing the learning rate to converging Neural network in getting high performance in prediction
default learning rate of 0.9 to 0.999 increment of optimization with the step of 0.001 is considered
keras.optimizers.Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0.0, amsgrad=False)
'''
# Model
model = Sequential()
# Convolutional
model.add(Cropping2D(cropping=((5, 5), (0, 0)), input_shape=input_shape))
# Lambda(lambda image: ktf.image.resize_images(image, (80, 200)))
model.add(Lambda(lambda x: (x / 255.0) - 0.5))
model.add(Conv2D(filters=3, kernel_size=(5, 5), strides=(2, 2), padding='valid', dilation_rate=(1, 1), use_bias=True,
kernel_initializer='glorot_uniform', bias_initializer='zeros'))
model.add(Activation('relu'))
model.add(Conv2D(filters=3, kernel_size=(5, 5), strides=(2, 2), padding='valid', dilation_rate=(1, 1), use_bias=True,
kernel_initializer='glorot_uniform', bias_initializer='zeros'))
model.add(Activation('relu'))
model.add(Conv2D(filters=8, kernel_size=(3, 3), strides=(2, 2), padding='valid', dilation_rate=(1, 1), use_bias=True,
kernel_initializer='glorot_uniform', bias_initializer='zeros'))
model.add(Conv2D(filters=16, kernel_size=(5, 5), strides=(2, 2), padding='valid', dilation_rate=(1, 1), use_bias=True,
kernel_initializer='glorot_uniform', bias_initializer='zeros'))
model.add(Activation('relu'))
model.add(Conv2D(filters=16, kernel_size=(5, 5), strides=(2, 2), padding='valid', dilation_rate=(1, 1), use_bias=True,
kernel_initializer='glorot_uniform', bias_initializer='zeros'))
model.add(Activation('relu'))
model.add(Conv2D(filters=32, kernel_size=(3, 3), strides=(1, 1), padding='valid', dilation_rate=(1, 1), use_bias=True,
kernel_initializer='glorot_uniform', bias_initializer='zeros'))
model.add(Activation('relu'))
model.add(Conv2D(filters=16, kernel_size=(3, 3), strides=(1, 1), padding='valid', dilation_rate=(1, 1),use_bias=True,
kernel_initializer='glorot_uniform',bias_initializer='zeros'))
model.add(Activation('relu'))
# Fully connected
model.add(Flatten())
model.add(Dense(units=1164))
model.add(Activation('relu'))
model.add(Dense(units=100))
model.add(Activation('relu'))
model.add(Dense(units=50))
model.add(Activation('relu'))
model.add(Dense(units=10))
model.add(Activation('relu'))
model.add(Dense(units=1))
model.compile(loss='mean_squared_error', optimizer='Adam', metrics=['mae'])
return model
| {"/TestModel.py": ["/Utils.py"]} |
42,432 | tickleliu/rnn-tf2 | refs/heads/master | /models/rnn.py | from typing import TypeVar
import tensorflow as tf
import tensorflow.compat.v1 as tfc
from tensorflow import keras
from tensorflow.compat.v1.lite.experimental.nn import TFLiteLSTMCell
from tensorflow.compat.v1.lite.experimental.nn import dynamic_rnn as lite_dynamic_rnn
from tensorflow.compat.v1.nn import static_rnn, dynamic_rnn
from tensorflow.compat.v1.nn.rnn_cell import LSTMCell
class RNN:
def output(self, features: tfc.placeholder, sequence_length: bool = None):
raise NotImplementedError()
RNN.Class = TypeVar("Class", bound=RNN)
class StaticLSTM(RNN):
def __init__(self, latent_units: int):
self._cell = LSTMCell(latent_units)
def output(self, features: tfc.placeholder, sequence_length: bool = None):
input_lstm = tf.unstack(features, axis=1)
output, state = static_rnn(self._cell, input_lstm, dtype=tf.float32, sequence_length=sequence_length)
output = tf.stack(output, axis=1)
return output
class DynamicLSTM(RNN):
def __init__(self, latent_units: int):
self._cell = LSTMCell(latent_units)
def output(self, features: tfc.placeholder, sequence_length: bool = None):
output, _ = dynamic_rnn(self._cell, features, dtype=tf.float32, sequence_length=sequence_length)
return output
class LiteDynamicLSTM(RNN):
def __init__(self, latent_units: int):
self._cell = TFLiteLSTMCell(latent_units)
def output(self, features: tfc.placeholder, sequence_length: bool = None):
input_lstm = tf.transpose(features, [1, 0, 2])
output, _ = lite_dynamic_rnn(self._cell, input_lstm, dtype=tf.float32, sequence_length=sequence_length)
output = tf.transpose(output, [1, 0, 2])
return output
class KerasLSTM(RNN):
def __init__(self, latent_units: int, training: bool = False, *args, **kwargs):
self._training = training
cell = keras.layers.LSTMCell(latent_units)
self._rnn = keras.layers.RNN(cell, unroll=self.unroll, return_sequences=True)
@property
def unroll(self):
raise NotImplementedError()
def output(self, features, sequence_length=None):
output = self._rnn(features, training=self._training)
return output
class KerasStaticLSTM(KerasLSTM):
def __init__(self, latent_units: int, training: bool = False):
super().__init__(latent_units, training)
@property
def unroll(self):
return True
class KerasDynamicLSTM(KerasLSTM):
def __init__(self, latent_units: int, training: bool = False, *args, **kwargs):
super().__init__(latent_units, training, *args, **kwargs)
@property
def unroll(self):
return False
| {"/tf_rnn.py": ["/models/rnn.py"], "/main.py": ["/models/rnn.py"], "/benchmark_keras.py": ["/models/rnn.py", "/tf_rnn.py"], "/benchmark_compat.py": ["/models/rnn.py", "/tf_rnn.py"]} |
42,433 | tickleliu/rnn-tf2 | refs/heads/master | /tf_rnn.py | from itertools import chain
from time import time
from typing import *
import numpy as np
import tensorflow as tf
import tensorflow.compat.v1 as tfc
from dataclasses import dataclass
from models.rnn import RNN, StaticLSTM
@dataclass
class BenchmarkSequences:
long: Tuple[int] = (10, 50, 200, 500, 1000)
short: Tuple[int] = (10, 50, 200)
@dataclass
class Shapes:
batch: int = 32
features: int = 200
latent: int = 200
@dataclass
class Constants:
epoch_count: int = 10
batch_count: int = 50
class TrackTime:
def __init__(self, name: str):
self._name = name
self._time_begin = None
self._time_end = None
def __enter__(self):
self._time_begin = time()
def __exit__(self, exc_type, exc_val, exc_tb):
self._time_end = time()
print(f"{self._name} TIME: {self._time_end - self._time_begin:.6} [s]")
@dataclass
class Data:
sequence_lengths: List[np.array]
features: List[np.array]
max_sequence_length: int
@staticmethod
def _benchmark_range(seq_range):
return chain(seq_range, reversed(seq_range))
@staticmethod
def _create_dataset(seq, pad: bool = False):
sequence_lengths = []
features = []
max_sequence_length = max(seq)
for seq_len in Data._benchmark_range(seq):
sequence_lengths.append([np.array([seq_len for _ in range(Shapes.batch)]).astype(np.float32)
for _ in range(Constants.batch_count)])
features.append([np.random.rand(Shapes.batch,
max_sequence_length if pad else seq_len,
Shapes.features).astype(np.float32) for _ in range(Constants.batch_count)])
return Data(sequence_lengths, features, max_sequence_length)
@staticmethod
def dynamic(seq: List[int]):
"""Not padded data with different sequence lengths"""
return Data._create_dataset(seq, pad=False)
@staticmethod
def static(seq: List[int]):
"""Padded data with different sequence lengths"""
return Data._create_dataset(seq, pad=True)
def run_rnn_compat(model_cls: Type[RNN.Class], data: Data, use_sequence_length_info: bool = False):
seq_len_dim = data.max_sequence_length if model_cls is StaticLSTM else None
with tfc.variable_scope("input"):
features = tfc.placeholder(shape=(Shapes.batch, seq_len_dim, Shapes.features),
dtype=tf.float32, name="features")
sequence_length = tfc.placeholder(shape=(Shapes.batch,), dtype=tf.float32, name="sequence_length")
with tfc.variable_scope(model_cls.__name__ + str(use_sequence_length_info), reuse=tfc.AUTO_REUSE):
lstm = model_cls(latent_units=Shapes.latent) \
.output(features, sequence_length=sequence_length if use_sequence_length_info else None)
with tfc.Session() as sess:
sess.run(tfc.global_variables_initializer())
print(f"Running {model_cls.__name__}")
with TrackTime("TOTAL"):
for seq_len, data in zip(data.sequence_lengths, data.features):
with TrackTime(f"epoch_count: {Constants.epoch_count}\t"
f" batch_count: " f"{Constants.batch_count}\t"
f" batch_size:" f" {Shapes.batch}\t "
f"sequence_length:" f" {seq_len[0][0]}\t "
f"use_sequence_length_info={use_sequence_length_info}\t "):
for _ in range(Constants.epoch_count):
for seq_len_b, data_b in zip(seq_len, data):
sess.run(lstm, feed_dict={features: data_b,
sequence_length: seq_len_b})
def run_rnn_keras(model_cls: Type[RNN.Class], data: Data):
lstm = tf.function(model_cls(latent_units=Shapes.latent).output)
print(f"Running {model_cls.__name__}")
with TrackTime("TOTAL"):
for seq_len, data in zip(data.sequence_lengths, data.features):
with TrackTime(f"epoch_count: {Constants.epoch_count}\t"
f" batch_count: " f"{Constants.batch_count}\t"
f" batch_size:" f" {Shapes.batch}\t "
f" sequence_length: {seq_len[0][0]}\t "):
for _ in range(Constants.epoch_count):
for seq_len_b, data_b in zip(seq_len, data):
lstm(data_b)
| {"/tf_rnn.py": ["/models/rnn.py"], "/main.py": ["/models/rnn.py"], "/benchmark_keras.py": ["/models/rnn.py", "/tf_rnn.py"], "/benchmark_compat.py": ["/models/rnn.py", "/tf_rnn.py"]} |
42,434 | tickleliu/rnn-tf2 | refs/heads/master | /train_basic.py | import numpy as np
import tensorflow as tf
from dataclasses import dataclass
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import OneHotEncoder
from models.basic import FullConnected
UNITS_LATENT = 200
UNITS_OUTPUT = 3
EPOCH_COUNT = 5
DEBUG = False
# DEBUG = True
@dataclass
class Set:
features: np.array
labels: np.array
class IrisDataset:
def __init__(self):
iris_data = load_iris()
x = iris_data.data
y = iris_data.target.reshape(-1, 1)
encoder = OneHotEncoder(sparse=False)
y = encoder.fit_transform(y)
train_x, test_x, train_y, test_y = train_test_split(x, y, test_size=0.2)
self.train = Set(features=train_x, labels=train_y)
self.test = Set(features=test_x, labels=test_y)
class TrainFunc:
def __init__(self, model, optimizer, loss, debug=True):
self.optimizer = optimizer
self.model = model
self.loss_fn = loss
if debug:
self.call_fn = self.train_step
else:
features_spec = tf.TensorSpec(shape=[None, 4], dtype=tf.float32)
labels_spec = tf.TensorSpec(shape=[None, 3], dtype=tf.int32)
self.call_fn = tf.function(func=self.train_step, input_signature=[features_spec, labels_spec])
@tf.function(input_signature=[tf.TensorSpec(shape=[None, 4], dtype=tf.float32)])
def predict(self, features):
return self.model(features)
@tf.function(input_signature=[tf.TensorSpec(shape=[None, 4], dtype=tf.float32),
tf.TensorSpec(shape=[None, 3], dtype=tf.int32)])
def train_step(self, features, labels, *args, **kwargs):
with tf.GradientTape() as tape:
prediction = self.model(features)
loss = self.loss_fn(labels, prediction)
gradients = tape.gradient(loss, self.model.trainable_variables)
self.optimizer.apply_gradients(zip(gradients, self.model.trainable_variables))
return loss
def __call__(self, *args, **kwargs):
return self.call_fn(*args, **kwargs)
def batch(features, labels, batch_size=10):
assert len(features) == len(labels)
for i in range(0, len(features), batch_size):
try:
yield features[i: i + batch_size], labels[i: i + batch_size]
except IndexError:
yield features[i:], labels[i:]
def train_basic(debug=True):
data = IrisDataset()
model = FullConnected(units_latent=UNITS_LATENT, units_output=UNITS_OUTPUT)
loss_fn = tf.keras.losses.CategoricalCrossentropy(from_logits=False)
optimizer = tf.keras.optimizers.Adam()
train_fn = TrainFunc(model, optimizer, loss_fn, debug=debug)
concrete_func = train_fn.predict.get_concrete_function()
converter = tf.lite.TFLiteConverter([concrete_func])
model_lite = converter.convert()
concrete_func = train_fn.train_step.get_concrete_function()
converter = tf.lite.TFLiteConverter([concrete_func])
model_lite = converter.convert()
# for i_epoch in range(EPOCH_COUNT):
# loss = None
# for features, labels in batch(data.train.features, data.train.labels):
# loss = train_fn(features=features, labels=labels)
# print(f"Epoch: {i_epoch}, Loss: {loss.numpy()}")
#
# return train_fn
if __name__ == '__main__':
train_basic(debug=DEBUG)
# begin_debug = time()
# train_basic(debug=True)
# end_debug = time()
# d_debug = end_debug - begin_debug
# begin = time()
# train_fn = train_basic(debug=DEBUG)
# end = time()
# d = end - begin
#
# fn = train_fn.call_fn.get_concrete_function()
# converter = tf.lite.TFLiteConverter([fn])
# model_lite = converter.convert()
#
# print(f"Training times [s]:")
# print(f"\tDebug:\t {d_debug:.2f}")
# print(f"\tProduction:\t {d:.2f}")
# class BasicModel(tf.Module):
# def __init__(self):
# self.const = None
#
# @tf.function(input_signature=[tf.TensorSpec(shape=[1], dtype=tf.float32)])
# def pow(self, x):
# if self.const is None:
# self.const = tf.Variable(2.)
# return x ** self.const
# Create the tf.Module object.
# root = TrainFunc()
# Get the concrete function.
# concrete_func = root.pow.get_concrete_function()
# converter = tf.lite.TFLiteConverter([concrete_func])
# model_lite = converter.convert()
| {"/tf_rnn.py": ["/models/rnn.py"], "/main.py": ["/models/rnn.py"], "/benchmark_keras.py": ["/models/rnn.py", "/tf_rnn.py"], "/benchmark_compat.py": ["/models/rnn.py", "/tf_rnn.py"]} |
42,435 | tickleliu/rnn-tf2 | refs/heads/master | /main.py | from pathlib import Path
from dataclasses import dataclass
from typing import List, Iterable
from collections import defaultdict
from functools import reduce
from copy import deepcopy
import numpy as np
import os
os.environ['TF_ENABLE_CONTROL_FLOW_V2'] = '1'
import tensorflow as tf
from spacy.lang.en import English, Language
from models.rnn import LiteLSTM
PATH_NER_TRAIN = Path("data/ner/train.corpus")
class ShakespeareDataset:
@staticmethod
def _group(sequence: Iterable, separator: str):
elements = []
for el in sequence:
if el == separator:
if len(elements) > 1:
yield elements
elements = []
else:
elements.append(el)
if len(elements) > 1:
yield elements
else:
yield None
@staticmethod
def prepare_corpora(corpora_path: Path, lang: Language = English):
tokenizer = lang.Defaults.create_tokenizer()
text = corpora_path.open('rb').read().decode(encoding='utf-8')
characters = defaultdict(list)
gen_utterance = ShakespeareDataset._group(text.split(sep='\n'), separator='')
for utterance in gen_utterance:
if utterance is not None:
name = utterance[0].strip(':').lower()
utterances = " ".join(utterance[1:])
characters[name].append(utterances)
@dataclass
class RawExample:
text: List[str]
labels: List[str]
@dataclass
class Example:
text: np.array
labels: np.array
@staticmethod
def from_raw_example(raw_example, label2idx, token2idx):
return Example(text=np.array([token2idx[t] for t in raw_example.text]),
labels=np.array([label2idx[l] for l in raw_example.labels]))
class ConllDataset:
def __init__(self, corpora_path: Path):
self.path = corpora_path
text = self.path.open('r').readlines()
raw_examples: List[RawExample] = []
tokens: List[str] = []
labels: List[str] = []
skip = False
for line in text:
line = line.rstrip('\n\r')
if "DOCSTART" in line:
skip = True
continue
if skip:
skip = False
continue
if line == "":
assert len(tokens) == len(labels)
raw_examples.append(RawExample(text=tokens, labels=labels))
tokens = []
labels = []
else:
line = line.split(" ")
tokens.append(line[0])
labels.append(line[3])
def flatten(sequence: Iterable) -> Iterable:
def extend(l1, l2):
l1.extend(l2)
return l1
return reduce(lambda l, r: extend(l, r), sequence)
self.vocab = sorted(set(flatten((e.text for e in deepcopy(raw_examples)))))
self.token2idx = {u: i for i, u in enumerate(self.vocab)}
self.idx2token = np.array(self.vocab)
self.labels = sorted(set(flatten((e.labels for e in raw_examples))))
self.label2idx = {u: i for i, u in enumerate(self.labels)}
self.idx2label = np.array(self.labels)
self.examples = [Example.from_raw_example(raw_example=e,
label2idx=self.label2idx,
token2idx=self.token2idx) for e in raw_examples]
def __str__(self):
return "Conll dataset: \n" \
f"\t{len(self.vocab)} unique tokens\n" \
f"\t{len(self.labels)} unique labels: {self.labels}\n"
if __name__ == '__main__':
dataset = ConllDataset(PATH_NER_TRAIN)
HIDDEN_UNITS = 200
EMBEDDINGS_DIM = 200
tensor_input = np.random.rand(1, 50, 200)
print(f"input tensor shape: {tensor_input.shape}")
# model_keras = KerasLSTM(latent_units=HIDDEN_UNITS,
# embeddings_dim=EMBEDDINGS_DIM,
# vocab_size=len(dataset.vocab),
# class_size=len(dataset.labels))
# ret = model_keras.predict(tensor_input)
# converter = tf.lite.TFLiteConverter.from_keras_model(model_keras)
# model_keras_lite = converter.convert()
# print(model_keras.summary())
# model_tf = LiteKerasLSTM(latent_units=HIDDEN_UNITS,
# embeddings_dim=EMBEDDINGS_DIM,
# vocab_size=len(dataset.vocab),
# class_size=len(dataset.labels))
# ret = model_tf.predict(tensor_input)
# converter = tf.lite.TFLiteConverter.from_keras_model(model_tf)
# model_keras_lite = converter.convert()
# print(model_keras.summary())
model_lite = LiteLSTM(latent_units=HIDDEN_UNITS,
embeddings_dim=EMBEDDINGS_DIM,
vocab_size=len(dataset.vocab),
class_size=len(dataset.labels))
model_lite(tensor_input)
| {"/tf_rnn.py": ["/models/rnn.py"], "/main.py": ["/models/rnn.py"], "/benchmark_keras.py": ["/models/rnn.py", "/tf_rnn.py"], "/benchmark_compat.py": ["/models/rnn.py", "/tf_rnn.py"]} |
42,436 | tickleliu/rnn-tf2 | refs/heads/master | /benchmark_keras.py | import os
os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
from models.rnn import KerasDynamicLSTM
from tf_rnn import Data, run_rnn_keras, BenchmarkSequences
def tf_keras(seq):
data_dynamic = Data.dynamic(seq)
run_rnn_keras(KerasDynamicLSTM, data_dynamic)
if __name__ == '__main__':
tf_keras(BenchmarkSequences.short)
tf_keras(BenchmarkSequences.long)
| {"/tf_rnn.py": ["/models/rnn.py"], "/main.py": ["/models/rnn.py"], "/benchmark_keras.py": ["/models/rnn.py", "/tf_rnn.py"], "/benchmark_compat.py": ["/models/rnn.py", "/tf_rnn.py"]} |
42,437 | tickleliu/rnn-tf2 | refs/heads/master | /benchmark_compat.py | import os
os.environ['TF_ENABLE_CONTROL_FLOW_V2'] = '1'
os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
import tensorflow.compat.v1 as tfc
from models.rnn import StaticLSTM
from tf_rnn import Data, run_rnn_compat, BenchmarkSequences
def tf_compat(seq):
tfc.disable_eager_execution()
# data_dynamic = Data.dynamic(seq)
#
# This doesn't work under tensorflow-2.0.0-rc0, should be run with tensorflow-1.14.0
# run_rnn_compat(LiteDynamicLSTM, data_dynamic)
# run_rnn_compat(LiteDynamicLSTM, data_dynamic, use_sequence_length_info=True)
#
# run_rnn_compat(DynamicLSTM, data_dynamic)
# run_rnn_compat(DynamicLSTM, data_dynamic, use_sequence_length_info=True)
data_static = Data.static(seq)
run_rnn_compat(StaticLSTM, data_static)
run_rnn_compat(StaticLSTM, data_static, use_sequence_length_info=True)
if __name__ == '__main__':
# tf_compat(BenchmarkSequences.short)
tf_compat(BenchmarkSequences.long)
| {"/tf_rnn.py": ["/models/rnn.py"], "/main.py": ["/models/rnn.py"], "/benchmark_keras.py": ["/models/rnn.py", "/tf_rnn.py"], "/benchmark_compat.py": ["/models/rnn.py", "/tf_rnn.py"]} |
42,458 | Jeewai/co-addis-tms | refs/heads/master | /tickets/migrations/0011_auto_20210203_1717.py | # Generated by Django 3.1.4 on 2021-02-03 14:17
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('tickets', '0010_auto_20210202_1650'),
]
operations = [
migrations.AlterField(
model_name='ticket',
name='category',
field=models.CharField(choices=[('IT', 'IT'), ('Finance', 'Finance'), ('Procurement', 'Procurement')], max_length=100),
),
migrations.AlterField(
model_name='ticket',
name='official',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.DeleteModel(
name='Official',
),
]
| {"/tickets/urls.py": ["/tickets/views.py"], "/tickets/views.py": ["/tickets/models.py", "/tickets/forms.py"], "/agents/views.py": ["/tickets/models.py"], "/tickets/admin.py": ["/tickets/models.py"], "/tickets/forms.py": ["/tickets/models.py"]} |
42,459 | Jeewai/co-addis-tms | refs/heads/master | /tickets/models.py | from django.db import models
from django.db.models.signals import post_save
from django.contrib.auth.models import AbstractUser
# Create your models here.
class User(AbstractUser):
ROLE_CHOICES = (
(1, 'Official'),
(2, 'Finance'),
(3, 'Procurement'),
(4, 'IT'),
(5, 'HR'),
)
OFFICE_CHOICES = (
(1, 'Ethiopia'),
(2, 'Sudan'),
(3, 'South Sudan'),
(4, 'Djibouti'),
(5, 'Somalia'),
)
is_organiser = models.BooleanField(default=False)
is_agent = models.BooleanField(default=False)
is_official = models.BooleanField(default=True)
role = models.PositiveSmallIntegerField(choices=ROLE_CHOICES, default=1)
office = models.PositiveSmallIntegerField(choices=OFFICE_CHOICES, default=1)
project = models.CharField(max_length=200, null=True, blank=True)
class UserProfile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
def __str__(self):
return self.user.username
class Ticket(models.Model):
# STATUS_CHOICES = {
# ('Requested', 'Requested'),
# ('Under Review', 'Under Review'),
# ('Approved', 'Approved'),
# }
CATEGORY_CHOICES = {
('Finance', 'Finance'),
('Procurement', 'Procurement'),
('IT', 'IT')
}
ORGANIZATION_CHOICES = {
('LORC_ETH', 'LORC_ETH'),
('LORC_SUD', 'LORC_SUD')
}
title = models.CharField(max_length=200)
content = models.TextField()
# organization = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
organization = models.CharField(choices=ORGANIZATION_CHOICES, max_length=100)
agent = models.ForeignKey("Agent", null=True, blank=True, on_delete=models.SET_NULL)
official = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
status = models.ForeignKey("Status", related_name="tickets", null=True, blank=True, on_delete=models.SET_NULL)
category = models.CharField(choices=CATEGORY_CHOICES, max_length=100)
file = models.FileField(upload_to='documents/', null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.title
class Agent(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
organization = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
def __str__(self):
return self.user.email
class Status(models.Model):
name = models.CharField(max_length=30) # Requested, Under Review, Approved
organization = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
def __str__(self):
return self.name
# class Official(models.Model):
# user = models.OneToOneField(User, on_delete=models.CASCADE)
# organization = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
# def __str__(self):
# return self.user.email
def post_user_created_signal(sender, instance, created, **kwargs):
print(instance, created)
if created:
UserProfile.objects.create(user=instance)
post_save.connect(post_user_created_signal, sender=User)
| {"/tickets/urls.py": ["/tickets/views.py"], "/tickets/views.py": ["/tickets/models.py", "/tickets/forms.py"], "/agents/views.py": ["/tickets/models.py"], "/tickets/admin.py": ["/tickets/models.py"], "/tickets/forms.py": ["/tickets/models.py"]} |
42,460 | Jeewai/co-addis-tms | refs/heads/master | /tickets/migrations/0005_ticket_organization.py | # Generated by Django 3.1.4 on 2021-01-30 10:30
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('tickets', '0004_auto_20210128_0023'),
]
operations = [
migrations.AddField(
model_name='ticket',
name='organization',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='tickets.userprofile'),
preserve_default=False,
),
]
| {"/tickets/urls.py": ["/tickets/views.py"], "/tickets/views.py": ["/tickets/models.py", "/tickets/forms.py"], "/agents/views.py": ["/tickets/models.py"], "/tickets/admin.py": ["/tickets/models.py"], "/tickets/forms.py": ["/tickets/models.py"]} |
42,461 | Jeewai/co-addis-tms | refs/heads/master | /tickets/migrations/0010_auto_20210202_1650.py | # Generated by Django 3.1.4 on 2021-02-02 13:50
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('tickets', '0009_auto_20210201_2302'),
]
operations = [
migrations.AddField(
model_name='ticket',
name='category',
field=models.CharField(choices=[('Procurement', 'Procurement'), ('Finance', 'Finance'), ('IT', 'IT')], default=django.utils.timezone.now, max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='ticket',
name='created_at',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='ticket',
name='file',
field=models.FileField(blank=True, null=True, upload_to='documents/'),
),
migrations.AddField(
model_name='ticket',
name='updated_at',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
]
| {"/tickets/urls.py": ["/tickets/views.py"], "/tickets/views.py": ["/tickets/models.py", "/tickets/forms.py"], "/agents/views.py": ["/tickets/models.py"], "/tickets/admin.py": ["/tickets/models.py"], "/tickets/forms.py": ["/tickets/models.py"]} |
42,462 | Jeewai/co-addis-tms | refs/heads/master | /tickets/migrations/0007_status_organization.py | # Generated by Django 3.1.4 on 2021-02-01 14:44
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('tickets', '0006_auto_20210201_1738'),
]
operations = [
migrations.AddField(
model_name='status',
name='organization',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='tickets.userprofile'),
),
]
| {"/tickets/urls.py": ["/tickets/views.py"], "/tickets/views.py": ["/tickets/models.py", "/tickets/forms.py"], "/agents/views.py": ["/tickets/models.py"], "/tickets/admin.py": ["/tickets/models.py"], "/tickets/forms.py": ["/tickets/models.py"]} |
42,463 | Jeewai/co-addis-tms | refs/heads/master | /tickets/urls.py | from django.urls import path
from . views import (
ticket_list, ticket_detail, ticket_create, ticket_update, ticket_delete,
TicketListView, TicketDetailView, TicketCreateView, TicketUpdateView, TicketDeleteView,
AssignAgentView, StatusListView, StatusDetailView, TicketStatusUpdateView
)
app_name = "tickets"
urlpatterns = [
path('', TicketListView.as_view(), name='ticket-list'),
path('<int:pk>/', TicketDetailView.as_view(), name='ticket-detail'),
path('<int:pk>/update/', TicketUpdateView.as_view(), name='ticket-update'),
path('<int:pk>/delete/', TicketDeleteView.as_view(), name='ticket-delete'),
path('<int:pk>/assign-agent/', AssignAgentView.as_view(), name='assign-agent'),
path('<int:pk>/status/', TicketStatusUpdateView.as_view(), name='ticket-status-update'),
path('create/', TicketCreateView.as_view(), name='ticket-create'),
path('status/', StatusListView.as_view(), name='status-list'),
path('status/<int:pk>/', StatusDetailView.as_view(), name='status-detail')
] | {"/tickets/urls.py": ["/tickets/views.py"], "/tickets/views.py": ["/tickets/models.py", "/tickets/forms.py"], "/agents/views.py": ["/tickets/models.py"], "/tickets/admin.py": ["/tickets/models.py"], "/tickets/forms.py": ["/tickets/models.py"]} |
42,464 | Jeewai/co-addis-tms | refs/heads/master | /tickets/views.py | from django.core.mail import send_mail
from django.shortcuts import render, redirect, reverse
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponse
from django.views import generic
from agents.mixins import OrganiserAndLoginRequiredMixin, OfficialAndLoginRequiredMixin
from . models import Ticket, Agent, Status
from . forms import TicketForm, TicketModelForm, CustomUserCreationForm, AssignAgentForm, TicketStatusUpdateForm
# Create your views here.
class SignupView(generic.CreateView):
template_name = "registration/signup.html"
form_class = CustomUserCreationForm
def get_success_url(self):
return reverse("login")
class LandingPageView(generic.TemplateView):
template_name = "landing.html"
def landing_page(request):
return render(request, "landing.html")
class TicketListView(LoginRequiredMixin, generic.ListView):
template_name = "tickets/ticket_list.html"
context_object_name = "tickets"
# def get_queryset(self):
# user = self.request.user
# # initial queryset of tickets for the entire organization
# if user.is_organiser and not user.is_agent:
# queryset = Ticket.objects.filter(organization=user.userprofile, agent__isnull=False)
# elif not user.is_organiser and user.is_agent:
# queryset = Ticket.objects.filter(organization=user.agent.organization, agent__isnull=False)
# # filter for the agent that is logged in
# queryset = queryset.filter(agent__user=user)
# else:
# # filter for the user that is logged in
# queryset = Ticket.objects.filter(official__user=user)
# return queryset
def get_queryset(self):
user = self.request.user
# initial queryset of tickets for the entire organization
if user.is_organiser:
queryset = Ticket.objects.filter(organization=user.userprofile, agent__isnull=False)
elif user.is_agent:
queryset = Ticket.objects.filter(organization=user.agent.organization, agent__isnull=False)
# filter for the agent that is logged in
queryset = queryset.filter(agent__user=user)
elif user.is_official:
# filter for the user that is logged in
queryset = Ticket.objects.filter(official__user=user)
return queryset
def get_context_data(self, **kwargs):
context = super(TicketListView, self).get_context_data(**kwargs)
user = self.request.user
if user.is_organiser and not user.is_agent:
queryset = Ticket.objects.filter(
organization=user.userprofile,
agent__isnull=True
)
context.update({
"unassigned_tickets": queryset
})
return context
def ticket_list(request):
tickets = Ticket.objects.all()
context = {
"tickets": tickets
}
return render(request, "tickets/ticket_list.html", context)
class TicketDetailView(LoginRequiredMixin, generic.DetailView):
template_name = "tickets/ticket_detail.html"
context_object_name = "ticket"
def get_queryset(self):
user = self.request.user
# initial queryset of tickets for the entire organization
if user.is_organiser:
queryset = Ticket.objects.filter(organization=user.userprofile, agent__isnull=False)
elif user.is_agent:
queryset = Ticket.objects.filter(organization=user.agent.organization, agent__isnull=False)
# filter for the agent that is logged in
queryset = queryset.filter(agent__user=user)
elif user.is_official:
# filter for the user that is logged in
queryset = Ticket.objects.filter(official=user)
return queryset
def ticket_detail(request, pk):
print(pk)
ticket = Ticket.objects.get(id=pk)
context = {
"ticket": ticket
}
return render(request, "tickets/ticket_detail.html", context)
class TicketCreateView(OfficialAndLoginRequiredMixin, generic.CreateView):
template_name = "tickets/ticket_create.html"
form_class = TicketModelForm
def get_success_url(self):
return reverse("tickets:ticket-list")
def form_valid(self, form):
ticket = form.save(commit=False)
ticket.official = self.request.user.userprofile
# ticket.organization = organization
ticket.save()
#TODOL send email
send_mail(
subject="A ticket has been created",
message="Go to the site to see the new ticket",
from_email="test@test.com",
recipient_list=["test2@test.com"]
)
return super(TicketCreateView, self).form_valid(form)
def ticket_create(request):
form = TicketModelForm()
if request.method == "POST":
form = TicketModelForm(request.POST)
if form.is_valid():
form.save()
return redirect("/tickets")
context = {
"form": form
}
return render(request, "tickets/ticket_create.html", context)
class TicketUpdateView(LoginRequiredMixin, generic.UpdateView):
template_name = "tickets/ticket_update.html"
form_class = TicketModelForm
def get_queryset(self):
user = self.request.user
# initial queryset of tickets for the entire organization
if user.is_organiser:
queryset = Ticket.objects.filter(organization=user.userprofile, agent__isnull=False)
elif user.is_agent:
queryset = Ticket.objects.filter(organization=user.agent.organization, agent__isnull=False)
# filter for the agent that is logged in
queryset = queryset.filter(agent__user=user)
elif user.is_official:
# filter for the user that is logged in
queryset = Ticket.objects.filter(official=user)
return queryset
def get_success_url(self):
return reverse("tickets:ticket-list")
def ticket_update(request, pk):
ticket = Ticket.objects.get(id=pk)
form = TicketModelForm(instance=ticket)
if request.method == "POST":
form = TicketModelForm(request.POST, instance=ticket)
if form.is_valid():
form.save()
return redirect("/tickets")
context = {
"form": form,
"ticket": ticket
}
return render(request, "tickets/ticket_update.html", context)
class TicketDeleteView(OrganiserAndLoginRequiredMixin, generic.DeleteView):
template_name = "tickets/ticket_delete.html"
queryset = Ticket.objects.all()
def get_success_url(self):
return reverse("tickets:ticket-list")
def get_queryset(self):
user = self.request.user
# initial queryset of tickets for the entire organization
return Ticket.objects.filter(organization=user.userprofile)
def ticket_delete(request, pk):
ticket = Ticket.objects.get(id=pk)
ticket.delete()
return redirect("/tickets")
class AssignAgentView(OrganiserAndLoginRequiredMixin, generic.FormView):
template_name = "tickets/assign_agent.html"
form_class = AssignAgentForm
def get_form_kwargs(self, **kwargs):
kwargs = super(AssignAgentView, self).get_form_kwargs(**kwargs)
kwargs.update({
"request": self.request
})
return kwargs
def get_success_url(self):
return reverse("tickets:ticket-list")
def form_valid(self, form):
agent = form.cleaned_data["agent"]
ticket = Ticket.objects.get(id=self.kwargs["pk"])
# status = Status.objects.filter(name="Under Review")
ticket.agent = agent
# ticket.status = status
ticket.save()
return super(AssignAgentView, self).form_valid(form)
class StatusListView(LoginRequiredMixin, generic.ListView):
template_name = "tickets/status_list.html"
context_object_name = "status_list"
def get_context_data(self, **kwargs):
context = super(StatusListView, self).get_context_data(**kwargs)
user = self.request.user
if user.is_organiser and not user.is_agent:
queryset = Ticket.objects.filter(organization=user.userprofile)
else:
queryset = Ticket.objects.filter(organization=user.agent.organization)
context.update({
"unassigned_ticket_count": queryset.filter(status__isnull=True).count()
})
return context
def get_queryset(self):
user = self.request.user
# initial queryset of tickets for the entire organization
if user.is_organiser and not user.is_agent:
queryset = Status.objects.filter(organization=user.userprofile)
else:
queryset = Status.objects.filter(organization=user.agent.organization)
# elif not user.is_organiser and user.is_agent:
# queryset = Status.objects.filter(organization=user.agent.organization)
# # filter for the agent that is logged in
# else:
# # filter for the user that is logged in
# queryset = Status.objects.filter(official__user=user)
return queryset
class StatusDetailView(LoginRequiredMixin, generic.DetailView):
template_name = "tickets/status_detail.html"
context_object_name = "status"
# def get_context_data(self, **kwargs):
# context = super(StatusDetailView, self).get_context_data(**kwargs)
# # qs = Ticket.objects.filter(status=self.get_object())
# tickets = self.get_object().tickets.all()
# context.update({
# "tickets": tickets
# })
# return context
def get_queryset(self):
user = self.request.user
# initial queryset of tickets for the entire organization
if user.is_organiser and not user.is_agent:
queryset = Status.objects.filter(organization=user.userprofile)
else:
queryset = Status.objects.filter(organization=user.agent.organization)
return queryset
class TicketStatusUpdateView(LoginRequiredMixin, generic.UpdateView):
template_name = "tickets/ticket_status_update.html"
form_class = TicketStatusUpdateForm
def get_queryset(self):
user = self.request.user
# initial queryset of tickets for the entire organization
if user.is_organiser and not user.is_agent:
queryset = Ticket.objects.filter(organization=user.userprofile)
else:
queryset = Ticket.objects.filter(organization=user.agent.organization)
# filter for the agent that is logged in
queryset = queryset.filter(agent__user=user)
return queryset
def get_success_url(self):
return reverse("tickets:ticket-detail", kwargs={"pk": self.get_object().id})
# def ticket_update(request, pk):
# ticket = Ticket.objects.get(id=pk)
# form = TicketForm()
# if request.method == "POST":
# form = TicketForm(request.POST)
# if form.is_valid():
# title = form.cleaned_data['title']
# content = form.cleaned_data['content']
# ticket.title = title
# ticket.content = content
# ticket.save()
# return redirect("/tickets")
# context = {
# "form": form,
# "ticket": ticket
# }
# return render(request, "tickets/ticket_update.html", context)
# def ticket_create(request):
# form = TicketForm()
# if request.method == "POST":
# form = TicketForm(request.POST)
# if form.is_valid():
# title = form.cleaned_data['title']
# content = form.cleaned_data['content']
# agent = Agent.objects.first()
# official = Official.objects.first()
# Ticket.objects.create(
# title = title,
# content = content,
# agent = agent,
# official = official
# )
# return redirect("/tickets")
# context = {
# "form": form
# }
# return render(request, "tickets/ticket_create.html", context)
| {"/tickets/urls.py": ["/tickets/views.py"], "/tickets/views.py": ["/tickets/models.py", "/tickets/forms.py"], "/agents/views.py": ["/tickets/models.py"], "/tickets/admin.py": ["/tickets/models.py"], "/tickets/forms.py": ["/tickets/models.py"]} |
42,465 | Jeewai/co-addis-tms | refs/heads/master | /agents/views.py | import random
from django.shortcuts import render, reverse
from django.views import generic
from django.contrib.auth.mixins import LoginRequiredMixin
from django.core.mail import send_mail
from tickets.models import Agent, UserProfile
from . forms import AgentModelForm
from . mixins import OrganiserAndLoginRequiredMixin
# Create your views here.
class AgentListView(OrganiserAndLoginRequiredMixin, generic.ListView):
template_name = "agents/agent_list.html"
def get_queryset(self):
organization = self.request.user.userprofile
return Agent.objects.filter(organization = organization)
class OfficialListView(OrganiserAndLoginRequiredMixin, generic.ListView):
template_name = "agents/official_list.html"
def get_queryset(self):
organization = self.request.user.userprofile
return UserProfile.objects.all()
class AgentCreateView(OrganiserAndLoginRequiredMixin, generic.CreateView):
template_name = "agents/agent_create.html"
form_class = AgentModelForm
def get_success_url(self):
return reverse("agents:agent-list")
def form_valid(self, form):
user = form.save(commit=False)
user.is_agent = True
user.is_organiser = False
user.is_official = False
user.set_password(f"{random.randint(0, 1000000)}")
user.save()
Agent.objects.create(
user=user,
organization=self.request.user.userprofile
)
send_mail(
subject="You are invited to be a reviewer.",
message="You were created as an agent on DJ TMS. Please login to start working.",
from_email="admin@ilo.org",
recipient_list=[user.email]
)
# agent.organization = self.request.user.userprofile
# agent.save()
return super(AgentCreateView, self).form_valid(form)
class AgentDetailView(OrganiserAndLoginRequiredMixin, generic.DetailView):
template_name = "agents/agent_detail.html"
context_object_name = "agent"
def get_queryset(self):
organization = self.request.user.userprofile
return Agent.objects.filter(organization = organization)
class AgentUpdateView(OrganiserAndLoginRequiredMixin, generic.UpdateView):
template_name = "agents/agent_update.html"
form_class = AgentModelForm
def get_success_url(self):
return reverse("agents:agent-list")
def get_queryset(self):
organization = self.request.user.userprofile
return Agent.objects.filter(organization = organization)
class AgentDeleteView(OrganiserAndLoginRequiredMixin, generic.DeleteView):
template_name = "agents/agent_delete.html"
context_object_name = "agent"
def get_success_url(self):
return reverse("agents:agent-list")
def get_queryset(self):
organization = self.request.user.userprofile
return Agent.objects.filter(organization = organization)
| {"/tickets/urls.py": ["/tickets/views.py"], "/tickets/views.py": ["/tickets/models.py", "/tickets/forms.py"], "/agents/views.py": ["/tickets/models.py"], "/tickets/admin.py": ["/tickets/models.py"], "/tickets/forms.py": ["/tickets/models.py"]} |
42,466 | Jeewai/co-addis-tms | refs/heads/master | /tickets/migrations/0006_auto_20210201_1738.py | # Generated by Django 3.1.4 on 2021-02-01 14:38
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('tickets', '0005_ticket_organization'),
]
operations = [
migrations.CreateModel(
name='Status',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
],
),
migrations.AddField(
model_name='ticket',
name='status',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='tickets.status'),
),
]
| {"/tickets/urls.py": ["/tickets/views.py"], "/tickets/views.py": ["/tickets/models.py", "/tickets/forms.py"], "/agents/views.py": ["/tickets/models.py"], "/tickets/admin.py": ["/tickets/models.py"], "/tickets/forms.py": ["/tickets/models.py"]} |
42,467 | Jeewai/co-addis-tms | refs/heads/master | /tickets/migrations/0018_auto_20210204_1354.py | # Generated by Django 3.1.4 on 2021-02-04 10:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tickets', '0017_auto_20210203_2342'),
]
operations = [
migrations.AlterField(
model_name='ticket',
name='category',
field=models.CharField(choices=[('Procurement', 'Procurement'), ('IT', 'IT'), ('Finance', 'Finance')], max_length=100),
),
]
| {"/tickets/urls.py": ["/tickets/views.py"], "/tickets/views.py": ["/tickets/models.py", "/tickets/forms.py"], "/agents/views.py": ["/tickets/models.py"], "/tickets/admin.py": ["/tickets/models.py"], "/tickets/forms.py": ["/tickets/models.py"]} |
42,468 | Jeewai/co-addis-tms | refs/heads/master | /tickets/admin.py | from django.contrib import admin
from . models import User, Ticket, Agent, UserProfile, Status
# Register your models here.
admin.site.register(Status)
admin.site.register(User)
admin.site.register(UserProfile)
admin.site.register(Ticket)
admin.site.register(Agent)
| {"/tickets/urls.py": ["/tickets/views.py"], "/tickets/views.py": ["/tickets/models.py", "/tickets/forms.py"], "/agents/views.py": ["/tickets/models.py"], "/tickets/admin.py": ["/tickets/models.py"], "/tickets/forms.py": ["/tickets/models.py"]} |
42,469 | Jeewai/co-addis-tms | refs/heads/master | /tickets/migrations/0015_auto_20210203_2338.py | # Generated by Django 3.1.4 on 2021-02-03 20:38
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('tickets', '0014_auto_20210203_2307'),
]
operations = [
migrations.AlterField(
model_name='ticket',
name='category',
field=models.CharField(choices=[('Procurement', 'Procurement'), ('Finance', 'Finance'), ('IT', 'IT')], max_length=100),
),
migrations.CreateModel(
name='Official',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AlterField(
model_name='ticket',
name='official',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tickets.official'),
),
]
| {"/tickets/urls.py": ["/tickets/views.py"], "/tickets/views.py": ["/tickets/models.py", "/tickets/forms.py"], "/agents/views.py": ["/tickets/models.py"], "/tickets/admin.py": ["/tickets/models.py"], "/tickets/forms.py": ["/tickets/models.py"]} |
42,470 | Jeewai/co-addis-tms | refs/heads/master | /tickets/forms.py | from django import forms
from django.contrib.auth import get_user_model
from django.contrib.auth.forms import UserCreationForm, UsernameField
from . models import Ticket, Agent
User = get_user_model()
class TicketModelForm(forms.ModelForm):
class Meta:
model = Ticket
fields = (
'title',
'content',
'organization',
'category',
'file',
)
class TicketForm(forms.Form):
title = forms.CharField()
content = forms.CharField()
class CustomUserCreationForm(UserCreationForm):
class Meta:
model = User
fields = ('username', 'office', 'project')
field_classes = {'username': UsernameField}
class AssignAgentForm(forms.Form):
agent = forms.ModelChoiceField(queryset=Agent.objects.none())
def __init__(self, *args, **kwargs):
request = kwargs.pop("request")
agents = Agent.objects.filter(organization=request.user.userprofile)
super(AssignAgentForm, self).__init__(*args, **kwargs)
self.fields["agent"].queryset = agents
class TicketStatusUpdateForm(forms.ModelForm):
class Meta:
model = Ticket
fields = (
'status',
)
| {"/tickets/urls.py": ["/tickets/views.py"], "/tickets/views.py": ["/tickets/models.py", "/tickets/forms.py"], "/agents/views.py": ["/tickets/models.py"], "/tickets/admin.py": ["/tickets/models.py"], "/tickets/forms.py": ["/tickets/models.py"]} |
42,471 | Jeewai/co-addis-tms | refs/heads/master | /tickets/migrations/0009_auto_20210201_2302.py | # Generated by Django 3.1.4 on 2021-02-01 20:02
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('tickets', '0008_auto_20210201_1745'),
]
operations = [
migrations.AlterField(
model_name='ticket',
name='status',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='tickets', to='tickets.status'),
),
]
| {"/tickets/urls.py": ["/tickets/views.py"], "/tickets/views.py": ["/tickets/models.py", "/tickets/forms.py"], "/agents/views.py": ["/tickets/models.py"], "/tickets/admin.py": ["/tickets/models.py"], "/tickets/forms.py": ["/tickets/models.py"]} |
42,472 | Jeewai/co-addis-tms | refs/heads/master | /tickets/migrations/0013_auto_20210203_2233.py | # Generated by Django 3.1.4 on 2021-02-03 19:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tickets', '0012_auto_20210203_1742'),
]
operations = [
migrations.AddField(
model_name='user',
name='office',
field=models.PositiveSmallIntegerField(choices=[(1, 'Ethiopia'), (2, 'Sudan'), (3, 'South Sudan'), (4, 'Djibouti'), (5, 'Somalia')], default=1),
),
migrations.AddField(
model_name='user',
name='role',
field=models.PositiveSmallIntegerField(choices=[(1, 'Official'), (2, 'Finance'), (3, 'Procurement'), (4, 'IT'), (5, 'HR')], default=1),
),
migrations.AlterField(
model_name='ticket',
name='category',
field=models.CharField(choices=[('Finance', 'Finance'), ('Procurement', 'Procurement'), ('IT', 'IT')], max_length=100),
),
]
| {"/tickets/urls.py": ["/tickets/views.py"], "/tickets/views.py": ["/tickets/models.py", "/tickets/forms.py"], "/agents/views.py": ["/tickets/models.py"], "/tickets/admin.py": ["/tickets/models.py"], "/tickets/forms.py": ["/tickets/models.py"]} |
42,473 | Jeewai/co-addis-tms | refs/heads/master | /tickets/migrations/0016_auto_20210203_2341.py | # Generated by Django 3.1.4 on 2021-02-03 20:41
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('tickets', '0015_auto_20210203_2338'),
]
operations = [
migrations.AlterField(
model_name='ticket',
name='category',
field=models.CharField(choices=[('IT', 'IT'), ('Procurement', 'Procurement'), ('Finance', 'Finance')], max_length=100),
),
migrations.AlterField(
model_name='ticket',
name='official',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='tickets.official'),
),
]
| {"/tickets/urls.py": ["/tickets/views.py"], "/tickets/views.py": ["/tickets/models.py", "/tickets/forms.py"], "/agents/views.py": ["/tickets/models.py"], "/tickets/admin.py": ["/tickets/models.py"], "/tickets/forms.py": ["/tickets/models.py"]} |
42,474 | Jeewai/co-addis-tms | refs/heads/master | /tickets/migrations/0019_auto_20210204_1520.py | # Generated by Django 3.1.4 on 2021-02-04 12:20
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('tickets', '0018_auto_20210204_1354'),
]
operations = [
migrations.AlterField(
model_name='ticket',
name='category',
field=models.CharField(choices=[('Finance', 'Finance'), ('Procurement', 'Procurement'), ('IT', 'IT')], max_length=100),
),
migrations.AlterField(
model_name='ticket',
name='official',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='tickets.userprofile'),
preserve_default=False,
),
migrations.AlterField(
model_name='ticket',
name='organization',
field=models.CharField(choices=[('LORC_SUD', 'LORC_SUD'), ('LORC_ETH', 'LORC_ETH')], max_length=100),
),
migrations.DeleteModel(
name='Official',
),
]
| {"/tickets/urls.py": ["/tickets/views.py"], "/tickets/views.py": ["/tickets/models.py", "/tickets/forms.py"], "/agents/views.py": ["/tickets/models.py"], "/tickets/admin.py": ["/tickets/models.py"], "/tickets/forms.py": ["/tickets/models.py"]} |
42,475 | Jeewai/co-addis-tms | refs/heads/master | /tickets/migrations/0008_auto_20210201_1745.py | # Generated by Django 3.1.4 on 2021-02-01 14:45
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('tickets', '0007_status_organization'),
]
operations = [
migrations.AlterField(
model_name='status',
name='organization',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tickets.userprofile'),
),
]
| {"/tickets/urls.py": ["/tickets/views.py"], "/tickets/views.py": ["/tickets/models.py", "/tickets/forms.py"], "/agents/views.py": ["/tickets/models.py"], "/tickets/admin.py": ["/tickets/models.py"], "/tickets/forms.py": ["/tickets/models.py"]} |
42,483 | gundry2/OSR-Automation-Tools | refs/heads/main | /wilderness_travel.py | import random
from dice import roll_dice
class Party:
"""
Defines an adventuring party to keep track of rations and proficiencies.
"""
def __init__(self):
self.rations = 0
self.survival = False
self.navigation = False
self.terrain = ''
surv = None
nav = None
self.rations = int(input("How many rations does your party currently have?"))
self.consumption = int(input("How many rations does your party consume a day?"))
while surv not in [1, 2]:
surv = int(input("Does your party have Survival as a proficiency? 1 for yes, 2 for no."))
if surv == 1:
self.survival = True
while nav not in [1, 2]:
nav = int(input("Does your party have Navigation as a proficiency? 1 for yes, 2 for no."))
if nav == 1:
self.navigation = True
self.loop()
def loop(self):
choice = ''
while choice != 5:
choice = int(input("Choose from the following:\n1.New day\n2.Increase rations\n3.Decrease rations\n4.Set ration consumption\n5.Quit"))
if choice == 1:
terrain = input("Please input one of the following options: p for plains, m for mountains, h for hills, f for forest, c for coast, s for sea, d for desert, j for jungle, w for swamp")
self.forage()
self.get_lost(terrain)
self.rations -= self.consumption
elif choice == 2:
self.rations += input("Increase your rations by how many?")
elif choice == 3:
self.rations -= input("Decrease rations by how many?")
elif choice == 4:
self.consumption = int(input("How many rations does your party consume a day?"))
elif choice == 5:
print("Goodbye!")
print(f"You now have {self.rations} rations. Your party is consuming {self.consumption} rations per day.")
def forage(self):
"""
determines how much food is foraged per day if any. survival is true if
a character in the party has the survival proficiency.
"""
target = 18
if self.survival:
target -= 4
if roll_dice(1, 20) >= target:
food = roll_dice(1, 6)
print(f"Foraged {food} rations.")
self.rations += food
else:
print("Found no rations today.")
def get_lost(self, terrain, navigation=False):
"""
Determines if the characters get lost based on their terrain. Navigation is whether
a character in the party has the navigation proficiency.
"""
target = -1
if terrain == 'p':
target = 4
elif terrain in ['m', 'h', 'f', 'c']:
target = 7
elif terrain in ['s', 'd', 'j', 'w']:
target = 11
if roll_dice(1, 20) >= target:
print("Not lost.")
else:
print(f"Lost. Heading {random.choice('southeast', 'southwest', 'south', 'north', 'northeast', 'northwest')}.")
if __name__ == "__main__":
x = Party()
| {"/wilderness_travel.py": ["/dice.py"], "/main.py": ["/osr_char_gen.py"], "/osr_char_gen.py": ["/dice.py", "/menu.py"]} |
42,484 | gundry2/OSR-Automation-Tools | refs/heads/main | /main.py | from osr_char_gen import Character
def main() -> None:
x = Character()
print(x)
if __name__ == "__main__":
main()
| {"/wilderness_travel.py": ["/dice.py"], "/main.py": ["/osr_char_gen.py"], "/osr_char_gen.py": ["/dice.py", "/menu.py"]} |
42,485 | gundry2/OSR-Automation-Tools | refs/heads/main | /player_data_graphing/class_diversity.py | # Displays a chart showing how many of each class have been played within the game world.
import matplotlib.pyplot as plt
import json
import os
class_list = []
for file in os.listdir('./characters'):
if not (file.startswith('~') or file.endswith('~')):
print(file)
with open('./characters/' + file, 'r') as f:
data = json.load(f)
class_list.append(data['class'].lower())
labels = list(set(class_list))
labels.sort()
class_list.sort()
sizes = []
for label in labels:
size = class_list.count(label)
sizes.append(size)
fig1, ax1 = plt.subplots()
ax1.pie(sizes, labels=labels, autopct='%1.1f%%')
ax1.axis('equal')
plt.show()
print(labels)
print(class_list)
| {"/wilderness_travel.py": ["/dice.py"], "/main.py": ["/osr_char_gen.py"], "/osr_char_gen.py": ["/dice.py", "/menu.py"]} |
42,486 | gundry2/OSR-Automation-Tools | refs/heads/main | /osr_char_gen.py | """Automatically generates a random OSR character."""
from typing import List
# Used to present a list of choices to the user.
#from cursesmenu import CursesMenu, SelectionMenu
from dice import roll_dice
from menu import Menu
class Character:
def __init__(self):
self.stats = dict(zip(["Strength", "Dexterity", "Constitution", "Intelligence", "Wisdom", "Charisma"], self.roll_stats()))
self.class_ = self.class_choice()
self.gold = self.starting_gold()
def roll_stats(self) -> List[int]:
"""Rolls 3d6 in order to generate a character's stats."""
stats = []
for i in range(6):
# Rolls 3d6 6 times, and adds each to the list.
stats.append(roll_dice(3, 6))
return stats
def class_choice(self) -> str:
"""
Prints a list of classes that the character's stats qualify them for,
then prompts the user to either select the class they want or randomly
decide.
:return str: The name of the class chosen.
"""
# Used to contain all classes the character qualifies for.
classes = self.available_classes()
# TODO: Add demi-human & Advanced Fantasy classes.
# TODO: Use color-coding to highlight classes based on the XP benefit
# each gains from the class' Prime Requisite.
print(f"Your stats so far are:\n{self.show_stats()}")
choices = Menu(classes)
choice = choices.get_choice()
return choice
def available_classes(self) -> List[str]:
classes = []
if self.stats["Strength"] >= 9:
classes.append("Fighter")
if self.stats["Dexterity"] >= 9:
classes.append("Explorer")
if self.stats["Dexterity"] >= 9:
classes.append("Thief")
if self.stats["Intelligence"] >= 9:
classes.append("Mage")
if self.stats["Strength"] >= 9:
classes.append("Elven Spellsword")
if self.stats["Dexterity"] >= 9:
classes.append("Elven Ranger")
if self.stats["Wisdom"] >= 9:
classes.append("Cleric")
if self.stats["Constitution"] >= 9:
if self.stats["Strength"] >= 9:
classes.append("Dwarven Vaultguard")
if self.stats["Wisdom"] >= 9:
classes.append("Dwarven Craftpriest")
if min(self.stats.values()) >= 11:
classes.append("Nobiran")
if any(self.stats.values()) == 18 and min(self.stats) >= 9:
classes.append("Chosen")
if len(classes) == 0:
classes.append("0-level")
return classes
def prime_requisites_xp(self) -> List[str]:
"""Determines the prime requisites for each stat"""
pass
def starting_gold(self) -> int:
"""
Rolls 3d6 x 10 gold for a starting character.
:return int: The amount of gold for the character.
"""
return roll_dice(3, 6) * 10
def show_stats(self) -> str:
"""Used to neatly output the character's stats."""
formatted_stats = ""
for stat, value in self.stats.items():
formatted_stats += (f"{stat}: {value}\n")
return formatted_stats
def __str__(self):
stats = self.show_stats()
return f"\n{stats}\nClass: {self.class_}\nGold: {self.gold}."
| {"/wilderness_travel.py": ["/dice.py"], "/main.py": ["/osr_char_gen.py"], "/osr_char_gen.py": ["/dice.py", "/menu.py"]} |
42,487 | gundry2/OSR-Automation-Tools | refs/heads/main | /dice.py | """Defines dice roll functions used in games."""
from random import randint
def roll_dice(amount, sides) -> int:
"""
Rolls an amount of dice with sides number of faces, with the faces
being numbered from 1 to sides.
:return int: The result of the dice roll.
"""
total = 0
for i in range(amount):
total += randint(1, sides)
return total
| {"/wilderness_travel.py": ["/dice.py"], "/main.py": ["/osr_char_gen.py"], "/osr_char_gen.py": ["/dice.py", "/menu.py"]} |
42,488 | gundry2/OSR-Automation-Tools | refs/heads/main | /menu.py | """Defines a Menu class that allows users to pick from a list of options."""
from typing import List
class Menu:
def __init__(self, choices: List[str]):
self.choices = choices
def get_choice(self):
print("Please choose one of the following options by entering the "
"corresponding number: ")
i = 0
for item in self.choices:
i += 1
print(f"{i}. {item}")
selection = -1
while selection not in range(1, len(self.choices) + 1):
try:
selection = int(input())
except Exception:
print("Please input a valid choice.")
choice = self.choices[selection - 1]
return choice
| {"/wilderness_travel.py": ["/dice.py"], "/main.py": ["/osr_char_gen.py"], "/osr_char_gen.py": ["/dice.py", "/menu.py"]} |
42,508 | BruceZhu88/sqaTools | refs/heads/master | /src/common/relay.py |
import time
import random
from .SerialHelper import SerialHelper
class Relay(object):
def __init__(self, log):
self.log = log
self.ser = None
def init_relay(self, port):
"""Before operating relay, you must initialize it first.
"""
self.ser = SerialHelper()
self.ser.port = port
self.ser.start()
def stop_relay(self):
self.ser.stop()
def init_button(self):
if self.ser.alive:
try:
time.sleep(0.5)
self.ser.write('50'.encode('utf-8'), isHex=True)
time.sleep(0.5)
self.ser.write('51'.encode('utf-8'), isHex=True)
time.sleep(0.5)
self.ser.write('00'.encode('utf-8'), isHex=True)
return True
except Exception as e:
self.log.info(e)
return False
def press(self, key: list, t):
"""Press and release relay port
:param key: list type,
That means you also could control many ports simultaneously, key = '01', '02'
:param t: string type, the time of press
:return None
"""
if not self.ser.alive:
return
k = '00'
for v in key:
k = hex(int(v, 16) ^ int(k, 16))
if len(k) == 3:
k = k.replace('0x', '0x0')
if "-" in t:
val = t.split("-")
delay = round(random.uniform(float(val[0]), float(val[1])), 4)
else:
delay = float(t)
k = k.replace('0x', '')
# close relay
self.ser.write(k.encode('utf-8'), isHex=True)
# How long do you need to press
self.log.info('button press time={}'.format(delay))
time.sleep(delay)
# release relay
self.ser.write('00'.encode('utf-8'), isHex=True)
def ac_power(self, state):
if not self.ser.alive:
return False
try:
if state.lower() == 'on':
self.ser.write('B'.encode('utf-8'))
else:
self.ser.write('A'.encode('utf-8'))
except Exception as e:
self.log.error(e)
return False
return True
| {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,509 | BruceZhu88/sqaTools | refs/heads/master | /build.py | import os
import json
from src.common.util import store
from src.common.util import zip_dir
from src.common.util import empty_folder
from src.common.cfg import Config
main_cfg_path = './config/main.conf'
main_config = Config(main_cfg_path)
main_config.cfg_load()
main_cfg = main_config.cfg
ase_ota_setting_path = main_cfg.get('WifiSpeaker', 'ase_ota_setting')
wifi_setup_path = main_cfg.get('WifiSpeaker', 'wifi_setup')
saved_ip_path = main_cfg.get('WifiSpeaker', 'saved_ip')
saved_action_steps = main_cfg.get('Automate', 'action_steps')
# clear data in test (OTA, Wifi Setup)
store(ase_ota_setting_path, {})
store(wifi_setup_path, {})
store(saved_ip_path, {"ip": []})
app_info = './data/app.json'
with open(app_info) as json_file:
data = json.load(json_file)
empty_folder(saved_action_steps, except_file='example.json')
empty_folder('./data/uploads', except_file='demo.txt')
app_name = data["name"]
app_version = data['version']
os.system("{}\pyinstaller_exe.bat".format(os.getcwd()))
dir_path = "./dist/run/"
zip_filename = "{0}_v{1}_x64.zip".format(app_name, app_version)
zip_dir(dir_path, zip_filename)
| {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,510 | BruceZhu88/sqaTools | refs/heads/master | /app_config.py |
SECRET_KEY = 'secret!'
DEBUG = False
UPLOAD_FOLDER = 'data/uploads'
| {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,511 | BruceZhu88/sqaTools | refs/heads/master | /sqaTools.py | #!/usr/bin/env python
# -------------------------------------------------------------------------
# added by Bruce for packaging whole project into exe by pyinstaller
from src import *
import configparser
from dns import dnssec,e164,edns,entropy,exception,flags,inet,ipv4,ipv6,\
message,name,namedict,node,opcode,query,rcode,rdata,rdataclass,rdataset,\
rdatatype,renderer,resolver,reversename,rrset,tokenizer,tsig,\
tsigkeyring,ttl,update,version,zone
import engineio.async_eventlet
# -------------------------------------------------------------------------
import subprocess
import sys
import re
import time
# import win32api
import app_config
from werkzeug.utils import secure_filename
from flask import Flask, render_template, request, redirect, jsonify, url_for, send_from_directory
from flask_socketio import SocketIO
from src.wifiSpeaker.AseInfo import AseInfo
from src.wifiSpeaker.AseUpdate import AseUpdate
from src.wifiSpeaker.WifiSetup import WifiSetup
from src.powerCycle.SerialTool import SerialTool
from src.automate.Command import Command
from src.automate.automate import Automate
from src.common.cfg import Config
from src.common.util import *
from src.common.Logger import Logger
from src.common.Url import check_url_status
from src.common.QRcode import MakeQR
logger = Logger("main").logger()
# print(__file__) print(sys.argv[0])
# print(os.path.basename(__file__))
cmd = "tasklist|find /i \"{}.exe\"".format(os.path.basename(__file__).rsplit(".", 1)[0])
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
tasklist = p.stdout.readlines()
if len(tasklist) > 1:
logger.info("sqaTools.exe has been launched!")
# win32api.MessageBox(0, "sqaTools has been launched!", "Warning")
sys.exit()
# Set this variable to "threading", "eventlet" or "gevent" to test the
# different async modes, or leave it set to None for the application to choose
# the best option based on installed packages.
async_mode = "eventlet"
app = Flask(__name__)
app.config.from_object(app_config)
socketio = SocketIO(app, async_mode=async_mode)
# socketio.async_mode
ALLOWED_EXTENSIONS = set(['txt', 'ini'])
MAX_CONTENT_LENGTH = 16 * 1024 * 1024 # 16MB
thread_ase_ota = None
thread_setup_wifi = None
thread = None
thread_lock = threading.Lock()
serial_tool = None
ase_info = AseInfo()
INFO = {}
UNBLOCK = {}
PAGE_INFO = {"page": ""}
STOP_REFRESH = False
run_automation_steps = {}
main_cfg_path = './config/main.conf'
main_config = Config(main_cfg_path)
main_config.cfg_load()
main_cfg = main_config.cfg
ase_ota_setting_path = main_cfg.get('WifiSpeaker', 'ase_ota_setting')
power_cycle_running_status = main_cfg.get('WifiSpeaker', 'power_cycle_running_status')
wifi_setup_path = main_cfg.get('WifiSpeaker', 'wifi_setup')
automate_running_status = main_cfg.get('Automate', 'automate_running_status')
saved_action_steps = main_cfg.get('Automate', 'action_steps')
automation_cfg = Config(main_cfg.get('Automate', 'automation'))
power_cycle_cfg = Config(main_cfg.get('PowerCycle', 'power_cycle'))
cmd_get_log_file = main_cfg.get('WifiSpeaker', 'cmd_get_log_file')
ase_log_path = main_cfg.get('Log', 'ase_log_path')
script_path = main_cfg.get('WifiSpeaker', 'script_path')
qr_code_num = 0
logger.debug("Starting {}".format(os.path.basename(__file__).rsplit(".", 1)[0]))
"""
@app.route('/hello/')
@app.route('/hello/<name>')
def hello(name=None):
return render_template('hello1.html', name=name)
"""
"""
*********************************************************************************
*Index*
*********************************************************************************
"""
@app.route('/')
def index():
PAGE_INFO['page'] = 'home'
global app_version
return render_template('index.html', app_version=app_version)
"""
*********************************************************************************
*Wifi Speaker*
*********************************************************************************
"""
@app.route('/wifi_speaker')
def wifi_speaker():
PAGE_INFO['page'] = 'wifi_speaker'
return render_template('WifiSpeaker.html')
''''
@app.route('/scan_devices', methods=['GET'])
def scan_devices():
"""
while len(DEVICES) > 0:
DEVICES.clear()
device_scan = deviceScan(DEVICES)
if device_scan.scan() == -1:
return jsonify({"error": "error"})
"""
devices = ase_info.get_ase_devices_list()
return jsonify(devices)
'''
def scan_devices_thread():
tmp_devices = ''
while PAGE_INFO['page'] == 'wifi_speaker':
socketio.sleep(0.003) # avoid emmit block
status = ase_info.status
devices_info = ase_info.devices_list
if len(devices_info) > 0:
# for i in devices_info.keys(): # dict.keys() return a list, so won't crash
for d in devices_info:
if d not in tmp_devices:
socketio.sleep(0.003)
tmp_devices += d
socketio.emit('get_scan_devices', {'data': d},
namespace='/wifi_speaker/test')
if status == 1:
socketio.emit('stop_scan_devices',
namespace='/wifi_speaker/test')
return
@socketio.on('scan_devices', namespace='/wifi_speaker/test')
def scan_devices():
socketio.start_background_task(target=scan_devices_thread)
socketio.start_background_task(target=ase_info.get_ase_devices_list)
'''
@socketio.on('connect', namespace='/wifi_speaker/test')
def test_connect():
socketio.start_background_task(target=background_thread)
'''
@app.route('/get_info', methods=['GET'])
def get_info():
global INFO, STOP_REFRESH
STOP_REFRESH = True
# text = request.form.to_dict().get("text") ---> if methods=post
text = request.args.get("ip") # ---> if methods=get
# p = re.compile(r'(?:(?:[0,1]?\d?\d|2[0-4]\d|25[0-5])\.){3}(?:[0,1]?\d?\d|2[0-4]\d|25[0-5])')
try:
ip = re.findall('\((.*)\)', text)[0]
except:
ip = text
INFO = ase_info.thread_get_info(ip)
return jsonify(INFO)
@socketio.on('save_ip', namespace='/wifi_speaker/test')
def save_ip(msg):
ip = msg.get('ip')
ips = ase_info.load_ip()
if ip not in ips:
ips.append(ip)
ase_info.store_ip(ips)
@app.route('/check_wifi', methods=['GET'])
def check_wifi():
ip = request.args.get("ip")
status = {'status': 'ok'}
if not check_url_status("http://{}:8080/BeoDevice".format(ip), timeout=5):
status = {'status': 'error'}
return jsonify(status)
@app.route('/get_network_settings', methods=['GET'])
def get_network_settings():
network = ase_info.get_info('network_settings', INFO['ip'])
if network == 'NA' or network == 'error':
return jsonify({"error": "error"})
return jsonify(network)
@app.route('/bt_open_set', methods=['POST'])
def bt_open_set():
enable = request.form.to_dict().get('enable')
status = ase_info.bt_open_set(enable, INFO["ip"])
return jsonify({'status': status})
# @socketio.on('bt_pair', namespace='/wifi_speaker/test')
@app.route('/bt_pair', methods=['POST'])
def bt_pair():
cmd = request.form.to_dict().get('cmd')
status = ase_info.pair_bt(cmd, INFO['ip'])
return jsonify({'status': status})
def thread_auto_refresh(t, items):
global STOP_REFRESH
while PAGE_INFO['page'] == 'wifi_speaker':
infos = {}
socketio.sleep(int(t))
if not check_url_status("http://{}:8080/BeoDevice".format(INFO['ip']), timeout=5):
STOP_REFRESH = True
socketio.emit('print_msg', {'data': 'Seems disconnected with your product!', 'color': 'red'},
namespace="/wifi_speaker/test")
return
if not STOP_REFRESH:
for key, value in items.items():
if value:
info = ase_info.get_info(key, INFO['ip'])
infos[key] = info
socketio.emit('start_auto_refresh', infos, namespace="/wifi_speaker/test")
else:
return
@socketio.on('auto_refresh', namespace='/wifi_speaker/test')
def auto_refresh(msg):
global STOP_REFRESH
STOP_REFRESH = False
with thread_lock:
if thread is None:
socketio.start_background_task(thread_auto_refresh, msg.get('time_interval'), msg.get('items'))
@app.route('/stop_auto_refresh', methods=['POST'])
def stop_auto_refresh():
global STOP_REFRESH
STOP_REFRESH = True
return jsonify({})
def thread_check_standby(ip):
num = 0
start_time = time.time()
while PAGE_INFO['page'] == 'wifi_speaker':
if num >= 60 * 30: # timeout
socketio.emit("check_standby", {"status": "timeout"}, namespace="/wifi_speaker/test")
return
status = ase_info.get_info('get_standby', ip)
if status == 'NA' or status == 'error':
socketio.emit("check_standby", {"status": "error"}, namespace="/wifi_speaker/test")
return
if status == 'Standby':
tmp_time = time.time() - start_time
m = str(int(tmp_time / 60))
s = str(int(tmp_time % 60))
m = '0' + m if len(m) == 1 else m
s = '0' + s if len(s) == 1 else s
elapsed_time = '{}m{}s'.format(m, s)
socketio.emit("check_standby", {"status": "Standby", "elapsed_time": elapsed_time},
namespace="/wifi_speaker/test")
return
socketio.sleep(1)
num = num + 1
@socketio.on('detect_standby', namespace='/wifi_speaker/test')
def detect_standby():
socketio.start_background_task(thread_check_standby, INFO['ip'])
@app.route('/get_product_status', methods=['GET'])
def get_product_status():
status = ase_info.get_info('get_product_status', INFO['ip'])
return jsonify(status)
@app.route('/get_volume', methods=['GET'])
def get_volume():
volume = ase_info.get_info('volume', INFO['ip'])
if volume == 'NA' or volume == 'error':
return jsonify({"error": "error"})
return jsonify(volume)
@app.route('/get_other_info', methods=['GET'])
def get_other_info():
info = ase_info.get_other_info(INFO['ip'])
return jsonify(info)
# return render_template('WifiSpeaker.html', info=info)
@app.route('/log_submit_server', methods=['POST'])
def log_submit_server():
ip = request.form.to_dict().get("ip")
status = ase_info.log_submit(ip)
return jsonify({"status": status})
@app.route('/log_download_local', methods=['POST'])
def log_download_local():
log_path = ase_info.download_log(INFO["ip"], INFO["sn"], script_path, ase_log_path)
return jsonify({"log_path": log_path})
@app.route('/log_get', methods=['POST'])
def log_get():
log_path = ase_info.get_log_files(INFO["ip"], INFO["sn"], cmd_get_log_file, ase_log_path)
return jsonify({"log_path": log_path})
@app.route('/log_clear', methods=['POST'])
def log_clear():
status = ase_info.log_clear(INFO["ip"])
# os.system('"{} root@192.168.1.100"'.format(".\config\OpenSSH\\bin\ssh.exe"))
return jsonify({"status": status})
@app.route('/change_product_name', methods=['POST'])
def change_product_name():
# ip = request.form.to_dict().get("ip")
product_name = request.form.to_dict().get("name")
status = ase_info.change_product_name(product_name, INFO['ip'])
return jsonify({"status": status})
@app.route('/ase_reset', methods=['POST'])
def ase_reset():
# ip = request.form.to_dict().get("ip")
status = ase_info.reset(INFO['ip'])
return jsonify({'status': status})
@app.route('/ase_set_bt_reconnect_mode', methods=['POST'])
def ase_set_bt_reconnect_mode():
# ip = request.form.to_dict().get("ip")
mode = request.form.to_dict().get("mode")
status = ase_info.bt_reconnect_set(mode, INFO['ip'])
return jsonify({'status': status})
@app.route('/bt_remove', methods=['POST'])
def bt_remove():
# ip = request.form.to_dict().get("ip")
bt_mac = request.form.to_dict().get("bt_mac")
status = ase_info.bt_remove(bt_mac, INFO['ip'])
return jsonify({'status': status})
@app.route('/unblock', methods=['POST'])
def unblock():
logger.info("Starting unblock...")
ip = request.form.to_dict().get("ip")
unblock_status = ase_info.unblock_device(ip)
UNBLOCK["status"] = "successful" if unblock_status else "fail"
return jsonify(UNBLOCK)
def thread_ota_status_check(ip):
num = 0
socketio.sleep(40)
while PAGE_INFO['page'] == 'wifi_speaker':
socketio.sleep(3)
num = num + 1
if num >= 60: # 180s timeout
socketio.emit("ota_check_over", namespace="/wifi_speaker/test")
return
if check_url_status("http://{}/index.fcgi".format(ip), timeout=6):
socketio.emit("ota_check_over", namespace="/wifi_speaker/test")
return
@socketio.on('ota_status_check', namespace='/wifi_speaker/test')
def ota_status_check(msg):
socketio.start_background_task(thread_ota_status_check, msg["ip"])
@app.route('/one_tap_ota', methods=['POST'])
def one_tap_ota():
status = ""
ip = request.form.to_dict().get("ip")
file_path = r'{}'.format(request.form.to_dict().get("file_path"))
if not check_url_status("http://{}/index.fcgi".format(ip), timeout=6):
return jsonify({"status": "device disconnect"})
if not os.path.exists(file_path):
return jsonify({"status": "file error"})
files = {
'file': open(file_path, 'rb')
}
if ase_info.ota_update(ip, files) == 200:
status = ase_info.trigger_update(ip)
else:
logger.debug("Upload ASE OTA file failed!")
return jsonify({"status": status})
@app.route('/page_info', methods=['GET'])
def page_info():
return jsonify(PAGE_INFO)
@app.route('/get_ota_setting', methods=['GET'])
def get_ota_setting():
settings = load(ase_ota_setting_path)
return jsonify(settings)
'''
@app.route('/ota_auto_update', methods=['GET', 'POST'])
def ota_auto_update():
error = None
if request.method == 'POST':
store(status_json, {"aseOtaUpdate": "1"})
setting_values = request.form.to_dict()
store(ase_ota_setting_path, setting_values)
_thread.start_new_thread(aseUpdate(socketio).start_ota, ())
return jsonify({"": ""})
else:
return redirect(url_for('wifi_speaker'))
'''
def ase_ota_thread():
while PAGE_INFO['page'] == 'wifi_speaker':
socketio.sleep(1)
if not thread_ase_ota.is_alive():
socketio.emit("stop_ase_auto_update", namespace='/wifi_speaker/test')
return
@socketio.on('ota_auto_update', namespace='/wifi_speaker/test')
def ota_auto_update(msg):
global thread_ase_ota
store(ase_ota_setting_path, msg)
# thread.start_new_thread(aseUpdate(socketio).start_ota, ())
with thread_lock:
thread_ase_ota = socketio.start_background_task(target=AseUpdate(socketio, ase_ota_setting_path).start_ota)
socketio.start_background_task(target=ase_ota_thread)
@app.route('/wifi_setup_setting', methods=['GET'])
def wifi_setup_setting():
"""
:return:
"""
'''
settings = {}
wifi_setup_cfg.cfg_load()
for info1 in wifi_setup_cfg.cfg_dump():
for info2 in info1:
settings[info2[0]] = info2[1]
wifi_setup_cfg.save()
'''
settings = load(wifi_setup_path)
if len(settings) == 0:
settings['dhcp'] = 'True'
return jsonify(settings)
def auto_setup_wifi_thread():
while PAGE_INFO['page'] == 'wifi_speaker':
socketio.sleep(1)
if not thread_setup_wifi.is_alive():
socketio.emit("stop_auto_setup_wifi", namespace='/wifi_speaker/test')
return
@socketio.on('auto_setup_wifi', namespace='/wifi_speaker/test')
def auto_setup_wifi(msg):
global thread_setup_wifi
'''
wifi_setup_cfg.cfg_load()
wifi_setup_cfg.set_items(msg)
wifi_setup_cfg.save()
'''
store(wifi_setup_path, msg)
wifi_setup = WifiSetup(INFO["ip"], INFO["deviceName"], wifi_setup_path, socketio)
thread_setup_wifi = socketio.start_background_task(target=wifi_setup.setup)
socketio.start_background_task(target=auto_setup_wifi_thread)
@app.route('/exit_run')
def exit_run():
return redirect(url_for('wifi_speaker'))
"""
@app.before_request
def my_before_request():
status = load(status_json)["aseOtaUpdate"]
if status=="1":
return render_template('aseOtaStatus.html')
"""
@app.route('/check_ota_path', methods=['POST'])
def check_ota_path():
msg = request.form.to_dict()
for n in msg:
path = msg[n]
if not (os.path.isfile(path) and os.stat(path)):
return jsonify({"status": "error", "name": n})
return jsonify({"status": "ok"})
"""
*********************************************************************************
*Power Cycle*
*********************************************************************************
"""
@app.route('/power_cycle')
def power_cycle():
PAGE_INFO['page'] = 'power_cycle'
store(power_cycle_running_status, {"power_cycle_status": 0})
return render_template('PowerCycle.html')
def scan_port_thread():
global serial_tool
serial_tool = SerialTool(power_cycle_running_status, socketio)
while PAGE_INFO['page'] == 'power_cycle':
serial_tool.find_all_serial()
socketio.sleep(1.2)
@socketio.on('scan_port', namespace='/power_cycle/test')
def scan_port():
socketio.start_background_task(target=scan_port_thread)
@socketio.on('open_port', namespace='/power_cycle/test')
def open_port(msg):
global serial_tool
serial_tool.open_port(msg)
@socketio.on('close_port', namespace='/power_cycle/test')
def close_port():
global serial_tool
serial_tool.close_port()
@socketio.on('send_ser_msg', namespace='/power_cycle/test')
def send_ser_msg(msg):
global serial_tool
serial_tool.send_msg(msg["msg"], False)
@app.route('/power_cycle_options', methods=['GET'])
def power_cycle_options():
settings = {}
power_cycle_cfg.cfg_load()
for info1 in power_cycle_cfg.cfg_dump():
for info2 in info1:
settings[info2[0]] = info2[1]
power_cycle_cfg.save()
return jsonify(settings)
def auto_power_cycle_thread(msg):
global serial_tool
store(power_cycle_running_status, {"power_cycle_status": 1})
serial_tool.power_cycle(msg)
socketio.emit("stop_confirm", namespace='/power_cycle/test')
@socketio.on('auto_power_cycle', namespace='/power_cycle/test')
def auto_power_cycle(msg):
global serial_tool
power_cycle_cfg.cfg_load()
power_cycle_cfg.set_items(msg)
power_cycle_cfg.save()
socketio.emit("start_auto_power_cycle", msg, namespace='/power_cycle/test')
socketio.start_background_task(auto_power_cycle_thread, msg)
@app.route('/stop_auto_power_cycle', methods=['POST'])
def stop_auto_power_cycle():
store(power_cycle_running_status, {"power_cycle_status": 0})
# here cannot use socketio to send stop command as some block will happen and cannot get command immediately!
socketio.emit("stop_auto_power_cycle", namespace='/power_cycle/test')
return jsonify({"status": "stopped"})
"""
*********************************************************************************
*Automate command*
*********************************************************************************
"""
@app.route('/automate')
def automate():
PAGE_INFO['page'] = 'automate'
store(automate_running_status, {"run_state": 0})
return render_template('Automate.html')
@socketio.on('automate_cmd', namespace='/automate/test')
def automate_cmd(msg):
command = msg["cmd"]
auto_cmd = Command()
auto_cmd.init_file()
val = auto_cmd.cmd(command)
print(val)
@app.route('/automate/get_automation_info', methods=['GET'])
def get_automation_info():
automation_cfg.cfg_load()
bp_port = automation_cfg.cfg.get('Button_Press', 'bp_port')
bp_time = automation_cfg.cfg.get('Button_Press', 'bp_time')
bp_usb_port = automation_cfg.cfg.get('Button_Press', 'bp_usb_port')
ac_usb_port = automation_cfg.cfg.get('AC_Power', 'ac_usb_port')
ac_state = automation_cfg.cfg.get('AC_Power', 'ac_state')
d_time = automation_cfg.cfg.get('Delay', 'd_time')
ase_volume = automation_cfg.cfg.get('ASE', 'volume')
ase_ip = automation_cfg.cfg.get('ASE', 'ip')
check_volume = automation_cfg.cfg.get('ASE', 'check_volume')
check_playback = automation_cfg.cfg.get('ASE', 'check_playback')
check_power_state = automation_cfg.cfg.get('ASE', 'check_power_state')
check_network = automation_cfg.cfg.get('ASE', 'check_network')
check_source = automation_cfg.cfg.get('ASE', 'check_source')
check_bt_connection = automation_cfg.cfg.get('ASE', 'check_bt_connection')
automation_cfg.save()
info1 = {
"button_press": "Button Press (Port:{}, Time:{})".format(bp_port, bp_time),
"ac_power": "AC Power (State:{})".format(ac_state),
"delay": "Delay (Time:{})".format(d_time),
"set_volume": "Set Volume (Value:{})".format(ase_volume),
'do_check_volume': "Check Volume (Value:{})".format(check_volume),
'do_check_playback': "Check Playback (Value:{})".format(check_playback),
'do_check_power_state': 'Check Power State (Value:{})'.format(check_power_state),
'do_check_network': 'Check Network Connection(Value:{})'.format(check_network),
'do_check_source': 'Check Source (Value:{})'.format(check_source),
'do_check_bt_connection': 'Check BT Connection(Value:{})'.format(check_bt_connection),
}
info2 = {
'bp_port': bp_port,
'bp_time': bp_time,
'ac_state': ac_state,
"d_time": d_time,
"volume": ase_volume,
"ip": ase_ip,
'bp_usb_port': bp_usb_port,
'ac_usb_port': ac_usb_port,
'check_volume': check_volume,
'check_playback': check_playback,
'check_power_state': check_power_state,
'check_network': check_network,
'check_source': check_source,
'check_bt_connection': check_bt_connection
}
info = {
'info1': info1,
'info2': info2
}
return jsonify(info)
@app.route('/automate/save_automation_info', methods=['POST'])
def save_automation_info():
info = request.form.to_dict()
automation_cfg.cfg_load()
automation_cfg.set_items(info)
automation_cfg.save()
return jsonify({})
def run_automation_thread(msg):
auto = Automate(automation_cfg, automate_running_status, socketio)
store(automate_running_status, {"run_state": 1})
socketio.sleep(0.1)
auto.run(msg)
@socketio.on('start_run_automation', namespace='/automate/test')
def start_run_automation(msg):
global run_automation_steps
run_automation_steps = msg
socketio.start_background_task(run_automation_thread, msg)
@app.route('/automate/stop_automation_running', methods=['POST'])
def stop_automation_running():
store(automate_running_status, {"run_state": 0})
# here cannot use socketio to send stop command as some block will happen and cannot get command immediately!
return jsonify({"status": "stopped"})
@socketio.on('save_steps', namespace='/automate/test')
def start_run_automation(msg):
file_name = msg.get('file_name')
file_path = os.path.join(saved_action_steps, '{}.json'.format(file_name))
with open(file_path, 'w') as f:
f.write('')
store(file_path, msg.get('steps'))
socketio.emit('save_success', {'file_name': file_name}, namespace='/automate/test')
@app.route('/automate/load_steps', methods=['GET'])
def load_steps():
path = saved_action_steps
all_files = []
for root, dirs, files in os.walk(path):
for filename in files:
all_files.append(filename.rsplit('.', 1)[0])
if len(all_files) == 0:
return jsonify({})
return jsonify({'files_name': all_files})
@app.route('/automate/remove_saved_list', methods=['POST'])
def remove_saved_list():
path = saved_action_steps
file_name = request.form.to_dict().get('name')
file_path = os.path.join(path, file_name.rsplit('_', 1)[0] + '.json')
if os.path.isfile(file_path):
os.remove(file_path)
return jsonify({})
@app.route('/automate/load_step', methods=['POST'])
def load_step():
info = {}
path = saved_action_steps
file_name = request.form.to_dict().get('name')
file_path = os.path.join(path, file_name.rsplit('__', 1)[0] + '.json')
if os.path.isfile(file_path):
info = load(file_path)
return jsonify(info)
"""
*********************************************************************************
*Data Graph*
*********************************************************************************
"""
@app.route('/DataGraph/')
def data_graph(data_name=None):
if data_name is None:
data_name = "demo.txt"
PAGE_INFO['page'] = 'DataGraph'
return render_template('DataGraph.html', name=data_name)
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
@app.route('/uploads/<filename>')
def uploaded_file(filename):
return send_from_directory(app.config['UPLOAD_FOLDER'],
filename)
@app.route('/DataGraph/upload_file', methods=['GET', 'POST'])
def upload_file():
file_url = ""
if request.method == 'POST':
file = request.files['file']
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
# file_url = url_for('uploaded_file', filename=filename)
file_url = filename
# return html + '<br><img src=' + file_url + '>'
# return jsonify({"errno": 0, "errmsg": "上传成功", "filename": filename})
return render_template("DataGraph.html", name=file_url)
@app.route('/DataGraph/get_graph_data', methods=['POST'])
def get_graph_data():
file_name = request.form.to_dict().get("filename")
data_time = []
data_values = []
title = ""
unit_name = ""
file_error = ""
with open("data/uploads/" + file_name, 'r') as f:
for idx, line in enumerate(f, 1):
try:
if line.startswith('#'):
title = re.findall(r"#title:(.*)\[", line)[0]
unit_name = re.findall(r"\[(.*)\]", line)[0]
continue
s = line.replace("\n", "").split(": ")
data_time.append(s[0])
data_values.append(s[1].replace(" ", ""))
except Exception as e:
file_error = "[{}] Line {}: Parse error: {}".format(file_name, idx, e)
logger.debug(file_error)
# socketio.emit("new_mychart", {"time": data_time, "values": data_values}, namespace='/DataGraph/test')
return jsonify({"time": data_time, "values": data_values, "title": title, "name": unit_name,
"file_error": file_error})
"""
*********************************************************************************
*QR Code*
*********************************************************************************
"""
@app.route('/QR_Code')
def qr_code():
return render_template('QR.html')
@app.route('/QR_Code/generate', methods=['POST'])
def generate():
global qr_code_num
txt = request.form.to_dict().get("txt")
qr_path = "./static/images/"
for parent, dirs, files in os.walk(qr_path):
for filename in files:
obj = re.search(r'qr_(.*).jpeg', filename)
if obj is not None:
os.remove(qr_path + obj.group())
qr_code_num = qr_code_num + 1
pic = "qr_{}.jpeg".format(qr_code_num)
make_qr = MakeQR(qr_path + pic, box_size=5)
make_qr.generate(txt)
return jsonify({"pic": pic})
"""
*********************************************************************************
*Common*
*********************************************************************************
"""
@socketio.on('WinSCP', namespace='/test')
def win_scp():
path = '"{}\config\WinSCP\WinSCP.exe"'.format(os.getcwd())
# win32api.ShellExecute(0, 'open', path, '', '', 1)
if __name__ == '__main__':
app_setting = load("./data/app.json")
app_version = app_setting["version"]
if not app.config["DEBUG"]:
go_web_page("http://localhost:{}".format(app_setting["port"]))
print("Server started: http://localhost:{}".format(app_setting["port"]))
socketio.run(app, host=app_setting["host"], port=app_setting["port"])
# app.run(host='localhost', port=5000)
| {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,512 | BruceZhu88/sqaTools | refs/heads/master | /src/common/Url.py |
import requests
from urllib import request
def request_url(url, data=None, timeout=None):
status = ""
try:
req = request.Request(url, data)
if timeout is not None:
res = request.urlopen(req, timeout=timeout)
else:
res = request.urlopen(req)
status = res.status
# data = urllib.request.urlopen(url, timeout=8)
text = res.read().decode('utf-8')
except Exception as e:
text = "error"
return {"content": text, "status": status}
def requests_url(url, mode, data=None, timeout=5):
r = None
try:
if mode == 'get':
r = requests.get(url, data=data, timeout=timeout)
elif mode == 'post':
r = requests.post(url, data=data, timeout=timeout)
elif mode == 'put':
r = requests.put(url, data=data, timeout=timeout)
content, status_code = r.content.decode('utf-8'), r.status_code
except Exception as e:
content, status_code = 'error', ''
return {'content': content, 'status': status_code}
def check_url_status(url, timeout=20.0):
try:
req = request.Request(url)
response = request.urlopen(req, timeout=timeout)
# status = response.status
return True
# except URLError as e:
except Exception as e:
# self.log.debug("Cannot connect {}: {}".format(url, e))
return False
'''
if hasattr(e, 'reason'): # urlError
print('We failed to reach a server')
print('Reason: ', e.reason)
elif hasattr(e, 'code'): # httpError
print('The server could not fulfill the request.')
print('Error code: ', e.code)
'''
| {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,513 | BruceZhu88/sqaTools | refs/heads/master | /src/wifiSpeaker/AseInfo.py | """
Created on Jan 3, 2017
@author: Bruce zhu
"""
import re
import json
import subprocess
import os
import datetime
# import socket
from threading import Thread, Lock
from time import sleep
from urllib import parse
# from urllib.error import URLError
from src.common.Logger import Logger
from src.common.SshHelper import SshHelper
from src.common.WindowsWifi import WindowsWifi
from src.common.util import store
from src.common.util import load
from src.common.cfg import Config
from src.common.Url import *
from src.wifiSpeaker.AseWebData import *
lock = Lock()
main_cfg_path = './config/main.conf'
main_config = Config(main_cfg_path)
main_config.cfg_load()
main_cfg = main_config.cfg
saved_ip_path = main_cfg.get('WifiSpeaker', 'saved_ip')
class AseInfo(object):
def __init__(self):
self.log = Logger("wifi_speaker").logger()
self.ip = 'NA'
self.device = 'NA'
self.urlSetData = "http://{}/api/setData?{}"
self.urlGetData = "http://{}/api/getData?{}"
self.urlGetRows = "http://{}/api/getRows?{}"
self.devices_list = []
self.status = 0
self.threads = []
self.INFO = {}
self.saved_ip = []
def transfer_data(self, request_way, ip, value, timeout=6):
"""
:param request_way:
:param ip:
:param value:
:param timeout:
:return:
%27 = ', %22 = ", + = 'space'
"""
value_str = parse.urlencode(value, encoding="utf-8")
if "+" in value_str:
value_str = value_str.replace('+', '')
if "True" in value_str:
value_str = value_str.replace('True', 'true')
if "False" in value_str:
value_str = value_str.replace('False', 'false')
if "%27" in value_str:
value_str = value_str.replace('%27', '%22')
if white_space in value_str:
value_str = value_str.replace(white_space, '+')
if request_way == "get":
url = self.urlGetData.format(ip, value_str)
elif request_way == "getRows":
url = self.urlGetRows.format(ip, value_str)
elif request_way == "set":
url = self.urlSetData.format(ip, value_str)
else:
self.log.error("No such request method: {}".format(request_way))
return
return requests_url(url, 'get', timeout=timeout)
@staticmethod
def ota_update(ip, ota_file):
"""
ASE OTA Update
:param ip: Device ip
:param ota_file: Must be dict
:return: Number type(post status)
"""
url = "http://{}/page_setup_swupdate.fcgi?firmwareupdate=1".format(ip)
req = requests.post(url=url, files=ota_file)
return req.status_code
def trigger_update(self, ip):
return self.transfer_data("set", ip, update_para)["status"]
def get_info(self, x, ip):
try:
if x == 'basicInfo':
r = request_url('http://{0}/index.fcgi'.format(ip), timeout=5)
if r.get('status') != 200:
return 'error'
text = re.findall('dataJSON = \'(.*)\';', r.get('content'))[0]
data = json.loads(text, encoding='utf-8')
'''
r = request_url('http://{0}/page_status.fcgi'.format(ip))['text']
product_id = re.findall('var productId = \'(.*)\';', r)[0]
jd = json.loads(product_id, encoding='utf-8')
sn = ''
for i in jd["beoGphProductIdData"]["serialNumber"]:
sn = sn + str(i)
'''
beo_machine = data.get('beoMachine')
fep_versions = beo_machine.get('fepVersions')
info = {'modelName': beo_machine.get('modelName'),
'model': beo_machine.get('model'),
'productName': beo_machine.get('setup').get('productName'),
'bootloaderVersion': fep_versions.get('bootloaderVersion'),
'appVersion': fep_versions.get('appVersion')
}
self.INFO['appVersion'] = info['appVersion']
return info
elif x == 'BeoDevice':
r = request_url(beo_device.format(ip), timeout=5)
if r.get('status') != 200:
return 'error'
data = json.loads(r.get('content'), encoding='utf-8')
beo_info = data.get('beoDevice')
beo_productid = beo_info.get('productId')
info = {'productType': beo_productid.get('productType'),
'serialNumber': beo_productid.get('serialNumber'),
'productFriendlyName': beo_info.get('productFriendlyName').get('productFriendlyName'),
'version': beo_info.get('software').get('version')
}
self.INFO["sn"] = info['serialNumber']
self.INFO["deviceName"] = info['productFriendlyName']
self.INFO["version"] = info['version']
return info
elif x == 'modulesInformation':
r = request_url(modules_info.format(ip), timeout=5)
if r.get('status') != 200:
return 'error'
data = json.loads(r.get('content'), encoding='utf-8')
module = data.get('profile').get('module')
info = {
'fep_application': module[0].get('application').get('version'),
'fep_bootloader': module[0].get('bootloader').get('version'),
# 'AP': module[2].get('application').get('version'),
# 'GoogleCast': module[3].get('application').get('version')
}
self.INFO['fep_app'] = info.get('fep_application')
self.INFO['bootloader'] = info.get('fep_bootloader')
return info
elif x == 'bluetoothSettings':
r = request_url(bluetooth_settings.format(ip), timeout=5)
if r.get('status') != 200:
return 'error'
data = json.loads(r.get('content'), encoding='utf-8') # GBK? Chinese string
bluetooth = data.get('profile').get('bluetooth')
device_settings = bluetooth.get('deviceSettings')
always_open = device_settings.get('alwaysOpen')
reconnect_mode = device_settings.get('reconnectMode')
devices = bluetooth.get('devices')
bt_devices = devices.get('device')
info = {'bt_open': always_open,
'bt_reconnect_mode': reconnect_mode,
'bt_devices': bt_devices}
self.INFO['bluetoothSettings'] = info
return info
elif x == 'network_settings':
r = request_url(network_settings.format(ip), timeout=8)
if r.get('status') != 200:
return 'error'
data = json.loads(r.get('content'), encoding='utf-8')
network_info = data.get('profile').get('networkSettings')
interfaces = network_info.get('interfaces')
active_interface = network_info.get('activeInterface')
# wired = interfaces.get('wired')
wireless = interfaces.get('wireless')
active_network = wireless.get('activeNetwork')
internet_reachable = 'Yes' if network_info.get('internetReachable') else 'No'
dhcp = 'Yes' if active_network.get('dhcp') else 'No'
if active_interface == 'wireless':
info = {
'Active Interface': 'Wi-Fi',
'Internet Reachable': internet_reachable,
'SSID': active_network.get('ssid'),
'DHCP': dhcp,
'Frequency': active_network.get('frequency').replace('ghz', ' GHz'),
'Quality': active_network.get('quality'),
'RSSI': active_network.get('rssi'),
'Encryption': active_network.get('encryption').replace('Psk', '-PSK').replace(
'Tkip', '(TKIP)').upper()
}
else:
info = {
'Active Interface': 'Ethernet',
'Internet Reachable': internet_reachable,
'Wifi configured': 'Yes' if active_network.get('configured') else 'No',
'DHCP': dhcp
}
self.INFO['network_settings'] = info
return info
elif x == 'volume':
r = requests_url(volume_speaker.format(ip), 'get')
if r.get('status') != 200:
return 'error'
data = json.loads(r.get('content'), encoding='utf-8')
speaker = data.get('speaker')
volume_range = speaker.get('range')
speaker_volume = {
'Current Level': speaker.get('level'),
'Default Level': speaker.get('defaultLevel'),
'Muted': 'Yes' if speaker.get('muted') else 'No',
'Min': volume_range.get('minimum'),
'Max': volume_range.get('maximum')
}
self.INFO['speaker_volume'] = speaker_volume
return speaker_volume
elif x == 'current_source':
r = request_url(current_source.format(ip), timeout=5)
if r.get('status') != 200:
self.INFO['current_source'] = 'error'
return 'error'
data = json.loads(r.get('content'), encoding='utf-8')
if len(data) == 0:
source = 'None'
else:
source = data.get('friendlyName')
self.INFO['current_source'] = source
return source
elif x == 'get_standby':
r = request_url(standby_status.format(ip), timeout=5)
if r.get('status') != 200:
self.INFO['standby_status'] = 'error'
return 'error'
data = json.loads(r.get('content'), encoding='utf-8')
status = data.get('standby').get('powerState')
status = status.replace(status[0], status[0].upper())
self.INFO['standby_status'] = status
return status
elif x == 'regional_settings':
r = request_url(regional_settings.format(ip), timeout=5)
if r.get('status') != 200:
return 'error'
data = json.loads(r.get('content'), encoding='utf-8')
data_region = data.get('profile').get('regionalSettings')
info = {
'Country': data_region.get('country').get('country'),
'Date Time': data_region.get('dateTime').get('dateTime'),
'Time Zone': data_region.get('timeZone').get('inTimeZone')
}
return info
elif x == 'power_management':
r = request_url(power_management.format(ip), timeout=5)
if r.get('status') != 200:
return 'error'
data = json.loads(r.get('content'), encoding='utf-8')
data_power = data.get('profile').get('powerManagement')
info = {
'Idle Timeout': data_power.get('idleTimeout').get('timeout'),
'Play Timeout': data_power.get('playTimeout').get('timeout')
}
return info
elif x == 'muted':
r = request_url(volume_speaker.format(ip) + '/Muted', timeout=5)
if r.get('status') != 200:
self.INFO['muted'] = 'error'
return 'error'
data = json.loads(r.get('content'), encoding='utf-8')
muted = data.get('muted')
self.INFO['muted'] = muted
return muted
elif x == 'stream_state':
r = request_url(sys_products.format(ip), timeout=5)
if r.get('status') != 200:
return 'error'
data = json.loads(r.get('content'), encoding='utf-8')
products = data.get('products')
for k, v in enumerate(products):
name = self.get_info('BeoDevice', ip).get('productFriendlyName')
if v.get('friendlyName') == name:
state = v.get('primaryExperience').get('state')
source_type = v.get('primaryExperience').get('source').get('sourceType').get('type')
break
info = {
'state': state,
'source_type': source_type
}
return info
elif x == 'get_product_status':
return self.get_product_status(ip)
else:
return 'NA'
except Exception as e:
self.log.debug('cmd = {0}, error: {1}'.format(x, e))
return 'NA'
def get_other_info(self, ip):
li = ['power_management', 'regional_settings']
title = {
'power_management': 'Power Management',
'regional_settings': 'Regional Settings'
}
info = {}
for i in li:
tmp = self.get_info(i, ip)
if tmp == 'NA' or tmp == 'error':
return {'error': 'error'}
info[title[i]] = tmp
return info
@staticmethod
def standby(ip):
url = power_management.format(ip) + '/standby'
payload = json.dumps({"standby": {"powerState": "standby"}})
return requests_url(url, 'put', data=payload).get('status')
@staticmethod
def set_volume(ip, value=None):
url = volume_speaker.format(ip) + '/Level'
payload = json.dumps({"level": int(value)})
return requests_url(url, 'put', data=payload).get('status')
@staticmethod
def stream(ip, mode):
"""Player Stream
mode = 'Play', 'Pause', 'Wind', 'Rewind', 'Forward', 'Backward'
"""
url = zone_stream.format(ip, mode)
return requests_url(url, 'post').get('status')
@staticmethod
def mute(ip):
url = volume_speaker.format(ip) + '/Muted'
payload = json.dumps({"muted": False})
return requests_url(url, 'put', data=payload).get('status')
def get_product_status(self, ip):
info = ['get_standby', 'current_source', 'muted']
for i in info:
self.get_info(i, ip)
self.INFO['product_status'] = {
'Power': self.INFO.get('standby_status'),
'Source': self.INFO.get('current_source'),
'Muted': 'Yes' if self.INFO.get('muted') else 'No'
}
return self.INFO["product_status"]
def scan_wifi(self, ip):
data = self.transfer_data("getRows", ip, network_scan_results_para)
return data
def pair_bt(self, pair, ip):
if pair == 'pair':
para = pairBT_para
# elif pair == 'cancel':
else:
para = pairCancelBT_para
return self.transfer_data("set", ip, para)['status']
def reset(self, ip):
return self.transfer_data("set", ip, factoryResetRequest_para)['status']
def change_product_name(self, name, ip):
return self.transfer_data("set", ip, set_device_name(name))['status']
def log_submit(self, ip):
return self.transfer_data("set", ip, logReport_para, timeout=80)["status"]
def log_clear(self, ip):
return self.transfer_data("set", ip, clearLogs_para)['status']
def bt_open_set(self, open_enable, ip):
return self.transfer_data("set", ip, set_pairing_mode(open_enable))['status']
def bt_remove(self, bt_mac, ip):
bt_mac = bt_mac.replace(":", "_")
return self.transfer_data("set", ip, bt_remove_para(bt_mac))['status']
def bt_reconnect_set(self, mode, ip):
if mode == 'disabled':
mode = 'none'
return self.transfer_data("set", ip, set_bt_mode(mode))['status']
# ==================================================================================================================
def setup_wifi(self, ssid="", key="", dhcp=True, ip="", gateway="", netmask="", originalIp=""):
# logging.log(logging.INFO, "Setup wifi ssid=%s key=%s"%(wifissid,pwd))
encryption = "wpa_psk"
if key == "":
encryption = "none"
wifi_value = wifi_settings(ssid, key, encryption, dhcp, ip, gateway, netmask)
try:
self.transfer_data("set", originalIp, wifi_value)
except Exception as e:
self.log.info(e)
return False
else:
return True
# ==================================================================================================================
@staticmethod
def get_ase(ip, info):
"""For unblock device"""
# info = 'readinfo'
cmd = r'"{}\\config\\thrift\\thrift2.exe"'.format(os.getcwd()) + " {} 1 {}".format(ip, info)
s = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
pipe = s.stdout.readlines()
return pipe
def unblock_device(self, ip):
if not check_url_status(beo_device.format(ip), timeout=5):
return False
self.log.debug('Start unblock...')
try:
status = str(self.get_ase(ip, 'readinfo')[-1], 'utf-8')
except Exception as e:
self.log.error(e)
return False
if 'Successful' in status:
self.log.debug('Unblock successfully!')
return True
self.log.debug('Unblock failed!')
return False
# ==================================================================================================================
def get_ase_devices_list(self):
# self.devices_list.clear()
self.devices_list = []
self.status = 0
self.saved_ip = self.load_ip()
ip_info = WindowsWifi().discover_ips()
scanned_ip = ip_info.get('ip')
gateway = ip_info.get('gateway')
ips = []
ips.extend(self.saved_ip)
for ip in scanned_ip:
if ip not in self.saved_ip:
ips.append(ip)
# ips = list(set(self.saved_ip + scanned_ip)) # method set will not keep list order
for ip in ips:
tmp = ip.rsplit('.', 1)[0]
if tmp not in gateway:
ips = [i for i in filter(lambda x: x != ip, ips)]
self.log.debug(ips)
self.thread_scan_devices(ips)
# For show device info in real-time(one by one) when scanning
thread_status = Thread(target=self.scan_status, args=())
thread_status.setDaemon(True)
thread_status.start()
def thread_scan_devices(self, ips: list):
self.threads = []
for ip in ips:
t = Thread(target=self.scan_devices, args=(ip,))
t.setDaemon(True)
t.start()
self.threads.append(t)
sleep(0.005) # avoid network block
@staticmethod
def load_ip():
return load(saved_ip_path).get('ip')
@staticmethod
def store_ip(ips):
# If saved ips are more than 20, then remove the oldest one (first in first out).
if len(ips) > 20:
ips = [i for i in filter(lambda x: x != ips[0], ips)]
store(saved_ip_path, {"ip": ips})
def scan_status(self):
for td in self.threads:
td.join()
self.status = 1
self.store_ip(self.saved_ip)
def scan_devices(self, ip):
r = requests_url(beo_device.format(ip), 'get', timeout=6)
# if not self.check_url_status("http://{}/index.fcgi".format(ip), timeout=6): # timeout need modify
if r.get('status') != 200:
return
try:
data = json.loads(r.get('content'), encoding='utf-8')
beo_info = data.get('beoDevice')
device_name = beo_info.get('productFriendlyName').get('productFriendlyName')
model_name = beo_info.get('productId').get('productType')
# device_name = self.get_info("device_name", ip)
# model_name = self.get_info("basicInfo", ip)['modelName']
"""
result = socket.gethostbyaddr(ip)
try:
host_name = result[0].replace(".lan", "")
except Exception as e:
self.log.error("host name not contain .lan" + e)
return
"""
if lock.acquire():
if ip not in self.saved_ip:
self.saved_ip.append(ip)
self.devices_list.append("{} ({}) [{}]".format(device_name, ip, model_name))
# self.devices_list.append("{} ({})".format(host_name, ip))
lock.release()
# except socket.herror as e:
except Exception as e:
self.log.debug("Something wrong when scanning {}: {}".format(ip, e))
# ==================================================================================================================
def thread_get_info(self, ip):
if not check_url_status(beo_device.format(ip), timeout=5):
return {"error": "error", "ip": ip}
# self.devices_list.clear()
# basic_info = ase_info.get_info("basicInfo", ip)
# beo_device_info = ase_info.get_info('BeoDevice', ip)
self.INFO["ip"] = ip
info = ['BeoDevice', 'modulesInformation', 'bluetoothSettings', 'get_product_status']
# threads = []
for i in info:
self.get_info(i, ip)
'''
t = Thread(target=self.get_info, args=(i, ip))
t.setDaemon(True)
t.start()
threads.append(t)
# sleep(0.02) # avoid network block
for td in threads:
td.join()
'''
self.INFO["ase_version"] = '{} ({})'.format(
self.INFO.get('version'), self.INFO.get('sn'))
self.INFO["fep_versions"] = '{} ({})'.format(
self.INFO.get('fep_app'), self.INFO.get('bootloader'))
return self.INFO
# ==================================================================================================================
def send_cmd(self, ip, cmd):
user = 'root'
ssh_rsa = './data/ssh-rsa'
ssh_path = '{}/config/OpenSSH/'.format(os.getcwd())
ssh = SshHelper(ip, user, ssh_rsa, ssh_path)
content = ssh.execute(cmd)
if len(content) == 0:
if self.unblock_device(ip):
content = ssh.execute(cmd)
else:
self.log.error('Cannot communicate with your sample!')
return content
def get_log_files(self, ip, sn, cmd, save_path):
save_path = os.path.abspath(save_path)
if not os.path.exists(save_path):
os.mkdir(save_path)
path = os.path.join(save_path, '{}_{}_{}.log'.format(
cmd.rsplit('/', 1)[1], sn, datetime.datetime.now().strftime('%m%d_%H_%M_%S')))
try:
content = self.send_cmd(ip, cmd)
with open(path, 'w', encoding='utf-8') as f:
f.write(content)
except Exception as e:
self.log.error(e)
path = ""
return path
def download_log(self, ip, sn, script_path, save_path):
save_path = os.path.abspath(save_path)
if not os.path.exists(save_path):
os.mkdir(save_path)
path, content = '', ''
with open(script_path, 'r') as f:
for l in f.readlines():
cmd = l.replace('\n', '')
content = self.send_cmd(ip, cmd)
# url = re.findall(r'http:.*tgz', content)
url = 'http://{}/{}'.format(ip, content)
path = os.path.join(save_path, 'log_{}.tgz'.format(sn))
try:
request.urlretrieve(url, path)
except Exception as e:
self.log.error(e)
path = ''
return path
if __name__ == "__main__":
ase_info = AseInfo()
files = {
'file': open(r'D:\bruce\Tymphany\Test_case\CA17\version\ase2ca17s810-release-1-0-15059-28653020', 'rb')
}
ase_info.ota_update("192.168.1.160", files)
| {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,514 | BruceZhu88/sqaTools | refs/heads/master | /src/common/QRcode.py |
import qrcode
class MakeQR(object):
def __init__(self, path, version=1, box_size=5, border=1):
self.version = version
self.box_size = box_size
self.border = border
self.path = path
def generate(self, txt):
qr = qrcode.main.QRCode(version=self.version, box_size=self.box_size, border=self.border)
qr.add_data(txt)
qr.make(fit=True)
m = qr.make_image()
m.save(self.path)
# print('ok,please check your source folder and check the pic.')
if __name__ == '__main__':
text = "http://v.youku.com/v_show/id_XMTQ2MTE2MTUwMA==.html?spm=a2hzp.8244740.0.0&from=y1.7-1.2"
makeQr = MakeQR("./qr.jpg")
makeQr.generate(text)
| {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,515 | BruceZhu88/sqaTools | refs/heads/master | /src/powerCycle/SerialTool.py |
import sys
import json
import random
import os
from time import sleep
from serial.tools import list_ports
from src.common.Logger import Logger
from src.common.SerialHelper import SerialHelper
class SerialTool(object):
def __init__(self, state_path, socketio=None):
self.socketio = socketio
self.log = Logger("power_cycle").logger()
self.ini = None
self.port_list = []
self.ser = None
self.port_selected = None
self.port_disconnect = False
self.state_path = os.path.abspath(state_path)
def print_log(self, msg, color='white'):
try:
self.log.info(msg)
if self.socketio is not None:
self.socketio.sleep(0.01) # Avoid sockeit network block leads to cannot print on page
self.socketio.emit('print_log',
{'msg': msg, 'color': color},
namespace='/power_cycle/test')
except Exception as e:
self.log.debug("Error when print_log: {}".format(e))
sys.exit()
def add_port(self, port_info):
try:
self.socketio.sleep(0.005)
self.socketio.emit('add_port',
{'data': port_info},
namespace='/power_cycle/test')
except Exception as e:
self.log.debug("Error when add_port: {}".format(e))
sys.exit()
def del_port(self, port_info):
try:
self.socketio.sleep(0.005)
self.socketio.emit('del_port',
{'data': port_info},
namespace='/power_cycle/test')
except Exception as e:
self.log.debug("Error when del_port: {}".format(e))
sys.exit()
def delay(self, t):
if self.socketio is not None:
self.socketio.sleep(t)
else:
sleep(t)
def find_all_serial(self):
"""Get serial list
:param self:
:return:
"""
try:
temp_serial = list()
for com in list_ports.comports():
str_com = com[0] + ": " + com[1][:-7] # + ": " + com[1][:-7].decode("gbk").encode("utf-8")
temp_serial.append(str_com)
for item in temp_serial:
if item not in self.port_list:
self.add_port(item)
self.port_list.append(item)
for item in self.port_list:
if item not in temp_serial:
self.port_list = [i for i in filter(lambda x: x != item, self.port_list)]
self.del_port(item)
self.port_list = temp_serial
if self.port_selected is not None:
if self.port_selected not in self.port_list:
self.port_disconnect = True
msg = "Disconnected [{0}]!".format(self.port_selected)
self.socketio.emit('port_status', {'msg': msg, 'color': "red"}, namespace='/power_cycle/test')
self.port_selected = None
except Exception as e:
self.log.error(e)
sys.exit()
def open_port(self, port_set):
try:
self.port_selected = port_set["port_info"]
port = port_set["port_info"].split(":")[0]
baud_rate = port_set["baud_rate"]
parity = port_set["parity"]
data_bit = port_set["data_bit"]
stop_bit = port_set["stop_bit"]
self.ser = SerialHelper(Port=port, BaudRate=baud_rate, ByteSize=data_bit, Parity=parity, Stopbits=stop_bit)
self.ser.start()
if self.ser.alive:
self.port_disconnect = False
msg = "Open [{0}] Successfully!".format(port_set["port_info"])
font_color = "green"
else:
msg = "Something wrong with your serial port!"
font_color = "red"
except Exception as e:
self.log.error(e)
msg = "Open [{0}] Failed!".format(port_set["port_info"])
font_color = "red"
self.socketio.emit('port_status', {'msg': msg, 'color': font_color}, namespace='/power_cycle/test')
def close_port(self):
try:
self.ser.stop()
self.port_selected = None
if not self.ser.alive:
msg = "Ready"
self.socketio.emit('port_status', {'msg': msg, 'color': "white"}, namespace='/power_cycle/test')
except Exception as e:
self.log.error(e)
def send_msg(self, msg, is_hex):
self.ser.write(msg.encode('utf-8'), isHex=is_hex)
def power(self, status):
with open(self.state_path) as f:
status_running = json.load(f)
if status_running["power_cycle_status"] == 0:
self.print_log("You stopped running!!!", "red")
self.socketio.emit("stop_confirm", namespace='/power_cycle/test')
sys.exit()
if status == "on":
time_delay = float(self.ini["time_on"])
if "-" in self.ini["button_press_on"]:
val = self.ini["button_press_on"].split("-")
button_press_delay = round(random.uniform(float(val[0]), float(val[1])), 4)
else:
button_press_delay = float(self.ini["button_press_on"])
ac_address = 'B'
else:
time_delay = float(self.ini["time_off"])
if "-" in self.ini["button_press_off"]:
val = self.ini["button_press_off"].split("-")
button_press_delay = round(random.uniform(float(val[0]), float(val[1])), 4)
else:
button_press_delay = float(self.ini["button_press_off"])
ac_address = 'A'
self.print_log('Powering {}...'.format(status))
if not self.port_disconnect:
try:
if self.ini["relay_type"] == 'Button':
self.ser.write(self.ini["port_address"].encode('utf-8'), isHex=True)
self.print_log("button press delay = {}s".format(button_press_delay))
self.delay(button_press_delay)
self.ser.write("00".encode('utf-8'), isHex=True)
else:
self.ser.write(ac_address.encode('utf-8'))
self.print_log('Waiting {} seconds'.format(time_delay))
self.delay(time_delay)
except Exception as e:
self.print_log('Error when powering on: {0}'.format(e), 'red')
return False
else:
self.print_log('Serial Port seems disconnected......', 'red')
return False
return True
def stop_confirm(self):
self.socketio.emit('stop_confirm', {'data': "stopped"},
namespace='/power_cycle/test')
def power_cycle(self, ini):
self.ini = ini
if self.ser.alive:
if self.ini["relay_type"] == 'Button':
self.delay(0.5)
self.ser.write('50'.encode('utf-8'), isHex=True)
self.delay(0.5)
self.ser.write('51'.encode('utf-8'), isHex=True)
self.delay(0.5)
self.ser.write('00'.encode('utf-8'), isHex=True)
for i in range(1, int(self.ini["total_count"]) + 1):
self.print_log('This is >>>>>>>>>> %i<<<<<<<<<< times' % i)
if not self.power('on'):
return False
if not self.power('off'):
return False
self.print_log('Power on your device...')
self.power('on')
self.print_log('-' * 37)
self.print_log('*********Running over*********')
self.print_log('-' * 37)
self.delay(0.5)
else:
return False
return True
| {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,516 | BruceZhu88/sqaTools | refs/heads/master | /src/wifiSpeaker/AseWebData.py | deviceName_para = {"path": "settings:/deviceName",
"roles": "value"}
displayVersion_para = {"path": "/system/displayVersion", "roles": "value"}
pairBT_para = {"path": "bluetooth:externalDiscoverable",
"roles": "activate",
"value": {"type": "bool_", "bool_": True}}
pairCancelBT_para = {"path": "bluetooth:externalDiscoverable",
"roles": "activate",
"value": {"type": "bool_", "bool_": False}}
pairingAlwaysEnabled_para = {"path": "settings:/bluetooth/pairingAlwaysEnabled", "roles": "value"}
autoConnect_para = {"path": "settings:/bluetooth/autoConnect", "roles": "value"}
pairedPlayers_para = {"path": "bluetooth:pairedPlayers", "roles": "title,id,description", "from": 0, "to": 99}
WirelessSSID_para = {"path": "networkWizard:info/WirelessSSID", "roles": "value"}
wifiSignalLevel_para = {"path": "settings:beo/wifiSignalLevel", "roles": "value"}
volumeDefault_para = {"path": "settings:/mediaPlayer/volumeDefault", "roles": "value"}
volumeMax_para = {"path": "settings:/mediaPlayer/volumeMax", "roles": "value"}
network_scan_results_para = {"path": "network:scan_results", "roles": "title,value", "from": 0, "to": 99}
factoryResetRequest_para = {"path": "beo_LocalUI:factoryResetRequest", "roles": "activate",
"value": {"type": "bool_", "bool_": True}}
logReport_para = {
"path": "BeoPortal:logReport/send",
"roles": "activate",
"value": {"type": "bool_", "bool_": True}
}
update_para = {
"path": "firmwareUpdate:update",
"roles": "accept",
"value": {"type": "string_", "string_": ""}
}
clearLogs_para = {
"path": "systemManager:clearLogs",
"roles": "activate",
"value": {"type": "bool_", "bool_": True}
}
# location: HK
# region: None
# timezone: Asia/Hong_Kong
location_para = {
"path": "settings:/location",
"roles": "value",
"value": {"type": "string_", "string_": "HK"}
}
timezone_para = {
"path": "settings:/timezone",
"roles": "value",
"value": {"type": "string_", "string_": "Asia/Hong_Kong"}
}
# location: CN
# region: Shanghai
# timezone: Asia/Shanghai
'''
path: settings:/region
roles: value
value: {"type":"string_","string_":"Asia/Shanghai"}
'''
# setData
# "Tue May 22 10:14:32 2018"
time_manager_para = {
"path": "time_manager:/get/actual/local/time/request",
"roles": "activate",
"value": {"type": "bool_", "bool_": True}
}
beo_device = "http://{}:8080/BeoDevice/"
sys_products = "http://{}:8080/BeoZone/System/Products"
modules_info = "http://{}:8080/BeoDevice/modulesInformation"
current_source = "http://{}:8080/BeoZone/Zone/ActiveSourceType"
# {"sourceType":{"type":"BLUETOOTH"},"friendlyName":"Bluetooth"}
network_settings = "http://{}:8080/BeoDevice/networkSettings"
bluetooth_settings = "http://{}:8080/BeoDevice/bluetoothSettings"
standby_status = "http://{}:8080/BeoDevice/powerManagement/standby"
# "standby":{"powerState":"standby"
volume_speaker = "http://{}:8080/BeoZone/Zone/Sound/Volume/Speaker"
zone_stream = "http://{}:8080/BeoZone/Zone/Stream/{}"
power_management = "http://{}:8080/BeoDevice/powerManagement"
regional_settings = "http://{}:8080/BeoDevice/regionalSettings"
white_space = '_0_white_space_0_'
def wifi_settings(ssid, key, encryption, dhcp, ip, gateway, netmask):
# _0_white_space_0_ is temporarily replace " ", because " " will be encoded to "+" when urlencode
ssid = ssid.replace(' ', white_space)
wireless = {"dhcp": dhcp,
"dns": ["", ""],
"gateway": gateway,
"encryption": encryption,
"ip": ip,
"ssid": ssid,
"netmask": netmask,
"key": key}
wired = {"dhcp": dhcp,
"dns": ["", ""],
"gateway": "",
"ip": "",
"netmask": ""}
network_profile = {"wireless": wireless,
"wired": wired,
"type": "automatic", }
value = {"networkProfile": network_profile,
"type": "networkProfile", }
wifi_value = {"path": "BeoWeb:/network", "roles": "activate",
"value": value}
return wifi_value
def bt_remove_para(bt_mac):
return {
"path": "bluetooth:devices/" + bt_mac + "/unpair",
"roles": "activate",
"value": {"type": "bool_", "bool_": True}
}
def set_device_name(name):
name = name.replace(' ', white_space)
return {"path": "settings:/deviceName", "roles": "value",
"value": {"type": "string_", "string_": name}}
def set_pairing_mode(enable):
return {"path": "settings:/bluetooth/pairingAlwaysEnabled", "roles": "value",
"value": {"type": "bool_", "bool_": enable}}
def set_bt_mode(mode):
return {"path": "settings:/bluetooth/autoConnect", "roles": "value",
"value": {"type": "bluetoothAutoConnectMode", "bluetoothAutoConnectMode": mode}}
| {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,517 | BruceZhu88/sqaTools | refs/heads/master | /src/wifiSpeaker/WifiSetup.py | # coding = utf-8
# Author: Bruce.Zhu
import re
import sys
import json
import urllib.request
from time import sleep
from src.common.Logger import Logger
from src.common.WindowsWifi import WindowsWifi
from src.wifiSpeaker.AseInfo import AseInfo
class WifiSetup(object):
def __init__(self, ip, product_name, wifi_setting, socketio):
self.socketio = socketio
self.wifi_ini = wifi_setting
self.log = Logger("wifi_speaker").logger()
self.wifi = WindowsWifi(socketio=socketio)
self.ase_info = AseInfo()
self.total_times = 0
self.success_times = 0
self.ip = ip
self.product_name = product_name
def print_log(self, info, color='white'):
try:
self.log.info(info)
if self.socketio is not None:
self.socketio.sleep(0.01) # Avoid sockeit network block leads to cannot print on page
self.socketio.emit('print_msg',
{'data': info, 'color': color},
namespace='/wifi_speaker/test')
else:
print(info)
except Exception as e:
self.log.debug("Error when print_log: {}".format(e))
sys.exit()
def delay(self, t):
if self.socketio is not None:
self.socketio.sleep(t)
else:
sleep(t)
def reset_and_wait(self, ip, t):
self.ase_info.reset(ip)
self.print_log("Doing factory reset. Waiting %ss..." % t)
self.delay(t)
def check_wifi_status(self, ip):
try:
response = urllib.request.urlopen(ip, timeout=20)
status = response.status
if status == 200:
return True
else:
return False
except Exception as e:
self.print_log("Cannot connect {}: {}".format(ip, e))
return False
def setup(self):
try:
with open(self.wifi_ini) as json_file:
data = json.load(json_file)
# parent_path = os.path.realpath(os.path.join(os.getcwd(), ".."))
times = int(data.get("total_times"))
time_reset = int(data.get("time_reset"))
dhcp = data.get("dhcp")
ssid = data.get("ssid")
key = data.get("password")
# encryption = wifi_setting.cfg.get("Wifi", "encryption")
ip = data.get("static_ip")
gateway = data.get("gateway")
netmask = data.get("netmask")
except Exception as e:
self.log.error(e)
sys.exit()
# hostName = "beoplay-{model}-{SN}.local".format(model=model, SN=SN)
host_url = "http://{}/index.fcgi"
DHCP = []
if dhcp == "True" or dhcp == "true":
DHCP.append(True)
elif dhcp == "False" or dhcp == "false":
DHCP.append(False)
else:
DHCP.extend([True, False, True])
self.total_times = times
for cycle in range(1, times + 1):
self.print_log("This is the %d times " % cycle)
for index in DHCP:
dhcp = index
static_ip, static_gateway, static_netmask= '', '', ''
if not dhcp:
static_ip, static_gateway, static_netmask = ip, gateway, netmask
self.print_log("Set DHCP={}".format(dhcp))
self.reset_and_wait(self.ip, time_reset)
while True:
if self.wifi.find_wifi(self.product_name):
self.wifi.connect_wifi(self.product_name)
self.delay(15) # Give wifi connect some time
if self.wifi.check_wifi(self.product_name):
break
self.delay(3)
if not self.check_wifi_status("http://192.168.1.1/index.fcgi#Fts/Network"):
return
if self.ase_info.setup_wifi(ssid, key, dhcp, static_ip, static_gateway, static_netmask, "192.168.1.1"):
self.print_log("Wifi setup command has been sent!")
if self.wifi.find_wifi(ssid):
self.wifi.connect_wifi(ssid)
self.delay(15) # Give wifi connect some time
if self.wifi.check_wifi(ssid):
scan_result = 0
for i in range(0, 10):
if scan_result == 1:
break
devices = self.scan_devices()
for d in devices:
if self.product_name in d:
scan_result = 1
self.ip = re.findall('\((.*)\)', d)[0]
break
if scan_result == 0:
self.print_log("Something wrong when scan devices!")
sys.exit()
if self.check_wifi_status(host_url.format(self.ip)):
if not dhcp:
if ip == self.ip:
self.print_log("Your static ip[{}] setup successfully!".format(ip))
else:
self.print_log("Your static ip[{}] setup Failed!".format(ip))
return
else:
self.print_log("Wifi[{}] setup successfully!".format(ssid))
self.success_times = self.success_times + 1
else:
self.print_log("Cannot connect wifi %s" % ssid)
break
self.socketio.emit("wifi_setup_pass_ratio", {"data": "{}/{}".format(self.success_times, cycle)}
, namespace="/wifi_speaker/test")
if self.success_times >= self.total_times:
finish_print = "**********All finished*********"
self.print_log(finish_print)
def scan_devices(self):
self.print_log("Scanning devices")
self.ase_info.get_ase_devices_list()
while True:
status = self.ase_info.status
if status == 1:
return self.ase_info.devices_list
| {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,518 | BruceZhu88/sqaTools | refs/heads/master | /src/common/WindowsWifi.py | # coding=utf-8
#
# Author: Bruce Zhu
#
# import netifaces ---> some os cannot loads this dll(is not a valid win32 application)
import os
import re
import sys
import subprocess
import socket
from time import sleep
from ctypes import windll
from src.common.Logger import Logger
class WindowsWifi(object):
def __init__(self, socketio=None):
self.socketio = socketio
self.log = Logger("wifi_speaker").logger()
# *************************************************************
# Chinese: 0x804
# English: 0x409
dll_handle = windll.kernel32
id = hex(dll_handle.GetSystemDefaultUILanguage())
if id == "0x804":
system_language = "Chinese"
elif id == "0x409":
system_language = "English"
else:
system_language = ""
self.log.debug("system language: "+system_language)
# *************************************************************
p = subprocess.Popen("chcp", shell=True, stdout=subprocess.PIPE)
try:
code = p.stdout.read().decode("GB2312")
except:
code = p.stdout.read().decode("utf-8")
# if system_language == "Chinese":
if self.contain_zh(code) is not None:
self.wifi_state = "已连接"
self.wifi_state_find = "状态"
self.ipType = "动态"
else:
self.wifi_state = "connected"
self.wifi_state_find = "State"
self.ipType = "dynamic"
def print_log(self, info):
try:
self.log.info(info)
if self.socketio is not None:
self.socketio.sleep(0.01)
self.socketio.emit('print_msg',
{'data': info},
namespace='/test')
else:
print(info)
except Exception as e:
self.log.debug("Error when print_log: {}".format(e))
sys.exit()
def delay(self, t):
if self.socketio is not None:
self.socketio.sleep(t)
else:
sleep(t)
@staticmethod
def contain_zh(word):
""":return None: no"""
zh_pattern = re.compile(u'[\u4e00-\u9fa5]+')
match = zh_pattern.search(word)
return match
def connect_wifi(self, name):
self.print_log("Try to connect wifi --> %s" % name)
p = os.popen("netsh wlan connect name=\"{name}\"".format(name=name))
content = p.read()
self.print_log(content)
# os.system("netsh wlan connect name=%s" % name)
def wifi_status(self):
self.print_log("Checking wifi status...")
p = os.popen("netsh wlan show interfaces")
content = p.read()
return content
def check_wifi(self, wifi_name):
# self.print_log(content)
for i in range(0, 5):
content = self.wifi_status()
try:
wifi_ssid = re.findall(u"SSID(.*)", content)[0].split(": ")[1]
wifi_state = re.findall(u"%s(.*)" % self.wifi_state_find, content)[0].split(": ")[1]
# self.print_log(wifi_state)
if wifi_ssid == wifi_name:
if wifi_state == self.wifi_state:
self.print_log("Wifi %s connected!" % wifi_name)
return True
self.print_log("Wifi [%s] did not connected!" % wifi_name)
except Exception as e:
self.log.error("Check wifi:{}".format(e))
self.delay(1)
return False
def find_wifi(self, str):
self.print_log("Finding wifi %s ..." % str)
p = subprocess.Popen("netsh wlan disconnect",
shell=True) # win10 system cannot auto refresh wifi list, so disconnect it first
p.wait()
# p = os.popen("netsh wlan show networks") #netsh wlan show networks mode=bssid
# content = p.read().decode("gbk", "ignore")
p = subprocess.Popen("netsh wlan show networks | find \"%s\"" % str, shell=True, stdout=subprocess.PIPE)
try:
content = p.stdout.read().decode("GB2312") # byte decode to str, and GB2312 is avoid Chinese strings.
except:
content = p.stdout.read().decode("utf-8")
if content != "":
self.print_log("Find [%s]" % str)
return True
else:
return False
"""
@staticmethod
def get_network_status():
network_status = {}
network_status['gateway'] = netifaces.gateways()['default'][netifaces.AF_INET][0]
network_status['nicName'] = netifaces.gateways()['default'][netifaces.AF_INET][1]
for interface in netifaces.interfaces():
if interface == network_status['nicName']:
network_status['mac'] = netifaces.ifaddresses(interface)[netifaces.AF_LINK][0]['addr']
try:
network_status['ip'] = netifaces.ifaddresses(interface)[netifaces.AF_INET][0]['addr']
network_status['netMask'] = netifaces.ifaddresses(interface)[netifaces.AF_INET][0]['netmask']
except KeyError as e:
self.log.debug(e)
return network_status
"""
def discover_ips(self):
"""
gateway = self.get_network_status()['gateway']
ip = self.get_network_status()['ip']
netmask = self.get_network_status()['netMask']
# count_bit = lambda count_str: len([i for i in count_str if i == '1'])
# count_bit("111111")
netmask_bit = 0
for n in netmask.split('.'):
netmask_bit = netmask_bit + bin(int(n)).count('1')
"""
ip = socket.gethostbyname(socket.gethostname())
tmp = ip.rsplit(".", 1)[0]
gateway = "{}.1".format(tmp)
# netmask_bit = 24
# path = "{}\config\\NBTscan-Ipanto.exe".format(os.getcwd())
# cmd = r"{} {}/{}".format(path, gateway, netmask_bit)
# self.log.debug(cmd)
text = ""
try:
p = subprocess.Popen('net view', shell=True, stdout=subprocess.PIPE)
p.wait()
sleep(0.1)
p = subprocess.Popen("arp -a", shell=True, stdout=subprocess.PIPE)
text = p.stdout.readlines()
except Exception as e:
self.log.debug(e)
ip_address = []
for t in text:
try:
content = t.decode("GB2312")
except:
content = t.decode("utf-8")
if self.ipType in content:
# re.findall(r'(?:(?:[0,1]?\d?\d|2[0-4]\d|25[0-5])\.){3}(?:[0,1]?\d?\d|2[0-4]\d|25[0-5])')
ip_filter = re.findall(r'\d+\.\d+\.\d+\.\d+', content)
if len(ip_filter) != 0:
if gateway != ip_filter[0] and ip != ip_filter[0]:
ip_address.append(ip_filter[0])
return {'ip': ip_address, 'gateway': gateway}
if __name__ == "__main__":
wifi = WindowsWifi()
# data = wifi.find_wifi("Beoplay M3_00094760")
# print(data)
print(wifi.discover_ips())
| {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,519 | BruceZhu88/sqaTools | refs/heads/master | /src/common/cfg.py |
import configparser
class Config(object):
def __init__(self, file):
self.file = file
self.cfg = configparser.ConfigParser()
def cfg_load(self):
self.cfg.read(self.file)
def cfg_dump(self):
values = []
se_list = self.get_section()
for se in se_list:
values.append(self.cfg.items(se))
return values
def get_section(self):
return self.cfg.sections()
def get_sections(self):
return self.cfg._sections
def delete_item(self, se, key):
self.cfg.remove_option(se, key)
def delete_section(self, se):
self.cfg.remove_section(se)
def add_section(self, se):
sections = self.get_section()
for s in sections:
if s == se:
return
self.cfg.add_section(se)
def set_item(self, se, key, value):
self.cfg.set(se, key, value)
def save(self):
fd = open(self.file, 'w')
self.cfg.write(fd)
fd.close()
def print_values(self):
print('*' * 100)
se_list = self.get_section()
for sec in self.cfg:
for value in self.cfg[sec]:
print(sec + "." + value + " = " + self.cfg[sec][value])
print('*' * 100)
def set_items(self, data):
for v in data.keys():
for sec in self.cfg:
if v in self.cfg[sec]:
self.set_item(sec, v, str(data[v]))
if __name__ == '__main__':
info = Config('../../data/wifiSetting.ini')
"""
info.cfg_load()
info.add_section('ZJE')
info.set_item('ZJE','name','zhujunwen')
info.cfg_dump()
info.save()
"""
info.cfg_load()
# print(info.cfg.getint('Running_times', 'total_times'))
# info.set_item('Running_times','total_times','2')
# info.print_values()
# print(info.cfg_dump())
# print(len(info.cfg_dump()))
# print(info.cfg_dump()[1][0][1])
# print("*"*100)
# print(info.get_section())
s = {'total_times': '20'}
# info.set_items(s)
info.cfg_dump()
info.print_values()
info.save()
| {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,520 | BruceZhu88/sqaTools | refs/heads/master | /src/powerCycle/__init__.py | from .SerialTool import SerialTool | {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,521 | BruceZhu88/sqaTools | refs/heads/master | /src/automate/__init__.py |
from .automate import Automate
| {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,522 | BruceZhu88/sqaTools | refs/heads/master | /src/automate/Command.py |
from time import sleep
from src.common.FtpUtil import FtpUtil
from src.common.util import read_file
from src.common.cfg import Config
ftp_settings = Config('./data/ftp.ini')
class Command(object):
def __init__(self):
ftp_settings.cfg_load()
host = ftp_settings.cfg.get("FTP", "host")
port = ftp_settings.cfg.getint("FTP", "port")
user = ftp_settings.cfg.get("FTP", "user")
pwd = ftp_settings.cfg.get("FTP", "pwd")
self.ftp_path = ftp_settings.cfg.get("FTP", "ftp_path")
ftp_settings.save()
self.ftp = FtpUtil(host=host, port=port, user=user, pwd=pwd, ftp_path=self.ftp_path)
self.file_path = ".\\data\\"
def init_file(self):
with open(self.file_path + 'cmd.ini', 'w') as f:
f.write("0")
with open(self.file_path + 'return.ini', 'w') as f:
f.write("-1")
self.ftp.up(self.ftp_path, self.file_path + 'cmd.ini')
self.ftp.up(self.ftp_path, self.file_path + 'return.ini')
def cmd(self, c):
with open(self.file_path + 'cmd.ini', 'w') as f:
f.write(c)
for i in range(0, 5): # try 5 times
if self.ftp.up(self.ftp_path, self.file_path + 'cmd.ini'):
break
if i == 4:
return False
else:
sleep(1)
value = '-1'
i = 0
while value == '-1':
i += 1
if i > 50:
return False
if self.ftp.down(self.ftp_path, self.file_path + 'return.ini'):
value = read_file(self.file_path + 'return.ini')
sleep(0.5)
else:
return False
return value
| {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,523 | BruceZhu88/sqaTools | refs/heads/master | /src/common/SshHelper.py |
import subprocess
class SshHelper(object):
"""docstring for sshHelper"""
def __init__(self, ip, user, ssh_rsa, ssh_path):
self.ip = ip
self.user = user
self.ssh_path = ssh_path
with open(ssh_rsa, 'r') as f:
self.rsa = f.readline()
def check_known_hosts(self):
flag = 0
known_hosts = '{}/.ssh/known_hosts'.format(self.ssh_path)
with open(known_hosts, 'r') as f:
for l in f.readlines():
if self.ip in l:
flag = 1
if flag == 0:
with open(known_hosts, 'a') as f:
txt = '{} ssh-rsa {}\n'.format(self.ip, self.rsa)
f.write(txt)
def execute(self, command):
self.check_known_hosts()
cmd = '{}/bin/ssh.exe {}@{} {}'.format(
self.ssh_path, self.user, self.ip, command)
try:
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
content = p.stdout.read().decode("utf-8")
except Exception as e:
content = None
return content
| {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,524 | BruceZhu88/sqaTools | refs/heads/master | /src/automate/automate.py |
import re
import time
import sys
import json
import os
from src.wifiSpeaker.AseInfo import AseInfo
from src.common.Logger import Logger
from src.common.relay import Relay
class Automate(object):
def __init__(self, cfg, state_path, socketio=None):
cfg.cfg_load()
self.cfg = cfg.cfg
self.ase_info = AseInfo()
self.socketio = socketio
self.state_path = os.path.abspath(state_path)
self.bp_relay_init = None
self.log = Logger("automate").logger()
self.button = Relay(self.log)
self.ac_power = Relay(self.log)
def print_log(self, info, color='white'):
try:
self.log.info(info)
self.socketio.sleep(0.02) # Avoid sockeit network block leads to cannot print on page
self.socketio.emit('print_msg',
{'data': info, 'color': color},
namespace='/automate/test')
except Exception as e:
self.log.debug("Error when print_log: {}".format(e))
sys.exit()
def delay(self, t):
if self.socketio is not None:
self.socketio.sleep(t)
else:
time.sleep(t)
@staticmethod
def _split(para):
info = []
if ',' in para:
values = re.findall('[^()]+', para)[1].split(',')
for v in values:
info.append(v.split(':')[1])
else:
values = re.findall('[^()]+', para)[1].split(':')[1]
info.append(values)
return info
def get_ase_info(self, name):
ip = self.cfg.get('ASE', 'ip')
info = self.ase_info.get_info(name, ip)
if info == 'error' or info == 'NA':
self.print_log('Seems disconnected with your product!', 'red')
return False
return info
def do_check(self, name, name_exp, name_get):
name_exp = self._split(name_exp)[0]
if str(name_exp).lower() != str(name_get).lower():
self.print_log('Current {}[{}] is unequal with expected[{}]'.format(name, name_get, name_exp), 'red')
return False
else:
self.print_log('Checked current {} = {}'.format(name, name_get))
return True
def send_command(self, cmd):
ip = self.cfg.get('ASE', 'ip')
playback = ['Pause', 'Play']
self.print_log('Send {} --> {}'.format(cmd, ip))
if cmd in playback and self.ase_info.stream(ip, cmd) == 200:
self.delay(1)
return True
elif cmd == 'Standby' and self.ase_info.standby(ip) == 200:
return True
self.print_log('Seems disconnected with your product!', 'red')
return False
def send_pause(self):
if not self.send_command('Pause'):
return False
return True
def send_play(self):
if not self.send_command('Play'):
return False
return True
def set_standby(self):
if not self.send_command('Standby'):
return False
return True
def set_volume(self, steps_para):
ip = self.cfg.get('ASE', 'ip')
value = self._split(steps_para)[0]
self.print_log('Set volume to {}'.format(value))
if self.ase_info.set_volume(ip, value) != 200:
self.print_log('Seems disconnected with your product!', 'red')
return False
return True
def do_check_network(self, steps_para):
ip = self.cfg.get('ASE', 'ip')
value = self._split(steps_para)[0].lower()
beo_device = self.ase_info.get_info('BeoDevice', ip)
if beo_device == 'error' and value == 'no':
self.print_log('Current network is off')
elif beo_device != 'error' and value == 'yes':
self.print_log('Current network is on')
else:
self.print_log('Current network is not [{}]'.format(value), 'red')
return False
return True
def do_check_volume(self, steps_para):
volume_info = self.get_ase_info('volume')
if volume_info is False:
return False
vol_get = volume_info.get('Current Level')
if not self.do_check('volume', steps_para, vol_get):
return False
return True
def do_check_playback(self, steps_para):
stream_info = self.get_ase_info('stream_state')
if stream_info is False:
return False
current_state = stream_info.get('state').lower()
# stream_info.get('source_type')
if not self.do_check('playback', steps_para, current_state):
return False
return True
def do_check_source(self, steps_para):
source = self.get_ase_info('current_source')
if source is False:
return False
if not self.do_check('source', steps_para, source.lower()):
return False
return True
def do_check_power_state(self, steps_para):
standby = self.get_ase_info('get_standby')
if standby is False:
return False
if not self.do_check('power state', steps_para, standby.lower()):
return False
return True
def do_check_bt_connection(self, steps_para):
bt_setting = self.get_ase_info('bluetoothSettings')
if bt_setting is False:
return False
bt_devices = bt_setting.get('bt_devices')
state = 'no'
if len(bt_devices) > 0:
for d in bt_devices:
if d.get('connected'):
self.print_log('BT connected device: [{}]'.format(d.get('deviceName')))
state = 'yes'
break
if not self.do_check('BT connection', steps_para, state):
return False
return True
def button_press(self, steps_para):
if self.bp_relay_init:
values = self._split(steps_para)
key = values[0]
t = values[1]
key = key.split('&') if '&' in key else [key]
self.print_log(steps_para)
self.button.press(key, t)
else:
self.print_log('Something wrong with your relay!', 'red')
return False
return True
def _ac_power(self, steps_para):
state = self._split(steps_para)[0]
if not self.ac_power.ac_power(state):
self.print_log('Something wrong with your Relay!!!', 'red')
return False
self.print_log(steps_para)
return True
def process_steps(self, steps):
for i in range(1, len(steps)):
with open(self.state_path) as f:
status_running = json.load(f)
if status_running["run_state"] == 0:
self.socketio.emit("stop_confirm", namespace='/automate/test')
sys.exit()
action = steps[str(i)]
for k, v in action.items():
if k == 'send_pause':
if not self.send_pause():
return False
elif k == 'send_play':
if not self.send_play():
return False
elif k == 'set_standby':
if not self.set_standby():
return False
elif k == 'delay':
t = self._split(v)[0]
self.print_log('Delay {} s'.format(t))
self.delay(float(t))
elif k == 'set_volume':
if not self.set_volume(v):
return False
elif k == 'do_check_network':
if not self.do_check_network(v):
return False
elif k == 'do_check_volume':
if not self.do_check_volume(v):
return False
elif k == 'do_check_playback':
if not self.do_check_playback(v):
return False
elif k == 'do_check_source':
if not self.do_check_source(v):
return False
elif k == 'do_check_power_state':
if not self.do_check_power_state(v):
return False
elif k == 'do_check_bt_connection':
if not self.do_check_bt_connection(v):
return False
elif k == 'button_press':
if not self.button_press(v):
return False
elif k == 'ac_power':
if not self._ac_power(v):
return False
return True
def run(self, steps):
total_times = int(steps['total_times'])
for x in steps:
if 'button_press' in steps[x]:
usb_port = 'com' + self.cfg.get('Button_Press', 'bp_usb_port')
self.button.init_relay(usb_port)
self.bp_relay_init = self.button.init_button()
elif 'ac_power' in steps[x]:
usb_port = 'com' + self.cfg.get('AC_Power', 'ac_usb_port')
self.ac_power.init_relay(usb_port)
c_time = 0
while c_time < total_times:
c_time += 1
txt = '**************This is the {} times**************'.format(c_time)
self.print_log(txt)
self.socketio.emit("show_running_info", {'current_times': c_time}, namespace='/automate/test')
if not self.process_steps(steps):
self.delay(0.2)
self.socketio.emit("run_stopped", namespace='/automate/test')
break
if self.button.ser is not None:
self.button.stop_relay()
if self.ac_power.ser is not None:
self.ac_power.stop_relay()
if c_time >= total_times:
self.socketio.emit('running_over', namespace='/automate/test')
sys.exit()
| {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,525 | BruceZhu88/sqaTools | refs/heads/master | /src/common/FtpUtil.py | # -*- coding: utf-8 -*-
# -------------------------------------------------------------------------------
# Name: checkUpdates
# Purpose:
#
# Author: Bruce Zhu
#
# Created: 8/11/2017
# Copyright: (c) it 2017
# Licence: <your licence>
# -------------------------------------------------------------------------------
# cmd('disconnect bluetooth(%s)'%bt_name)
# cmd('connect bluetooth(%s)'%bt_name)
# cmd('bluetooth status()')
# cmd('PlayAudio(%s)'%musicPath)
# cmd('mediaVolumeUp()')
# cmd('mediaVolumeUp()')
# cmd('StopAudio()')
import os
from src.common.Logger import Logger
from ftplib import FTP
class FtpUtil(object):
def __init__(self, host='', port=21, user='', pwd='', ftp_path=''):
self.log = Logger('main').logger()
self.ftp = FTP()
self.host = host
self.port = port
self.user = user
self.pwd = pwd
self.ftp_path = ftp_path
def connect(self):
try:
self.ftp.connect(self.host, self.port)
self.ftp.login(self.user, self.pwd)
# print('Ftp connected!')
return True
except Exception as e:
self.log.error("Error when connecting FTP server: {0}".format(e))
return False
# sys.exit()
def up(self, path, filename):
try:
# print ftp.dir() #display file detail under directory
# print ftp.nlst()
if not self.connect():
return False
self.ftp.cwd(path)
buf_size = 1024
file_handler = open(filename, 'rb')
self.ftp.storbinary('STOR %s' % os.path.basename(filename), file_handler, buf_size)
# ftp.set_debuglevel(0)
file_handler.close()
self.ftp.quit()
return True
except Exception as e:
self.log.error("Error when ftp_up: {0}".format(e))
# sys.exit()
return False
def down(self, path, filename):
try:
if not self.connect():
return False
self.ftp.cwd(path)
buf_size = 1024
file_handler = open(filename, 'wb').write
self.ftp.retrbinary('RETR %s' % os.path.basename(filename), file_handler, buf_size)
self.ftp.set_debuglevel(0)
# file_handler.close()
self.ftp.quit()
# print "ftp down OK"
return True
except Exception as e:
self.log.error("Error when ftp_down: {0}".format(e))
# sys.exit()
return False
if __name__ == "__main__":
ip = '192.168.0.105'
port = 3721
ftp = FtpUtil(host=ip, port=port, user='', pwd='', ftp_path='autoSQA/')
print(ftp.connect())
| {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,526 | BruceZhu88/sqaTools | refs/heads/master | /clear_folders.py | # -------------------------------------------------------------------------------
# Name: Clear_folders
# Purpose:
#
# Author: Bruce.Zhu
#
# Created: 14/09/2017
# Copyright: (c) SQA 2017
# Licence: <your licence>
# -------------------------------------------------------------------------------
import os
import shutil
def walk_folders(folder):
folderscount = 0
filescount = 0
size = 0
# walk(top,topdown=True,onerror=None)
for root, dirs, files in os.walk(folder):
folderscount += len(dirs)
filescount += len(files)
size += sum([os.path.getsize(os.path.join(root, name))
for name in files])
return folderscount, filescount, size
def remove_file(path, fileType):
if os.path.exists(path):
folderscount, filescount, size = walk_folders(path)
for parent, dirnames, filenames in os.walk(path):
for filename in filenames:
if fileType in filename:
try:
delFilePath = os.path.join(parent, filename)
os.remove(delFilePath)
except Exception as e:
print(e)
def remove_folders(path, folderType):
if os.path.exists(path):
folderscount, filescount, size = walk_folders(path)
for parent, dirnames, filenames in os.walk(path):
if folderType in parent:
# print(parent)
try:
shutil.rmtree(parent)
except Exception as e:
print(e)
def remove_folder(folder_path):
if os.path.exists(folder_path):
shutil.rmtree(folder_path)
# remove_file('./src', '.pyc')
remove_folders('./src', '__pycache__')
remove_folder('./__pycache__')
remove_folder('./log')
remove_folder('./dist')
remove_folder('./build')
| {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,527 | BruceZhu88/sqaTools | refs/heads/master | /src/wifiSpeaker/__init__.py | from .AseInfo import AseInfo
from .AseUpdate import AseUpdate
from .AseWebData import *
from .WifiSetup import WifiSetup
| {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,528 | BruceZhu88/sqaTools | refs/heads/master | /src/common/__init__.py | from .WindowsWifi import WindowsWifi
from .cfg import Config
from .Logger import Logger
from .SerialHelper import SerialHelper
from .FtpUtil import FtpUtil
from .util import *
from .QRcode import MakeQR
from .SshHelper import SshHelper
from .Url import *
from .relay import Relay
| {"/build.py": ["/src/common/cfg.py"], "/sqaTools.py": ["/app_config.py", "/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/WifiSetup.py", "/src/powerCycle/SerialTool.py", "/src/automate/Command.py", "/src/automate/automate.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/common/QRcode.py"], "/src/wifiSpeaker/AseInfo.py": ["/src/common/SshHelper.py", "/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/Url.py", "/src/wifiSpeaker/AseWebData.py"], "/src/wifiSpeaker/WifiSetup.py": ["/src/common/WindowsWifi.py", "/src/wifiSpeaker/AseInfo.py"], "/src/powerCycle/__init__.py": ["/src/powerCycle/SerialTool.py"], "/src/automate/__init__.py": ["/src/automate/automate.py"], "/src/automate/Command.py": ["/src/common/FtpUtil.py", "/src/common/cfg.py"], "/src/automate/automate.py": ["/src/wifiSpeaker/AseInfo.py", "/src/common/relay.py"], "/src/wifiSpeaker/__init__.py": ["/src/wifiSpeaker/AseInfo.py", "/src/wifiSpeaker/AseWebData.py", "/src/wifiSpeaker/WifiSetup.py"], "/src/common/__init__.py": ["/src/common/WindowsWifi.py", "/src/common/cfg.py", "/src/common/FtpUtil.py", "/src/common/QRcode.py", "/src/common/SshHelper.py", "/src/common/Url.py", "/src/common/relay.py"]} |
42,549 | vanes11/QuickFill | refs/heads/master | /version1.py | from graphviz import *
from collections import *
from collections.abc import Iterable
import sys,codecs,locale
import csv
import re
import json
""" ++++++++++++++++++++++ Debut Zone de definition des constantes et variable global ++++++++++++++++ """
BOTTOM = "⊥"
ClasseC = {}
MesNode = {} #distionnaire ayant pour cles une valeur et valeur un chemins allant de la source a la destination
valit = 0 #permet de varie la cles de chemin
""" ++++++++++++++++++++++ Fin Zone de definition des constantes et variable global ++++++++++++++++ """
""" ***************** Debut Zone d'écriture des fonctions******************** """
#Fonction permetant de determiner tous les chemin allant de la source a la destination
# This class represents a directed graph
# using adjacency list representation
class Graph:
def __init__(self, vertices):
# No. of vertices
self.V = vertices
# default dictionary to store graph
self.graph = defaultdict(list)
# function to add an edge to graph
def addEdge(self, u, v):
self.graph[u].append(v)
'''A recursive function to print all paths from 'u' to 'd'.
visited[] keeps track of vertices in current path.
path[] stores actual vertices and path_index is current
index in path[]'''
def printAllPathsUtil(self, u, d, visited, path):
# Mark the current node as visited and store in path
visited[u]= True
path.append(u)
# If current vertex is same as destination, then print
# current path[]
if u == d:
global MesNode
global valit
valtest = []
for elt in path:
valtest.append(elt)
MesNode[valit] = valtest
valit = valit + 1
else:
# If current vertex is not destination
# Recur for all the vertices adjacent to this vertex
for i in self.graph[u]:
if visited[i]== False:
self.printAllPathsUtil(i, d, visited, path)
# Remove current vertex from path[] and mark it as unvisited
path.pop()
visited[u]= False
# Prints all paths from 's' to 'd'
def printAllPaths(self, s, d):
# Mark all the vertices as not visited
visited =[False]*(self.V)
# Create an array to store paths
path = []
# Call the recursive helper function to print all paths
self.printAllPathsUtil(s, d, visited, path)
#convert list of iterable in a signe list
def flatten(items):
for x in items:
if isinstance(x, Iterable) and not isinstance(x, (str, bytes)):
for sub_x in flatten(x):
yield sub_x
else:
yield x
#Etape 0) Fonction de recuperation des exemples
def GetExamples():
S =[]
s = set()
sigma = {}
d = {}
sortie = ""
decoupe = []
with open("data.txt","r", newline=None) as f:
for line in f:
line = line.replace("\n", "")
decoupe = line.split("---")
if len(decoupe)==1:
if decoupe[0]=="***":
S.append(s)
s = set()
else:
example = decoupe[0]
sortie = decoupe[1]
example = example.split("+++")
for i in range(len(example)):
sigma["v"+str(i+1)] = example[i]
x = (json.dumps(sigma),sortie)
s = s.union(set([x]))
sigma = {}
S.append(s)
return S
########################################Fin Etape 0#################################################
#Etape 1) Fonction de creaion de la classe C de Tokens utiliser dans les primitives de notre langage
def GetClassC():
"""
Defintion de la classe de token C,
Il s'agit d'un dico dont les cles st les noms de tokens et
les valeurs st les expressions regulieres correspondantes
""" # a present on a 23 tokens dans ClassC
ClasseC ={}
with open('classeC.csv', mode='r') as csv_file:
csv_reader = csv.reader(csv_file, delimiter=';')
for token in csv_reader:
ClasseC[token[0]] = token[1]
ClasseC['SemiColonTok'] = ";"
return ClasseC
########################################Fin Etape 1#################################################
#Etape 2) Definition des primitives de permetant implementation de GeneratePostion
def Cpos(s,k):
""" CPos est un constructeur de position, il permet de representer une position de la gauche vers la droite et de la droite vers la droite """
if(k>= 0):
return k
else:
return len(s) + k
def TokenSeq(*tokens):
""" contruit une sequence(concatenation)d'expression regulieres """
TokenSeqResult = ""
for token in tokens:
TokenSeqResult = TokenSeqResult+token
return TokenSeqResult
def Pos(s,r1,r2,c):
"""
Retourne l'indice de debut de r2 dans le cieme matching de l'expression reguliere
TokenSeq(r1,r2)
"""
r = TokenSeq(r1, r2) ; r = re.compile(r)
r1 = re.compile(r1) ; r2 = re.compile(r2)
RegularExpression = r.findall(s) # retourne la liste des chaines qui match l'expression reguliere r dans s
if len(RegularExpression) >= abs(c):
e = abs(c)-1 # -1 car on veut recuperer le cieme element dans RegulaExpression dont l'element d'indice c-1 car le comptage commence a -1
if c > 0:
res = re.search(re.escape(RegularExpression[e]),s)
t1 = res.group()
res1 = re.search(r1,t1) # on recupere l'occurrence de r1 dans res
Taille = res1.end() - res1.start() # taille de res1
t = res.start() + Taille
else:
e = len(RegularExpression) + c # comme c est negatif, on applique son cpos et on applique le meme principe precedent
res = re.search(re.escape(RegularExpression[e]),s)
t1 = res.group()
res1 = re.search(r1,t1) # on recupere l'occurrence de r1 dans rest1
Taille = res1.end() - res1.start() # on recupere la taille
t = res.start() + Taille
return t
else:
return BOTTOM
def GenerateRegularExpressionLeft(s,k):
"""
Retourne l'ensemble des expressions regulieres qui existent dans s[k1:k]
pour k1 variant de 0 a k-1
"""
k1 = 0 #on commence la recherche en debut de chaine
r1 = []
Tokens = [] # liste des tokens dans la sous chaine s[k1:k]
while k1 <= k-1 :
for cle in ClasseC:
Token = ClasseC[cle]
TokenComp = re.compile(Token)
Test = TokenComp.match(s[k1:k])
if Test != None:
k1 = k1 + Test.start()
Tokens.append((k1,Token,cle))# 0n stoke le tuplet contenant l'indice de k1 du token en k1, la valeur du token associe et la cle(nom du token correspondant)
k1 = k1 + Test.end()
break
Taille = len(Tokens)
i = 0
while i < Taille : # on construit les tokens correspondant a la sous chaine s[k1:k]
Expression = "".join([str(elt[1]) for elt in Tokens[i: Taille]]) # ecriture de valeurs de tokens
Expression2 = ",".join([str(elt[2]) for elt in Tokens[i: Taille]])# ecriture sur forme de cle separe par les virgules: Numtok, Hyphentok...
r1.append((Tokens[i][0],Expression,Tokens[i: Taille],Expression2))
# r1 est un liste de quatuplet(a,b,c,d): a represente k1, b l'expression reguliere associe, c la sous liste de Tokens correspondant.et d les tokens correspondant
i = i +1
return r1
def GenerateRegularExpressionRigth(s,k):
""" Retourne l'ensemble des expressions regulieres qui existent dans s[k:k2]
pour k2 variant de k a len(s) """ # meme principe que precedement
k1 = k
r1 = []
Tokens = []
Longueur = len(s)
while k1 < Longueur:
for cle in ClasseC:
Token = ClasseC[cle]
TokenComp = re.compile(Token)
Test = TokenComp.match(s[k1:Longueur])
if Test != None:
k1 = k1 + Test.end()-1 # on doit nomalement faire end()-1, mais pour respecter l'indexation de python on prend end()
Tokens.append((k1,Token,cle))
k1 = k1 + 1 #on avance pour continuer le parcours
break
Taille = len(Tokens)
#print(Tokens)
i = 0
while i < Taille:
Expression = "".join([str(elt[1]) for elt in Tokens[0: i+1]])
Expression2 = ",".join([str(elt[2]) for elt in Tokens[0: i+1]])
r1.append((Tokens[i][0],Expression,Tokens[0: i+1],Expression2))
i = i +1
return r1
def MatchExpression(l,s):
"""
etant donnee une liste l, cette fonction retourne l'indice(l'occurence) de s dans l s'il trouve
"""
try:
return l.index(s) + 1 # +1 parceque l'indxation dans les liste commence a 0
except ValueError:
return 0
def GetBestKey(s):
""" prend une chaine(nondigitToc) et retoune le type(cle) de l'expression reguliere correspondante. """
k1 = "NonAlphtok"
k2 = "NonspaceTok"
keylist = list(ClasseC.keys())
k1 = keylist.index(k1)
k2 = keylist.index(k2)
for i in range((k1+1),k2):
Token = ClasseC[keylist[i]]
TokenComp = re.compile(Token)
Test = TokenComp.match(s)
if Test != None:
return keylist[i]
return ''
def ExpressionConcatenate(entree, s):
""" retourne la formule concatenate(de SubStr2) et concatenate(de ConstStr) qui permet d'obtenir s a partir de entree """
k1 = 0
r1 = []
Tokens = []
ExpressionExecute = []
ExpressionExecuteFormule = []
P1 = []
P2 = []
k=len(s)
booleen = False
while k1 <= k-1 :
for cle in ClasseC:
Token = ClasseC[cle]
TokenComp = re.compile(Token)
Test = TokenComp.match(s[k1:k])
if Test != None:
k1 = k1 + Test.start()
if cle == 'NonDigitTok' :
bestkey = GetBestKey(Test.group())
if bestkey != '':
cle = bestkey
Token = ClasseC[cle]
Tokens.append((k1,Token,cle,Test.group()))
k1 = k1 + Test.end()
break
for elt in Tokens:
booleen = False
for key in entree:
elt = list(elt)
chaine = elt[3]
expression = elt[1]
Tokenname = elt[2]
TokenComp = re.compile(expression)
Test = TokenComp.findall(entree[key])
for p in range(len(Test)):
K = len(chaine)
res = [Test[p][i: j] for i in range(len(Test[p])) for j in range(i + 1, len(Test[p]) + 1) if len(Test[p][i:j]) == K]
if chaine in res:
if len(chaine) == len(Test[p]):
result = (p,-2)
booleen = booleen or True
else:
if res[0] == chaine:
result = (p,0,len(chaine))
booleen = booleen or True
elif res[-1] == chaine:
result = (p,-1,len(chaine))
booleen = booleen or True
else:
booleen = booleen or False
break
if booleen:
break
else:
booleen = booleen or False
if booleen:
ExpressionExecute.append((key,expression,result))
ExpressionExecuteFormule.append("SubStr2(" + key + "," + Tokenname + "," + str(list(result)[0]+1) + ")")
else:
ExpressionExecute.append(chaine)
ExpressionExecuteFormule.append("ConstStr(" + chaine + ")")
P1.append(ExpressionExecute)
P2.append(ExpressionExecuteFormule)
return [P1,P2]
def ExpressionConcatenateOfString(s):
""" retourne l'expression reguliere qui match une sortie s, s etant une chaine de caractere """
k1 = 0
r1 = []
Tokens = []
ExpressionExecuteFormule = ""
k=len(s)
while k1 <= k-1 :
for cle in ClasseC:
Token = ClasseC[cle]
TokenComp = re.compile(Token)
Test = TokenComp.match(s[k1:k])
if Test != None:
k1 = k1 + Test.start()
if cle == 'NonDigitTok' :
bestkey = GetBestKey(Test.group())
if bestkey != '':
cle = bestkey
Token = ClasseC[cle]
Tokens.append((k1,Token,cle,Test.group()))
k1 = k1 + Test.end()
break
for elt in Tokens:
ExpressionExecuteFormule = ExpressionExecuteFormule + " " + list(elt)[2]
return ExpressionExecuteFormule
def ExpressionConcatenateOfStringSigma(entre):
""" retourne l'expression reguliere qui match l'entree sigma, sigma etant un dic de chaine de caractere"""
DiskExpression= {}
for key in entre:
s = entre[key]
k1 = 0
r1 = []
Tokens = []
ExpressionExecuteFormule = ""
k=len(s)
while k1 <= k-1 :
for cle in ClasseC:
Token = ClasseC[cle]
TokenComp = re.compile(Token)
Test = TokenComp.match(s[k1:k])
if Test != None:
k1 = k1 + Test.start()
if cle == 'NonDigitTok' :
bestkey = GetBestKey(Test.group())
if bestkey != '':
cle = bestkey
Token = ClasseC[cle]
Tokens.append((k1,Token,cle,Test.group()))
k1 = k1 + Test.end()
break
for elt in Tokens:
ExpressionExecuteFormule = ExpressionExecuteFormule + " " + list(elt)[2]
DiskExpression[key] = ExpressionExecuteFormule
return DiskExpression
def ExpressionConstString(entree, s):
""" retourne la formule concatenate de constString qui permet d'obtrenir s a partir de entree """
exp = []
for key in entree:
if entree[key] in s:
s = s.replace(entree[key],key)
Expression2 = "|".join(entree.keys())
Expression2 = "(" + Expression2 + ")"
disssos = re.split(Expression2,s)
for elt in disssos:
if elt != '':
if elt in entree.keys():
exp.append(elt)
else:
exp.append("ConstStr(" + elt + ")")
return exp
########################################Fin Etape 2#################################################
#Etape 3) implementation de GeneratePostion
def GeneratePosition(s,k):
""" retourne l'ensemble des differentes facons de representer une position
donnee dans une chaine donnee avec les primitives du langage """
POsList = [] # variable de formatage
PosChain = ""
result = set([Cpos(s,k), Cpos(s,-(len(s)-k))])
PosChain = "Cpos("+s+","+str(k)+")"
POsList.append(PosChain)
PosChain = "Cpos("+s+",-"+str((len(s)-k))+")"
POsList.append(PosChain)
R1List = GenerateRegularExpressionLeft(s,k) # liste des expressions regulieres dans la chaine s[0:k-1]
R2List = GenerateRegularExpressionRigth(s,k) # Liste des expressions regulieres dans la chaine s[k:len(s)]
for r1 in R1List :
for r2 in R2List :
r12 = TokenSeq(r1[1],r2[1])
r12 = re.compile(r12)
MatchingList = r12.findall(s)# liste de toutes les occurrences de qui match r12 dans s
c = MatchExpression(MatchingList,s[r1[0]:r2[0]+1])
c1 = len(MatchingList)
if c!=0 and (r1[2][len(r1[2])-1][1] != r2[2][0][1]):
# cette deuxieme condition sur if nous permet d'exclure le ou la fin de r1 est de meme type que le debut de r2, car ceci pret a confusion, on ne sait plus dean ce cas determiner l;indice de debut de r2
c1 = -(c1 -c +1) # c1 nous permet de parcourir la chaine de la droite vers la gauche
PosChain = "Pos("+"TokenSeq("+r1[3]+"),TokenSeq("+r2[3]+"),"+str(c)+")"
POsList.append(PosChain)
PosChain = "Pos("+"TokenSeq("+r1[3]+"),TokenSeq("+r2[3]+"),"+str(c1)+")"
POsList.append(PosChain)
result = result.union(set([Pos(s,r1[1],r2[1],c),Pos(s,r1[1],r2[1],c1)])) # result n'est rien d'autres que que k
return result,set(POsList)
########################################Fin Etape 3#################################################
#Etape 4) Definition des primitives de permetant implementation de GenerateStr
def SubStr(s,p1,p2):
"""
Expression de sous chaine tel que formuler dans l'article, un peu != de celle de python.l'indexation commence a 0
"""
# par la suite on utile la fonction substring de python a ajoutant a l'indice de fin
p2 = p2+1
return s[p1:p2]
def SubStrs(s,p1,p2):
"""
Retourne la liste de sous chaine de s pouvant etre former avec les elements de p1 et p2: p1 et p2 etant les ensembles de positions ,
"""
SubList = [] # pour affichage
SubChain = ""
p1 = list(p1); p2 = list(p2)
for i in p1:
for j in p2:
#result = result.union(set([SubStr(s,i,j)])) # ce ci n'est pertinent que lorsque l'ensemble p1 tout comme p2
# represente les valeurs diff, ce qui n'est pas le cas pour nous
#SubChain = "SubStr("+s+","+str(i)+","+str(j)+")"
SubChain = "SubStr("+s+","+i+","+j+")"# pour le formatage, on laisse i et j pour avoir les expressions pos et cpos
SubList.append(SubChain)
return set(SubList)
########################################Fin Etape 4#################################################
#Etape 5) Implementation de GenerateSubstring, fonction principale de GenerateStr
def GenerateSubstring(entree,s):
"""
Pour un etat d'entree sigma et une sortir s, cette fonction retourne l'ensemble des expressions Substr()
de notre langage qui permette d'obtenir d'extraire la chaine s dans l'entree sigma.
l'etat d'entree sigma est un dictionnaire : les cle sont vi et les valeurs sont des chaine de caracteres.
---------
Ce qui nous interesse c'est l'affichage avec les expression pos et cpos, car la valeur est unique.
"""
result = set()
for cle in entree:
# cle = vi, nom de colonne
if s in entree[cle]:
k = entree[cle].index(s)
k1 = len(s)+k-1
Y1 = GeneratePosition(entree[cle],k) # Y1 est un tuple car GeneratePosition retourne un tuple
Y1 = Y1[1] # ensemble de facons de retourner l'indince de debut de s sans entree
Y1 = list(Y1)
for i in range(len(Y1)):
Y1[i] = Y1[i].replace(entree[cle],cle,10)
Y2 = GeneratePosition(entree[cle], k1)
Y2 = Y2[1]
Y2 = list(Y2)
for i in range(len(Y2)):
Y2[i] = Y2[i].replace(entree[cle],cle,10)
SubResult = SubStrs(cle,Y1,Y2) # le resultat de Substrs est deja un set
result = result.union(SubResult)
return result
def GenerateStr(entree, s):
"""
cette fonction retourne l'ensemble des facons d'obtenir s a partir de l'etat d'entree sigma
elle retourne un dag, une structure de donnee qui permet de representer des grands ensembles
elle utilse le principe de l'algorithme CYK, base sur la programmation dynamique, qui consiste a reconnaitre un mot
dans un langage en passant par sa table de transition.
"""
EtaTilda = set() ; PsiTilda = set()
""" EtaSource = 0
EtaTarget = len(s) """
W = {} # table de transition, pour chaque arrete (i,j) associe l'etique qui est un ensemble d'expressions at0mique
for i in range(len(s)+1):
EtaTilda = EtaTilda.union(set([i])) # construction de l'ensemble des noeuds
for i in range(len(s)+1):
k = i+1
for j in range(k,len(s)+1):
PsiTilda = PsiTilda.union(set([(i,j)])) # ensemble des aretes
#PsiTilda = list(PsiTilda)
for i in PsiTilda: # Pour chaque arete
x = "ConstStr("+s[i[0]:i[1]]+")" # s[i:j] = SubStr(i,j), c'est pourquoi je concerve i[1] au lieu de i[1]-1
ConstString = set([x])
SubString = GenerateSubstring(entree,s[i[0]:i[1]])
ConstString = ConstString.union(SubString)
W[i] = ConstString
baseformule = ExpressionConcatenate(entree , s)
baseformule = baseformule[-1][0]
if baseformule != [] :
baseformule = "Concatenate (" + ",".join(baseformule) + " )"
W[(0,len(s))] = W[(0,len(s))].union(set([baseformule]))
EtaTilda = list(EtaTilda)
return W , EtaTilda
def PrintDag(Listenoeud , generatestring ,entree, s):
"""
prend une chaine s et son resultat GenerateStr associe i.e la table de transition et la liste des noeuds et affiche le dag correspondant
"""
sys.stdout.reconfigure(encoding='utf-8')
dot = Digraph(name='GenerateStr',comment='Test')
for elt in list(Listenoeud):
if elt == list(Listenoeud)[0]:
dot.node(str(elt) , str(elt),color="Red")
elif elt == list(Listenoeud)[-1]:
dot.node( str(elt) ,str(elt) ,shape="doublecircle")
else:
dot.node(str(elt) , str(elt))
if len(list(Listenoeud)) != 0:
for cle in generatestring:
r = "Const" ; r = re.compile(r)
target =list(generatestring[cle])
val1 = target[0]
label1="{"+val1 +"}"
dot.edge(str(cle[0]) , str(cle[1]) , label=label1)
dot.attr(label= "entree_sigma = " + str(entree) +",s = "+s)
dot.attr(fontsize='25')
dot.format = 'png'
dot.render()
########################################Fin Etape 5#################################################
#Etape 6) Definition des primitives de permetant implementation de IntersectDag
def SizeNoeud(n,W):
size = 0
if n ==0 :
return 1
else:
for i in range(n):
if (i,n) in W.keys():
size = size + SizeNoeud(i,W)*(len(W[(i,n)])) # +1 parceque on ajoute l'expression atomique ConstStr dans la construction de w
return size
def SizeDag(EtaTarget, W):
"""
Permet de retourner le nombre d'element d'un ensemble d'expression:
c'est la taille des Dags qui nous interesse particulierement.
"""
if W != {} :
return SizeNoeud(EtaTarget,W)
else:
return 0
def IntersectConstStr(s1,s2):
if s1 == s2 : # s1 et s2 sont sur la forme ConstStr(s1),....
return s1
else:
return BOTTOM
def IntersectRegex(t1,t2):
"""
Prend en entree deux expression regulieres (sur la forme TokenSeq) et le retoune leur intersection
"""
t1 = t1.split(",")
t2 = t2.split(",")
t1[0] = t1[0].replace("TokenSeq(","")
t1[-1] = t1[-1].replace(")","")
t2[0] = t2[0].replace("TokenSeq(","")
t2[-1] = t2[-1].replace(")","")
t3 = [] # TokenSeq resultat des tokenseq t1 et t2
if len(t1) != len(t2):
return BOTTOM
else:
for i in range(len(t1)):
if t1[i] == t2[i]:
t3.append(t1[i])
if len(t3) == 0:
return BOTTOM
else:
Expression = ",".join(t3)
Expression = "TokenSeq("+Expression+")"
return Expression
def IntersectPos(p1,p2):
"""
P1 et p2 sont des ensemble d'elements de type Pos(r1,r2,c)
"""
if p1.startswith("Cpos") and p2.startswith("Cpos"):
return IntersectCpos(p1,p2)
elif p1.startswith("Pos") and p2.startswith("Pos"):
p1 = p1.split("Pos(",1)
p2 = p2.split("Pos(",1)
p11=[]
p22=[]
if p1[1].rfind('TokenSeq') >=0:
l = p1[1].rfind('TokenSeq')-1
p11.append(p1[1][0:l])
k = p1[1][l+1:len(p1[1])]
n = k.index(')')
p11.append(k[0:n+1])
p11.append(k[n+2:len(k)-1])
if p2[1].rfind('TokenSeq') >=0:
l = p2[1].rfind('TokenSeq')-1
p22.append(p2[1][0:l])
k=p2[1][l+1:len(p2[1])]
n = k.index(')')
p22.append(k[0:n+1])
p22.append(k[n+2:len(k)-1])
if p11[2] != p22[2]: # p1[2] et p2[2] represente c et c'. si c!= c' alors c inter c' = {} et l'intersection de pos devient impossible
return BOTTOM
x = IntersectRegex(p11[0],p22[0]) # p1[0] et p2[0] representent les expressions regulieres r1 et r1'
y = IntersectRegex(p11[1],p22[1])
if x != BOTTOM and y != BOTTOM :
Expression = "Pos("+x+","+y+","+p11[2]+")"
return Expression
else:
return BOTTOM
else:
return BOTTOM
def IntersectCpos(s1,s2):
# le resultat est sur la forme Cpos(s1) , s1 et s2 sont sur cette meme forme
if s1 == s2:
return s1
else:
return BOTTOM
def IntersectSubstr(s1,s2):
"""
Intersection de 2 sous expressions subStr()
"""
s11=[]
s22=[]
s1 = s1.split(",",1)
s2 = s2.split(",",1)
if s1[1].rfind('Cpos') >=0:
if s1[1].rfind('Cpos') == 0:
l = s1[1].rfind('Pos')-1
s11.append(s1[1][0:l])
s11.append(s1[1][l+1:len(s1[1])-1])
else:
l = s1[1].rfind('Cpos')-1
s11.append(s1[1][0:l])
s11.append(s1[1][l+1:len(s1[1])-1])
else:
l = s1[1].rfind('Pos')-1
s11.append(s1[1][0:l])
s11.append(s1[1][l+1:len(s1[1])-1])
if s2[1].rfind('Cpos') >=0:
if s2[1].rfind('Cpos') == 0:
l = s2[1].rfind('Pos')-1
s22.append(s2[1][0:l])
s22.append(s2[1][l+1:len(s2[1])-1])
else:
l = s2[1].rfind('Cpos')-1
s22.append(s2[1][0:l])
s22.append(s2[1][l+1:len(s2[1])-1])
else:
l = s2[1].rfind('Pos')-1
s22.append(s2[1][0:l])
s22.append(s2[1][l+1:len(s2[1])-1])
s1[0] = s1[0].replace("SubStr(","")
s2[0] = s2[0].replace("SubStr(","")
x = IntersectPos(s11[0],s22[0])
y = IntersectPos(s11[1],s22[1])
if x != BOTTOM and y != BOTTOM:
Expression = "SubStr("+s1[0]+","+x+","+y+")"
return Expression
else:
return BOTTOM
def Intersect(f1,f2):
"""
f1 et f2 sont les ensembles d'expressions atomiques.
"""
result = set()
""" cpos1 = []
cpos2 = [] # liste des expressions atomiques contenant Cpos() dans f1 f1 = list(f1) """
f1 = list(f1)
f2 = list(f2)
s1 = ""
s2 = ""
indice1 = [i for i in range(len(f1)) if f1[i].startswith('Const')]
indice2 = [i for i in range(len(f2)) if f2[i].startswith('Const')]
if indice1!=[] and indice2!=[] :
s1 = f1[indice1[0]] #pn extrait le cas particulier des constantes.
s2 = f2[indice2[0]]
f1.remove(s1)
f2.remove(s2)
if IntersectConstStr(s1, s2) != BOTTOM: # cas des constrs
result = result.union(set([s1]))
# cas des expressions Substr avec Pos
for elt1 in f1:
for elt2 in f2:
if elt1.startswith('SubStr') and elt2.startswith('SubStr'):
x = IntersectSubstr(elt1,elt2) # elt1 et elt2 st sur la forme SubStr(vi,pos(....),Pos(,,,))
if x != BOTTOM: # ce x est de la forme IntersetPos
result = result.union(set([x]))
return result
def RenomageDag(EtaTilda, W12):
Renomage = {}
EtaTildaNew = []
W12New = {}
for i in range(len(EtaTilda)):
Renomage[EtaTilda[i]] = i
EtaTildaNew.append(i)
for key in W12:
W12New[(Renomage[key[0]], Renomage[key[1]])] = W12[key]
return EtaTildaNew , W12New
########################################Fin Etape 6#################################################
#Etape 7) Implementation de IntersectDag
def IntersectDag(EtaTilda1,W1,EtaTilda2,W2):
"""
Cette fonction prend en entree deux graphes et retourne leur intersection
"""
# si on admet les expressions loops, il va falloir faire la fonction IntersectLoop pour gerer ce cas
EtaTilda1 = list(EtaTilda1)
EtaTilda2 = list(EtaTilda2)
EtaTilda = []
EtaTildaCopy = []
PsiTilda = []
PsiTildaCopy = []
W12 = {} # table de transition du graphe resultat
if len(EtaTilda1) == len(EtaTilda2):
#print("++++++++++++++++++++++++++1" ,EtaTilda1,"\n" )
#print("++++++++++++++++++++++++++2" ,EtaTilda2 , "\n")
if (0,len(EtaTilda1)-1) in W1.keys() and (0,len(EtaTilda2)-1) in W2.keys():
baseformule1 = list(W1[(0,len(EtaTilda1)-1)])
baseformule2 = list(W2[(0,len(EtaTilda2)-1)])
indice1 = [i for i in range(len(baseformule1)) if baseformule1[i].startswith('Concatenate')]
indice2 = [i for i in range(len(baseformule2)) if baseformule2[i].startswith('Concatenate')]
if indice1!=[] and indice2!=[] :
elt = baseformule1[indice1[0]]
elt2 = baseformule2[indice2[0]]
if elt == elt2:
W12[((0,0),(len(EtaTilda1)-1,len(EtaTilda2)-1))] = set([elt])
W1[(0,len(EtaTilda1)-1)] = list(W1[(0,len(EtaTilda1)-1)])
W1[(0,len(EtaTilda1)-1)].remove(elt)
W1[(0,len(EtaTilda1)-1)] = set(W1[(0,len(EtaTilda1)-1)])
W2[(0,len(EtaTilda2)-1)] = list(W2[(0,len(EtaTilda2)-1)])
W2[(0,len(EtaTilda2)-1)].remove(elt2)
W2[(0,len(EtaTilda2)-1)] = set(W2[(0,len(EtaTilda2)-1)])
# construction de l'ensemble de noeuds cadidats du graphe resultat
for i in EtaTilda1:
for j in EtaTilda2:
if j==i:
EtaTilda.append((i,j))
EtaTildaCopy.append((i,j))
# construction de l'ensemble des aretes candidats
for i in range(len(EtaTilda)):
elt = EtaTilda[i]
k = i+1
for j in range(k,len(EtaTilda)):
elt2 = EtaTilda[j]
PsiTilda.append((elt,elt2))
PsiTildaCopy.append((elt,elt2))
VarieCase = 0
# calculons les intersection pour elements surprimer les transistions non importants et obtenir le graphe resultat.
for elt in PsiTilda:
# Intersect prend en entre 2 ensembles d'expression atomiques correspondant aux etiquettes des arretes entre 2 noeuds pris dans les 2 Dags
if (elt[0][0],elt[1][0]) in W1.keys() and (elt[0][1],elt[1][1]) in W2.keys():
x = Intersect(W1[(elt[0][0],elt[1][0])],W2[(elt[0][1],elt[1][1])])
else:
x = set()
if x == set() or x == set(['']):
PsiTildaCopy.remove(elt)
else:
if elt in W12.keys():
W12[elt] = W12[elt].union(x)
else:
W12[elt] = x
if EtaTilda[VarieCase] != elt[0]:
DoitSupp = True
for key in W12:
if EtaTilda[VarieCase] == key[0]:
if W12[key] != set():
DoitSupp = False
break
if DoitSupp:
EtaTildaCopy.remove(EtaTilda[VarieCase])
VarieCase = VarieCase + 1
DoitPasSupp = ((EtaTilda[VarieCase], EtaTilda[len(EtaTilda)-1]) in W12.keys()) and (W12[(EtaTilda[VarieCase], EtaTilda[len(EtaTilda)-1])]!= set())
DoitSupp = not(DoitPasSupp)
if DoitSupp :
EtaTildaCopy.remove(EtaTilda[VarieCase])
else:
if (0,len(EtaTilda1)-1) in W1.keys() and (0,len(EtaTilda2)-1) in W2.keys():
baseformule1 = list(W1[(0,len(EtaTilda1)-1)])
baseformule2 = list(W2[(0,len(EtaTilda2)-1)])
indice1 = [i for i in range(len(baseformule1)) if baseformule1[i].startswith('Concatenate')]
indice2 = [i for i in range(len(baseformule2)) if baseformule2[i].startswith('Concatenate')]
if indice1!=[] and indice2!=[] :
elt = baseformule1[indice1[0]]
elt2 = baseformule2[indice2[0]]
if elt == elt2:
W12[((0,0),(len(EtaTilda1)-1,len(EtaTilda2)-1))] = set([elt])
EtaTilda.append((0,0))
EtaTilda.append((len(EtaTilda1)-1,len(EtaTilda2)-1))
if len(EtaTildaCopy) == len(EtaTilda) and EtaTilda != []:
EtaTilda, W12 = RenomageDag(EtaTildaCopy,W12)
g = Graph(len(EtaTilda))
for elt in list(W12.keys()):
g.addEdge(elt[0], elt[1])
s = EtaTilda[0]
d = EtaTilda[-1]
global MesNode
global valit
MesNode = {}
valit = 0
g.printAllPaths(s, d)
WK = {}
for elt in MesNode:
for i in range(len(MesNode[elt])-1):
if (MesNode[elt][i],MesNode[elt][i+1]) not in WK.keys():
WK[(MesNode[elt][i],MesNode[elt][i+1])] = W12[(MesNode[elt][i],MesNode[elt][i+1])]
return WK , EtaTilda
else:
if EtaTilda != [] :
VerifCas2 = ((EtaTilda[0], EtaTilda[len(EtaTilda)-1]) in W12.keys()) and (W12[(EtaTilda[0], EtaTilda[len(EtaTilda)-1])]!= set()) and (W12[(EtaTilda[0], EtaTilda[len(EtaTilda)-1])]!= set(['']))
if VerifCas2:
EtaTildaNew = [0,1]
W12New={}
W12New[(0,1)] = W12[(EtaTilda[0], EtaTilda[len(EtaTilda)-1])]
return W12New , EtaTildaNew
else:
return {},[]
else:
return {},[]
########################################Fin Etape 7#################################################
#Etape 8) Definition des primitives de permetant implementation de GeneratePartition
def Comp(EtaTilda1,W1,EtaTilda2,W2):
"""
prende en entree deux dags(trace expression) caracteriser chacun par son ensemble de noeud et sa table transistion
e dit si oui ou non ces deux sont compabtibles ou pas
"""
if W1!={} and W2!={}:
x,y = IntersectDag(EtaTilda1,W1,EtaTilda2,W2)
if x != {}:
return True
else:
return False
else:
return False
def z(EtaTilda1,w1, EtaTilda2,w2,EtaTilda3,w3):
"""
Teste la compatibilite de ensembles de trace expressions couples avec un troisime ensemble
"""
if w1!={} and w2!={}:
w12 , EtaTilda12 = IntersectDag(EtaTilda1,w1,EtaTilda2,w2)
if (Comp(EtaTilda1,w1,EtaTilda3,w3) == Comp(EtaTilda2,w2,EtaTilda3,w3) and (Comp(EtaTilda12, w12 ,EtaTilda3,w3))==True) :
return 1
else:
return 0
else:
return 0
def CS1(EtaTilda1,w1, EtaTilda2,w2,p1,p2,T):
"""
permet de mesurer la concordance de partition en ce qui concerne concerne la compatibilite de leurs ensmbles
de traces et leur intersection avec les autres ensembles de traces.
On prendra p1 et p2 lors du parcours dans GeneratePartition pour trouver la paire de grand score de compatibilite
T est une liste de tuples; l'elementt gauche de chahque tuple est aussi un tuple
"""
# T = {({sigma1},(w1,EtaTilda))}
cs1 = 0 # variable resultat
for i in range(len(T)):
if i != p1 and i != p2:
cs1 = cs1 + z(EtaTilda1,w1, EtaTilda2,w2,T[i][1][1],T[i][1][0])
return cs1
def CS2(EtaTilda1,w1, EtaTilda2,w2):
"""
"""
if w1!={} and w2!={}:
w12,EtaTilda12 = IntersectDag(EtaTilda1,w1, EtaTilda2,w2)
if w12 != {}:
x = SizeDag (EtaTilda12[-1],w12) #numerateur
size1 = SizeDag(EtaTilda1[-1],w1)
size2 = SizeDag(EtaTilda2[-1],w2)
result = x/max(size1,size2)
return result
else:
return 0
else:
return 0
def CS(EtaTilda1,w1, EtaTilda2,w2,p1,p2,T):
""" Calcul du score de compatibilite. p1 et p2 representent les positiion du Dag1 et Dag2 dans T respectivement """
# T est le resultat de GenerateStr converti en liste
cs1 = CS1(EtaTilda1,w1, EtaTilda2,w2,p1,p2,T)
cs2 = CS2(EtaTilda1,w1, EtaTilda2,w2)
return ((cs1,cs2),(p1,p2)) # les indices p1 et p2 vont nous permettre de recuperer les entree(sigma correspondant)
def ComparaisonCS(CSa, CSb):
""" compare deux scores de compatibilites Csa et CSb avec les entree correspondantes;
C'est un CSa est un tuple de tuples ((cs1,cs2),(p1 p2)) , p1 et p2 representant les indice de Dag1,
Dag2 dans le tableau T resultat de GenerateStr.
"""
if (CSa[0][0] > CSb[0][0]) or (CSa[0][0]== CSb[0][0] and CSa[0][1] > CSb[0][1]):
return CSa
else:
return CSb # tuple de tuples de forme: ((cs1,cs2),(p1 p2)):p1 indice du prmier elemnt dans T et p2 ..
def TestCompatibilite(T):
"""
Teste s'il exite dans un ensembles de couples deux couple compatible
"""
for i in range(len(T)):
for j in range(i+1,len(T)):
if Comp(T[i][1][1],T[i][1][0],T[j][1][1],T[j][1][0]):
return True
return False
def LargestCS(T):
""" retourne les deux couples qui ont le plus grand score de compatibilite sur ensemble de trace """
ScoreList = []
for i in range(len(T)):
for j in range(i+1,len(T)):
score = CS(T[i][1][1],T[i][1][0],T[j][1][1],T[j][1][0],i,j,T)
ScoreList.append(score)
ScoreMax = ScoreList[0] # initialisation du score max
for i in range(1,len(ScoreList)):
ScoreMax = ComparaisonCS(ScoreMax,ScoreList[i])
return ScoreMax # est un tuble de tuple tout comme un element resultat de la fonction CS()
def TestCompatibiliteNew(T):
""" verifie s'il exite encore des couple compabtibles, son but est le meme que TestCompatibilite precedent """
e1 = ''
e2 = ''
f = ''
booleen = False
for i in range(len(T)):
for j in range(i+1,len(T)):
listeformule1 = list(T[i][1])
listeformule2 = list(T[j][1])
valtest1 = list(set(listeformule1[1][0]).intersection(set(listeformule2[1][0])))
valtest2 = list(set(listeformule1[1][1]).intersection(set(listeformule2[1][1])))
if len(listeformule1[1][0]) == len(listeformule2[1][0]) and len(listeformule1[1][0]) == len(valtest1):
f = (listeformule1[0][0],listeformule1[1][0])
e1 = i
e2 = j
booleen = True
return booleen,e1,e2,f
elif len(listeformule1[1][1]) == len(listeformule2[1][1]) and len(listeformule1[1][1]) == len(valtest2):
f = (listeformule1[0][1],listeformule1[1][1])
e1 = i
e2 = j
booleen = True
return booleen,e1,e2,f
return booleen,e1,e2,f
def ConvertListOfDickToString(L):
newList = []
for elt in L:
newList.append(json.dumps(elt))
exp = ",".join(newList)
return exp
########################################Fin Etape 8#################################################
#Etape 9) Implementation de GeneratePartition
def GeneratePartition(T):
""" Permet partitionner l'ensemble d'entrees en des ensembles qui demandent les memes traitements
i.e qui vont satisfaire la meme formule boolenne. Le but etant de dimunuer les coups computationnel.
T est le resultat de GenerateStr i.e un ens de tuples; l'elementt gauche de chaque tuple est aussi un tuple
"""
T = list(T)
vtest = 1
while TestCompatibilite(T):
ScoreMax = LargestCS(T)
FirstIndexe = ScoreMax[1][0]
SecondIndexe = ScoreMax[1][1]
partition = [T[FirstIndexe][0],T[SecondIndexe][0]]
intersect = IntersectDag(T[FirstIndexe][1][1],T[FirstIndexe][1][0],T[SecondIndexe][1][1],T[SecondIndexe][1][0])
# enlevons les elts T[FirstIndexe] et T[SecondIndexe]
T = [item for item in T if item != T[FirstIndexe] and item != T[SecondIndexe]]
# ajoutons la partion forme
T.append((partition,intersect))
return T # T est liste de tuple de tuples de la forme: (({ens des entree},(W du Dag correspondant, EtaTilda corespondant)))
########################################Fin Etape 9#################################################
#Etape 10) Definition des primitives de permetant implementation de GenerateBoolClassifier
def PredicatContruction(SigmaSet1, SigmaSet2):
"""
Prend deux ensembles d'entree et contruit une liste qui contient pour chaque elt de l'union de ces deux ensembles
le predicat qui match cet elt . ainsi que le complementaire de ce predicat.
Mais nous ne retournons ici que la liste des predicats, leurs complementaires seront geres lors du calcul de CSP
"""
SigmaSet = SigmaSet1.union(SigmaSet2)
Preds = []
r = [] # pour la construction de l'expression reguliere pour une expression, on prefaire stocker le resultat sur forme de list
for elt in SigmaSet:
elt = json.loads(elt)
r = []
for key in elt:
express = GenerateRegularExpressionLeft(elt[key],len(elt[key]))
if express != []:
r.append((express[0][1] , express[0][3]))
Preds.append((elt , r))
return Preds
def MachingPredicat(SigmaElement, Predicat):
"""
Permet de savoir si le predicat Predicat satisfait l'etat d'entree SigmaElement
Predicat est une liste d'expression regulieres , tel que la concatenation peut macth ou pas l'obje SigmaElement
"""
i = 0
SigmaElementConvert = json.loads(SigmaElement)
PredicatElementConvert = Predicat[0]
if len(SigmaElementConvert) == len(PredicatElementConvert):
for elt in SigmaElementConvert:
r1 = re.compile(Predicat[1][i][0]) # Predicat est une liste de meme taille que l'element dictionnaire SigmaElement
r2 = re.search(r1, SigmaElementConvert[elt])
if r2 == None :
return False
i = i +1
return True
else:
return False
def CSP(Predicat, SigmaSet1, SigmaSet2):
"""
calcul le score declassification d'un predicat par rapport aux deux ebembls
"""
csp1 = set() ; csp2 = set()
# csp1 contient l'ensemb des elements de SigmaSet 1 qui sastifait predicat et csp2 contient les elements de Sigma2 qui ne stisfont pas predicat
for elt in SigmaSet1 :
if MachingPredicat(elt,Predicat):
csp1 = csp1.union(elt)
for elt in SigmaSet2 :
if not(MachingPredicat(elt,Predicat)):
csp2 = csp2.union(elt)
csp = len(csp1) * len(csp2)
NonCsp = len(SigmaSet1 - csp1) * len(SigmaSet2-csp2)
return csp, NonCsp
def LargestCSP(Preds,SigmaSet1,SigmaSet2):
"""
prend un liste de predicat et retoune le predicat de plus grand score de classification
pour chaque predication contenu dans Preds, on calcul son score et celui de son complementaire
"""
ScoreList = []
for elt in Preds:
csp , NonCsp = CSP(elt,SigmaSet1,SigmaSet2)
ScoreList.append((elt,csp))# on garde le predicat et son complementaire
newPredicate = [elt,'#'] # contruction du complementaire de elt: elt est un predicat dont une liste.
ScoreList.append((newPredicate,NonCsp))
# tri du tableau du score par ordre croissant du deuxieme element du tuple
ScoreList.sort(key=lambda x:x[1])
return ScoreList[-1][0]
def PresentInBoolClassier(entree, B):
expr1 = ExpressionConcatenateOfStringSigma(entree)
for elt in B:
expr2 = B[elt][0][0]
booleen = True
if len(expr2.keys()) == len(expr1.keys()):
for elt2 in expr2:
if expr2[elt2] != expr1[elt2]:
booleen = False
break
if booleen:
return elt
return ''
########################################Fin Etape 10#################################################
#Etape 11) Implementation de GenerateBoolClassifier
def GenerateBoolClassifier(SigmaSet1, SigmaSet2):
"""
Genere les schemas booleens pour chaque partion generer precedement:
Prend en entree deux ensembles d'entree s1,s2 et construit la condition booleenne
qui satisfait tous les elemnts de s1 mais pas pour s2
"""
SigmaSet11 = SigmaSet1
b = [False]
while SigmaSet11 != set():
OldSigmaSet1 = SigmaSet11
SigmaSet22 = SigmaSet2
SigmaSet111 = SigmaSet11
d = [True]
while SigmaSet22 != set() :
OldSigmaSet2 = SigmaSet22
Preds = PredicatContruction(SigmaSet111, SigmaSet22)
# prenons dans Preds le predicat ayant le plus grand score de classification.
LargestPredicat = LargestCSP(Preds,SigmaSet111, SigmaSet22)
d.append(LargestPredicat) # on construit d
SigmaSet111Rest = set() # elements de SigmaSet111Rest qui ne satisfont pas LargestPredicat
SigmaSet2Rest = set() # elements de SigmaSet2Rest qui ne satisfont pas LargestPredicat
if LargestPredicat[-1] =='#':
for elt in SigmaSet111 :
if MachingPredicat(elt,LargestPredicat[0:-1][0]):
SigmaSet111Rest = SigmaSet111Rest .union(set([elt]))
for elt in SigmaSet22 :
if MachingPredicat(elt,LargestPredicat[0:-1][0]):
SigmaSet2Rest = SigmaSet2Rest .union(set([elt]))
else:
for elt in SigmaSet111 :
if not(MachingPredicat(elt,LargestPredicat)):
SigmaSet111Rest = SigmaSet111Rest .union(set([elt]))
for elt in SigmaSet22 :
if not(MachingPredicat(elt,LargestPredicat)):
SigmaSet2Rest = SigmaSet2Rest .union(set([elt]))
SigmaSet111 = SigmaSet111 - SigmaSet111Rest
SigmaSet22 = SigmaSet22 - SigmaSet2Rest
if OldSigmaSet2 == SigmaSet22:
return 'FAIL'
SigmaSet11 = SigmaSet11 - SigmaSet111
b.append(d)
if OldSigmaSet1 == SigmaSet11:
return 'FAIL'
return b
def MathStringexpression(Listformular):
maformule = ""
maformule = str(Listformular[0])
maformule = maformule + " V "
if Listformular[-1] == "#":
maformule = maformule + "¬ ( "
for i in range(1,(len(Listformular)-1)):
if "v" in str(Listformular[i]):
maformule = maformule + "Match( " + Listformular[i] + "," + "TokenSeq("+ Listformular[i+2] +")," + "1)"
i = i + 2
if i < (len(Listformular)-1):
maformule = maformule + " ∧ "
maformule = maformule + " ) "
else:
maformule = maformule + "( "
for i in range(1,(len(Listformular)-1)):
if type(Listformular[i]) ==str and "v" in Listformular[i]:
maformule = maformule + "Match( " + Listformular[i] + "," + "TokenSeq("+ Listformular[i+2] +")," + "1)"
i = i + 2
if i < (len(Listformular)-1):
maformule = maformule + " ∧ "
maformule = maformule + " ) "
return maformule
def ListOfCancatenateExpression(dagExpression):
CanatenateString = ""
CanatenateString = "Concatenate( "
chainlist = []
for item in dagExpression:
chain = []
for elt in item:
if type(elt) == tuple:
chain.append(list(item[elt])[0])
chainlist.append(chain)
chainlist = list(flatten(chainlist))
if len(chainlist) > 1:
CanatenateString = CanatenateString + ",".join(chainlist) + " )"
else:
CanatenateString = ",".join(chainlist)
return CanatenateString
def MathStringexpressionCaseFail(entree):
catexp = ExpressionConcatenateOfStringSigma(json.loads(entree))
maformule = ""
maformule = str(False)
maformule = maformule + " V "
maformule = maformule + "( "
tokens = ""
for key in catexp:
tokens = catexp[key][1:len(catexp[key])]
tokens = tokens.replace(" " , ",")
maformule = maformule + "Match(" + key + "," + "TokenSeq("+ tokens +")," + "1)"
maformule = maformule + " ∧ "
return maformule[0:len(maformule)-len(" ∧ ")]
########################################Fin Etape 11#################################################
# Etape 12)Implementation de l'algorithme principale GenerateStringProgramm, qui utilise toutes les fonctions precedentes
def GenerateStringProgram(S): #S est l'ensemble des paires d'exemples d'entrees sorties
"""
Prend un ensemble de paires d'exemples (entree, sortie) et retourne l'ensemble des programmes coherents avec les exemples
i.e retourne l'ensemble de programmes qui permettent d'obtenir les sorties s a partir des entrees
"""
T = []
SigmaSet = set()
newSetForme = set()
B = {} # resultat de GenerateBoolclassifier
for elt in S:
GenerateStrResult = GenerateStr(json.loads(elt[0]),elt[1])
T.append((elt[0],GenerateStrResult))
T = GeneratePartition(T) # T est desormais une liste de tuple
for elt in S:
SigmaSet = SigmaSet.union(set([elt[0]]))
for elt in T:
newSetForme = set()
if type(elt[0]) == list:
newSetForme = newSetForme.union(set(list(flatten(elt[0]))))
else:
newSetForme = newSetForme.union(set([elt[0]]))
SigmaMoins = SigmaSet - newSetForme
valeureDeRetour = GenerateBoolClassifier(newSetForme,SigmaMoins)
if valeureDeRetour == 'FAIL':
B[str(newSetForme)] = 'FAIL'
else:
B[str(newSetForme)] = list(flatten(valeureDeRetour))
# trions les elements de T (de la forme (Sigmaset, TraceSet)) dans l'ordre croissant de taill de size(TraceSet)
#T.sort(key=lambda x:SizeDag(len(x[0])-1,x[1])) # tri de la liste T par ordre croissant des seconds elements du tuple.
StringProgram = "" # ensemble resultatsss
StringProgram = StringProgram + "Switch("
for elt in B :
for elt2 in T:
newSetForme2 = set()
if type(elt2[0]) == list:
newSetForme2 = newSetForme2.union(set(list(flatten(elt2[0]))))
else:
newSetForme2 = newSetForme2.union(set([elt2[0]]))
if str(newSetForme2) == elt:
break
if B[elt] != 'FAIL':
StringProgram = StringProgram + "("+ MathStringexpression(B[elt]) +","+ ListOfCancatenateExpression(elt2[1])+"),"
else:
StringProgram = StringProgram + "("+ MathStringexpressionCaseFail(list(newSetForme2)[0])+","+ ListOfCancatenateExpression(elt2[1])+"),"
print(len(B.keys()))
StringProgram = StringProgram[0:len(StringProgram)-1]+ ")"
return StringProgram
########################################Fin Implementation#################################################
""" ***************** Debut Zone de test et validation des fonctions******************** """
######################Debut test fonction GetclassC################################
ClasseC = GetClassC()
#######################Fin test fonction GetclassC###############################
######################Debut test fonction Cpos################################
""" cpos = Cpos("laure", -1)
print(cpos) """
#######################Fin test fonction Cpos###############################
######################Debut test fonction pos################################
""" pos = Pos("-706-7709-", ClasseC['NumTok'],ClasseC['HyphenTok'],-1)
#pos1 = Pos("425-706-7709", ClasseC['NumTok'],ClasseC['HyphenTok'],-2)
print(pos)
#print(pos1)
"""
#######################Fin test fonction poss###############################
######################Debut test fonction GenerateRegularExpressionRigth################################
""" test = GenerateRegularExpressionRigth("425-706-7709",4)
print(test) """
#######################Fin test fonction GenerateRegularExpressionRigth###############################
######################Debut test fonction GenerateRegularExpressionLeft################################
""" test = GenerateRegularExpressionLeft("425-706-7709",4)
print(test) """
#######################Fin test fonction GenerateRegularExpressionLeft###############################
######################Debut test fonction SubStrs###############################
""" test = SubStrs("425-706-7709",{"Pos(HyphenTok,NumTok,1)"},{"cpos(2)","Pos(NumTok, TokenSeq(HyphenTok,NumTok),2)"})
print(test) """
#######################Fin test fonction SubStrs###############################
######################Debut test fonction GenerateSubstring###############################
""" test = GenerateSubstring({"v1":"425-706-7709"},"706") """
#######################Fin test fonction GenerateSubstring###############################
######################Debut test fonction GenerateSubstring###############################
""" w1,EtaTilda1 = GenerateStr({"v1": "-706-7709"}, "70")
print(EtaTilda1)
print(len(w1[(0,2)]))
w2,EtaTilda2 = GenerateStr({"v1": "-706-7709"}, "06")
print(EtaTilda2)
print(len(w2[(0,2)])) """
#######################Fin test fonction GenerateSubstring###############################
######################Debut test fonction IntersectSubstr###############################
""" s1="SubStr(-706-7709,Pos(-706-7709,TokenSeq(HyphenTok),TokenSeq(NumTok,HyphenTok),-1),Pos(-706-7709,TokenSeq(HyphenTok),TokenSeq(NumTok),1))"
s2="SubStr(-706-7709,Pos(-706-7709,TokenSeq(HyphenTok),TokenSeq(NumTok,HyphenTok),-1),Pos(-706-7709,TokenSeq(HyphenTok),TokenSeq(NumTok),1))"
test = IntersectSubstr(s1,s2)
print(test) """
""" test1 =IntersectSubstr("SubStr(-706-7709,Cpos(-706-7709,2),Cpos(-706-7709,-7))","SubStr(-706-7709,Cpos(-706-7709,2),Cpos(-706-7709,-7))")
print("valeur de test1: ",test1) """
#######################Fin test fonction IntersectSubstr###############################
######################Debut test fonction GenerateStr###############################
#W1,EtaTilda1 = GenerateStr({"v1": "Alex", "v2": "Asst."}, "Alex(Asst.)")
# print(EtaTilda1)
# print(len(w1[(0,2)]))
#W2,EtaTilda2 = GenerateStr({"v1": "Jim", "v2": "Manager"}, "Jim(Manager)")
# print(EtaTilda2)
# print(len(w2[(0,2)]))
#######################Fin test fonction GenerateStr###############################
""" x,intersect = IntersectDag(EtaTilda1,W1,EtaTilda2,W2)
print(intersect)
for elt in x:
print(elt,"\n",x[elt],"\n\n")
"""
#PrintDag(x,intersect,{"v1": "-506-7309"}, "506")
""" T = []
T.append((set({"v1": "-706-7709"}),(w1,EtaTilda1)))
T.append((set({"v1": "-706-7709"}),(w2,EtaTilda2)))
print("avant : ", T)
print(GeneratePartition(T))
print(" : ", T) """
""" positions,poslist = GeneratePosition("425-706-7709",6)
print(poslist)
print(positions)
print(len(poslist)) """
""" ** Implementation de GenerateBoolClassifier*** """
#r2 , r3 = ExpressionConcatenate(inputelement,outputelement)
#ExpressionConcatenateOfString("Leorge Ciprian Necula")
#print(Executionfonction({"v1": "Keorge Ciprian paul"}, r2))
#print(Executionfonction({"v1": "Ken McMillan Alpha"}, r2))
#print(ExpressionConstString({"v1": "Alex","v2": "Asst."} ,"Alex(Asst.)" ))
""" x,y = GenerateStr({"v1": "425-706-7709"},"70")
print(x)
GenerateStringProgramm(liste_ensemble_exemples[0]) """
""" ***************** Fin Zone de test et validation des fonctions******************** """
""" ***************** Debut Zone de test et validation du programme principale******************** """
"""
testSize = SizeDag(2,{(0,1):{1,2,3},(0,2):{7,8,9},(1,2):{4,5,6}})
print(testSize) """
""" liste_ensemble_exemples = GetExamples()
S = liste_ensemble_exemples[2]
print(GenerateStringProgram(S))
"""
""" ***************** Fin Zone de test et validation du programme principale******************** """ | {"/QuickFillGUI.py": ["/version1.py"]} |
42,550 | vanes11/QuickFill | refs/heads/master | /QuickFillGUI.py | # -*- coding: utf-8 -*-
from traceback import print_tb
import version1
from kivy.core.window import Window #
from kivy.lang import Builder
import os
import json
from kivymd.app import MDApp
from kivymd.uix.filemanager import MDFileManager
from kivymd.toast import toast
import pandas as pd
from kivy.metrics import dp
from kivymd.uix.menu import MDDropdownMenu
from kivymd.uix.dialog import MDDialog
#from kivymd.uix.label import MDLabel
from kivymd.uix.list import OneLineListItem
import sys
root_kv ='welcome.kv'
df = {} # dataframe
PartitionResult = [] #liste de partition
Examples = [] # ens des exemples d'entree sortie
BoolClassifierResult = {}
class MainApp(MDApp):
dialog = None
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.screen = Builder.load_file(root_kv)
self.title = "Strings Manipulation"
Window.maximize()
Window.bind(on_keyboard=self.events)
self.manager_open = False
self.file_manager = MDFileManager(
exit_manager=self.exit_manager,
select_path=self.select_path,
preview=False,
)
menu_items = []
self.menu = MDDropdownMenu(
caller=self.screen.ids.dropdown_item,
items=menu_items,
width_mult=3,
)
self.menu.bind()
self.menu2 = MDDropdownMenu(
caller=self.screen.ids.dropdown_item2,
items=menu_items,
width_mult=3,
)
self.menu2.bind() # declencher les evenements
self.menu3 = MDDropdownMenu(
caller=self.screen.ids.dropdown_item3,
items=menu_items,
width_mult=3,
)
self.menu3.bind()
self.menu4 = MDDropdownMenu(
caller=self.screen.ids.dropdown_item4,
items=menu_items,
width_mult=3,
)
self.menu4.bind()
def build(self):
return self.screen
###### gestionnaire de fichier
def file_manager_open(self):
self.file_manager.show(os.path.dirname(os.path.abspath(__file__))) # output manager to the screen
self.manager_open = True
def select_path(self, path):
'''It will be called when you click on the file name
or the catalog selection button.
:type path: str;
:param path: path to the selected directory or file;
'''
self.exit_manager()
file_name = path.split('/')[-1]
file_extend = file_name.split('.')
if((file_extend[-1]).lower()) != 'csv':
self.show_confirmation_dialog("Veuillez charger un .csv")
else:
global df
df = pd.read_csv(path)
NumRows = df.shape[0]
self.screen.ids.box.clear_widgets()
self.screen.ids.montexte.text = ""
self.screen.ids.numPartition.text = ""
self.screen.ids.MyImageGenerateStr.source = ""
self.screen.ids.MyImageGenerateStr.reload()
self.screen.ids.montextesortie.text = ""
self.screen.ids.longueur.text = ""
print(df)
print("******* \n")
print("nombre d'exemples: ",len(df))
def exit_manager(self, *args):
'''Called when the user reaches the root of the directory tree.'''
self.manager_open = False
self.file_manager.close()
def events(self, instance, keyboard, keycode, text, modifiers):
'''Called when buttons are pressed on the mobile device.'''
if keyboard in (1001, 27):
if self.manager_open:
self.file_manager.back()
return True
#### gestion de la page d'accueil du botton SubSrting
def PrintDataItem(self):
if len(df) == 0:
self.show_confirmation_dialog("Veuillez charger votre fichier")
else:
try:
NumRows = df.shape[0]
menu_items = [
{
"text": f"Example {i}",
"viewclass": "OneLineListItem",
"on_release": lambda x=f"Example {i}": self.set_item(x),
} for i in range( NumRows)
]
self.menu = MDDropdownMenu(
caller=self.screen.ids.dropdown_item,
items=menu_items,
width_mult=3,
)
self.menu.bind()
except Exception as e:
self.show_confirmation_dialog("Veuillez charger votre fichier")
def PrintDataItem2(self):
if len(df) == 0:
self.show_confirmation_dialog("Veuillez charger votre fichier")
else:
try:
NumRows = df.shape[0]
menu_items = [
{
"text": f"Example {i}",
"viewclass": "OneLineListItem",
"on_release": lambda x=f"Example {i}": self.set_item2(x),
} for i in range( NumRows)
]
self.menu2 = MDDropdownMenu(
caller=self.screen.ids.dropdown_item2,
items=menu_items,
width_mult=3,
)
self.menu2.bind()
except Exception as e:
self.show_confirmation_dialog("Veuillez charger votre fichier")
""" def PrintDataItem3(self):
if len(df) == 0:
self.show_confirmation_dialog("Veuillez charger votre fichier")
else:
try:
NumRows = len(PartitionResult)
menu_items = [
{
"text": f"Sub set {i}",
"viewclass": "OneLineListItem",
"on_release": lambda x=f"Sub set{i}": self.set_item3(x),
} for i in range( NumRows)
]
self.menu3 = MDDropdownMenu(
caller=self.screen.ids.dropdown_item3,
items=menu_items,
width_mult=3,
)
self.menu3.bind()
except Exception as e:
self.show_confirmation_dialog("Veuillez charger votre fichier") """
""" def PrintDataItem4(self):
if len(PartitionResult) == 0:
self.show_confirmation_dialog("Veuillez creer les partitions avant de classifier")
else:
try:
NumRows = len(PartitionResult)
menu_items = [
{
"text": f"Classifier {i}",
"viewclass": "OneLineListItem",
"on_release": lambda x=f"Classifier {i}": self.set_item4(x),
} for i in range( NumRows)
]
self.menu4 = MDDropdownMenu(
caller=self.screen.ids.dropdown_item4,
items=menu_items,
width_mult=3,
)
self.menu4.bind()
except Exception as e:
self.show_confirmation_dialog("Veuillez creer les partitions avant de classifier") """
##### gestion de la liste deroulante pour le cas de Substring
def set_item(self, text_item):
self.screen.ids.dropdown_item.set_item(text_item)
self.menu.dismiss() # masquer le menu
def set_item2(self, text_item):
self.screen.ids.dropdown_item2.set_item(text_item)
self.menu2.dismiss()
def set_item3(self, text_item):
self.screen.ids.dropdown_item3.set_item(text_item)
self.menu3.dismiss()
def set_item4(self, text_item):
self.screen.ids.dropdown_item4.set_item(text_item)
self.menu4.dismiss()
def show_confirmation_dialog(self,text_to_print):
if not self.dialog:
self.dialog = MDDialog(
title=text_to_print,
md_bg_color=self.theme_cls.error_color,
type="custom",
)#text_color = selft
self.dialog.open()
def GenerateSubstring2(self, item):
#entree,s = BuildExample(item)
if len(df) == 0:
self.show_confirmation_dialog("Veuillez charger votre fichier")
else:
entree ={}
indice = item.split(" ")
exemple = dict(df.iloc[int(indice[-1])])
# construction de l'entree(dictionnaire sigma)
listheads = list(df.columns)
s = str(exemple[listheads[-1]])
i = 0
for elt in range(len(listheads)-1):
elt1 = "v"+str(i)
entree[elt1] = str(exemple[listheads[i]])
result = version1.GenerateSubstring(entree,s)
result = list(result)
if result != []:
for elt in result:
self.screen.ids.box.add_widget(
OneLineListItem(text= elt)
)
elt = "entree = " + str(entree)
self.screen.ids.montexte.text = elt
elt = "s = " + s
self.screen.ids.montextesortie.text = elt
elt = str(len(result))+" manieres d'extraire s dans entree"
self.screen.ids.longueur.text = elt
else:
self.screen.ids.box.clear_widgets()
elt = "entree = " + str(entree)
self.screen.ids.montexte.text = elt
elt = "s = " + s
self.screen.ids.montextesortie.text = elt
elt = " 0 "+" maniere d'extraire s dans entree"
self.screen.ids.longueur.text = elt
def PrintDag2(self, item):
if len(df) ==0:
self.show_confirmation_dialog("Veuillez charger votre fichier")
else:
entree ={}
indice = item.split(" ")
exemple = dict(df.iloc[int(indice[-1])])
# construction de l'entree(dictionnaire sigma)
listheads = list(df.columns)
s = str(exemple[listheads[-1]])
i = 0
for elt in range(len(listheads)-1):
elt1 = "v"+str(i)
entree[elt1] = str(exemple[listheads[i]])
w,EtaTilda = version1.GenerateStr(entree,s)
try:
os.remove("GenerateStr.gv.png")
os.remove("GenerateStr.gv")
version1.PrintDag(EtaTilda, w,entree,s)
self.screen.ids.MyImageGenerateStr.source = "GenerateStr.gv.png"
self.screen.ids.MyImageGenerateStr.reload()
except OSError:
version1.PrintDag(EtaTilda, w,entree,s)
self.screen.ids.MyImageGenerateStr.source = "GenerateStr.gv.png"
self.screen.ids.MyImageGenerateStr.reload()
def PrintPartition(self, item):
if len(PartitionResult) == 0:
self.show_confirmation_dialog("Veuillez au prealable effectuer le partitionnement")
else:
indice = item.split(" ")
if type(PartitionResult[int(indice[-1])][0]) == list:
self.screen.ids.partition.text = str(set(list(version1.flatten(PartitionResult[int(indice[-1])][0]))))
else:
self.screen.ids.partition.text = str(PartitionResult[int(indice[-1])][0])
def PrintClassifier(self, item):
if len(PartitionResult) == 0:
self.show_confirmation_dialog("Veuillez au prealable effectuer le partitionnement")
else:
indice = item.split(" ")
if type(PartitionResult[int(indice[-1])][0]) == list:
self.screen.ids.classifier.text = str(BoolClassifierResult[str(set(list(version1.flatten(PartitionResult[int(indice[-1])][0]))))])
else:
self.screen.ids.classifier.text = str(BoolClassifierResult[str(set([PartitionResult[int(indice[-1])][0]]))])
def GeneratePartition2(self):
if len(df) == 0:
self.show_confirmation_dialog("Veuillez charger votre fichier")
else:
global Examples
# pour structurer les exemples
T = [] # pour le resultat de GenerateStr
entree = {}
listheads = list(df.columns)
for i in range(len(df)):
s = str(df.iloc[i][listheads[-1]])
entree = {}
for elt in range(len(listheads)-1):
elt1 = "v"+str(elt)
entree[elt1] = str(df.iloc[i][listheads[elt]])
Examples.append((entree,s))
for elt in Examples:
dag = version1.GenerateStr(elt[0],elt[1])
T.append((json.dumps(elt[0]),dag))
global PartitionResult
PartitionResult = []
PartitionResult = version1.GeneratePartition(T)
if (len(PartitionResult)) == 0:
print("tous les exemples forment une unique partition")
else:
NumRows = len(PartitionResult)
menu_items = [
{
"text": f"Sub set {i}",
"viewclass": "OneLineListItem",
"on_release": lambda x=f"Sub set {i}": self.set_item3(x),
} for i in range( NumRows)
]
self.menu3 = MDDropdownMenu(
caller=self.screen.ids.dropdown_item3,
items=menu_items,
width_mult=3,
)
self.menu3.bind()
elt = str(len(PartitionResult))+" Partitions"
self.screen.ids.numPartition.text = elt
def PrintStringProgramme(self):
if len(BoolClassifierResult) == 0:
self.show_confirmation_dialog("Veuillez faire BoolClassifier D'abord")
else:
StringProgram = ""
StringProgram = StringProgram + "Switch("
for elt in BoolClassifierResult:
for elt2 in PartitionResult:
newSetForme2 = set()
if type(elt2[0]) == list:
newSetForme2 = newSetForme2.union(set(list(version1.flatten(elt2[0]))))
else:
newSetForme2 = newSetForme2.union(set([elt2[0]]))
if str(newSetForme2) == elt:
break
if BoolClassifierResult[elt] != 'FAIL':
StringProgram = StringProgram + "("+ version1.MathStringexpression(BoolClassifierResult[elt]) +","+ version1.ListOfCancatenateExpression(elt2[1])+"),"
else:
StringProgram = StringProgram + "("+ version1.MathStringexpressionCaseFail(list(newSetForme2)[0])+","+ version1.ListOfCancatenateExpression(elt2[1])+"),"
StringProgram = StringProgram[0:len(StringProgram)-1]+ ")"
self.screen.ids.PrincipalProgram.text = StringProgram
def BoolClassifier(self):
if len(PartitionResult) == 0:
self.show_confirmation_dialog("Veuillez au prealable effectuer le partitionnement")
else:
global Examples
global BoolClassifierResult
SigmaSet = set()
entree = {}
listheads = list(df.columns)
for i in range(len(df)):
s = str(df.iloc[i][listheads[-1]])
entree = {}
for elt in range(len(listheads)-1):
elt1 = "v"+str(elt)
entree[elt1] = str(df.iloc[i][listheads[elt]])
Examples.append((entree,s))
for elt in Examples:
SigmaSet = SigmaSet.union(set([json.dumps(elt[0])]))
for elt in PartitionResult:
newSetForme = set()
if type(elt[0]) == list:
newSetForme = newSetForme.union(set(list(version1.flatten(elt[0]))))
else:
newSetForme = newSetForme.union(set([elt[0]]))
SigmaMoins = SigmaSet - newSetForme
valeureDeRetour = version1.GenerateBoolClassifier(newSetForme,SigmaMoins)
if valeureDeRetour == 'FAIL':
BoolClassifierResult[str(newSetForme)] = 'FAIL'
else:
BoolClassifierResult[str(newSetForme)] = list(version1.flatten(valeureDeRetour))
entrees = [] # ensembles des entrees dans l'ensemble du fichier
elt = str(len(PartitionResult))+ " Clasifiers"
self.screen.ids.numClassifier.text = elt
NumRows = len(PartitionResult)
menu_items = [
{
"text": f"Classifier {i}",
"viewclass": "OneLineListItem",
"on_release": lambda x=f"Classifier {i}": self.set_item4(x),
} for i in range( NumRows)
]
self.menu4 = MDDropdownMenu(
caller=self.screen.ids.dropdown_item4,
items=menu_items,
width_mult=3,
)
self.menu4.bind()
MainApp().run()
| {"/QuickFillGUI.py": ["/version1.py"]} |
42,558 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/main.py | # -*- coding: utf-8 -*-
from wise.msfd.gescomponents import GES_DESCRIPTORS
from .base import BaseComplianceView
# from itertools import chain
class StartComplianceView(BaseComplianceView):
name = 'comp-start'
class DescriptorsView(BaseComplianceView):
name = 'comp-start'
@property
def descriptors(self):
return GES_DESCRIPTORS
class ViewComplianceModule(BaseComplianceView):
# name = 'comp-start2'
@property
def national_descriptors(self):
pass
@property
def regional_descriptors(self):
pass
# def get_folder_by_id(self, id):
# folders = [
# x.contentValues()
#
# for x in self.context.contentValues()
#
# if x.portal_type == 'Folder'
# and x.id == id
# ]
# folders = [f for f in chain(*folders)]
#
# return folders
#
# @property
# def regional_descriptors_folders(self):
# id = 'regional-descriptors-assessments'
# folders = self.get_folder_by_id(id)
#
# return folders
#
# @property
# def national_descriptors_folders(self):
# id = 'national-descriptors-assessments'
# folders = self.get_folder_by_id(id)
#
# return folders
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,559 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/nationaldescriptors/a8esa.py | import logging
from collections import defaultdict
from lxml.etree import fromstring
from sqlalchemy.orm.relationships import RelationshipProperty
from Products.Five.browser.pagetemplatefile import \
ViewPageTemplateFile as Template
from wise.msfd import db, sql # , sql2018
from wise.msfd.data import get_xml_report_data
from wise.msfd.gescomponents import (Criterion, MarineReportingUnit,
get_criterion, get_descriptor)
from wise.msfd.labels import COMMON_LABELS
from wise.msfd.translation import retrieve_translation
from wise.msfd.utils import Item, ItemLabel, ItemList, Node, RawRow, Row
from ..base import BaseArticle2012
from .data import REPORT_DEFS
logger = logging.getLogger('wise.msfd')
NSMAP = {
"w": "http://water.eionet.europa.eu/schemas/dir200856ec",
"c": "http://water.eionet.europa.eu/schemas/dir200856ec/mscommon",
}
class Descriptor(Criterion):
""" Override the default Criterion to offer a nicer title
(doesn't duplicate code)
"""
@property
def title(self):
return self._title
class Article8ESA(BaseArticle2012):
# TODO not implemented, copy of Article 8
""" Article 8 implementation for nation descriptors data
klass(self, self.request, self.country_code, self.descriptor,
self.article, self.muids, self.colspan)
"""
template = Template('pt/report-data-a8.pt')
help_text = ""
def setup_data(self):
filename = self.context.get_report_filename()
text = get_xml_report_data(filename)
root = fromstring(text)
def xp(xpath, node=root):
return node.xpath(xpath, namespaces=NSMAP)
# TODO: should use declared set of marine unit ids
xml_muids = sorted(set(xp('//w:MarineUnitID/text()')))
self.rows = [
Row('Reporting area(s) [MarineUnitID]',
[', '.join(set(xml_muids))]),
]
report_map = defaultdict(list)
root_tags = get_report_tags(root)
ReportTag = None
# basic algorthim to detect what type of report it is
article = self.article
# override the default translatable
fields = REPORT_DEFS[self.context.year][article]\
.get_translatable_fields()
self.context.TRANSLATABLES.extend(fields)
for name in root_tags:
nodes = xp('//w:' + name)
for node in nodes:
try:
rep = ReportTag(node, NSMAP)
except:
# There are some cases when an empty node is reported
# and the ReportTag class cannot be initialized because
# MarineUnitID element is not present in the node
# see ../fi/bal/d5/art8/@@view-report-data-2012
# search for node MicrobialPathogens
continue
import pdb
pdb.set_trace()
# TODO for D7(maybe for other descriptors too)
# find a way to match the node with the descriptor
# because all reported criterias and indicators are GESOther
if rep.matches_descriptor(self.descriptor):
report_map[rep.marine_unit_id].append(rep)
descriptor = get_descriptor(self.descriptor)
ges_crits = [descriptor] + list(descriptor.criterions)
# a bit confusing code, we have multiple sets of rows, grouped in
# report_data under the marine unit id key.
report_data = {}
# TODO: use reported list of muids per country,from database
for muid in xml_muids:
if muid not in report_map:
logger.warning("MarineUnitID not reported: %s, %s, Article 8",
muid, self.descriptor)
report_data[muid] = []
continue
m_reps = report_map[muid]
if len(m_reps) > 1:
logger.warning("Multiple report tags for this "
"marine unit id: %r", m_reps)
rows = []
for i, report in enumerate(m_reps):
# if i > 0: # add a splitter row, to separate reports
# rows.append(Row('', ''))
cols = report.columns(ges_crits)
for col in cols:
for name in col.keys():
values = []
for inner in cols:
values.append(inner[name])
translated_values = [
self.context.translate_value(
name, v, self.country_code
)
for v in values
]
row = RawRow(name, translated_values, values)
rows.append(row)
break # only need the "first" row, for headers
report_data[muid] = rows
res = {}
muids = {m.id: m for m in self.muids}
for mid, v in report_data.items():
mlabel = muids.get(mid)
if mlabel is None:
logger.warning("Report for non-defined muids: %s", mid)
mid = unicode(mid)
mlabel = MarineReportingUnit(mid, mid)
res[mlabel] = v
# self.muids = sorted(res.keys())
self.rows = res
def __call__(self):
self.setup_data()
return self.template()
def auto_translate(self):
self.setup_data()
translatables = self.context.TRANSLATABLES
seen = set()
for table in self.rows.items():
muid, table_data = table
for row in table_data:
if not row:
continue
if row.title not in translatables:
continue
for value in row.raw_values:
if not isinstance(value, basestring):
continue
if value not in seen:
retrieve_translation(self.country_code, value)
seen.add(value)
return ''
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,560 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/nationalsummary/main.py |
from ..nationaldescriptors.main import NationalDescriptorsOverview
from .base import BaseNatSummaryView
class NationalSummaryOverview(BaseNatSummaryView, NationalDescriptorsOverview):
section = 'national-summaries'
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,561 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/regionalsummary/introduction.py |
from sqlalchemy import or_
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from wise.msfd import db
from wise.msfd.data import get_text_reports_2018
from wise.msfd.sql2018 import (MarineReportingUnit, ReportingHistory,
t_MarineWaters)
from wise.msfd.utils import ItemList
from ..nationalsummary.introduction import Introduction
from .base import BaseRegSummaryView
from .utils import SimpleTable
class RegionalIntroduction(BaseRegSummaryView, Introduction):
""" Make National summary code compatible for Regional summary """
template = ViewPageTemplateFile('pt/introduction.pt')
rep_date_tpl = u'<a href="{}">{}</a>'
def default(self):
return ['-' for _ in self.available_countries]
@property
def information_memberstate(self):
text = u"""
By October 2018, the Member States were due to submit updates of the assessment
of their marine waters (Article 8), the determination of GES (Article 9) and
the setting of environmental targets (Article 10), in accordance with
Article 17 of the Marine Strategy Framework Directive (MSFD, Directive 2008/56/EC).
<br/>The table gives details of when the Member States submitted their reports, in text (usually pdf) and elecronic (xml) format. E-reporting was undertaken in relation to specific geographic areas (Marine Reporting Units) which are specifed in an xml file (4geo.xml) and accompanied by geographic information system (GIS) shapefiles which allow the reported information to be displayed as maps. In cases where the Member State uploaded reports in stages (text reports) or modified previous versions (e-reports), multiple dates are shown.
"""
return text
def _get_memberstate_reports(self, data, obligation):
res = []
for country_code, country_name in self.available_countries:
values = []
for row in data:
if row.ReportingObligation != obligation:
continue
if row.CountryCode != country_code:
continue
date_received = row.DateReceived.date()
envelope_url = row.LocationURL.replace(row.FileName, '')
values.append((envelope_url, date_received))
values = sorted(set(values), key=lambda i: i[1], reverse=True)
unique_values = [
self.rep_date_tpl.format(v[0], v[1])
for v in values
]
res.append(ItemList(unique_values, sort=False))
return res
def get_text_reports(self):
res = []
for country_code, country_name in self.available_countries:
text_reports = get_text_reports_2018(country_code)
values = []
for row in text_reports:
file_url = row[0]
release_date = row[1].date()
envelope_url = '/'.join(file_url.split('/')[:-1])
values.append((envelope_url, release_date))
values = sorted(set(values), key=lambda i: i[1], reverse=True)
unique_values = [
self.rep_date_tpl.format(v[0], v[1])
for v in values
]
res.append(ItemList(unique_values, sort=False))
return res
@db.use_db_session('2018')
def memberstate_reports(self):
header = u"Dates of Member State's reports for 2018 updates of " \
u"Articles 8, 9 and 10"
mc = ReportingHistory
_, data = db.get_all_records(
mc,
# mc.EnvelopeStatus == 'End',
)
rows = [
("", [x[1] for x in self.available_countries]),
("Text reports (pdf)", self.get_text_reports()),
("Electronic reports (xml)", self._get_memberstate_reports(
data, 'MSFD - Articles 8, 9 and 10 - XML data')),
("Geographic data (4geo.xml; GIS shapefiles)",
self._get_memberstate_reports(
data, 'MSFD - Article 4 - Spatial data')
)
]
view = SimpleTable(self, self.request, header, rows)
return view()
def _get_marine_water_by_type(self, data, types):
res = []
for country_id, country_name in self.available_countries:
values = [
int(row.Area_km2)
for row in data
if (row.Country == country_id and
row.Type in types and
row.Subregion in self.available_subregions)
]
res.append("{:,}".format(sum(values)))
return res
def get_water_seabed_row(self, data):
types = ['Water column & seabed/subsoil', 'Marine waters']
return self._get_marine_water_by_type(data, types)
def get_seabed_only_row(self, data):
types = ['Seabed/subsoil']
return self._get_marine_water_by_type(data, types)
def get_proportion_row(self, data):
types = ['Water column & seabed/subsoil', 'Marine waters']
res = []
total = sum([
float(row.Area_km2)
for row in data
if (row.Type in types and
row.Subregion in self.available_subregions)
])
for country_id, country_name in self.available_countries:
values = [
float(row.Area_km2)
for row in data
if (row.Country == country_id and
row.Type in types and
row.Subregion in self.available_subregions)
]
country_total = sum(values)
res.append("{:.1f}%".format(country_total/total * 100))
return res
@db.use_db_session('2018')
def marine_waters(self):
header = u"Length of coastline and area of marine waters per Member " \
u"State (based on GIS data reported for MSFD by each Member " \
u"State)"
column_names = ['Country', 'Subregion', 'Area_km2', 'Type']
cnt, data = db.get_all_specific_columns(
[getattr(t_MarineWaters.c, c) for c in column_names]
)
rows = [
("", [x[1] for x in self.available_countries]),
("Length of coastline (km)", self.default()),
("Area of marine waters (water column and seabed) (km2)",
self.get_water_seabed_row(data)),
("Area of marine waters (seabed only - beyond EEZ or quivalent) "
"(km2)", self.get_seabed_only_row(data)),
("Proportion of Baltic Sea region per Member State (areal %)",
self.get_proportion_row(data))
]
view = SimpleTable(self, self.request, header, rows)
return view()
@db.use_db_session('2018')
def get_number_of_mrus(self):
columns = ['CountryCode', 'MarineReportingUnitId', 'desigBegin',
'_4geo']
cnt, data = db.get_all_specific_columns(
[getattr(MarineReportingUnit, c)
for c in columns],
or_(MarineReportingUnit.desigBegin.isnot(None),
MarineReportingUnit._4geo.isnot(None))
)
res = []
for country_code, country_name in self.available_countries:
cnt = [
x
for x in data
if x.CountryCode == country_code
]
res.append(len(cnt))
return res
def assessment_areas(self):
header = u"Reporting areas of the Member States"
title = u"The table gives details about the Marine Reporting Units " \
u"used for the 2018 reporting on updates of Articles 8, 9 " \
u"and 10."
rows = [
("", [x[1] for x in self.available_countries]),
("Number of Marine Reporting Units used",
self.get_number_of_mrus()),
("Range of extent of Marine Reporting Units (km2)",
self.default()),
("Average extent of Marine Reporting Units (km2)", self.default())
]
view = SimpleTable(self, self.request, header, rows, title)
return view()
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,562 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/nationaldescriptors/reportdata.py | import logging
from collections import OrderedDict, defaultdict, namedtuple
from datetime import datetime
from HTMLParser import HTMLParser
from io import BytesIO
from lxml.etree import fromstring
from sqlalchemy import or_
from zope.interface import implements
from zope.schema import Choice
from zope.schema.vocabulary import SimpleTerm, SimpleVocabulary
import xlsxwriter
from eea.cache import cache
from plone.memoize import volatile
from Products.Five.browser.pagetemplatefile import \
ViewPageTemplateFile as Template
from Products.statusmessages.interfaces import IStatusMessage
from wise.msfd import db, sql2018 # sql,
from wise.msfd.base import BaseUtil
from wise.msfd.compliance.interfaces import (IReportDataView,
IReportDataViewSecondary)
from wise.msfd.compliance.nationaldescriptors.data import get_report_definition
from wise.msfd.compliance.utils import (group_by_mru,
insert_missing_criterions,
ordered_regions_sortkey)
from wise.msfd.compliance.vocabulary import get_regions_for_country
from wise.msfd.data import (get_all_report_filenames,
get_envelope_release_date, get_factsheet_url,
get_report_file_url, get_report_filename,
get_xml_report_data)
from wise.msfd.gescomponents import get_descriptor, get_features
from wise.msfd.translation import get_translated, retrieve_translation
from wise.msfd.utils import (current_date, items_to_rows, natural_sort_key,
timeit)
from z3c.form.button import buttonAndHandler
from z3c.form.field import Fields
from z3c.form.form import Form
from .a7 import Article7, Article7_2018
from .a8 import Article8
from .a8alternate import Article8Alternate
from .a8esa import Article8ESA
from .a9 import Article9, Article9Alternate
from .a10 import Article10, Article10Alternate
from .a34 import Article34, Article34_2018
from .base import BaseView
from .proxy import Proxy2018
from .utils import consolidate_date_by_mru, consolidate_singlevalue_to_list
# from persistent.list import PersistentList
# from six import string_types
# from .utils import row_to_dict
logger = logging.getLogger('wise.msfd')
NSMAP = {"w": "http://water.eionet.europa.eu/schemas/dir200856ec"}
ReportingInformation = namedtuple('ReportingInformation',
['report_date', 'reporters'])
ReportingInformation2018 = namedtuple(
'ReportingInformation', ['ReportedFileLink', 'ContactOrganisation',
'ReportingDate'])
def get_reportdata_key(func, self, *args, **kwargs):
""" Reportdata template rendering cache key generation
"""
if 'nocache' in self.request.form:
raise volatile.DontCache
muids = ",".join([m.id for m in self.muids])
region = getattr(self, 'country_region_code', ''.join(self.regions))
focus_muid = getattr(self, 'focus_muid', '')
cache_key_extra = getattr(self, 'cache_key_extra', '')
res = '_cache_' + '_'.join([
func.__name__,
self.report_year,
cache_key_extra,
self.country_code,
region,
self.descriptor,
self.article,
muids,
focus_muid,
current_date(),
])
# TODO why replace '.', makes D1.1 the same as D11
# res = res.replace('.', '').replace('-', '')
logger.info("Report data cache key: %s", res)
return res
def serialize_rows(rows):
""" Return a cacheable result of rows, this is used when
downloading the report data as excel
:param rows: view.rows
:return: dict in format {mru : data, ...} where
'mru': marine unit id, representing the worksheet title
'data': list of tuples in format [(row_title, raw_data), ...]
'raw_data' list of unicode values [u'GES 5.1', u'GES 5.2', ...]
"""
if isinstance(rows, list):
rows = {'Report data': rows}
res = {}
for mru, data in rows.items():
raw_data = []
for row in data:
title = row.title
raw_values = []
for v in row.raw_values:
if isinstance(v, str):
parser = HTMLParser()
v = parser.unescape(v.decode('utf-8'))
if not isinstance(v, basestring):
if not v:
v = ''
else:
v = v.__repr__()
if isinstance(v, str):
v = v.decode('utf-8')
raw_values.append(unicode(v))
raw_data.append((title, raw_values))
res[mru] = raw_data
return res
class ReportData2012(BaseView, BaseUtil):
""" WIP on compliance tables
"""
implements(IReportDataView)
year = report_year = '2012'
section = 'national-descriptors'
cache_key_extra = 'base'
@property
def help_text(self):
klass = self.article_implementations[self.article]
return klass.help_text
@property
def article_implementations(self):
res = {
'Art3': Article34,
'Art4': Article34,
'Art7': Article7,
'Art8esa': Article8ESA,
'Art8': Article8,
'Art9': Article9,
'Art10': Article10,
}
return res
def get_criterias_list(self, descriptor):
""" Get the list of criterias for the specified descriptor
:param descriptor: 'D5'
:return: (('D5', 'Eutrophication'),
('5.1.1', 'D5C1'),
('5.2.1', 'D5C2'), ... )
# TODO: the results here need to be augumented by L_GESComponents
"""
result = [
(descriptor, self.descriptor_label)
]
criterions = get_descriptor(descriptor).criterions
for crit in criterions:
for alt in crit.alternatives:
title = '{} ({}) {}'.format(crit._id or '', alt[0], alt[1])
indicator = alt[0]
result.append((indicator, title))
return result
def get_report_view(self):
logger.info("Rendering 2012 report for: %s %s %s %s",
self.country_code, self.descriptor, self.article,
",".join([x.id for x in self.muids]))
klass = self.article_implementations[self.article]
view = klass(self, self.request, self.country_code,
self.country_region_code, self.descriptor, self.article,
self.muids)
return view
@cache(get_reportdata_key, dependencies=['translation'])
def get_report_data(self):
view = self.get_report_view()
rendered_view = view()
# get cacheable raw values
rows = serialize_rows(view.rows)
return rendered_view, rows
def get_report_header_data(self, report_by, source_file, factsheet,
report_date, multiple_source_files=False):
data = OrderedDict(
title=self.report_title,
report_by=report_by,
source_file=source_file,
factsheet=factsheet,
# TODO: do the report_due by a mapping with article: date
report_due='2012-10-15',
report_date=report_date,
help_text=self.help_text,
multiple_source_files=multiple_source_files
)
return data
def get_report_filename(self, art=None):
# needed in article report data implementations, to retrieve the file
return get_report_filename('2012',
self.country_code,
self.country_region_code,
art or self.article,
self.descriptor)
@property
def report_title(self):
title = "Member State report / {} / 2012/ {} / {} / {}".format(
self.article,
self.descriptor_title,
self.country_name,
self.country_region_name,
)
return title
def data_to_xls(self, data, report_header):
# Create a workbook and add a worksheet.
out = BytesIO()
workbook = xlsxwriter.Workbook(out, {'in_memory': True})
# add worksheet with report header data
worksheet = workbook.add_worksheet(unicode('Report header'))
for i, (wtitle, wdata) in enumerate(report_header.items()):
wtitle = wtitle.title().replace('_', ' ')
if isinstance(wdata, tuple):
wdata = wdata[1]
worksheet.write(i, 0, wtitle)
worksheet.write(i, 1, wdata)
for wtitle, wdata in data.items(): # add worksheet(s) with report data
if not wdata:
continue
worksheet = workbook.add_worksheet(unicode(wtitle)[:30])
for i, row in enumerate(wdata):
row_label = row[0]
worksheet.write(i, 0, row_label)
row_values = row[1]
for j, v in enumerate(row_values):
transl = get_translated(v, self.country_code) or v
worksheet.write(i, j + 1, transl)
workbook.close()
out.seek(0)
return out
def _set_response_header(self, xlsio):
sh = self.request.response.setHeader
sh('Content-Type', 'application/vnd.openxmlformats-officedocument.'
'spreadsheetml.sheet')
fname = "-".join([self.country_code,
self.country_region_code,
self.article,
self.descriptor])
sh('Content-Disposition',
'attachment; filename=%s.xlsx' % fname)
return xlsio.read()
def download(self, report_data, report_header):
xlsio = self.data_to_xls(report_data, report_header)
return self._set_response_header(xlsio)
@db.use_db_session('2012')
def __call__(self):
# if self.descriptor.startswith('D1.'): # map to old descriptor
# # self._descriptor = 'D1' # this hardcodes D1.x
# # descriptors to D1
# assert self.descriptor == 'D1'
if 'translate' in self.request.form:
report_view = self.get_report_view()
report_view.auto_translate()
messages = IStatusMessage(self.request)
messages.add(u"Auto-translation initiated, please refresh "
u"in a couple of minutes", type=u"info")
print("Will render report for: %s" % self.article)
self.filename = filename = self.get_report_filename()
factsheet = None
source_file = ('File not found', None)
multiple_source_files = False
if filename:
if isinstance(filename, tuple):
multiple_source_files = True
try:
source_file = [
(f, get_report_file_url(f) + '/manage_document')
for f in filename
]
except:
logger.exception("Error in getting HTML Factsheet URL)")
else:
url = get_report_file_url(filename)
if url:
try:
factsheet = get_factsheet_url(url)
except Exception:
logger.exception("Error in getting HTML Factsheet URL %s",
url)
else:
logger.warning("No factsheet url, filename is: %r", filename)
source_file = (filename, url + '/manage_document')
rep_info = self.get_reporting_information()
report_header_data = self.get_report_header_data(
rep_info.reporters, source_file, factsheet, rep_info.report_date,
multiple_source_files
)
report_header = self.report_header_template(**report_header_data)
try:
report_data, report_data_rows = self.get_report_data()
except:
report_data, report_data_rows = 'Error in rendering report', []
trans_edit_html = self.translate_view()()
self.report_html = report_header + report_data + trans_edit_html
if 'download' in self.request.form:
return self.download(report_data_rows, report_header_data)
return self.index()
def _get_reporting_info(self, root):
reporters = root.xpath(
'//w:ReportingInformation/w:Organisation/text()', namespaces=NSMAP
)
date = root.xpath('//w:ReportingInformation/w:ReportingDate/text()',
namespaces=NSMAP)
if not date:
date.append('-')
return reporters, date
def get_reporting_information(self, filename=None):
# The MSFD<ArtN>_ReportingInformation tables are not reliable (8b is
# empty), so we try to get the information from the reported XML files.
if not filename:
f = self.filename
filename = isinstance(f, tuple) and f[0] or f
default = ReportingInformation('2013-04-30', 'Member State')
if not filename:
return default
text = get_xml_report_data(filename)
root = fromstring(text)
reporters, date = self._get_reporting_info(root)
try:
date_obj = datetime.strptime(date[0], '%d-%m-%Y')
date_final = date_obj.date().isoformat()
res = ReportingInformation(date_final, ', '.join(set(reporters)))
except Exception:
logger.exception('Could not parse date for %s, %s, %s',
self.article, self.descriptor, self.country_code
)
res = ReportingInformation(date[0], ', '.join(set(reporters)))
return res
class ReportData2012Secondary(ReportData2012):
""" Class implementation for Article 8 ESA
"""
implements(IReportDataViewSecondary)
descriptor = 'Not linked'
country_region_code = 'No region'
@property
def report_title(self):
title = "Member State report / {} / {} / 2012".format(
self.article,
self.country_name,
)
return title
def _get_reporting_info_art_34(self, root):
reporter = [root.attrib['Organisation']]
date = [root.attrib['ReportingDate']]
return reporter, date
def _get_reporting_info_art_7(self, root):
default = u'Not available'
reporter = [root.attrib.get('GeneratedBy', default)]
date = [root.attrib.get('CreationDate', default)]
return reporter, date
def _get_reporting_info(self, root):
impl = {
'Art3': self._get_reporting_info_art_34,
'Art4': self._get_reporting_info_art_34,
'Art7': self._get_reporting_info_art_7,
'Art8esa': self._get_reporting_info_art_34,
}
return impl[self.article](root)
def data_to_xls_art7(self, data):
# Create a workbook and add a worksheet.
out = BytesIO()
workbook = xlsxwriter.Workbook(out, {'in_memory': True})
for region, wdata, report_header in data:
if not wdata:
continue
# add worksheet with report header data
worksheet = workbook.add_worksheet(
u'Report header for {}'.format(region)
)
for i, (rtitle, rdata) in enumerate(report_header.items()):
rtitle = rtitle.title().replace('_', ' ')
if isinstance(rdata, tuple):
rdata = rdata[1]
worksheet.write(i, 0, rtitle)
worksheet.write(i, 1, rdata)
worksheet = workbook.add_worksheet(
u'Report data for {}'.format(region)
)
for i, row in enumerate(wdata['Report data']):
row_label = row[0]
worksheet.write(i, 0, row_label)
row_values = row[1]
for j, v in enumerate(row_values):
transl = get_translated(v, self.country_code) or v
worksheet.write(i, j + 1, transl)
workbook.close()
out.seek(0)
return out
def download_art7(self, report_data):
xlsio = self.data_to_xls_art7(report_data)
return self._set_response_header(xlsio)
def __call__(self):
""" Article 3 & 4 reports are separated per regions
This means we can have more than one report xml for a country
one for each region
Merge the data from each region, and display it in one table
"""
# we treat Art 3 & 4 different because of multiple report files
if self.article not in ('Art3', 'Art4'):
return super(ReportData2012Secondary, self).__call__()
template = Template('pt/report-data-view-art34.pt')
report_header_template = Template('pt/report-data-header-art34.pt')
regions = get_regions_for_country(self.country_code)
filenames = [
(r[0], r[1], get_report_filename('2012', self.country_code, r[0],
self.article, self.descriptor))
for r in regions
]
filenames = sorted(filenames,
key=lambda i: ordered_regions_sortkey(i[0]))
trans_edit_html = self.translate_view()()
reports = []
report_data = []
for region, region_name, filename in filenames:
if not filename:
continue
url = get_report_file_url(filename)
source_file = (filename, url + '/manage_document')
factsheet = get_factsheet_url(url)
view = Article34(self, self.request, self.country_code,
region, self.descriptor, self.article,
self.muids, filename)
rendered_view = view()
rep_info = self.get_reporting_information(filename=filename)
report_header_data = self.get_report_header_data(
rep_info.reporters, source_file, factsheet,
rep_info.report_date
)
report_header = report_header_template(self, self.request,
region=region_name,
**report_header_data)
reports.append(report_header + rendered_view + trans_edit_html)
report_data.append((region, serialize_rows(view.rows),
report_header_data))
self.reports = reports
if 'download' in self.request.form:
return self.download_art7(report_data)
return template(self, self.request)
class ReportData2012Like2018(ReportData2012):
""" An alternative implementation, mapping data like the 2018 views
"""
cache_key_extra = 'like2018'
@property
def article_implementations(self):
res = {
'Art8': Article8Alternate,
'Art9': Article9Alternate,
'Art10': Article10Alternate,
}
return res
class SnapshotSelectForm(Form):
template = Template('../pt/inline-form.pt')
_updated = False
@property
def fields(self):
snaps = getattr(self.context.context, 'snapshots', [])
if snaps:
default = snaps[-1][0]
else:
default = None
dates = [SimpleTerm(x[0], x[0].isoformat(), x[0]) for x in snaps]
field = Choice(
title=u'Date of harvest',
__name__='sd',
vocabulary=SimpleVocabulary(dates),
required=False,
default=default
)
return Fields(field)
def update(self):
if not self._updated:
Form.update(self)
self._updated = True
@buttonAndHandler(u'View snapshot', name='view')
def apply(self, action):
return
# TODO: make a condition for this button
@buttonAndHandler(u'Harvest new data', name='harvest')
def harvest(self, action):
data = self.context.get_data_from_db()
self.context.context.snapshots.append((datetime.now(), data))
self.request.response.redirect('./@@view-report-data-2018')
class ReportData2018(BaseView):
implements(IReportDataView)
report_year = '2018' # used by cache key
year = '2018' # used in report definition and translation
section = 'national-descriptors'
help_texts = {
'Art8': """
The data is retrieved from the MSFD2018_production.V_ART8_GES_2018 database
view, filtered by country code and ges component ids. If the current Descriptor
starts with 'D1.', we also append the 'D1' descriptor to the GES Component ids.
We use this table for the list of GES Components and the descriptor that they
belong to:
https://raw.githubusercontent.com/eea/wise.msfd/master/src/wise/msfd/data/ges_terms.csv
""",
'Art9': """
The data is retrieved from the MSFD2018_production.V_ART9_GES_2018 database
view, filtered by country code and ges component ids. If the current Descriptor
starts with 'D1.', we also append the 'D1' descriptor to the GES Component ids.
We use this table for the list of GES Components and the descriptor that they
belong to:
https://raw.githubusercontent.com/eea/wise.msfd/master/src/wise/msfd/data/ges_terms.csv
""",
'Art10': """
The data is retrieved from the MSFD2018_production.V_ART10_Targets_2018
database view. Because the GESComponent column is not reliable (the Netherlands
reported using the 1.1.3 GESComponent for all their records), we filter the
data using the Parameters and Features available for the current descriptor.
We use this file for the Descriptor to Parameters and Features association
table:
https://svn.eionet.europa.eu/repositories/Reportnet/Dataflows/MarineDirective/MSFD2018/Webforms/msfd2018-codelists.json
""",
'Art3': "To be completed...",
'Art4': "To be completed...",
'Art7': "To be completed..."
}
@property
def help_text(self):
return self.help_texts[self.article]
Art8 = Template('pt/report-data-multiple-muid.pt')
Art9 = Template('pt/report-data-multiple-muid.pt')
Art10 = Template('pt/report-data-multiple-muid.pt')
# Art9 = Template('pt/report-data-single-muid.pt')
subform = None # used for the snapshot selection form
@property
def all_descriptor_ids(self):
descr_class = get_descriptor(self.descriptor)
all_ids = list(descr_class.all_ids())
if self.descriptor.startswith('D1.'):
all_ids.append('D1')
all_ids = set(all_ids)
return all_ids
def _get_order_cols_Art8(self, descr):
descr = descr.split('.')[0]
criteria_priority = ('MarineReportingUnit', 'GESComponent', 'Criteria',
'Feature', 'Element', 'Element2', 'Element2Code',
'IntegrationRuleTypeParameter')
default = ('MarineReportingUnit', 'GESComponent', 'Feature',
'Element', 'Element2', 'Element2Code', 'Criteria',
'IntegrationRuleTypeParameter',)
order_by = {
'D2': criteria_priority,
'D4': criteria_priority,
'D5': ('MarineReportingUnit', 'GESComponent', 'Feature',
'Criteria', 'Element', 'Element2', 'Element2Code',
'IntegrationRuleTypeParameter',
),
'D6': default,
'D7': criteria_priority,
'D8': criteria_priority,
'D11': criteria_priority,
'default': default
}
return order_by.get(descr, order_by['default'])
def _get_order_cols_Art10(self):
order = ('TargetCode', 'Features', 'Element')
return order
def get_data_from_view_Art8(self):
sess = db.session()
t = sql2018.t_V_ART8_GES_2018
descr_class = get_descriptor(self.descriptor)
all_ids = list(descr_class.all_ids())
if self.descriptor.startswith('D1.'):
all_ids.append('D1')
# muids = [x.id for x in self.muids]
conditions = [
t.c.CountryCode == self.country_code,
# t.c.Region == self.country_region_code,
# t.c.MarineReportingUnit.in_(muids), #
t.c.GESComponent.in_(all_ids)
]
# Handle the case of Romania that submitted duplicate data,
# where Element is empty, but Criteria has data
if self.country_code != 'RO':
conditions.append(
or_(t.c.Element.isnot(None),
t.c.Criteria.isnot(None))
)
else:
conditions.append(
t.c.Element.isnot(None)
)
if self.country_code != 'DK':
conditions.insert(
1, t.c.Region == self.country_region_code
)
else:
# Handle the case of Denmark that have submitted a lot of
# information under the DK-TOTAL MRU, which doesn't have a region
# attached.
conditions.insert(1,
or_(t.c.Region == 'NotReported',
t.c.Region == self.country_region_code
)
)
orderby = [
getattr(t.c, x) for x in self._get_order_cols_Art8(self.descriptor)
]
# groupby IndicatorCode
q = sess\
.query(t)\
.filter(*conditions)\
.order_by(*orderby)\
.distinct()
# For the following countries filter data by features
# for other countries return all data
country_filters = ('BE', )
if self.country_code not in country_filters:
return q
ok_features = set([f.name for f in get_features(self.descriptor)])
out = []
for row in q:
if not self.descriptor.startswith('D1.'):
out.append(row)
continue
feats = set((row.Feature, ))
if feats.intersection(ok_features):
out.append(row)
return out
def get_data_from_view_Art10(self):
t = sql2018.t_V_ART10_Targets_2018
conditions = [t.c.CountryCode == self.country_code]
if self.country_code != 'DK':
conditions.insert(
1, t.c.Region == self.country_region_code
)
else:
# Handle the case of Denmark that have submitted a lot of
# information under the DK-TOTAL MRU, which doesn't have a region
# attached.
conditions.insert(1,
or_(t.c.Region == 'NotReported',
t.c.Region == self.country_region_code
)
)
count, res = db.get_all_records_ordered(
t,
self._get_order_cols_Art10(),
*conditions
)
out = []
# GESComponents contains multiple values separated by comma
# filter rows by splitting GESComponents
for row in res:
ges_comps = getattr(row, 'GESComponents', ())
ges_comps = set([g.strip() for g in ges_comps.split(',')])
if ges_comps.intersection(self.all_descriptor_ids):
out.append(row)
if not self.descriptor.startswith('D1.'):
return out
# conditions = []
# params = get_parameters(self.descriptor)
# p_codes = [p.name for p in params]
# conditions.append(t.c.Parameter.in_(p_codes))
# Filtering results based on FeaturesSmart and other conditions
# I don't think this code should be kept. Probably the edge case should
# be documented. It makes it fragile and dependent on correct
# definitions in FeaturesSmart. I think it's trying to avoid showing
# too many results when the GESComponent has been incorectly reported
# on the <Target> records.
ok_features = set([f.name for f in get_features(self.descriptor)])
out_filtered = []
for row in out:
# Because some Features are missing from FeaturesSmart
# we consider 'D1' descriptor valid for all 'D1.x'
# and we keep the data if 'D1' is present in the GESComponents
# countries_filter = for these countries DO NOT filter by features
ges_comps = getattr(row, 'GESComponents', ())
countries_filter = ('RO', 'DK', 'CY', 'MT')
if 'D1' in ges_comps and self.country_code not in countries_filter:
out_filtered.append(row)
continue
feats = set(row.Features.split(','))
if feats.intersection(ok_features):
out_filtered.append(row)
return out_filtered
def get_data_from_view_Art9(self):
t = sql2018.t_V_ART9_GES_2018
descriptor = get_descriptor(self.descriptor)
all_ids = list(descriptor.all_ids())
if self.descriptor.startswith('D1.'):
all_ids.append('D1')
conditions = [
t.c.CountryCode == self.country_code,
t.c.GESComponent.in_(all_ids)
]
if self.country_code != 'DK':
conditions.insert(
1, or_(t.c.Region == self.country_region_code,
t.c.Region.is_(None))
)
else:
# Handle the case of Denmark that have submitted a lot of
# information under the DK-TOTAL MRU, which doesn't have a region
# attached.
conditions.insert(1,
or_(t.c.Region == 'NotReported',
t.c.Region == self.country_region_code,
t.c.Region.is_(None)
)
)
count, q = db.get_all_records_ordered(
t,
('GESComponent', ),
*conditions
)
ok_features = set([f.name for f in get_features(self.descriptor)])
out = []
# There are cases when justification for delay is reported
# for a ges component. In these cases region, mru, features and
# other fields are empty. Justification for delay should be showed
# for all regions, mrus
for row in q:
if not row.Features:
out.append(row)
continue
if not self.descriptor.startswith('D1.'):
out.append(row)
continue
feats = set(row.Features.split(','))
if feats.intersection(ok_features):
out.append(row)
return out
def get_data_from_view(self, article):
data = getattr(self, 'get_data_from_view_' + article)()
return data
@db.use_db_session('2018')
@timeit
def get_data_from_db(self):
data = self.get_data_from_view(self.article)
data = [Proxy2018(row, self) for row in data]
if self.request.form.get('split-mru') and (len(data) > 2000):
if self.muids:
if getattr(self, 'focus_muid', None) is None:
self.focus_muid = self.muids[0].name
self.focus_muids = self._get_muids_from_data(data)
if self.article == 'Art8':
order = self._get_order_cols_Art8(self.descriptor)
data = consolidate_singlevalue_to_list(data,
'IndicatorCode',
order,
)
data_by_mru = group_by_mru(data)
if self.article == 'Art10':
# data_by_mru = group_by_mru(data)
order = self._get_order_cols_Art10()
data_by_mru = consolidate_singlevalue_to_list(
data, 'MarineReportingUnit', order
)
if data_by_mru:
data_by_mru = {"": data_by_mru}
else:
data_by_mru = {}
if self.article == 'Art9':
# data_by_mru = consolidate_date_by_mru(data_by_mru)
data_by_mru = consolidate_singlevalue_to_list(
data, 'MarineReportingUnit'
)
if data_by_mru:
data_by_mru = {"": data_by_mru}
else:
data_by_mru = {}
insert_missing_criterions(data_by_mru, self.descriptor_obj)
res = []
fields = get_report_definition(self.article).get_fields()
for mru, rows in data_by_mru.items():
_rows = items_to_rows(rows, fields)
res.append((mru, _rows))
# resort the results by marine reporting unit
res_sorted = sorted(
res, key=lambda r: natural_sort_key(r[0].__repr__()))
return res_sorted
def get_snapshots(self):
""" Returns all snapshots, in the chronological order they were created
"""
# TODO: fix this. I'm hardcoding it now to always use generated data
db_data = self.get_data_from_db()
snapshot = (datetime.now(), db_data)
return [snapshot]
# snapshots = getattr(self.context, 'snapshots', None)
#
# if snapshots is None:
# self.context.snapshots = PersistentList()
#
# db_data = self.get_data_from_db()
# snapshot = (datetime.now(), db_data)
#
# self.context.snapshots.append(snapshot)
# self.context.snapshots._p_changed = True
#
# self.context._p_changed = True
#
# return self.context.snapshots
#
# return snapshots
def get_report_data(self):
""" Returns the data to display in the template
Returns a list of "rows (tuples of label: data)"
"""
snapshots = self.get_snapshots()
self.subform.update()
fd, errors = self.subform.extractData()
date_selected = fd['sd']
data = snapshots[-1][1]
if hasattr(self, 'focus_muid'):
# filter the data based on selected muid
# this is used to optmize display of really long data
data = [t for t in data if t[0].name == self.focus_muid]
if date_selected:
filtered = [x for x in snapshots if x[0] == date_selected]
if filtered:
date, data = filtered[0]
else:
raise ValueError("Snapshot doesn't exist at this date")
return data
def _get_muids_from_data(self, data):
muids = set()
for row in data:
o = getattr(row, '__o')
muid = o.MarineReportingUnit
muids.add(muid)
return list(sorted(muids))
# def get_muids_from_data(self, data):
# # TODO: this shouldn't exist anymore
# if isinstance(data[0][0], (unicode, str)):
# all_muids = sorted(set([x[0] for x in data]))
#
# return ', '.join(all_muids)
#
# all_muids = [x[0] for x in data]
# seen = []
# muids = []
#
# for muid in all_muids:
# name = muid.name
#
# if name in seen:
# continue
#
# seen.append(name)
# muids.append(muid)
#
# return ItemList(rows=muids)
@db.use_db_session('2018')
@timeit
def get_report_metadata(self):
""" Returns metadata about the reported information
"""
t = sql2018.ReportedInformation
schemas = {
'Art8': 'ART8_GES',
'Art9': 'ART9_GES',
'Art10': 'ART10_Targets',
}
count, item = db.get_item_by_conditions(
t,
'ReportingDate',
t.CountryCode == self.country_code,
t.Schema == schemas[self.article],
reverse=True,
)
return item
@property
def report_header_title(self):
title = "Member State report / {} / 2018 / {} / {} / {}".format(
self.article,
self.descriptor_title,
self.country_name,
self.country_region_name,
)
return title
def get_report_header(self):
report = self.get_report_metadata()
link = report_by = report_date = None
if report:
link = report.ReportedFileLink
link = (link.rsplit('/', 1)[1], link)
report_by = report.ContactOrganisation
report_date = report.ReportingDate
report_header = self.report_header_template(
title=self.report_header_title,
factsheet=None,
# TODO: find out how to get info about who reported
report_by=report_by,
source_file=link,
report_due='2018-10-15',
report_date=report_date,
help_text=self.help_text,
multiple_source_files=False
)
return report_header
@cache(get_reportdata_key, dependencies=['translation'])
def render_reportdata(self):
logger.info("Quering database for 2018 report data: %s %s %s %s",
self.country_code, self.country_region_code, self.article,
self.descriptor)
data = self.get_report_data()
report_header = self.get_report_header()
template = self.get_template(self.article)
return template(data=data, report_header=report_header)
def data_to_xls(self, data):
# Create a workbook and add a worksheet.
out = BytesIO()
workbook = xlsxwriter.Workbook(out, {'in_memory': True})
for index, (wtitle, wdata) in enumerate(data):
_wtitle = '{}_{}'.format(index + 1, unicode(wtitle)[:28])
worksheet = workbook.add_worksheet(_wtitle)
for i, (row_label, row_values) in enumerate(wdata):
worksheet.write(i, 0, row_label.title)
for j, v in enumerate(row_values):
v = unicode(v) or ''
transl = get_translated(v, self.country_code) or v
worksheet.write(i, j + 1, transl)
workbook.close()
out.seek(0)
return out
def download(self):
xlsdata = self.get_report_data()
xlsio = self.data_to_xls(xlsdata)
sh = self.request.response.setHeader
sh('Content-Type', 'application/vnd.openxmlformats-officedocument.'
'spreadsheetml.sheet')
fname = "-".join([self.country_code,
self.country_region_code,
self.article,
self.descriptor])
sh('Content-Disposition',
'attachment; filename=%s.xlsx' % fname)
return xlsio.read()
def auto_translate(self, data=None):
if not data:
data = self.get_report_data()
# report_def = REPORT_DEFS[self.year][self.article]
# translatables = report_def.get_translatable_fields()
translatables = self.TRANSLATABLES
seen = set()
for table in data:
muid, table_data = table
for row in table_data:
field, cells = row
if field.name in translatables:
for value in cells:
if value not in seen:
retrieve_translation(self.country_code, value)
seen.add(value)
messages = IStatusMessage(self.request)
messages.add(u"Auto-translation initiated, please refresh "
u"in a couple of minutes", type=u"info")
url = self.context.absolute_url() + '/@@view-report-data-2018'
return self.request.response.redirect(url)
def get_template(self, article):
template = getattr(self, article, None)
return template
@timeit
def __call__(self):
# allow focusing on a single muid if the data is too big
if 'focus_muid' in self.request.form:
self.focus_muid = self.request.form['focus_muid'].strip()
# self.focus_muid = 'BAL-AS-EE-ICES_SD_29'
self.content = ''
template = self.get_template(self.article)
if not template:
return self.index()
self.subform = self.get_form()
if ('download' in self.request.form): # and report_data
return self.download()
if 'translate' in self.request.form and self.can_view_assessment_data:
return self.auto_translate()
trans_edit_html = self.translate_view()()
print "will render report"
report_html = self.render_reportdata()
self.report_html = report_html + trans_edit_html
@timeit
def render_html():
return self.index()
return render_html()
def get_form(self):
if not self.subform:
form = SnapshotSelectForm(self, self.request)
self.subform = form
return self.subform
class ReportData2018Secondary(ReportData2018):
implements(IReportDataViewSecondary)
descriptor = 'Not linked'
country_region_code = 'No region'
Art3 = Template('pt/report-data-secondary-2018.pt')
Art4 = Template('pt/report-data-secondary-2018.pt')
Art7 = Template('pt/report-data-secondary-2018.pt')
def article_name(self):
get_art_name = super(ReportData2018Secondary, self).article_name
if self.article not in ('Art3', 'Art4'):
return get_art_name()
art_name = '{} & {}'.format(get_art_name('Art3'),
get_art_name('Art4'))
return art_name
def get_previus_url(self, grouped_urls, url):
for region, group in grouped_urls.items():
# find the right group for our url
if url not in group:
continue
# if our url is the last from its group, it does not have previous
# file
if group[-1] == url:
return None
url_index = group.index(url)
return group[url_index + 1]
def get_report_metadata_from_view(self, view, filename):
fileurl = get_report_file_url(filename)
root = view.get_report_file_root(filename)
reporters = date = None
try:
reporters = root.attrib['GeneratedBy']
date = root.attrib['CreationDate']
except:
pass
metadata = ReportingInformation2018(
fileurl,
reporters,
date
)
return metadata
@property
def report_header_title(self):
article = self.article
if self.article in ('Art3', 'Art4'):
article = 'Art3-4'
title = "Member State report: {} / {}".format(
self.country_name,
article,
)
return title
def get_template(self, article):
article = article.replace('-', '')
template = getattr(self, article, None)
return template
def get_implementation_view(self, filename, prev_filename):
""" In other articles (8, 9, 10) for 2018 year,
we get the data from the DB (MSFD2018_production)
Here instead we will get the data from the report xml from CDR
by initializing and calling the view's class to setup the data
"""
klass = {'Art7': Article7_2018,
'Art3': Article34_2018,
'Art4': Article34_2018}.get(self.article)
init_args = [self, self.request, self.country_code,
self.country_region_code, self.descriptor, self.article,
self.muids, filename]
if self.article in ['Art3', 'Art4'] and prev_filename:
prev_view = klass(
self, self.request, self.country_code, self.country_region_code,
self.descriptor, self.article, self.muids, prev_filename
)
prev_view.setup_data()
previous_mrus = prev_view.available_mrus
init_args.append(previous_mrus)
view = klass(*init_args)
view.setup_data()
return view
def auto_translate(self):
self.render_reportdata()
seen = set()
all_translatables = (self.translatable_data +
self.translatable_extra_data)
for value in all_translatables:
if not value:
continue
if not isinstance(value, basestring):
continue
if value not in seen:
retrieve_translation(self.country_code, value)
seen.add(value)
messages = IStatusMessage(self.request)
messages.add(u"Auto-translation initiated, please refresh "
u"in a couple of minutes", type=u"info")
url = self.context.absolute_url() + '/@@view-report-data-2018'
return self.request.response.redirect(url)
def get_translatable_data(self, view):
res = []
for row in view.rows:
field_name = row.title
if field_name not in self.TRANSLATABLES:
continue
res.extend(row.raw_values)
return set(res)
def render_reportdata(self):
"""
1. Get all reported files under Article 7 or 3/4
2. Render the data separately for all files
3. Concat the rendered htmls into a single
:return: rendered html
"""
translatable_extra_data = []
translatable_data = []
template = self.get_template(self.article)
urls = get_all_report_filenames(self.country_code, self.article)
rendered_results = []
# identify order of files, grouped by region. If multiple regions are
# reported in a file, then just sort them by envelope release date.
# once sorted, create view for each file. Each view can potentially get
# a reference to the previous file data.
grouped_urls = defaultdict(list)
for url in urls:
view = self.get_implementation_view(url, None)
regions = "-".join(view.available_regions)
grouped_urls[regions].append(url)
for (index, url) in enumerate(urls):
prev_url = self.get_previus_url(grouped_urls, url)
# For article 3/4 2018, the data from previous "version" of the
# file should also be sent. Then it will be possible to identify
# which MRUs have been added/removed
view = self.get_implementation_view(url, prev_url)
translatable_extra_data.extend(view.translatable_extra_data)
translatable_data.extend(self.get_translatable_data(view))
report = self.get_report_metadata_from_view(view, url)
# Report Header
report_by = None
report_date = get_envelope_release_date(url)
if report:
report_by = report.ContactOrganisation
# report_date = report.ReportingDate
res = []
source_file = (url.rsplit('/', 1)[-1], url + '/manage_document')
factsheet = get_factsheet_url(url)
view() # updates the view
data = [Proxy2018(row, self) for row in view.cols]
if self.article == 'Art7':
data_by_mru = group_by_mru(data)
else:
data_by_mru = {'no mru': data}
fields = get_report_definition(self.article).get_fields()
for mru, rows in data_by_mru.items():
_rows = items_to_rows(rows, fields)
res.append((mru, _rows))
report_header = self.report_header_template(
title=(index == 0 and self.report_header_title or ''),
factsheet=factsheet,
# TODO: find out how to get info about who reported
report_by=report_by,
source_file=source_file,
report_due=None,
report_date=report_date.date(),
help_text=self.help_text,
multiple_source_files=False,
show_navigation=index == 0,
)
rendered_results.append(template(data=res,
report_header=report_header,
show_navigation=False))
self.translatable_extra_data = translatable_extra_data
self.translatable_data = translatable_data
res = "<hr/>".join(rendered_results)
return res or "No data found"
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,563 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/nationaldescriptors/data.py | from pkg_resources import resource_filename
from wise.msfd.compliance.utils import ReportDefinition
definition_files = {
'2018': 'data/report_2018_def.xml',
'2012': 'data/report_2012_def.xml',
}
f_2012 = resource_filename(__package__, definition_files['2012'])
f_2018 = resource_filename(__package__, definition_files['2018'])
REPORT_DEFS = {
'2018': {
'Art8': ReportDefinition(f_2018, 'Art8'),
'Art9': ReportDefinition(f_2018, 'Art9'),
'Art10': ReportDefinition(f_2018, 'Art10'),
'Art3': ReportDefinition(f_2018, 'Art3'),
'Art4': ReportDefinition(f_2018, 'Art3'),
'Art7': ReportDefinition(f_2018, 'Art7'),
},
'2012': {
'Art8a': ReportDefinition(f_2012, 'Art8a'),
'Art8b': ReportDefinition(f_2012, 'Art8b'),
'Art9': ReportDefinition(f_2012, 'Art9'),
'Art10': ReportDefinition(f_2012, 'Art10'),
'Art3': ReportDefinition(f_2012, 'Art3'),
'Art4': ReportDefinition(f_2012, 'Art3'),
'Art7': ReportDefinition(f_2012, 'Art7'),
'Art8esa': ReportDefinition(f_2012, 'Art8esa'),
}
}
def get_report_definition(article):
return REPORT_DEFS['2018'][article]
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,564 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py | import logging
from collections import namedtuple
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from wise.msfd.compliance.assessment import AssessmentDataMixin
from wise.msfd.compliance.interfaces import (IDescriptorFolder,
INationalDescriptorAssessment,
INationalRegionDescriptorFolder)
from wise.msfd.compliance.scoring import (CONCLUSIONS, get_range_index,
OverallScores)
from wise.msfd.compliance.utils import ordered_regions_sortkey
from wise.msfd.gescomponents import DESCRIPTOR_TYPES
from wise.msfd.utils import (ItemList, TemplateMixin, db_objects_to_dict,
fixedorder_sortkey, timeit)
from ..nationaldescriptors.main import (ARTICLE_WEIGHTS,
get_assessment_data_2012_db,
filter_assessment_data_2012)
from .base import BaseNatSummaryView
from .odt_utils import create_heading, create_table_descr
logger = logging.getLogger('wise.msfd')
DESCRIPTOR_SUMMARY = namedtuple(
'DescriptorSummary',
['assessment_summary', 'progress_assessment', 'recommendations',
'adequacy', 'consistency', 'coherence', 'overall_score_2018',
'overall_score_2012', 'change_since_2012', 'coherence_2012',
'coherence_change_since_2012',]
)
class DescriptorLevelAssessments(BaseNatSummaryView, AssessmentDataMixin):
template = ViewPageTemplateFile('pt/descriptor-level-assessments.pt')
overall_scores = {}
article_titles = {
'Art9': 'Article 9 - GES Determination',
'Art8': 'Article 8 - Initial Assessment',
'Art10': 'Article 10 - Environmental Targets'
}
descriptor_types = DESCRIPTOR_TYPES
def get_article_title(self, article):
return self.article_titles[article]
def get_assessment_data_2012(self, region_code, country_name,
descriptor, article):
try:
db_data_2012 = get_assessment_data_2012_db(
country_name,
descriptor,
article
)
assessments_2012 = filter_assessment_data_2012(
db_data_2012,
region_code,
[] # descriptor_criterions,
)
if assessments_2012.get(country_name):
score_2012 = assessments_2012[country_name].score
conclusion_2012 = assessments_2012[country_name].overall_ass
else: # fallback
ctry = assessments_2012.keys()[0]
score_2012 = assessments_2012[ctry].score
conclusion_2012 = assessments_2012[ctry].overall_ass
except:
logger.exception("Could not get assessment data for 2012")
score_2012 = 0
conclusion_2012 = 'Not found'
__score = int(round(score_2012 or 0))
return __score, conclusion_2012 or 'Not found'
def _setup_phase_overall_scores(self, phase_overall_scores, assess_data,
article):
for k, score in assess_data.items():
if '_Score' not in k:
continue
if not score:
continue
is_not_relevant = getattr(score, 'is_not_relevant', False)
q_klass = score.question.klass
weighted_score = getattr(score, 'weighted_score', 0)
max_weighted_score = getattr(score, 'max_weighted_score', 0)
if not is_not_relevant:
p_score = getattr(phase_overall_scores, q_klass)
p_score['score'] += weighted_score
p_score['max_score'] += max_weighted_score
phases = phase_overall_scores.article_weights[article].keys()
for phase in phases:
# set the conclusion and color based on the score for each phase
phase_scores = getattr(phase_overall_scores, phase)
score_val = phase_overall_scores.get_range_index_for_phase(phase)
if phase == 'consistency' and article == 'Art9':
phase_scores['conclusion'] = ('-', 'Not relevant')
phase_scores['color'] = 0
else:
phase_scores['conclusion'] = (score_val,
self.get_conclusion(score_val))
phase_scores['color'] = self.get_color_for_score(score_val)
return phase_overall_scores
def _get_article_data(self, region_code, country_name, descriptor,
assess_data, article):
phase_overall_scores = OverallScores(ARTICLE_WEIGHTS)
# Get the adequacy, consistency scores from national descriptors
phase_overall_scores = self._setup_phase_overall_scores(
phase_overall_scores, assess_data, article)
# Get the coherence scores from regional descriptors
phase_overall_scores.coherence = self.get_coherence_data(
region_code, descriptor, article
)
adequacy_score_val, conclusion = \
phase_overall_scores.adequacy['conclusion']
# score = phase_overall_scores.get_score_for_phase('adequacy')
adequacy = ("{} ({})".format(conclusion, adequacy_score_val),
phase_overall_scores.adequacy['color'])
score_val, conclusion = phase_overall_scores.consistency['conclusion']
# score = phase_overall_scores.get_score_for_phase('consistency')
consistency = ("{} ({})".format(conclusion, score_val),
phase_overall_scores.consistency['color'])
cscore_val, conclusion = phase_overall_scores.coherence['conclusion']
# score = phase_overall_scores.get_score_for_phase('coherence')
coherence = ("{} ({})".format(conclusion, cscore_val),
phase_overall_scores.coherence['color'])
overallscore_val, score = phase_overall_scores.get_overall_score(
article
)
conclusion = self.get_conclusion(overallscore_val)
overall_score_2018 = (
"{} ({})".format(conclusion, overallscore_val),
self.get_color_for_score(overallscore_val)
)
assessment_summary = (
assess_data.get('{}_assessment_summary'.format(article)) or '-'
)
progress_assessment = (
assess_data.get('{}_progress'.format(article)) or '-'
)
recommendations = (
assess_data.get('{}_recommendations'.format(article)) or '-'
)
score_2012, conclusion_2012 = self.get_assessment_data_2012(
region_code, country_name, descriptor, article
)
overall_score_2012 = ("{} ({})".format(conclusion_2012, score_2012),
self.get_color_for_score(score_2012))
__key = (region_code, descriptor, article)
self.overall_scores[__key] = overall_score_2018
change_since_2012 = int(adequacy_score_val - score_2012)
reg_assess_2012 = self.get_reg_assessments_data_2012(
article, region_code, descriptor
)
coherence_2012 = ('-', '0')
coherence_change_since_2012 = '-'
if reg_assess_2012:
__score = reg_assess_2012[0].overall_score
coherence_2012 = ("{} ({})".format(reg_assess_2012[0].conclusion,
__score),
self.get_color_for_score(__score))
coherence_change_since_2012 = int(cscore_val - __score)
res = DESCRIPTOR_SUMMARY(
assessment_summary, progress_assessment, recommendations,
adequacy, consistency, coherence, overall_score_2018,
overall_score_2012, change_since_2012,
coherence_2012, coherence_change_since_2012
)
return res
@timeit
def setup_descriptor_level_assessment_data(self):
"""
:return: res = [("Baltic Sea", [
("D7 - Hydrographical changes", [
("Art8", DESCRIPTOR_SUMMARY),
("Art9", DESCRIPTOR_SUMMARY),
("Art10", DESCRIPTOR_SUMMARY),
]
),
("D1.4 - Birds", [
("Art8", DESCRIPTOR_SUMMARY),
("Art9", DESCRIPTOR_SUMMARY),
("Art10", DESCRIPTOR_SUMMARY),
]
),
]
)]
"""
res = []
country_folder = [
country
for country in self._nat_desc_folder.contentValues()
if country.id == self.country_code.lower()
][0]
self.nat_desc_country_folder = country_folder
region_folders = self.filter_contentvalues_by_iface(
country_folder, INationalRegionDescriptorFolder
)
region_folders_sorted = sorted(
region_folders, key=lambda i: ordered_regions_sortkey(i.id.upper())
)
for region_folder in region_folders_sorted:
region_code = region_folder.id
region_name = region_folder.title
descriptor_data = []
descriptor_folders = self.filter_contentvalues_by_iface(
region_folder, IDescriptorFolder
)
for descriptor_folder in descriptor_folders:
desc_id = descriptor_folder.id.upper()
desc_name = descriptor_folder.title
articles = []
article_folders = self.filter_contentvalues_by_iface(
descriptor_folder, INationalDescriptorAssessment
)
for article_folder in article_folders:
article = article_folder.title
assess_data = self._get_assessment_data(article_folder)
article_data = self._get_article_data(
region_code.upper(), country_folder.title,
desc_id, assess_data, article
)
articles.append((article, article_data))
articles = sorted(
articles,
key=lambda i: fixedorder_sortkey(i[0], self.ARTICLE_ORDER)
)
descriptor_data.append(
((desc_id, desc_name), articles)
)
res.append((region_name, descriptor_data))
return res
def get_odt_data(self, document):
res = []
h = create_heading(1, u"Descriptor-level assessments")
res.append(h)
all_data = self.descr_assess_data
for region in all_data:
h = create_heading(2, region[0])
res.append(h)
all_descriptors_data = region[1]
for descriptor_type in self.descriptor_types:
h = create_heading(3, descriptor_type[0])
res.append(h)
for descriptor in descriptor_type[1]:
descriptor_data = [
d
for d in all_descriptors_data
if d[0][0] == descriptor
][0]
h = create_heading(4, descriptor_data[0][1])
res.append(h)
articles = descriptor_data[1]
for article in articles:
h = create_heading(5, article[0])
res.append(h)
article_data = article[1]
t = create_table_descr(document, article_data)
res.append(t)
return res
def __call__(self):
data = self.setup_descriptor_level_assessment_data()
self.descr_assess_data = data
return self.template(data=data)
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,565 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/patch.py | from collective.z3cform.datagridfield import interfaces
from zope.interface import Interface
class IDataGridFieldLayer(Interface):
"""
"""
def install():
interfaces.IDataGridFieldLayer = IDataGridFieldLayer
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,566 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/nationaldescriptors/utils.py | from collections import defaultdict
from itertools import chain
from operator import attrgetter
from Products.Five.browser import BrowserView
from wise.msfd.labels import GES_LABELS
from wise.msfd.utils import ItemLabel, ItemList, LabeledItemList, timeit
from .proxy import proxy_cmp
def consolidate_date_by_mru(data):
""" Takes data (proxies of data) organized by mru and groups them according
to similarity of data (while ignoring the mru of that proxied row)
This is used by the A9 2018 report.
"""
groups = []
# Rows without MRU reported
# This case applies for Art9, when justification for delay is reported
rows_without_mru = []
for obj in chain(*data.values()):
found = False
for group in groups:
# compare only with the first object from a group because
# all objects from a group should contain the same data
first_from_group = group[0]
if proxy_cmp(obj, first_from_group):
group.append(obj)
found = True
if not found:
groups.append([obj])
# regroup the data by mru, now that we found identical rows
regroup = defaultdict(list)
for batch in groups:
# TODO: get a proper MarineUnitID object
mrus = tuple(sorted(set([r.MarineReportingUnit for r in batch])))
if mrus[0] is None:
rows_without_mru.append(batch[0])
continue
regroup[mrus].append(batch[0])
out = {}
# rewrite the result keys to list of MRUs
for mrus, rows in regroup.items():
mrus_labeled = tuple([
ItemLabel(row, u'{} ({})'.format(GES_LABELS.get('mrus', row), row))
for row in mrus
])
label = LabeledItemList(rows=mrus_labeled)
# TODO how to explain better?
# Skip rows from rows_without_mru if the GESComponent exists
# in rows (we do not insert justification delay/non-use
# if the GESComponent has reported data)
# example: ges component D6C3, D6C4
# .../fi/bal/d6/art9/@@view-report-data-2018
ges_comps_with_data = set(x.GESComponent.id for x in rows)
for row_extra in rows_without_mru:
ges_comp = row_extra.GESComponent.id
if ges_comp in ges_comps_with_data:
continue
rows.append(row_extra)
# rows.extend(rows_without_mru)
out[label] = rows
if not regroup and rows_without_mru:
rows = ItemLabel('No Marine unit ID reported',
'No Marine unit ID reported')
label = LabeledItemList(rows=(rows, ))
out[label] = rows_without_mru
return out
@timeit
def consolidate_singlevalue_to_list(proxies, fieldname, order=None):
""" Given a list of proxies where one of the fields needs to be a list, but
is spread across different similar proxies, consolidate the single values
to a list and return only one object for that list of similar objects
"""
map_ = defaultdict(list)
for o in proxies:
map_[o.hash(fieldname)].append(o)
res = []
for set_ in map_.values():
o = set_[0]
values = [getattr(xo, fieldname) for xo in set_]
if any(values):
l = ItemList(rows=values)
setattr(o, fieldname, l)
res.append(o)
# consolidate_singlevalue_to_list is used in regional descriptor too
# where we do not order the results
if order:
res = list(sorted(res, key=attrgetter(*order)))
return res
class ViewSavedAssessmentData(BrowserView):
""" Temporary class for viewing saved assessment data
"""
def get_saved_assessment_data(self):
catalog = self.context.portal_catalog
brains = catalog.searchResults(
portal_type='wise.msfd.nationaldescriptorassessment',
path={
"query": "/Plone/marine/assessment-module"
"/national-descriptors-assessments"
}
)
res = []
for brain in brains:
obj = brain.getObject()
if not hasattr(obj, 'saved_assessment_data'):
continue
sad = obj.saved_assessment_data
if not sad:
continue
if len(sad) == 1:
continue
# import pdb; pdb.set_trace()
res.append((obj, obj.saved_assessment_data))
return res
def fix_assessment_data(self):
from wise.msfd.compliance.content import AssessmentData
for obj, data in self.get_saved_assessment_data():
last = data.last().copy()
new_data = AssessmentData()
new_data._append(last)
obj.saved_assessment_data = new_data
def __call__(self):
if 'fix' in self.request.form:
self.fix_assessment_data()
return 'Done'
return self.index() | {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,567 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/nationaldescriptors/main.py | """ Classes and views to implement the National Descriptors compliance page
"""
import re
from collections import namedtuple
from logging import getLogger
from sqlalchemy import or_
from zope.interface import implements
from persistent.list import PersistentList
from plone.api.content import transition
from plone.api.portal import get_tool
from plone.protect import CheckAuthenticator # , protect
from Products.Five.browser.pagetemplatefile import \
ViewPageTemplateFile as Template
from Products.statusmessages.interfaces import IStatusMessage
from wise.msfd import db, sql2018
from wise.msfd.compliance.assessment import (ANSWERS_COLOR_TABLE,
CONCLUSION_COLOR_TABLE,
AssessmentDataMixin)
from wise.msfd.compliance.base import NAT_DESC_QUESTIONS
from wise.msfd.compliance.content import AssessmentData
from wise.msfd.compliance.scoring import (CONCLUSIONS, get_overall_conclusion,
get_range_index, OverallScores)
from wise.msfd.compliance.utils import ordered_regions_sortkey
from wise.msfd.compliance.vocabulary import (REGIONAL_DESCRIPTORS_REGIONS,
SUBREGIONS_TO_REGIONS)
from wise.msfd.data import _extract_pdf_assessments
from wise.msfd.gescomponents import get_descriptor
from wise.msfd.utils import t2rt
from .base import BaseView
from ..interfaces import ICountryStartAssessments, ICountryStartReports
from .interfaces import (INationaldescriptorArticleView,
INationaldescriptorSecondaryArticleView)
logger = getLogger('wise.msfd')
REGION_RE = re.compile('.+\s\((?P<region>.+)\)$')
ARTICLE_WEIGHTS = {
'Art9': {
'adequacy': 3/5.0,
'consistency': 0.0,
'coherence': 2/5.0
},
'Art8': {
'adequacy': 3/5.0,
'consistency': 1/5.0,
'coherence': 1/5.0
},
'Art10': {
'adequacy': 3/5.0,
'consistency': 1/5.0,
'coherence': 1/5.0
},
'Art3': {
'adequacy': 1.0,
'consistency': 0,
'coherence': 0
},
'Art4': {
'adequacy': 1.0,
'consistency': 0,
'coherence': 0
},
'Art7': {
'adequacy': 1.0,
'consistency': 0,
'coherence': 0
},
'Art8esa': {
'adequacy': 1.0,
'consistency': 0,
'coherence': 0
}
}
Assessment2012 = namedtuple(
'Assessment2012', [
'gescomponents',
'criteria',
'summary',
'overall_ass',
'score'
]
)
Criteria = namedtuple(
'Criteria', ['crit_name', 'answer']
)
Assessment = namedtuple('Assessment',
[
'gescomponents',
'answers',
'assessment_summary',
'recommendations',
'phase_overall_scores',
'overall_score',
'overall_conclusion',
'overall_conclusion_color'
])
AssessmentRow = namedtuple('AssessmentRow',
[
'question',
'summary',
'conclusion',
'conclusion_color',
'score',
'values'
])
CountryStatus = namedtuple('CountryStatus',
['code', 'name', 'status', 'state_id', 'url'])
@db.use_db_session('2018')
def get_assessment_data_2012_db(*args):
""" Returns the assessment for 2012, from COM_Assessments_2012 table
"""
articles = {
'Art8': 'Initial assessment (Article 8)',
'Art9': 'GES (Article 9)',
'Art10': 'Targets (Article 10)',
}
country, descriptor, article = args
art = articles.get(article)
descriptor = descriptor.split('.')[0]
t = sql2018.t_COM_Assessments_2012
count, res = db.get_all_records(
t,
t.c.Country.like('%{}%'.format(country)),
t.c.Descriptor == descriptor,
or_(t.c.MSFDArticle == art,
t.c.MSFDArticle.is_(None))
)
# look for rows where OverallAssessment looks like 'see D1'
# replace these rows with data for the descriptor mentioned in the
# OverallAssessment
res_final = []
descr_reg = re.compile('see\s(d\d{1,2})', flags=re.I)
for row in res:
overall_text = row.OverallAssessment
assess = row.Assessment
if 'see' in overall_text.lower() or (not overall_text and
'see d' in assess.lower()):
descr_match = (descr_reg.match(overall_text)
or descr_reg.match(assess))
descriptor = descr_match.groups()[0]
_, r = db.get_all_records(
t,
t.c.Country == row.Country,
t.c.Descriptor == descriptor,
t.c.AssessmentCriteria == row.AssessmentCriteria,
t.c.MSFDArticle == row.MSFDArticle
)
res_final.append(r[0])
continue
if not overall_text:
res_final.append(row)
continue
res_final.append(row)
return res_final
@db.use_db_session('2018')
def get_assessment_head_data_2012(article, region, country_code):
t = sql2018.COMGeneral
count, res = db.get_all_records(
t,
t.CountryCode == country_code,
t.MSFDArticle == article,
t.RegionSubregion.startswith(region),
# t.RegionSubregion == region + country_code,
t.AssessmentTopic == 'GES Descriptor'
)
assert count == 1
if count:
# report_by = res[0].ReportBy
report_by = 'Commission'
assessors = res[0].Assessors
assess_date = res[0].DateAssessed
com_report = res[0].CommissionReport
return (report_by,
assessors,
assess_date,
(com_report.split('/')[-1], com_report))
return ['Not found'] * 3 + [('Not found', '')]
class NationalDescriptorsOverview(BaseView):
section = 'national-descriptors'
def countries(self):
countries = self.context.contentValues()
res = []
for country in countries:
state_id, state_label = self.process_phase(country)
info = CountryStatus(country.id.upper(), country.Title(),
state_label, state_id, country.absolute_url())
res.append(info)
return res
class NationalDescriptorCountryOverview(BaseView):
section = 'national-descriptors'
def get_regions(self, context=None):
if not context:
context = self.context
regions = [
x for x in context.contentValues()
if x.portal_type == 'Folder'
]
sorted_regions = sorted(
regions, key=lambda i: ordered_regions_sortkey(i.id.upper())
)
return sorted_regions
# @protect(CheckAuthenticator)
def send_to_tl(self):
regions = self.get_regions()
for region in regions:
descriptors = self.get_descriptors(region)
for desc in descriptors:
assessments = self.get_articles(desc)
for assessment in assessments:
state_id = self.get_wf_state_id(assessment)
if state_id == 'approved':
transition(obj=assessment, to_state='in_work')
IStatusMessage(self.request).add(u'Sent to TL', type='info')
url = self.context.absolute_url()
return self.request.response.redirect(url)
def ready_phase2(self, regions=None):
# roles = self.get_current_user_roles(self.context)
if not self.can_view_edit_assessment_data(self.context):
return False
if not regions:
regions = self.get_regions()
for region in regions:
descriptors = self.get_descriptors(region)
for desc in descriptors:
assessments = self.get_articles(desc)
for assessment in assessments:
state_id = self.get_wf_state_id(assessment)
if state_id != 'approved':
return False
return True
def get_descriptors(self, region):
order = [
'd1.1', 'd1.2', 'd1.3', 'd1.4', 'd1.5', 'd1.6', 'd2', 'd3', 'd4',
'd5', 'd6', 'd7', 'd8', 'd9', 'd10', 'd11',
]
return [region[d] for d in order]
def descriptor_for_code(self, code):
desc = get_descriptor(code.upper())
return desc
def get_secondary_articles(self, country):
order = ['art7', 'art3', 'art4']
return [country[a] for a in order]
def __call__(self):
return self.index()
class NatDescCountryOverviewReports(NationalDescriptorCountryOverview):
""" Class declaration needed to be able to override HTML head title """
implements(ICountryStartReports)
class NatDescCountryOverviewAssessments(NationalDescriptorCountryOverview):
""" Class declaration needed to be able to override HTML head title """
implements(ICountryStartAssessments)
def get_crit_val(question, element, descriptor):
""" Get the criteria value to be shown in the assessment data 2018 table
"""
use_crit = question.use_criteria
if 'targets' in use_crit:
if use_crit == 'all-targets':
return element.title
if use_crit == '2018-targets' and element.year == '2018':
return element.title
return ''
is_prim = element.is_primary(descriptor)
crit = element.id
# special case for D1.4 A09Ad2 we need to show all crits excluding D1C2
if question.id == 'A09Ad2' and descriptor.id == 'D1.4' and crit != 'D1C2':
return crit
if use_crit == 'all':
return crit
if is_prim and use_crit == 'primary':
return crit
if not is_prim and use_crit == 'secondary':
return crit
return ''
def format_assessment_data(article, elements, questions, muids, data,
descriptor, article_weights, self):
""" Builds a data structure suitable for display in a template
This is used to generate the assessment data overview table for 2018
TODO: this is doing too much. Need to be simplified and refactored.
"""
answers = []
phases = article_weights.values()[0].keys()
phase_overall_scores = OverallScores(article_weights)
for question in questions:
values = []
choices = dict(enumerate(question.answers))
q_scores = question.scores
q_klass = question.klass
if question.use_criteria == 'none':
field_name = '{}_{}'.format(article, question.id)
color_index = 0
label = 'Not filled in'
v = data.get(field_name, None)
if v is not None:
label = choices[v]
color_index = ANSWERS_COLOR_TABLE[q_scores[v]]
value = (label, color_index, u'All criteria')
values.append(value)
else:
for element in elements:
field_name = '{}_{}_{}'.format(
article, question.id, element.id
)
color_index = 0
label = u'{}: Not filled in'.format(element.title)
v = data.get(field_name, None)
if v is not None:
label = u'{}: {}'.format(element.title, choices[v])
try:
color_index = ANSWERS_COLOR_TABLE[q_scores[v]]
except Exception:
logger.exception('Invalid color table')
color_index = 0
# label = 'Invalid color table'
value = (
label,
color_index,
get_crit_val(question, element, descriptor)
)
values.append(value)
summary_title = '{}_{}_Summary'.format(article, question.id)
summary = data.get(summary_title) or ''
sn = '{}_{}_Score'.format(article, question.id)
score = data.get(sn, {})
conclusion = getattr(score, 'conclusion', '')
score_value = getattr(score, 'score_value', 0)
conclusion_color = CONCLUSION_COLOR_TABLE[score_value]
weighted_score = getattr(score, 'weighted_score', 0)
max_weighted_score = getattr(score, 'max_weighted_score', 0)
is_not_relevant = getattr(score, 'is_not_relevant', False)
# q_weight = float(question.score_weights.get(descriptor.id, 10.0))
# is_not_relevant is True if all answered options are 'Not relevant'
# maximum overall score is incremented if the is_not_relevant is False
if not is_not_relevant:
p_score = getattr(phase_overall_scores, q_klass)
p_score['score'] += weighted_score
p_score['max_score'] += max_weighted_score
qr = AssessmentRow(question.definition, summary, conclusion,
conclusion_color, score, values)
answers.append(qr)
# assessment summary and recommendations
assess_sum = data.get('%s_assessment_summary' % article)
recommend = data.get('%s_recommendations' % article)
for phase in phases:
# set the conclusion and color based on the score for each phase
phase_scores = getattr(phase_overall_scores, phase)
phase_score = phase_overall_scores.get_score_for_phase(phase)
if phase == 'consistency' and article == 'Art9':
phase_scores['conclusion'] = ('-', 'Not relevant')
phase_scores['color'] = 0
continue
phase_scores['conclusion'] = get_overall_conclusion(phase_score)
phase_scores['color'] = \
CONCLUSION_COLOR_TABLE[get_range_index(phase_score)]
# for national descriptors override the coherence score with the score
# from regional descriptors
if self.section == 'national-descriptors':
phase_overall_scores.coherence = self.get_coherence_data(
self.country_region_code, self.descriptor, article
)
# the overall score and conclusion for the whole article 2018
overall_score_val, overall_score = phase_overall_scores.\
get_overall_score(article)
overall_conclusion = get_overall_conclusion(overall_score)
overall_conclusion_color = CONCLUSION_COLOR_TABLE[overall_score_val]
assessment = Assessment(
elements,
answers,
assess_sum or '-',
recommend or '-',
phase_overall_scores,
overall_score,
overall_conclusion,
overall_conclusion_color
)
return assessment
# TODO: use memoization for old data, needs to be called again to get the
# score, to allow delta compute for 2018
#
# @memoize
def filter_assessment_data_2012(data, region_code, descriptor_criterions):
""" Filters and formats the raw db data for 2012 assessment data
"""
gescomponents = [c.id for c in descriptor_criterions]
assessments = {}
criterias = []
for row in data:
fields = row._fields
def col(col):
return row[fields.index(col)]
country = col('Country')
# The 2012 assessment data have the region in the country name
# For example: United Kingdom (North East Atlantic)
# When we display the assessment data (which we do, right now, based on
# subregion), we want to match the data according to the "big" region
if '(' in country:
region = REGION_RE.match(country).groupdict()['region']
if region not in SUBREGIONS_TO_REGIONS[region_code]:
continue
summary = col('Conclusions')
score = col('OverallScore')
overall_ass = col('OverallAssessment')
criteria = Criteria(
col('AssessmentCriteria'),
t2rt(col('Assessment'))
)
# TODO test for other countries beside LV
# Condition changed because of LV report, where score is 0
# if not score:
if score is None:
criterias.append(criteria)
elif country not in assessments:
criterias.insert(0, criteria)
assessment = Assessment2012(
gescomponents,
criterias,
summary,
overall_ass,
score,
)
assessments[country] = assessment
else:
assessments[country].criteria.append(criteria)
# if country not in assessments:
# assessment = Assessment2012(
# gescomponents,
# [criteria],
# summary,
# overall_ass,
# score,
# )
# assessments[country] = assessment
# else:
# assessments[country].criteria.append(criteria)
if not assessments:
assessment = Assessment2012(
gescomponents,
criterias,
summary,
overall_ass,
score,
)
assessments[country] = assessment
return assessments
class NationalDescriptorRegionView(BaseView):
section = 'national-descriptors'
class NationalDescriptorArticleView(BaseView, AssessmentDataMixin):
implements(INationaldescriptorArticleView)
section = 'national-descriptors'
assessment_data_2012_tpl = Template('./pt/assessment-data-2012.pt')
assessment_data_2018_tpl = Template('./pt/assessment-data-2018.pt')
year = '2018' # used by self.muids
_questions = NAT_DESC_QUESTIONS
@property
def title(self):
return u"Commission assessment / {} / 2018 / {} / {} / {} ".format(
self.article,
self.descriptor_title,
self.country_title,
self.country_region_name,
)
@property
def criterias(self):
return self.descriptor_obj.sorted_criterions() # criterions
@property
def questions(self):
qs = self._questions.get(self.article, [])
return qs
def __call__(self):
if 'assessor' in self.request.form:
assessors = self.request.form['assessor']
if isinstance(assessors, list):
assessors = ', '.join(assessors)
self.context.saved_assessment_data.ass_new = assessors
# BBB:
context = self.context
if not hasattr(context, 'saved_assessment_data') or \
not isinstance(context.saved_assessment_data, PersistentList):
context.saved_assessment_data = AssessmentData()
# Assessment data 2012
descriptor_criterions = get_descriptor(self.descriptor).criterions
country_name = self._country_folder.title
try:
db_data_2012 = get_assessment_data_2012_db(
country_name,
self.descriptor,
self.article
)
assessments_2012 = filter_assessment_data_2012(
db_data_2012,
self.country_region_code, # TODO: this will need refactor
descriptor_criterions,
)
self.assessment_data_2012 = self.assessment_data_2012_tpl(
data=assessments_2012
)
if assessments_2012.get(country_name):
score_2012 = assessments_2012[country_name].score
conclusion_2012 = assessments_2012[country_name].overall_ass
else: # fallback
ctry = assessments_2012.keys()[0]
score_2012 = assessments_2012[ctry].score
conclusion_2012 = assessments_2012[ctry].overall_ass
report_by, assessors, assess_date, source_file = \
get_assessment_head_data_2012(self.article,
self.country_region_code,
self._country_folder.id)
except:
logger.exception("Could not get assessment data for 2012")
self.assessment_data_2012 = ''
score_2012 = 100
conclusion_2012 = 'Not found'
report_by, assessors, assess_date, source_file = [
'Not found'] * 3 + [('Not found', '')]
# Assessment header 2012
self.assessment_header_2012 = self.assessment_header_template(
report_by=report_by,
assessor_list=[],
assessors=assessors,
assess_date=assess_date,
source_file=source_file,
show_edit_assessors=False,
)
# Assessment data 2018
data = self.context.saved_assessment_data.last()
elements = self.questions[0].get_all_assessed_elements(
self.descriptor_obj,
muids=self.muids
)
article_weights = ARTICLE_WEIGHTS
assessment = format_assessment_data(
self.article,
elements,
self.questions,
self.muids,
data,
self.descriptor_obj,
article_weights,
self
)
assessment.phase_overall_scores.coherence = self.get_coherence_data(
self.country_region_code, self.descriptor, self.article
)
score_2012 = int(round(score_2012))
conclusion_2012_color = CONCLUSION_COLOR_TABLE.get(score_2012, 0)
change = int(
assessment.phase_overall_scores
.get_range_index_for_phase('adequacy') - score_2012
)
self.assessment_data_2018_html = self.assessment_data_2018_tpl(
assessment=assessment,
score_2012=score_2012,
conclusion_2012=conclusion_2012,
conclusion_2012_color=conclusion_2012_color,
change_since_2012=change,
can_comment=self.can_comment
)
# Assessment header 2018
report_by_2018 = u'Commission'
# assessors_2018 = self.context.saved_assessment_data.assessors
assessors_2018 = getattr(
self.context.saved_assessment_data, 'ass_new', 'Not assessed'
)
assess_date_2018 = data.get('assess_date', u'Not assessed')
source_file_2018 = ('To be addedd...', '.')
can_edit = self.check_permission('wise.msfd: Edit Assessment')
show_edit_assessors = self.assessor_list and can_edit
self.assessment_header_2018_html = self.assessment_header_template(
report_by=report_by_2018,
assessor_list=self.assessor_list,
assessors=assessors_2018,
assess_date=assess_date_2018,
source_file=source_file_2018,
show_edit_assessors=show_edit_assessors,
)
return self.index()
class NationalDescriptorSecondaryArticleView(NationalDescriptorArticleView):
""""""
assessment_data_2018_tpl = Template(
'./pt/assessment-data-2018-secondary.pt'
)
assessment_header_template = Template(
'../pt/assessment-header-secondary.pt'
)
pdf_assessments = _extract_pdf_assessments()
implements(INationaldescriptorSecondaryArticleView)
_descriptor = 'Not linked'
@property
def country_region_code(self):
return 'No region'
@property
def descriptor_obj(self):
return 'Not linked'
@property
def has_assessment(self):
""" Article 7 will be not assessed, we do not show the 2018 and
2012 assessment tables
"""
if self.article == 'Art7':
return False
return True
def source_pdf_assessment(self):
for row in self.pdf_assessments:
country = row[0]
if country != self.country_code:
continue
article = row[1]
if article != self.article:
continue
url = row[2]
return url
return None
def __call__(self):
if 'assessor' in self.request.form:
assessors = self.request.form['assessor']
if isinstance(assessors, list):
assessors = ', '.join(assessors)
self.context.saved_assessment_data.ass_new = assessors
# BBB:
context = self.context
if not hasattr(context, 'saved_assessment_data') or \
not isinstance(context.saved_assessment_data, PersistentList):
context.saved_assessment_data = AssessmentData()
# Assessment data 2012
# descriptor_criterions = get_descriptor(self.descriptor).criterions
descriptor_criterions = []
country_name = self._country_folder.title
try:
db_data_2012 = get_assessment_data_2012_db(
country_name,
self.descriptor,
self.article
)
assessments_2012 = filter_assessment_data_2012(
db_data_2012,
self.country_region_code,
descriptor_criterions,
)
self.assessment_data_2012 = self.assessment_data_2012_tpl(
data=assessments_2012
)
if assessments_2012.get(country_name):
score_2012 = assessments_2012[country_name].score
conclusion_2012 = assessments_2012[country_name].overall_ass
else: # fallback
ctry = assessments_2012.keys()[0]
score_2012 = assessments_2012[ctry].score
conclusion_2012 = assessments_2012[ctry].overall_ass
report_by, assessors, assess_date, source_file = \
get_assessment_head_data_2012(self.article,
self.country_region_code,
self._country_folder.id)
except:
logger.exception("Could not get assessment data for 2012")
self.assessment_data_2012 = ''
score_2012 = 100
conclusion_2012 = 'Not found'
report_by, assessors, assess_date, source_file = [
'Not found'] * 3 + [('Not found', '')]
# Assessment header 2012
self.assessment_header_2012 = self.assessment_header_template(
report_by=report_by,
assessor_list=[],
assessors=assessors,
assess_date=assess_date,
source_file=source_file,
show_edit_assessors=False,
)
# Assessment data 2018
data = self.context.saved_assessment_data.last()
elements = self.questions[0].get_all_assessed_elements(
self.descriptor_obj,
country_name=self.country_name,
country_code=self.country_code
)
article_weights = ARTICLE_WEIGHTS
assessment = format_assessment_data(
self.article,
elements,
self.questions,
self.muids,
data,
self.descriptor_obj,
article_weights,
self
)
score_2012 = int(round(score_2012))
conclusion_2012_color = CONCLUSION_COLOR_TABLE.get(score_2012, 0)
change = int(
assessment.phase_overall_scores
.get_range_index_for_phase('adequacy') - score_2012
)
self.assessment_data_2018_html = self.assessment_data_2018_tpl(
assessment=assessment,
score_2012=score_2012,
conclusion_2012=conclusion_2012,
conclusion_2012_color=conclusion_2012_color,
change_since_2012=change,
can_comment=self.can_comment
)
# Assessment header 2018
report_by_2018 = u'Commission'
# assessors_2018 = self.context.saved_assessment_data.assessors
assessors_2018 = getattr(
self.context.saved_assessment_data, 'ass_new', 'Not assessed'
)
assess_date_2018 = data.get('assess_date', u'Not assessed')
source_file_2018 = ('To be addedd...', '.')
can_edit = self.check_permission('wise.msfd: Edit Assessment')
show_edit_assessors = self.assessor_list and can_edit
self.assessment_header_2018_html = self.assessment_header_template(
report_by=report_by_2018,
assessor_list=self.assessor_list,
assessors=assessors_2018,
assess_date=assess_date_2018,
source_file=source_file_2018,
show_edit_assessors=show_edit_assessors,
)
return self.index()
@property
def title(self):
return u"Commission assessment: {} / {} / 2018".format(
self.country_title,
self.article,
)
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,568 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/regionaldescriptors/main.py | from collections import namedtuple
from persistent.list import PersistentList
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile as VPTF
from wise.msfd.compliance.assessment import (AssessmentDataMixin,
CONCLUSION_COLOR_TABLE)
from wise.msfd.compliance.base import REG_DESC_QUESTIONS
from wise.msfd.compliance.content import AssessmentData
from wise.msfd.compliance.nationaldescriptors.main import (
format_assessment_data)
from wise.msfd.gescomponents import get_descriptor
from .base import BaseRegComplianceView
RegionStatus = namedtuple('CountryStatus',
['name', 'countries', 'status', 'state_id', 'url'])
ARTICLE_WEIGHTS = {
'Art9': {
'adequacy': 0.0,
'consistency': 0.0,
'coherence': 1.0
},
'Art8': {
'adequacy': 0.0,
'consistency': 0.0,
'coherence': 1.0
},
'Art10': {
'adequacy': 0.0,
'consistency': 0.0,
'coherence': 1.0
}
}
class RegionalDescriptorsOverview(BaseRegComplianceView):
section = 'regional-descriptors'
def regions(self):
regions = self.context.contentValues()
res = []
for region in regions:
countries = [x[1] for x in region._countries_for_region]
state_id, state_label = self.process_phase(region)
info = RegionStatus(region.Title(), ", ".join(countries),
state_label, state_id,
region.absolute_url())
res.append(info)
return res
class RegionalDescriptorRegionsOverview(BaseRegComplianceView):
section = 'regional-descriptors'
def get_regions(self):
regions = [
x for x in self.context.contentValues()
if x.portal_type == 'Folder'
]
return regions
def get_descriptors(self, region):
order = [
'd1.1', 'd1.2', 'd1.3', 'd1.4', 'd1.5', 'd1.6', 'd2', 'd3', 'd4',
'd5', 'd6', 'd7', 'd8', 'd9', 'd10', 'd11',
]
return [region[d] for d in order]
def descriptor_for_code(self, code):
desc = get_descriptor(code.upper())
return desc
def ready_phase2(self):
return False
class RegionalDescriptorArticleView(BaseRegComplianceView,
AssessmentDataMixin):
section = 'regional-descriptors'
assessment_data_2012_tpl = VPTF('pt/assessment-data-2012.pt')
assessment_data_2018_tpl = VPTF('pt/assessment-data-2018.pt')
national_assessment_tpl= VPTF('pt/report-data.pt')
_questions = REG_DESC_QUESTIONS
@property
def questions(self):
qs = self._questions[self.article]
return qs
@property
def title(self):
return u"Commission assessment / {} / 2018 / {} / {}".format(
self.article,
self.descriptor_title,
self.country_region_name,
)
# @property
# def criterias(self):
# return self.descriptor_obj.sorted_criterions() # criterions
# def get_assessments_data_2012(self, article=None, region_code=None,
# descriptor_code=None):
#
# if not article:
# article = self.article
#
# if not region_code:
# region_code = self.country_region_code
#
# if not descriptor_code:
# descriptor_code = self.descriptor_obj.id
#
# res = []
#
# for x in ASSESSMENTS_2012:
# if x.region.strip() != region_code:
# continue
#
# if x.descriptor.strip() != descriptor_code.split('.')[0]:
# continue
#
# art = x.article.replace(" ", "")
#
# if not art.startswith(article):
# continue
#
# res.append(x)
#
# sorted_res = sorted(
# res, key=lambda i: int(i.overall_score), reverse=True
# )
#
# return sorted_res
def get_assessment_2012_header_data(self, assessments_2012):
res = {}
if not assessments_2012:
return res
assessments_2012 = assessments_2012[0]
res['report_by'] = assessments_2012.report_by
res['assessed_by'] = assessments_2012.assessment_by
res['assess_date'] = assessments_2012.date_assessed.date()
res['file_name'] = assessments_2012.commission_report.split('/')[-1]
res['file_url'] = assessments_2012.commission_report
return res
def get_elements_for_question(self):
# Because Art 10 questions are based on targets
# It is a hack to return something for article 10, to be able to
# answer the first question
if self.article == 'Art10':
return self.descriptor_obj.criterions
elements = self.questions[0].get_all_assessed_elements(
self.descriptor_obj,
muids=[]
)
return elements
def __call__(self):
if 'assessor' in self.request.form:
assessors = self.request.form['assessor']
if isinstance(assessors, list):
assessors = ', '.join(assessors)
self.context.saved_assessment_data.ass_new = assessors
# BBB:
context = self.context
if not hasattr(context, 'saved_assessment_data') or \
not isinstance(context.saved_assessment_data, PersistentList):
context.saved_assessment_data = AssessmentData()
# Assessment 2012
assessments_2012 = self.get_reg_assessments_data_2012()
assessment_2012_header_data = self.get_assessment_2012_header_data(
assessments_2012
)
self.assessment_header_2012 = self.assessment_header_template(
report_by=assessment_2012_header_data.get('report_by', '-'),
assessor_list=self.assessor_list,
assessors=assessment_2012_header_data.get('assessed_by', '-'),
assess_date=assessment_2012_header_data.get('assess_date', '-'),
source_file=[
assessment_2012_header_data.get('file_name', '-'),
assessment_2012_header_data.get('file_url', ''),
],
show_edit_assessors=False,
)
self.assessment_data_2012 = self.assessment_data_2012_tpl(
data=assessments_2012
)
score_2012 = (assessments_2012 and assessments_2012[0].overall_score
or 0)
conclusion_2012 = (assessments_2012 and assessments_2012[0].conclusion
or 'Not found')
conclusion_2012_color = CONCLUSION_COLOR_TABLE.get(score_2012, 0)
# Assessment 2018
assessors_2018 = getattr(
self.context.saved_assessment_data, 'ass_new', 'Not assessed'
)
data = self.context.saved_assessment_data.last()
elements = self.get_elements_for_question()
assess_date_2018 = data.get('assess_date', u'Not assessed')
source_file_2018 = ('To be addedd...', '.')
muids = None
article_weights = ARTICLE_WEIGHTS
assessment = format_assessment_data(
self.article,
# self.get_available_countries(),
elements,
self.questions,
muids,
data,
self.descriptor_obj,
article_weights,
self
)
change = int(
assessment.phase_overall_scores
.get_range_index_for_phase('coherence') - score_2012
)
can_edit = self.check_permission('wise.msfd: Edit Assessment')
show_edit_assessors = self.assessor_list and can_edit
national_assessments_data = self.get_adequacy_assessment_data()
self.national_assessments_2018 = self.national_assessment_tpl(
data=national_assessments_data, report_header=""
)
self.assessment_header_2018_html = self.assessment_header_template(
report_by="Member state",
assessor_list=self.assessor_list,
assessors=assessors_2018,
assess_date=assess_date_2018,
source_file=source_file_2018,
show_edit_assessors=show_edit_assessors,
)
self.assessment_data_2018_html = self.assessment_data_2018_tpl(
assessment=assessment,
score_2012=score_2012,
conclusion_2012=conclusion_2012,
conclusion_2012_color=conclusion_2012_color,
change_since_2012=change,
can_comment=self.can_comment
)
return self.index()
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,569 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/nationalsummary/reportdata.py | # -*- coding: utf-8 -*-
import logging
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from Products.statusmessages.interfaces import IStatusMessage
from wise.msfd.data import get_report_filename
from wise.msfd.translation import get_translated, retrieve_translation
from wise.msfd.utils import (ItemList, TemplateMixin, db_objects_to_dict,
fixedorder_sortkey, timeit)
from ..nationaldescriptors.a7 import Article7
from ..nationaldescriptors.a34 import Article34
from ..nationaldescriptors.base import BaseView
from .base import BaseNatSummaryView
logger = logging.getLogger('wise.msfd')
class Article34Copy(Article34):
""" Class to override the template """
template = ViewPageTemplateFile('pt/report-data-secondary.pt')
title = "Articles 3 & 4 Marine regions"
class Article7Copy(Article7):
""" Class to override the template """
template = ViewPageTemplateFile('pt/report-data-secondary.pt')
title = "Article 7 Competent authorities"
class ArticleTable(BaseView):
impl = {
'Art3': Article34Copy,
'Art4': Article34Copy,
'Art7': Article7Copy,
}
is_translatable = True
def __init__(self, context, request, article):
super(ArticleTable, self).__init__(context, request)
self._article = article
self.klass = self.impl[article]
year = '2012'
@property
def article(self):
return self._article
@property
def descriptor(self):
return 'Not linked'
@property
def muids(self):
return []
@property
def country_region_code(self):
return 'No region'
def get_article_title(self, klass):
tmpl = u"<h4>{}</h4>"
title = klass.title
return tmpl.format(title)
def get_report_filename(self, art=None):
# needed in article report data implementations, to retrieve the file
return get_report_filename(
self.year, self.country_code, self.country_region_code,
art or self.article, self.descriptor
)
def __call__(self):
try:
self.view = self.klass(
self, self.request, self.country_code,
self.country_region_code, self.descriptor, self.article,
self.muids
)
rendered_view = self.view()
except:
rendered_view = 'Error getting report'
return self.get_article_title(self.klass) + rendered_view
class NationalSummaryView(BaseNatSummaryView):
help_text = "HELP TEXT"
template = ViewPageTemplateFile('pt/report-data.pt')
year = "2012"
render_header = True
# @cache(get_reportdata_key, dependencies=['translation'])
@timeit
def render_reportdata(self):
report_header = self.report_header_template(
title="National summary report: {}".format(
self.country_name,
)
)
self.tables = [
report_header,
# ArticleTable(self, self.request, 'Art7'),
# ArticleTable(self, self.request, 'Art3-4'),
# trans_edit_html,
]
template = self.template
return template(tables=self.tables)
def __call__(self):
if 'edit-data' in self.request.form:
url = "{}/edit".format(self._country_folder.absolute_url())
return self.request.response.redirect(url)
report_html = self.render_reportdata()
self.report_html = report_html
if 'translate' in self.request.form:
for value in self._translatable_values:
retrieve_translation(self.country_code, value)
messages = IStatusMessage(self.request)
messages.add(u"Auto-translation initiated, please refresh "
u"in a couple of minutes", type=u"info")
@timeit
def render_html():
return self.index()
return render_html()
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,570 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/admin.py | import logging
from collections import namedtuple
from datetime import datetime
from io import BytesIO
from zope.interface import alsoProvides
import xlsxwriter
from eea.cache import cache
from plone import api
from plone.api import portal
from plone.api.content import get_state, transition
from plone.api.portal import get_tool
from plone.dexterity.utils import createContentInContainer as create
from Products.CMFCore.utils import getToolByName
from Products.CMFDynamicViewFTI.interfaces import ISelectableBrowserDefault
from Products.CMFPlacefulWorkflow.WorkflowPolicyConfig import \
WorkflowPolicyConfig
from Products.Five.browser import BrowserView
from Products.statusmessages.interfaces import IStatusMessage
from wise.msfd import db, sql2018
from wise.msfd.compliance.assessment import AssessmentDataMixin
from wise.msfd.compliance.interfaces import (INationalDescriptorAssessment,
INationalDescriptorAssessmentSecondary)
from wise.msfd.compliance.vocabulary import (get_regions_for_country,
REGIONAL_DESCRIPTORS_REGIONS)
from wise.msfd.compliance.regionaldescriptors.base import COUNTRY
from wise.msfd.gescomponents import (get_all_descriptors, get_descriptor,
get_marine_units)
from wise.msfd.labels import get_indicator_labels
from wise.msfd.translation import Translation, get_detected_lang
from wise.msfd.translation.interfaces import ITranslationsStorage
from . import interfaces
from .base import (_get_secondary_articles, BaseComplianceView,
NAT_DESC_QUESTIONS, REG_DESC_QUESTIONS,
report_data_cache_key)
logger = logging.getLogger('wise.msfd')
CONTRIBUTOR_GROUP_ID = 'extranet-wisemarine-msfd-tl'
REVIEWER_GROUP_ID = 'extranet-wisemarine-msfd-reviewers'
EDITOR_GROUP_ID = 'extranet-wisemarine-msfd-editors'
def get_wf_state_id(context):
state = get_state(context)
wftool = get_tool('portal_workflow')
wf = wftool.getWorkflowsFor(context)[0] # assumes one wf
wf_state = wf.states[state]
wf_state_id = wf_state.id or state
return wf_state_id
class ToPDB(BrowserView):
def __call__(self):
import pdb
pdb.set_trace()
return 'ok'
class BootstrapCompliance(BrowserView):
""" Bootstrap the compliance module by creating all needed country folders
"""
@property
def debug(self):
return 'production' not in self.request.form
@db.use_db_session('2018')
def _get_countries(self):
""" Get a list of (code, name) countries
"""
count, res = db.get_all_records(
sql2018.LCountry
)
countries = [(x.Code, x.Country) for x in res]
if self.debug:
countries = [x for x in countries if x[0] in ('LV', 'NL', 'DE')]
return countries
@db.use_db_session('2018')
def _get_countries_names(self, country_codes):
result = []
all_countries = self._get_countries()
for code in country_codes:
result.extend([x for x in all_countries if x[0] == code])
return result
def _get_descriptors(self):
""" Get a list of (code, description) descriptors
"""
descriptors = get_all_descriptors()
debug_descriptors = ('D1.1', 'D4', 'D5', 'D6')
if self.debug:
descriptors = [x for x in descriptors if x[0] in debug_descriptors]
return descriptors
@db.use_db_session('2018')
def _get_articles(self):
# articles = db.get_unique_from_mapper(
# sql2018.LMSFDArticle,
# 'MSFDArticle'
# )
# return articles
return ['Art8', 'Art9', 'Art10']
def set_layout(self, obj, name):
ISelectableBrowserDefault(obj).setLayout(name)
def set_policy(self, context, name):
logger.info("Set placeful workflow policy for %s", context.getId())
config = WorkflowPolicyConfig(
workflow_policy_in='compliance_section_policy',
workflow_policy_below='compliance_section_policy',
)
context._setObject(config.id, config)
@db.use_db_session('2018')
def get_country_regions(self, country_code):
regions = get_regions_for_country(country_code)
return regions
def get_group(self, code):
if '.' in code:
code = 'd1'
code = code.lower()
return "{}-{}".format(CONTRIBUTOR_GROUP_ID, code)
def create_comments_folder(self, content):
for id, title, trans in [
(u'tl', 'Discussion track with Topic Leads', 'open_for_tl'),
(u'ec', 'Discussion track with EC', 'open_for_ec'),
]:
if id not in content.contentIds():
dt = create(content,
'wise.msfd.commentsfolder',
id=id,
title=title)
transition(obj=dt, transition=trans)
def make_country(self, parent, country_code, name):
if country_code.lower() in parent.contentIds():
cf = parent[country_code.lower()]
else:
cf = create(parent,
'wise.msfd.countrydescriptorsfolder',
title=name,
id=country_code)
for regid, region in self.get_country_regions(country_code):
if regid.lower() in cf.contentIds():
reg = cf[regid.lower()]
else:
reg = create(cf,
'Folder',
title=region,
id=regid.lower())
alsoProvides(reg, interfaces.INationalRegionDescriptorFolder)
self.set_layout(reg, '@@nat-desc-reg-view')
for desc_code, description in self._get_descriptors():
if desc_code.lower() in reg.contentIds():
df = reg[desc_code.lower()]
else:
df = create(reg, 'Folder', title=description, id=desc_code)
alsoProvides(df, interfaces.IDescriptorFolder)
for art in self._get_articles():
if art.lower() in df.contentIds():
nda = df[art.lower()]
else:
nda = create(df,
'wise.msfd.nationaldescriptorassessment',
title=art)
lr = nda.__ac_local_roles__
group = self.get_group(desc_code)
lr[group] = ['Contributor']
logger.info("Created NationalDescriptorAssessment %s",
nda.absolute_url())
self.set_layout(nda, '@@nat-desc-art-view')
self.create_comments_folder(nda)
return cf
def make_region(self, parent, region):
code, name = region.code.lower(), region.title
if code.lower() in parent.contentIds():
rf = parent[code.lower()]
else:
rf = create(parent,
'wise.msfd.regiondescriptorsfolder',
title=name,
id=code)
rf._subregions = region.subregions
rf._countries_for_region = self._get_countries_names(
region.countries
)
self.set_layout(rf, '@@reg-region-start')
alsoProvides(rf, interfaces.IRegionalDescriptorRegionsFolder)
for desc_code, description in self._get_descriptors():
if desc_code.lower() in rf.contentIds():
df = rf[desc_code.lower()]
else:
df = create(rf, 'Folder', title=description, id=desc_code)
alsoProvides(df, interfaces.IDescriptorFolder)
for art in self._get_articles():
if art.lower() in df.contentIds():
rda = df[art.lower()]
else:
rda = create(df,
'wise.msfd.regionaldescriptorassessment',
title=art)
lr = rda.__ac_local_roles__
group = self.get_group(desc_code)
lr[group] = ['Contributor']
logger.info("Created RegionalDescriptorArticle %s",
rda.absolute_url())
self.set_layout(rda, '@@reg-desc-art-view')
alsoProvides(rda, interfaces.IRegionalDescriptorAssessment)
self.create_comments_folder(rda)
return rf
def setup_nationaldescriptors(self, parent):
# National Descriptors Assessments
if 'national-descriptors-assessments' in parent.contentIds():
nda = parent['national-descriptors-assessments']
else:
nda = create(parent,
'Folder', title=u'National Descriptors Assessments')
self.set_layout(nda, '@@nat-desc-start')
alsoProvides(nda, interfaces.INationalDescriptorsFolder)
for code, country in self._get_countries():
self.make_country(nda, code, country)
def setup_regionaldescriptors(self, parent):
# Regional Descriptors Assessments
if 'regional-descriptors-assessments' in parent.contentIds():
rda = parent['regional-descriptors-assessments']
else:
rda = create(parent,
'Folder', title=u'Regional Descriptors Assessments')
self.set_layout(rda, '@@reg-desc-start')
alsoProvides(rda, interfaces.IRegionalDescriptorsFolder)
for region in REGIONAL_DESCRIPTORS_REGIONS:
if not region.is_main:
continue
self.make_region(rda, region)
def setup_nationalsummaries(self, parent):
if 'national-summaries' in parent.contentIds():
ns = parent['national-summaries']
else:
ns = create(parent,
'Folder', title=u'National summaries')
self.set_layout(ns, '@@nat-summary-start')
alsoProvides(ns, interfaces.INationalSummaryFolder)
for code, country in self._get_countries():
if code.lower() in ns.contentIds():
cf = ns[code.lower()]
else:
# national_summary type used for Assessment summary/pdf export
cf = create(ns,
'national_summary',
title=country,
id=code)
self.set_layout(cf, 'assessment-summary')
alsoProvides(cf, interfaces.INationalSummaryCountryFolder)
# self.create_comments_folder(cf)
# create the overview folder
# if 'overview' in cf.contentIds():
# of = cf['overview']
# else:
# of = create(cf,
# 'wise.msfd.nationalsummaryoverview',
# title='National summary overview',
# id='overview')
#
# self.set_layout(of, 'sum-country-start')
# alsoProvides(of, interfaces.INationalSummaryOverviewFolder)
def setup_regionalsummaries(self, parent):
if 'regional-summaries' in parent.contentIds():
ns = parent['regional-summaries']
else:
ns = create(parent,
'Folder',
title=u'Regional summaries')
self.set_layout(ns, 'reg-summary-start')
alsoProvides(ns, interfaces.IRegionalSummaryFolder)
for region in REGIONAL_DESCRIPTORS_REGIONS:
if not region.is_main:
continue
code, name = region.code.lower(), region.title
if code in ns.contentIds():
rf = ns[code]
else:
rf = create(ns,
'wise.msfd.regionalsummaryfolder',
title=name,
id=code)
rf._subregions = region.subregions
rf._countries_for_region = self._get_countries_names(
region.countries
)
self.set_layout(rf, 'assessment-summary')
alsoProvides(rf, interfaces.IRegionalSummaryRegionFolder)
# self.set_layout(rf, '@@sum-region-start')
def setup_secondary_articles(self, parent):
if 'national-descriptors-assessments' not in parent.contentIds():
return
nda_parent = parent['national-descriptors-assessments']
country_ids = nda_parent.contentIds()
for country in country_ids:
country_folder = nda_parent[country]
for article in _get_secondary_articles():
if article.lower() in country_folder.contentIds():
nda = country_folder[article.lower()]
else:
nda = create(country_folder,
'wise.msfd.nationaldescriptorassessment',
title=article)
logger.info("Created NationalDescriptorAssessment %s",
nda.absolute_url())
alsoProvides(
nda,
interfaces.INationalDescriptorAssessmentSecondary
)
self.set_layout(nda, '@@nat-desc-art-view-secondary')
self.create_comments_folder(nda)
def __call__(self):
# if 'compliance-module' in self.context.contentIds():
# self.context.manage_delObjects(['compliance-module'])
if 'assessment-module' in self.context.contentIds():
cm = self.context['assessment-module']
else:
cm = create(self.context, 'Folder', title=u'Compliance Module')
self.set_layout(cm, '@@comp-start')
self.set_policy(cm, 'compliance_section_policy')
alsoProvides(cm, interfaces.IComplianceModuleFolder)
lr = cm.__ac_local_roles__
lr[REVIEWER_GROUP_ID] = [u'Reviewer']
lr[EDITOR_GROUP_ID] = [u'Editor']
# Contributor: TL
# Reviewer: EC
# Editor: Milieu
# self.setup_nationaldescriptors(cm)
DEFAULT = 'regional,nationalsummary,regionalsummary,secondary'
targets = self.request.form.get('setup', DEFAULT)
if targets:
targets = targets.split(',')
else:
targets = DEFAULT
if "regional" in targets:
self.setup_regionaldescriptors(cm)
if "nationalsummary" in targets:
self.setup_nationalsummaries(cm)
if "secondary" in targets:
self.setup_secondary_articles(cm)
if 'regionalsummary' in targets:
self.setup_regionalsummaries(cm)
return cm.absolute_url()
class CleanupCache(BrowserView):
""" Remove the persistent cache that we have saved in objects
"""
def __call__(self):
brains = api.content.find(context=self.context, depth=10000)
for brain in brains:
obj = brain.getObject()
print "For obj", obj
for name in obj.__dict__.keys():
if name.startswith('_cache_'):
logger.info("Cleaning up %r: %s", obj, name)
delattr(obj, name)
return "done"
User = namedtuple('User', ['username', 'fullname', 'email'])
class ComplianceAdmin(BaseComplianceView):
""""""
name = 'admin'
section = 'compliance-admin'
@property
def get_descriptors(self):
descriptors = get_all_descriptors()
return descriptors
def get_users_by_group_id(self, group_id):
groups_tool = getToolByName(self.context, 'portal_groups')
g = groups_tool.getGroupById(group_id)
members = g.getGroupMembers()
if not members:
return []
res = []
for x in members:
user = User(x.getProperty('id'),
x.getProperty('fullname'),
x.getProperty('email'), )
res.append(user)
return res
# @cache #TODO
def get_groups_for_desc(self, descriptor):
descriptor = descriptor.split('.')[0]
group_id = '{}-{}'.format(CONTRIBUTOR_GROUP_ID, descriptor.lower())
return self.get_users_by_group_id(group_id)
@property
def get_reviewers(self):
group_id = REVIEWER_GROUP_ID
return self.get_users_by_group_id(group_id)
@property
def get_editors(self):
group_id = EDITOR_GROUP_ID
return self.get_users_by_group_id(group_id)
class AdminScoring(BaseComplianceView, AssessmentDataMixin):
name = 'admin-scoring'
section = 'compliance-admin'
questions = NAT_DESC_QUESTIONS
questions_reg = REG_DESC_QUESTIONS
def descriptor_obj(self, descriptor):
return get_descriptor(descriptor)
def get_available_countries(self, region_folder):
res = [
# id, title, definition, is_primary
COUNTRY(x[0], x[1], "", lambda _: True)
for x in region_folder._countries_for_region
]
return res
@cache(report_data_cache_key)
def muids(self, country_code, country_region_code, year):
""" Get all Marine Units for a country
:return: ['BAL- LV- AA- 001', 'BAL- LV- AA- 002', ...]
"""
return get_marine_units(country_code,
country_region_code,
year)
@property
def get_descriptors(self):
"""Exclude first item, D1 """
descriptors = get_all_descriptors()
return descriptors[1:]
@property
def ndas(self):
catalog = get_tool('portal_catalog')
brains = catalog.searchResults(
portal_type='wise.msfd.nationaldescriptorassessment',
)
for brain in brains:
obj = brain.getObject()
# safety check to exclude secondary articles
if not INationalDescriptorAssessment.providedBy(obj):
continue
# safety check to exclude secondary articles
obj_title = obj.title.capitalize()
if obj_title in _get_secondary_articles():
continue
if obj_title in ('Art3-4'):
continue
yield obj
@property
def ndas_sec(self):
catalog = get_tool('portal_catalog')
brains = catalog.searchResults(
portal_type='wise.msfd.nationaldescriptorassessment',
)
for brain in brains:
obj = brain.getObject()
# safety check to exclude primary articles
if not INationalDescriptorAssessmentSecondary.providedBy(obj):
continue
obj_title = obj.title.capitalize()
if obj_title not in _get_secondary_articles():
continue
yield obj
def reset_assessment_data(self):
""" Completely erase the assessment data from the system
TODO: when implementing the regional descriptors, make sure to adjust
"""
for obj in self.ndas:
logger.info('Reset assessment data for %s', obj.absolute_url())
if hasattr(obj, 'saved_assessment_data'):
del obj.saved_assessment_data
obj._p_changed = True
def recalculate_score_for_objects(self, objects, questions):
for obj in objects:
if hasattr(obj, 'saved_assessment_data') \
and obj.saved_assessment_data:
logger.info('recalculating scores for %r', obj)
data = obj.saved_assessment_data.last()
new_overall_score = 0
scores = {k: v for k, v in data.items()
if '_Score' in k and v is not None}
for q_id, score in scores.items():
id_ = score.question.id
article = score.question.article
_question = [
x
for x in questions.get(article, ())
if x.id == id_
]
if not _question:
continue
_question = _question[0]
# new_score_weight = _question.score_weights
# _question.score_weights = new_score_weight
values = score.values
descriptor = score.descriptor
new_score = _question.calculate_score(descriptor,
values)
data[q_id] = new_score
new_overall_score += getattr(new_score,
'weighted_score', 0)
data['OverallScore'] = new_overall_score
obj.saved_assessment_data._p_changed = True
def recalculate_scores(self):
self.recalculate_score_for_objects(self.ndas, self.questions)
self.recalculate_score_for_objects(self.rdas, self.questions_reg)
def get_data(self, obj):
""" Get assessment data for a country assessment object
"""
if not (hasattr(obj, 'saved_assessment_data')
and obj.saved_assessment_data):
return
state = get_wf_state_id(obj)
article = obj
descr = obj.aq_parent
region = obj.aq_parent.aq_parent
country = obj.aq_parent.aq_parent.aq_parent
d_obj = self.descriptor_obj(descr.id.upper())
muids = self.muids(country.id.upper(), region.id.upper(), '2018')
data = obj.saved_assessment_data.last()
for k, val in data.items():
if not val:
continue
if '_Score' in k:
last_change_name = "{}_{}_Last_update".format(article.title,
val.question.id)
last_change = data.get(last_change_name, '')
last_change = last_change and last_change.isoformat() or ''
for i, v in enumerate(val.values):
options = ([o.title
for o in val.question.get_assessed_elements(
d_obj, muids=muids)] or ['All criteria'])
# TODO IndexError: list index out of range
# investigate this
# Possible cause of error: D9C2 was removed and some old
# questions have answered it
try:
option = options[i]
except IndexError:
continue
option = 'ERROR with options: {} / index: {}'.format(
', '.join(options), i
)
answer = val.question.answers[v]
yield (country.title, region.title, d_obj.id,
article.title, val.question.id, option, answer,
val.question.scores[v], state, last_change)
elif '_Summary' in k:
article_id, question_id, _ = k.split('_')
last_change_name = "{}_{}_Last_update".format(article_id,
question_id)
last_change = data.get(last_change_name, '')
last_change = last_change and last_change.isoformat() or ''
yield (country.title, region.title, d_obj.id, article_id,
question_id, 'Summary', val, ' ', state, last_change)
elif '_assessment_summary' in k:
article_id, _, __ = k.split('_')
last_change_name = "{}_assess_summary_last_upd".format(
article_id
)
last_change = data.get(last_change_name, '')
last_change = last_change and last_change.isoformat() or ''
yield (country.title, region.title, d_obj.id, article_id,
' ', 'Assessment Summary', val, '', state, last_change)
elif '_recommendations' in k:
article_id, _ = k.split('_')
last_change_name = "{}_assess_summary_last_upd".format(
article_id
)
last_change = data.get(last_change_name, '')
last_change = last_change and last_change.isoformat() or ''
yield (country.title, region.title, d_obj.id, article_id,
' ', 'Recommendations', val, '', state, last_change)
elif '_progress' in k:
article_id, _ = k.split('_')
last_change_name = "{}_assess_summary_last_upd".format(
article_id
)
last_change = data.get(last_change_name, '')
last_change = last_change and last_change.isoformat() or ''
yield (country.title, region.title, d_obj.id, article_id,
' ', 'Progress', val, '', state, last_change)
def get_data_sec(self, obj):
""" Get assessment data for a country assessment object
"""
if not (hasattr(obj, 'saved_assessment_data')
and obj.saved_assessment_data):
return
state = get_wf_state_id(obj)
article = obj
country = obj.aq_parent
data = obj.saved_assessment_data.last()
d_obj = 'Not linked'
muids = []
for k, val in data.items():
if not val:
continue
if '_Score' in k:
for i, v in enumerate(val.values):
options = ([o.title
for o in val.question.get_assessed_elements(
d_obj, muids=muids)] or ['All criteria'])
# TODO IndexError: list index out of range
# investigate this
# Possible cause of error: D9C2 was removed and some old
# questions have answered it
try:
option = options[i]
except IndexError:
continue
answer = val.question.answers[v]
yield (country.title, article.title, val.question.id,
option, answer, val.question.scores[v], state)
elif '_Summary' in k:
article_id, question_id, _ = k.split('_')
yield (country.title, article_id, question_id,
'Summary', val, ' ', state)
elif '_assessment_summary' in k:
article_id, _, __ = k.split('_')
yield (country.title, article_id, ' ',
'Assessment Summary', val, '', state)
elif '_recommendations' in k:
article_id, _ = k.split('_')
yield (country.title, article_id, ' ',
'Recommendations', val, '', state)
elif '_progress' in k:
article_id, _ = k.split('_')
yield (country.title, article_id, ' ',
'Progress', val, '', state)
def get_data_rda(self, obj):
""" Get assessment data for a regional descriptor assessment
"""
if not (hasattr(obj, 'saved_assessment_data')
and obj.saved_assessment_data):
return
state = get_wf_state_id(obj)
article = obj
descr = obj.aq_parent
region = obj.aq_parent.aq_parent
d_obj = self.descriptor_obj(descr.id.upper())
data = obj.saved_assessment_data.last()
for k, val in data.items():
if not val:
continue
if '_Score' in k:
for i, v in enumerate(val.values):
options = (
[o.title for o in self.get_available_countries(region)]
or ['All criteria']
)
# TODO IndexError: list index out of range
# investigate this
# Possible cause of error: D9C2 was removed and some old
# questions have answered it
try:
option = options[i]
except IndexError:
continue
answer = val.question.answers[v]
yield (region.title, d_obj.id,
article.title, val.question.id, option, answer,
val.question.scores[v], state)
elif '_Summary' in k:
article_id, question_id, _ = k.split('_')
yield (region.title, d_obj.id,
article_id, question_id, 'Summary', val, ' ', state)
elif '_assessment_summary' in k:
article_id, _, __ = k.split('_')
yield (region.title, d_obj.id,
article_id, ' ', 'Assessment Summary', val, '', state)
elif '_recommendations' in k:
article_id, _ = k.split('_')
yield (region.title, d_obj.id,
article_id, ' ', 'Recommendations', val, '', state)
elif '_progress' in k:
article_id, _ = k.split('_')
yield (region.title, d_obj.id,
article_id, ' ', 'Progress', val, '', state)
def data_to_xls(self, all_data):
out = BytesIO()
workbook = xlsxwriter.Workbook(out, {'in_memory': True})
for sheetname, labels, data in all_data:
worksheet = workbook.add_worksheet(sheetname)
for i, label in enumerate(labels):
worksheet.write(0, i, label)
x = 0
for objdata in data:
for row in objdata:
x += 1
for iv, value in enumerate(row):
worksheet.write(x, iv, value)
workbook.close()
out.seek(0)
return out
def export_scores(self, context):
# National descriptors data
nda_labels = ('Country', 'Region', 'Descriptor', 'Article', 'Question',
'Option', 'Answer', 'Score', 'State', 'Last change')
nda_xlsdata = (self.get_data(nda) for nda in self.ndas)
# Regional descriptors data
rda_labels = ('Region', 'Descriptor', 'Article', 'Question',
'Option', 'Answer', 'Score', 'State')
rda_xlsdata = (self.get_data_rda(rda) for rda in self.rdas)
# Secondary Articles 3 & 4, 7
sec_labels = ('Country', 'Article', 'Question',
'Option', 'Answer', 'Score', 'State')
sec_xlsdata = (self.get_data_sec(sec) for sec in self.ndas_sec)
all_data = [
('National descriptors', nda_labels, nda_xlsdata),
('Regional descriptors', rda_labels, rda_xlsdata),
('Articles 3 & 4, 7', sec_labels, sec_xlsdata)
]
xlsio = self.data_to_xls(all_data)
sh = self.request.response.setHeader
sh('Content-Type', 'application/vnd.openxmlformats-officedocument.'
'spreadsheetml.sheet')
fname = "-".join(['Assessment_Scores',
str(datetime.now().replace(microsecond=0))])
sh('Content-Disposition',
'attachment; filename=%s.xlsx' % fname)
return xlsio.read()
def __call__(self):
msgs = IStatusMessage(self.request)
if 'export-scores' in self.request.form:
return self.export_scores(self.context)
if 'reset-assessments' in self.request.form:
self.reset_assessment_data()
msgs.add('Assessments reseted successfully!', type='warning')
logger.info('Reset score finished!')
if 'recalculate-scores' in self.request.form:
self.recalculate_scores()
msgs.add('Scores recalculated successfully!', type='info')
logger.info('Recalculating score finished!')
return self.index()
class SetupAssessmentWorkflowStates(BaseComplianceView):
@property
def ndas(self):
catalog = get_tool('portal_catalog')
brains = catalog.searchResults(
portal_type='wise.msfd.nationaldescriptorassessment',
)
for brain in brains:
obj = brain.getObject()
yield obj
def __call__(self):
changed = 0
not_changed = 0
logger.info("Changing workflow states to not_started...")
for nda in self.ndas:
state = get_wf_state_id(nda)
if hasattr(nda, 'saved_assessment_data'):
data = nda.saved_assessment_data.last()
if data:
not_changed += 1
continue
if state == 'in_work':
changed += 1
logger.info("State changing for {}".format(nda.__repr__()))
transition(obj=nda, to_state='not_started')
logger.info("States changed: {}, Not changed: {}".format(
changed, not_changed)
)
return "Done"
class TranslateIndicators(BrowserView):
def __call__(self):
labels = get_indicator_labels().values()
site = portal.get()
storage = ITranslationsStorage(site)
count = 0
for label in labels:
lang = get_detected_lang(label)
if (not lang) or (lang == 'en'):
continue
lang = lang.upper()
langstore = storage.get(lang, None)
if langstore is None:
continue
if label not in langstore:
langstore[label] = u''
logger.info('Added %r to translation store for lang %s',
label, lang)
count = +1
return "Added %s labels" % count
class MigrateTranslationStorage(BrowserView):
def __call__(self):
site = portal.get()
storage = ITranslationsStorage(site)
count = 0
for langstore in storage.values():
for original, translated in langstore.items():
count = +1
if hasattr(translated, 'text'):
translated = translated.text
translated = Translation(translated, 'original')
if not translated.text.startswith('?'):
translated.approved = True
langstore[original] = translated
return "Migrated {} strings".format(count)
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,571 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/assessment.py | import logging
from collections import namedtuple
from zope.schema import Text
from plone.api.portal import get_tool
from AccessControl import Unauthorized
from persistent.list import PersistentList
from Products.Five.browser.pagetemplatefile import (PageTemplateFile,
ViewPageTemplateFile)
from wise.msfd.compliance.content import AssessmentData
from wise.msfd.compliance.interfaces import (ICountryDescriptorsFolder,
IEditAssessorsForm,
INationalDescriptorsFolder,
IRegionalDescriptorAssessment,
IRegionalDescriptorRegionsFolder,
IRegionalDescriptorsFolder)
from wise.msfd.compliance.regionaldescriptors.base import BaseRegComplianceView
from wise.msfd.compliance.scoring import (CONCLUSIONS, get_overall_conclusion,
get_range_index, OverallScores)
from wise.msfd.compliance.utils import get_assessors, set_assessors
from wise.msfd.compliance.vocabulary import (REGIONAL_DESCRIPTORS_REGIONS,
SUBREGIONS_TO_REGIONS)
from wise.msfd.gescomponents import get_descriptor # get_descriptor_elements
from z3c.form.button import buttonAndHandler
from z3c.form.field import Fields
from z3c.form.form import Form
from .base import BaseComplianceView
logger = logging.getLogger('wise.msfd')
# This somehow translates the real value in a color, to be able to compress the
# displayed information in the assessment table
# New color table with answer score as keys, color as value
ANSWERS_COLOR_TABLE = {
'1': 1, # very good
'0.75': 2, # good
'0.5': 4, # poor
'0.25': 5, # very poor
'0': 3, # not reported
'0.250': 6, # not clear
'/': 7 # not relevant
}
# score_value as key, color as value
CONCLUSION_COLOR_TABLE = {
5: 0, # not relevant
4: 1, # very good
3: 2, # good
2: 4, # poor
1: 5, # very poor
0: 3 # not reported
}
CHANGE_COLOR_TABLE = {
-2: 5,
-1: 4,
0: 6,
1: 3,
2: 2,
3: 1,
}
# TODO which question type belongs to which phase?
PHASES = {
'phase1': ('adequacy', 'consistency'),
'phase2': ('coherence', ),
'phase3': (),
}
# mapping of title: field_name
additional_fields = {
'Summary': u'Summary',
}
summary_fields = (
('assessment_summary', u'Assessment summary'),
('progress', u'Progress since 2012'),
('recommendations', u'Recommendations for Member State'),
)
reg_summary_fields = (
('assessment_summary', u'Assessment summary'),
('progress', u'Progress since 2012'),
('recommendations', u'Recommendations'),
)
# TODO not used
progress_fields = (
('assessment_summary', u'Assessment summary'),
('progress', u'Progress since 2012'),
('recommendations', u'Recommendations for Member State'),
)
class EditAssessorsForm(Form, BaseComplianceView):
""" Assessment settings form, used to edit the assessors list
/compliance-module/national-descriptors-assessments/edit-assessors
"""
ignoreContext = True
name = 'edit-assessors'
section = 'compliance-admin'
title = u'Edit assessed by'
fields = Fields(IEditAssessorsForm)
template = ViewPageTemplateFile('pt/edit-assessors.pt')
@buttonAndHandler(u'Save', name='Save')
def hande_save(self, action):
data, errors = self.extractData()
if not errors:
value = data.get('assessed_by', '')
value = ', '.join(value.split('\r\n'))
set_assessors(value)
def updateWidgets(self):
super(EditAssessorsForm, self).updateWidgets()
assessed_by_field = self.fields['assessed_by'].field
default = assessed_by_field.default
annot_assessors = get_assessors()
annot_assessors = '\r\n'.join(annot_assessors.split(', '))
if annot_assessors and default != annot_assessors:
assessed_by_field.default = annot_assessors
self.update()
class ViewAssessmentSummaryForm(BaseComplianceView):
""" Render the assessment summary, progress assessment
and recommendations for member state for view
"""
template = ViewPageTemplateFile("pt/assessment-summary-form-view.pt")
@property
def summary_fields(self):
return summary_fields
@property
def summary_data(self):
saved_data = self.context.saved_assessment_data.last()
_fields = []
for name, title in self.summary_fields:
_name = '{}_{}'.format(
self.article, name
)
text = saved_data.get(_name, None)
_fields.append((title, text))
return _fields
def __call__(self):
fields = self.summary_data
return self.template(fields=fields)
class ViewAssessmentSummaryFormRegional(BaseRegComplianceView,
ViewAssessmentSummaryForm):
""" Render the assessment summary, progress assessment
and recommendations for member state for view
Wrapper class for regional descriptors
"""
@property
def summary_fields(self):
return reg_summary_fields
class EditAssessmentSummaryForm(Form, BaseComplianceView):
""" Edit the assessment summary
Fields are: summary, recommendations, progress assessment
"""
# TODO unused
title = u"Edit progress assessment"
template = ViewPageTemplateFile("pt/inline-form.pt")
_saved = False
@property
def fields(self):
saved_data = self.context.saved_assessment_data.last()
_fields = []
for name, title in progress_fields:
_name = '{}_{}'.format(
self.article, name
)
default = saved_data.get(_name, None)
_field = Text(title=title,
__name__=_name, required=False, default=default)
_fields.append(_field)
return Fields(*_fields)
@buttonAndHandler(u'Save', name='save')
def handle_save(self, action):
if self.read_only_access:
raise Unauthorized
data, errors = self.extractData()
if errors:
return
context = self.context
# BBB code, useful in development
if not hasattr(context, 'saved_assessment_data') or \
not isinstance(context.saved_assessment_data, PersistentList):
context.saved_assessment_data = AssessmentData()
saved_data = self.context.saved_assessment_data.last()
if not saved_data:
self.context.saved_assessment_data.append(data)
else:
saved_data.update(data)
self.context.saved_assessment_data._p_changed = True
def nextURL(self):
return self.context.absolute_url()
@property
def action(self):
return self.context.absolute_url() + '/@@edit-assessment-summary'
def render(self):
if self.request.method == 'POST':
Form.render(self)
return self.request.response.redirect(self.nextURL())
return Form.render(self)
class EditAssessmentDataFormMain(Form):
@property
def criterias(self):
return self.descriptor_obj.sorted_criterions() # criterions
@property
def help(self):
return render_assessment_help(self.criterias, self.descriptor)
def is_disabled(self, question):
""" Returns True if question is not editable
"""
if self.read_only_access:
return True
# Is this still needed?
state, _ = self.current_phase
is_disabled = question.klass not in PHASES.get(state, ())
return is_disabled
@property
def fields(self):
if not self.subforms:
self.subforms = self.get_subforms()
fields = []
for subform in self.subforms:
fields.extend(subform.fields._data_values)
return Fields(*fields)
@property # TODO: memoize
def descriptor_obj(self):
return get_descriptor(self.descriptor)
# TODO: use memoize
@property
def questions(self):
qs = self._questions[self.article]
return qs
Cell = namedtuple('Cell', ['text', 'rowspan'])
help_template = PageTemplateFile('pt/assessment-question-help.pt')
def render_assessment_help(criterias, descriptor):
elements = []
methods = []
for c in criterias:
elements.extend([e.id for e in c.elements])
methods.append(c.methodological_standard.id)
element_count = {}
for k in elements:
element_count[k] = elements.count(k)
method_count = {}
for k in methods:
method_count[k] = methods.count(k)
rows = []
seen = []
for c in criterias:
row = []
if not c.elements:
logger.info("Skipping %r from help rendering", c)
continue
cel = c.elements[0] # TODO: also support multiple elements
if cel.id not in seen:
seen.append(cel.id)
rowspan = element_count[cel.id]
cell = Cell(cel.definition, rowspan)
row.append(cell)
prim_label = c.is_primary(descriptor) and 'primary' or 'secondary'
cdef = u"<strong>{} ({})</strong><br/>{}".format(
c.id, prim_label, c.definition
)
cell = Cell(cdef, 1)
row.append(cell)
meth = c.methodological_standard
if meth.id not in seen:
seen.append(meth.id)
rowspan = method_count[meth.id]
cell = Cell(meth.definition, rowspan)
row.append(cell)
rows.append(row)
return help_template(rows=rows)
class AssessmentDataMixin(object):
""" Helper class for easier access to the assesment_data for
national and regional descriptor assessments
Currently used to get the coherence score from regional descriptors
TODO: implement a method to get the adequacy and consistency scores
from national descriptors assessment
"""
@property
def _nat_desc_folder(self):
portal_catalog = get_tool('portal_catalog')
brains = portal_catalog.searchResults(
object_provides=INationalDescriptorsFolder.__identifier__
)
nat_desc_folder = brains[0].getObject()
return nat_desc_folder
@property
def _nat_desc_country_folders(self):
return self.filter_contentvalues_by_iface(
self._nat_desc_folder, ICountryDescriptorsFolder
)
@property
def _reg_desc_folder(self):
portal_catalog = get_tool('portal_catalog')
brains = portal_catalog.searchResults(
object_provides=IRegionalDescriptorsFolder.__identifier__
)
nat_desc_folder = brains[0].getObject()
return nat_desc_folder
@property
def _reg_desc_region_folders(self):
return self.filter_contentvalues_by_iface(
self._reg_desc_folder, IRegionalDescriptorRegionsFolder
)
@property
def rdas(self):
catalog = get_tool('portal_catalog')
brains = catalog.searchResults(
portal_type='wise.msfd.regionaldescriptorassessment',
)
for brain in brains:
obj = brain.getObject()
if not IRegionalDescriptorAssessment.providedBy(obj):
continue
yield obj
def get_color_for_score(self, score_value):
return CONCLUSION_COLOR_TABLE[score_value]
def get_conclusion(self, score_value):
concl = list(reversed(CONCLUSIONS))[score_value]
return concl
def _get_assessment_data(self, article_folder):
if not hasattr(article_folder, 'saved_assessment_data'):
return {}
return article_folder.saved_assessment_data.last()
def get_main_region(self, region_code):
""" Returns the main region (used in regional descriptors)
for a sub region (used in national descriptors)
"""
for region in REGIONAL_DESCRIPTORS_REGIONS:
if not region.is_main:
continue
if region_code in region.subregions:
return region.code
return region_code
def get_coherence_data(self, region_code, descriptor, article):
""" For year 2018
:return: {'color': 5, 'score': 0, 'max_score': 0,
'conclusion': (1, 'Very poor')
}
"""
article_folder = None
for obj in self.rdas:
descr = obj.aq_parent.id.upper()
if descr != descriptor:
continue
region = obj.aq_parent.aq_parent.id.upper()
if region != self.get_main_region(region_code):
continue
art = obj.title
if art != article:
continue
article_folder = obj
break
assess_data = self._get_assessment_data(article_folder)
res = {
'score': 0,
'max_score': 0,
'color': 0,
'conclusion': (0, 'Not reported')
}
for k, score in assess_data.items():
if '_Score' not in k:
continue
if not score:
continue
is_not_relevant = getattr(score, 'is_not_relevant', False)
weighted_score = getattr(score, 'weighted_score', 0)
max_weighted_score = getattr(score, 'max_weighted_score', 0)
if not is_not_relevant:
res['score'] += weighted_score
res['max_score'] += max_weighted_score
score_percent = int(round(res['max_score'] and (res['score'] * 100)
/ res['max_score'] or 0))
score_val = get_range_index(score_percent)
res['color'] = self.get_color_for_score(score_val)
res['conclusion'] = (score_val, self.get_conclusion(score_val))
return res
def get_reg_assessments_data_2012(self, article=None, region_code=None,
descriptor_code=None):
""" Get the regional descriptor assessment 2012 data """
from .regionaldescriptors.assessment import ASSESSMENTS_2012
if not article:
article = self.article
if not region_code:
region_code = self.country_region_code
if not descriptor_code:
descriptor_code = self.descriptor_obj.id
res = []
for x in ASSESSMENTS_2012:
if x.region.strip() != region_code:
continue
if x.descriptor.strip() != descriptor_code.split('.')[0]:
continue
art = x.article.replace(" ", "")
if not art.startswith(article):
continue
res.append(x)
sorted_res = sorted(
res, key=lambda i: int(i.overall_score), reverse=True
)
return sorted_res | {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,572 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/regionalsummary/reportdata.py | # -*- coding: utf-8 -*-
import logging
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from Products.statusmessages.interfaces import IStatusMessage
from wise.msfd import sql, db
from wise.msfd.gescomponents import get_all_descriptors
from wise.msfd.labels import get_label
from wise.msfd.translation import get_translated, retrieve_translation
from wise.msfd.utils import (ItemList, TemplateMixin, db_objects_to_dict,
fixedorder_sortkey, timeit)
from ..regionaldescriptors.assessment import ASSESSMENTS_2012
from .base import BaseRegSummaryView
logger = logging.getLogger('wise.msfd')
SECTIONS = []
def regionalsection(klass):
SECTIONS.append(klass)
class RegionalDescriptorsSimpleTable(BaseRegSummaryView):
""" Implementation for a simple table, with a title, headers and data
title: a string
headers: return a list of strings which represent the headers(first row)
of the table
setup_data: returns a list of rows which represent the data
"""
template = ViewPageTemplateFile("pt/simple-table.pt")
title = ''
def setup_data(self):
return []
def get_table_headers(self):
return []
def __call__(self):
data = self.setup_data()
headers = self.get_table_headers()
title = self.title
return self.template(title=title, data=data, headers=headers)
@regionalsection
class Article11CoverageOfActivities(RegionalDescriptorsSimpleTable):
features_table = sql.t_MSFD_12_8cOverview
title = 'Coverage of activities by monitoring programmes'
@property
@db.use_db_session('2012')
def features(self):
table = self.features_table
features = db.get_unique_from_table(
table, 'Features & Characteristics'
)
return features
@db.use_db_session('2012')
def get_db_data(self):
table = self.features_table
columns_needed = (
'MemberState', 'Marine region/subregion',
'Features & Characteristics', 'Found relevant by MS?',
'Reported by MS?'
)
columns = [
getattr(table.c, c)
for c in columns_needed
]
conditions = [
getattr(table.c, 'Marine region/subregion').in_(
self._region_folder._subregions
),
]
_, data = db.get_all_specific_columns(
columns,
*conditions
)
return data
def get_table_headers(self):
countries = [x[1] for x in self.available_countries]
return ['Activities'] + countries
def setup_data(self):
db_data = self.get_db_data()
rows = []
for feature in self.features:
values = []
for country_id, country_name in self.available_countries:
reported_for_country = set([
getattr(r, 'Reported by MS?')
for r in db_data
if (getattr(r, 'Features & Characteristics')
.strip() == feature
and r.MemberState.strip() == country_id)
])
values.append("; ".join(reported_for_country))
feature_label = feature # get_label(feature, None)
if any(values):
rows.append((feature_label, values))
return rows
@regionalsection
class PressuresActivities(RegionalDescriptorsSimpleTable):
pressures_table = sql.t_MSFD_8b_8bPressures
title = 'Pressures and associated activities affecting the marine waters'
@property
@db.use_db_session('2012')
def pressures(self):
table = self.pressures_table
pressures = db.get_unique_from_table(table, 'Pressure')
return pressures
@db.use_db_session('2012')
def get_db_data(self):
table = self.pressures_table
columns_needed = ('MemberState', 'Marine region/subregion',
'Pressure', 'Activity')
columns = [
getattr(table.c, c)
for c in columns_needed
]
conditions = [
getattr(table.c, 'Marine region/subregion').in_(
self._region_folder._subregions
),
table.c.Activity != 'NotReported'
]
_, data = db.get_all_specific_columns(
columns,
*conditions
)
return data
def get_table_headers(self):
countries = [x[1] for x in self.available_countries]
return ['Pressures'] + countries
def setup_data(self):
db_data = self.get_db_data()
rows = []
for pressure in self.pressures:
values = []
for country_id, country_name in self.available_countries:
activities_for_country = [
r.Activity
for r in db_data
if (r.Pressure.strip() == pressure
and r.MemberState.strip() == country_id)
]
values.append("; ".join(activities_for_country))
pressure_label = pressure # get_label(pressure, None)
if any(values):
rows.append((pressure_label, values))
return rows
@regionalsection
class OverallConclusion2012(RegionalDescriptorsSimpleTable):
title = "Overall conclusion - descriptor-level"
articles = [
('Art9', 'Article 9: Determination of GES'),
('Art8', 'Article 8: Initial assessment'),
('Art10', 'Article 10: Environmental targets'),
]
def get_assessment_data(self, region, article, descriptor):
data = ASSESSMENTS_2012
for row in data:
if row.region.strip() != region:
continue
if row.descriptor.strip() != descriptor:
continue
if article not in row.article.replace(' ', ''):
continue
return row
def setup_data(self):
data = []
descriptors = get_all_descriptors()
for desc_code, desc_title in descriptors:
conclusions = []
for art_id, art_title in self.articles:
concl = ''
assess_data = self.get_assessment_data(
self.region_code, art_id, desc_code
)
if assess_data:
concl = assess_data.conclusion
conclusions.append(concl)
if any(conclusions):
data.append((desc_title, conclusions))
return data
def get_table_headers(self):
h = ['Article'] + [a[0] for a in self.articles]
return h
class RegionalSummaryView(BaseRegSummaryView):
help_text = "HELP TEXT"
template = ViewPageTemplateFile('pt/report-data.pt')
year = "2012"
render_header = True
# @cache(get_reportdata_key, dependencies=['translation'])
@timeit
def render_reportdata(self):
report_header = self.report_header_template(
title="Regional summary report: {}".format(
self.region_name,
),
countries=", ".join([x[1] for x in self.available_countries])
)
# trans_edit_html = self.translate_view()()
self.tables = [
report_header,
# trans_edit_html,
]
for klass in SECTIONS:
self.tables.append(klass(self, self.request))
template = self.template
return template(tables=self.tables)
def __call__(self):
if 'edit-data' in self.request.form:
url = "{}/edit".format(self._country_folder.absolute_url())
return self.request.response.redirect(url)
report_html = self.render_reportdata()
self.report_html = report_html
if 'translate' in self.request.form:
for value in self._translatable_values:
retrieve_translation(self.country_code, value)
messages = IStatusMessage(self.request)
messages.add(u"Auto-translation initiated, please refresh "
u"in a couple of minutes", type=u"info")
@timeit
def render_html():
return self.index()
return render_html()
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,573 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/data.py | import csv
import logging
import os
import tempfile
from collections import defaultdict
from datetime import datetime
import requests
from pkg_resources import resource_filename
import sparql
from eea.cache import cache
from wise.msfd import db, sql, sql_extra
from .utils import current_date, timeit
logger = logging.getLogger('wise.msfd')
FILENAMES_MISSING_DB_ALL = {
'PL': (
('BAL', 'Art8', 'MSFD8aFeatures_20150225_135158.xml'),
('BAL', 'Art9', 'MSFD9GES_20150130_111719.xml'),
('BAL', 'Art10', 'MSFD10TI_20160226_150132.xml')
),
'MT': (
('MIC', 'Art8', 'MSFD8aFeatures_20131105_121510.xml'),
('MIC', 'Art9', 'MSFD9GES_20131105_121546.xml'),
('MIC', 'Art10', 'MSFD10TI_20140502_095401.xml')
),
'ES': (
('ABI', 'Art8', ('ABIES-NOR_MSFD8aFeatures_20130430.xml',
'ABIES-SUD_MSFD8aFeatures_20130513.xml')),
('ABI', 'Art9', ('ABIES-NOR_MSFD9GES_20121210.xml',
'ABIES-SUD_MSFD9GES_20121210.xml')),
('ABI', 'Art10', ('ABIES-NOR_MSFD10TI.xml',
'ABIES-SUD_MSFD10TI.xml')),
('AMA', 'Art8', 'AMAES_MSFD8aFeatures_20131004.xml'),
('AMA', 'Art9', 'AMAES_MSFD9GES_20121210.xml'),
('AMA', 'Art10', 'AMAES_MSFD10TI_20130412.xml'),
('MWE', 'Art8', ('MWEES-ESAL_MSFD8aFeatures_20130517.xml',
'MWEES-LEBA_MSFD8aFeatures_20130624.xml')),
('MWE', 'Art9', ('MWEES-ESAL_MSFD9GES_20121210.xml',
'MWEES-LEBA_MSFD9GES_20121210.xml')),
('MWE', 'Art10', ('MWEES-ESAL_MSFD10TI.xml',
'MWEES-LEBA_MSFD10TI.xml')),
),
'HR': (
('MAD', 'Art8', 'MADHR_MSFD8aFeatures_20130610.xml'),
('MAD', 'Art9', 'MADHR_MSFD9GES_20141014.xml'),
('MAD', 'Art10', 'MADHR_MSFD10TI_20141014.xml'),
),
}
FILENAMES_MISSING_DB_8b = {
'ES': (
('ABI', 'Art8', ('ABIES-NOR_MSFD8bPressures_20130516.xml',
'ABIES-SUD_MSFD8bPressures_20130722.xml')),
('AMA', 'Art8', 'AMAES_MSFD8bPressures_20121015.xml'),
('MWE', 'Art8', ('MWEES-ESAL_MSFD8bPressures_20130726.xml',
'MWEES-LEBA_MSFD8bPressures_20121015.xml')),
),
'MT': (
('MIC', 'Art8', 'MSFD8bPressures_20140826_082900.xml'),
),
'HR': (
('MAD', 'Art8', 'MADHR_MSFD8bPressures_20130610.xml'),
)
}
def _extract_pdf_assessments():
data = []
csv_f = resource_filename('wise.msfd',
'data/pdf_assessments.csv')
with open(csv_f, 'rb') as csvfile:
csv_file = csv.reader(csvfile, delimiter='\t', quotechar='|')
for row in csv_file:
data.append(row)
return data
@db.use_db_session('2012')
def all_regions():
""" Return a list of region ids
"""
return db.get_unique_from_mapper(
sql_extra.MSFD4GeographicalAreaID,
'RegionSubRegions'
)
@db.use_db_session('2012')
def countries_in_region(regionid):
""" Return a list of (<countryid>, <marineunitids>) pairs
"""
t = sql_extra.MSFD4GeographicalAreaID
return db.get_unique_from_mapper(
t,
'MemberState',
t.RegionSubRegions == regionid
)
@db.use_db_session('2012')
def muids_by_country(regions=None):
t = sql_extra.MSFD4GeographicalAreaID
count, records = db.get_all_records(t)
res = defaultdict(list)
for rec in records:
# filter MUIDs by region, used in regional descriptors A9 2012
if regions and rec.RegionSubRegions not in regions:
continue
res[rec.MemberState].append(rec.MarineUnitID)
return dict(**res)
@db.use_db_session('2012')
def _get_report_filename_art10_2012(country, region, article, descriptor):
mc = sql.MSFD10Import
count, item = db.get_item_by_conditions(
mc,
'MSFD10_Import_ID',
mc.MSFD10_Import_ReportingCountry == country,
mc.MSFD10_Import_ReportingRegion == region
)
# TODO: analyse cases when it returns more then one file
if count != 1:
logger.warning("Could not find precise report (count %s) "
"filename for %s %s %s", country, region, article,)
raise ValueError
# return None
return item.MSFD10_Import_FileName
@db.use_db_session('2012')
def _get_report_filename_art8esa_2012(country, region, article):
mc = sql.MSFD8cImport
count, item = db.get_item_by_conditions(
mc,
'MSFD8c_Import_ID',
mc.MSFD8c_Import_ReportingCountry == country,
mc.MSFD8c_Import_ReportingRegion == region
)
if count != 1:
logger.warning("Could not find report filename for %s %s %s",
country, region, article,)
return None
return item.MSFD8c_Import_FileName
@db.use_db_session('2012')
def _get_report_filename_art3_4_2012_db(country, region, article, descriptor):
""" This method is not used anymore, see _get_report_filename_art3_4_2012
"""
mc = sql.MSFD4Import
count, item = db.get_item_by_conditions(
mc,
'MSFD4_Import_ID',
mc.MSFD4_Import_ReportingCountry == country,
# mc.MSFD8c_Import_ReportingRegion == region
)
if count != 1:
logger.warning("Could not find report filename for %s %s %s",
country, region, article,)
return None
return item.MSFD4_Import_FileName
@db.use_db_session('2012')
def _get_report_filename_art7_2012_db(country, region, article, descriptor):
mc = sql_extra.MSCompetentAuthority
count, item = db.get_item_by_conditions(
mc,
'Import_Time',
mc.C_CD == country,
reverse=True
)
if count < 1:
logger.warning("Could not find report filename for %s %s %s",
country, region, article,)
return None
return item.Import_FileName
@db.use_db_session('2012')
def _get_report_filename_art9_2012(country, region, article, descriptor):
mc = sql.MSFD9Import
count, item = db.get_item_by_conditions(
mc,
'MSFD9_Import_ID',
mc.MSFD9_Import_ReportingCountry == country,
mc.MSFD9_Import_ReportingRegion == region
)
# TODO: analyse cases when it returns more then one file
if count != 1:
logger.warning("Could not find report filename for %s %s %s",
country, region, article,)
return None
return item.MSFD9_Import_FileName
def _get_report_filename_art8_2012(country, region, article, descriptor):
d = descriptor.split('.')[0]
if d in ['D1', 'D4', 'D6']:
base = 'MSFD8a'
else:
base = 'MSFD8b'
mc = getattr(sql, base + 'Import')
idcol = base + '_Import_ID'
filecol = base + '_Import_FileName'
countrycol = getattr(mc, base + '_Import_ReportingCountry')
regcol = getattr(mc, base + '_Import_ReportingRegion')
count, item = db.get_item_by_conditions(
mc,
idcol,
countrycol == country,
regcol == region
)
# TODO: analyse cases when it returns more then one file
if count != 1:
logger.warning("Could not find report filename for %s %s %s",
country, region, article,)
return None
return getattr(item, filecol)
def get_report_filename(report_version,
country, region, article, descriptor):
""" Return the filename for imported information
:param report_version: report "version" year: 2012 or 2018
:param country: country code, like: 'LV'
:param region: region code, like: 'ANS'
:param article: article code, like: 'art9'
:param descriptor: descriptor code, like: 'D5'
"""
d = descriptor.split('.')[0]
if article != 'Art8':
filenames = FILENAMES_MISSING_DB_ALL
elif d in ['D1', 'D4', 'D6']:
filenames = FILENAMES_MISSING_DB_ALL
else:
filenames = FILENAMES_MISSING_DB_8b
if country in filenames:
filename = [
x[2]
for x in filenames[country]
if x[0] == region and x[1] == article
]
return filename[0]
# 'Art8': '8b', # TODO: this needs to be redone for descriptor
mapping = {
'2012': {
'Art3': _get_report_filename_art3_4_2012,
'Art4': _get_report_filename_art3_4_2012,
'Art7': _get_report_filename_art7_2012,
'Art8esa': _get_report_filename_art8esa_2012,
'Art8': _get_report_filename_art8_2012,
'Art9': _get_report_filename_art9_2012,
'Art10': _get_report_filename_art10_2012,
},
'2018': {
'Art7': _get_report_filename_art7_2018,
'Art3': _get_report_filename_art3_4_2018,
'Art4': _get_report_filename_art3_4_2018,
}
}
handler = mapping[report_version][article]
return handler(country, region, article, descriptor)
@cache(lambda func, filename: func.__name__ + filename + current_date())
@timeit
def get_report_file_url(filename):
""" Retrieve the CDR url based on query in ContentRegistry
"""
if 'http' in filename:
# already a url
return filename
# q = """
# PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
# PREFIX cr: <http://cr.eionet.europa.eu/ontologies/contreg.rdf#>
# PREFIX dc: <http://purl.org/dc/dcmitype/>
# PREFIX dcterms: <http://purl.org/dc/terms/>
#
# SELECT ?file
# WHERE {
# ?file a dc:Dataset .
# ?file dcterms:date ?date .
# FILTER regex(str(?file), '%s')
# }
# ORDER BY DESC(?date)
# LIMIT 1""" % filename
q = """
PREFIX cr: <http://cr.eionet.europa.eu/ontologies/contreg.rdf#>
PREFIX terms: <http://purl.org/dc/terms/>
SELECT ?file
WHERE {
?file terms:date ?date .
?file cr:mediaType 'text/xml'.
FILTER regex(str(?file), '/%s')
}
ORDER BY DESC(?date)
LIMIT 1""" % filename
service = sparql.Service('https://cr.eionet.europa.eu/sparql')
logger.info("Getting filename with SPARQL: %s", filename)
try:
req = service.query(q)
rows = req.fetchall()
urls = []
for row in rows:
url = row[0].value
splitted = url.split('/')
filename_from_url = splitted[-1]
if filename == filename_from_url:
urls.append(url)
assert len(urls) == 1
except:
logger.exception('Got an error in querying SPARQL endpoint for '
'filename url: %s', filename)
raise
logger.info("Got file with url: %s", urls[0])
return urls[0]
@cache(lambda func, url: func.__name__ + url + current_date())
def get_factsheet_url(url):
""" Returns the URL for the conversion that gets the "HTML Factsheet"
"""
cdr = "http://cdr.eionet.europa.eu/Converters/run_conversion"\
"?source=remote&file="
base = url.replace('http://cdr.eionet.europa.eu/', '')
base = base.replace('https://cdr.eionet.europa.eu/', '')
resp = requests.get(url + '/get_possible_conversions')
j = resp.json()
ids = [x
for x in j['remote_converters']
if x['description'] == 'HTML Factsheet']
if ids:
return '{}{}&conv={}'.format(cdr, base, ids[0]['convert_id'])
@timeit
def get_xml_report_data(filename):
if not filename:
return ""
url = ''
if 'http' in filename: # this is a URL, not a filename
url = filename
filename = url.rsplit('/', 1)[-1]
xmldir = os.environ.get("MSFDXML")
if not xmldir:
xmldir = tempfile.gettempdir()
assert '..' not in filename # need better security?
fpath = os.path.join(xmldir, filename)
text = ''
if filename in os.listdir(xmldir):
with open(fpath) as f:
text = f.read()
if not text:
# TODO: handle this problem:
# https://cr.eionet.europa.eu/factsheet.action?uri=http%3A%2F%2Fcdr.eionet.europa.eu%2Fro%2Feu%2Fmsfd8910%2Fblkro%2Fenvux97qw%2FRO_MSFD10TI_20130430.xml&page1=http%3A%2F%2Fwww.w3.org%2F1999%2F02%2F22-rdf-syntax-ns%23type
if not url:
url = get_report_file_url(filename)
req = requests.get(url)
text = req.content
logger.info("Requesting XML file: %s", fpath)
with open(fpath, 'wb') as f:
f.write(text)
else:
logger.info("Using cached XML file: %s", fpath)
assert text, "Report data could not be fetched %s" % url
return text
@db.use_db_session('2012')
def country_ges_components(country_code):
""" Get the assigned ges components for a country
"""
t = sql.t_MSFD_19a_10DescriptiorsCriteriaIndicators
count, res = db.get_all_records(
t,
t.c.MemberState == country_code,
)
cols = t.c.keys()
recs = [
{
k: v for k, v in zip(cols, row)
} for row in res
]
return list(set([c['Descriptors Criterion Indicators'] for c in recs]))
def _get_report_filename_art3_4_2012(country, region, article, descriptor):
schema = 'http://icm.eionet.europa.eu/schemas/dir200856ec/MSFD4Geo_2p0.xsd'
obligation = '608'
return __get_report_filename_art3_4(country, region, schema, obligation)
def _get_report_filename_art3_4_2018(country, region, article, descriptor):
schema = 'http://icm.eionet.europa.eu/schemas/dir200856ec/MSFD4Geo_2p0.xsd'
obligation = '760'
return __get_report_filename_art3_4(country, region, schema, obligation)
@cache(lambda func, *args: func.__name__ + "_".join(args) + current_date())
@timeit
def __get_report_filename_art3_4(country, region, schema, obligation):
""" Retrieve from CDR the latest filename for Article 3/4
"""
q = """
PREFIX cr: <http://cr.eionet.europa.eu/ontologies/contreg.rdf#>
PREFIX terms: <http://purl.org/dc/terms/>
PREFIX schema: <http://rod.eionet.europa.eu/schema.rdf#>
PREFIX core: <http://www.w3.org/2004/02/skos/core#>
SELECT ?file
WHERE {
?file terms:date ?date .
?file cr:mediaType 'text/xml' .
?file terms:isPartOf ?isPartOf .
?file cr:xmlSchema ?schema .
?isPartOf schema:locality ?locality .
?isPartOf schema:obligation ?obligation .
?obligation core:notation ?obligationNr .
?locality core:notation ?notation .
FILTER (?notation = '%s')
FILTER (?obligationNr = '%s')
FILTER (str(?schema) = '%s')
FILTER regex(str(?file), '%s')
}
ORDER BY DESC(?date)
LIMIT 1
""" % (country.upper(), obligation, schema, region.upper())
service = sparql.Service('https://cr.eionet.europa.eu/sparql')
filename = ''
try:
req = service.query(q)
rows = req.fetchall()
if not rows:
logger.warning("Filename not found for query: %s", q)
return filename
url = rows[0][0].value
splitted = url.split('/')
filename = splitted[-1]
except:
logger.exception('Got an error in querying SPARQL endpoint for '
'Article 3/4 country: %s', country)
raise
return filename
def _get_report_filename_art7_2012(country, region, article, descriptor):
""" Retrieve from CDR the latest filename
for Article 7 competent authorities
"""
schema = 'http://water.eionet.europa.eu/schemas/dir200856ec/MSCA_1p0.xsd'
return __get_report_filename_art7(country, schema)
def _get_report_filename_art7_2018(country, region, article, descriptor):
schema = 'http://dd.eionet.europa.eu/schemas/MSFD/MSFDCA_1p0.xsd'
return __get_report_filename_art7(country, schema)
@cache(lambda func, *args: func.__name__ + "".join(args) + current_date())
@timeit
def __get_report_filename_art7(country, schema):
""" Retrieve from CDR the latest filename
for Article 7 competent authorities
"""
q = """
PREFIX cr: <http://cr.eionet.europa.eu/ontologies/contreg.rdf#>
PREFIX terms: <http://purl.org/dc/terms/>
PREFIX schema: <http://rod.eionet.europa.eu/schema.rdf#>
PREFIX core: <http://www.w3.org/2004/02/skos/core#>
SELECT ?file
WHERE {
?file terms:date ?date .
?file cr:mediaType 'text/xml' .
?file terms:isPartOf ?isPartOf .
?file cr:xmlSchema ?schema .
?isPartOf schema:locality ?locality .
?isPartOf schema:obligation ?obligation .
?obligation core:notation ?obligationNr .
?locality core:notation ?notation .
FILTER (?notation = '%s')
FILTER (?obligationNr = '607')
FILTER (str(?schema) = '%s')
}
ORDER BY DESC(?date)
LIMIT 1
""" % (country.upper(), schema)
service = sparql.Service('https://cr.eionet.europa.eu/sparql')
filename = ''
try:
req = service.query(q)
rows = req.fetchall()
url = rows[0][0].value
splitted = url.split('/')
filename = splitted[-1]
except:
logger.exception('Got an error in querying SPARQL endpoint for '
'Article 7 country: %s', country)
raise
return filename
@cache(lambda func, *args: func.__name__ + "".join(args) + current_date())
@timeit
def get_all_report_filenames(country, article):
ART3 = 'http://icm.eionet.europa.eu/schemas/dir200856ec/MSFD4Geo_2p0.xsd'
ART7 = ('http://dd.eionet.europa.eu/schemas/MSFD/MSFDCA_1p0.xsd',
'http://water.eionet.europa.eu/schemas/dir200856ec/MSCA_1p0.xsd')
schemas = {
'art7': "str(?schema) IN %s" % str(ART7), # tuple hack
'art3': "str(?schema) = '%s'" % ART3,
'art4': "str(?schema) = '%s'" % ART3,
}
obligations = {
'art3': "?obligationNr IN ('608', '760')",
'art4': "?obligationNr IN ('608', '760')",
'art7': "?obligationNr IN ('607', '608')",
}
schema = schemas[article.lower()]
obligation = obligations[article.lower()]
q = """
PREFIX cr: <http://cr.eionet.europa.eu/ontologies/contreg.rdf#>
PREFIX terms: <http://purl.org/dc/terms/>
PREFIX schema: <http://rod.eionet.europa.eu/schema.rdf#>
PREFIX core: <http://www.w3.org/2004/02/skos/core#>
SELECT ?file
WHERE {
?file terms:date ?date .
?file cr:mediaType 'text/xml' .
?file terms:isPartOf ?isPartOf .
?file cr:xmlSchema ?schema .
?file schema:restricted ?restricted.
?isPartOf schema:locality ?locality .
?isPartOf schema:obligation ?obligation .
?obligation core:notation ?obligationNr .
?locality core:notation ?notation .
FILTER (str(?restricted) = 'false')
FILTER (?notation = '%s')
FILTER (%s)
FILTER (%s)
}
ORDER BY DESC(?date)
""" % (country.upper(), obligation, schema, ) # region.upper()
# FILTER regex(str(?file), '%s')
service = sparql.Service('https://cr.eionet.europa.eu/sparql')
urls = []
try:
req = service.query(q)
rows = req.fetchall()
for row in rows:
url = row[0].value
if url not in urls:
urls.append(url)
except:
logger.exception('Got an error in querying SPARQL endpoint for '
'%s country: %s', article, country)
raise
return urls
def _to_datetime(date_string):
d = datetime.strptime(date_string, "%Y-%m-%dT%H:%M:%SZ")
return d
# TODO with caching enabled the file url is returned WHY???
# @cache(lambda func, *args: func.__name__ + args[0] + current_date())
@timeit
def get_envelope_release_date(file_url):
q = """
PREFIX cr: <http://cr.eionet.europa.eu/ontologies/contreg.rdf#>
PREFIX terms: <http://purl.org/dc/terms/>
PREFIX schema: <http://rod.eionet.europa.eu/schema.rdf#>
SELECT ?released
WHERE {
?file terms:date ?date .
?file cr:mediaType 'text/xml'.
?file terms:isPartOf ?part .
?part schema:released ?released .
FILTER (str(?file) = '%s')
}
ORDER BY DESC(?date)
LIMIT 1
""" % file_url
service = sparql.Service('https://cr.eionet.europa.eu/sparql')
try:
req = service.query(q)
rows = req.fetchall()
released = rows[0][0].value
except:
logger.exception('Got an error in querying SPARQL endpoint for '
'file_url: %s', file_url)
raise
release_date = _to_datetime(released)
return release_date
def get_text_reports_2018(country_code):
q = """
PREFIX cr: <http://cr.eionet.europa.eu/ontologies/contreg.rdf#>
PREFIX terms: <http://purl.org/dc/terms/>
PREFIX schema: <http://rod.eionet.europa.eu/schema.rdf#>
PREFIX core: <http://www.w3.org/2004/02/skos/core#>
SELECT distinct ?file, ?released
WHERE {
?file terms:date ?date .
#?file cr:mediaType 'text/xml' .
?file terms:isPartOf ?isPartOf .
?isPartOf schema:released ?released .
?isPartOf schema:locality ?locality .
?isPartOf schema:obligation ?obligation .
?obligation core:notation ?obligationNr .
?locality core:notation ?notation .
FILTER (?notation = '%s')
FILTER (?obligationNr = '761')
}
ORDER BY DESC(?date)
""" % country_code
service = sparql.Service('https://cr.eionet.europa.eu/sparql')
res = []
try:
req = service.query(q)
rows = req.fetchall()
for row in rows:
file_url = row[0].value
release_date = _to_datetime(row[1].value)
res.append((file_url, release_date))
except:
logger.exception('Got an error in querying SPARQL endpoint when '
'getting text reports for: %s', country_code)
raise
return res | {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,574 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/gescomponents.py | # -*- coding: utf-8 -*-
import csv
import logging
import re
from collections import namedtuple
import lxml.etree
from pkg_resources import resource_filename
from wise.msfd import db, sql, sql2018, sql_extra
from wise.msfd.labels import COMMON_LABELS, TERMSLIST
from wise.msfd.utils import (ItemLabel, _parse_files_in_location,
get_element_by_id, natural_sort_key, timeit)
logger = logging.getLogger('wise.msfd')
# GES criterias have been used in 2010/2012 reports and then revamped for 2018
# reports. As such, some exist in 2010 that didn't exist in 2018, some exist
# for 2018 that didn't exist for 2010 and they have changed their ids between
# the two reporting exercises.
Criterion2012 = namedtuple('Criterion2012', ['id', 'title'])
Feature = namedtuple('Feature', ['name', 'label', 'descriptors', 'theme'])
Feature_2018 = namedtuple('Feature', ['name', 'label', 'subject', 'theme'])
Parameter = namedtuple('Parameter', ['name', 'unit', 'criterias'])
DESC_RE = re.compile(r'^D\d(\.\d|\d)?$')
CRIT_2018_RE = re.compile(r'^D\d[0,1]?C\d$') # ex: D10C5
CRIT_2012_RE = re.compile(r'^\d[0,1]?\.\d$') # ex: 4.1
INDICATOR_2012_RE = re.compile(r'^\d[0,1]?\.\d\.\d$') # ex: 10.1.1
NOTHEME = u'No theme'
DESCRIPTOR_TYPES = [
("Pressure-based descriptors", ['D2', 'D5', 'D7', 'D9', 'D10', 'D11']),
("State-based descriptors", ['D1.1', 'D1.2', 'D1.3', 'D1.4', 'D1.5',
'D3', 'D1.6', 'D6', 'D4'])
]
class ElementDefinition:
def __init__(self, node, root):
self.id = node.get('id')
self.definition = node.text.strip()
class DummyMSD:
def __init__(self):
self.id = object()
self.definition = ''
class MetodologicalStandardDefinition:
def __init__(self, node, root):
self.id = node.get('id')
self.definition = node.text.strip()
class CriteriaAssessmentDefinition:
def __init__(self, node, root):
self.id = node.get('id')
defn = node.find('definition')
self.definition = defn.text.strip()
prim = node.get('primary', 'false')
if prim.lower() in ['false', 'true']:
# acts as a star
self._primary_for_descriptors = bool(['false', 'true'].index(prim))
else:
# parse the primary definition to identify descriptors
descriptors = prim.split(' ')
self._primary_for_descriptors = descriptors
self.elements = []
for eid in node.xpath('uses-element/@href'):
el = get_element_by_id(root, eid)
self.elements.append(ElementDefinition(el, root))
msid = node.xpath('uses-methodological-standard/@href')[0]
mel = get_element_by_id(root, msid)
self.methodological_standard = MetodologicalStandardDefinition(
mel, root
)
@property
def title(self):
if self._primary_for_descriptors:
primary = True
else:
primary = False
return u"{} - {}".format(self.id,
primary and 'Primary' or 'Secondary')
def parse_elements_file(fpath):
# Note: this parsing is pretty optimistic that there's a single descriptor
# in the file. Keep that true
res = []
try:
root = lxml.etree.parse(fpath).getroot()
except:
logger.exception('Could not parse file: %s', fpath)
return
desc_id = root.get('id')
for critn in root.iterchildren('criteria'):
crit = CriteriaAssessmentDefinition(critn, root)
res.append(crit)
return desc_id, res
def get_descriptor_elements(location):
""" Parse the descriptor elements in a location and build a mapping struct
The location argument should be a path relative to wise.msfd package.
The return data is used to build the automatic forms.
"""
def check_filename(fname):
return fname.endswith('_elements.xml')
return _parse_files_in_location(location,
check_filename, parse_elements_file)
DESCRIPTOR_ELEMENTS = get_descriptor_elements(
'compliance/nationaldescriptors/data'
)
class Descriptor(ItemLabel):
""" A descriptor representation
"""
def __init__(self, id=None, title=None, criterions=None):
self.id = id
self.title = title
assert isinstance(self.title, unicode)
self.name = self.title
self.criterions = criterions or set()
def is_descriptor(self):
return True
@property
def template_vars(self):
# ItemLabel support
title = self.id
if title.startswith('D1.'):
# if D1.1, return "D1-B"
bits = self.title.split(' ')
b2 = bits[1].strip()
major = b2[0].strip()
# title = u"D1‑" + major[0].upper() # non-breaking hyphen
title = u"D1-" + major.upper() # non-breaking hyphen
return {
'title': title,
'name': self.title,
}
def all_ids(self):
res = set()
res.add(self.id)
if self.id == 'D6':
res.add('D6/D1')
if self.id == 'D4':
res.add('D4/D1')
# TODO why it is commented
# if self.id.startswith('D1.'):
# res.add('D1')
for crit in self.criterions:
for cid in crit.all_ids():
res.add(cid)
return res
def sorted_criterions(self):
crits = {c.id: c for c in self.criterions}
# ids = crits.keys()
s = sorted_by_criterion(crits.keys())
return [crits[x] for x in s]
def __getitem__(self, crit_id):
for crit in self.criterions:
if crit.id == crit_id:
return crit
raise KeyError
class Criterion(ItemLabel):
""" A container for a GES criterion information
A criterion is a somewhat confusing concept. In 2012 reporting, the
Criteria were used, which had assigned Indicators. In 2018, the Descriptor
concept has been introduced, which has Indicators. So, the "virtual"
hierarchy is Descriptor > Criteria > Indicator.
A criterion can be either of Descriptor, Criteria, Indicator.
NOTE: there is a criterion that is 2012 exclusive, we don't include it in
data: 1.3.2 Population genetic structure
"""
_id = None # id for the 2018 version
_title = None # title for the 2018 version
_alternatives = None
_main_id = None
@property
def template_vars(self):
# ItemLabel support
# title = self._title or self.id
#
# if self._main_id and self._main_id != self.id:
# title = u"{} ({})".format(title, self._main_id)
return {
'title': self.id,
'name': self.title,
}
def is_descriptor(self):
return False
def __init__(self, id, title, descriptor):
self.alternatives = [] # Criterion2012 objects
self._id = id
self.id = self.name = self._id or self.alternatives[0][0]
self._title = title
self.descriptor = descriptor
crit_defs = [x
for x in DESCRIPTOR_ELEMENTS[self.descriptor]
if x.id == self.id]
if crit_defs:
self.__dict__.update(crit_defs[0].__dict__)
else:
self.elements = []
self.definition = ''
self.methodological_standard = DummyMSD()
self._primary = False
# self._primary_for_descriptors = []
def __str__(self):
return self.title
def __repr__(self):
title = self.title.encode('ascii', 'replace')
title = title.replace('?', '-')
return "<Criterion {}>".format(title)
def is_2018_exclusive(self):
return not self.alternatives
def is_2012_exclusive(self):
return not self._id
@property
def title(self):
alter = self.alternatives
if not alter:
return self._title
# return u"{} {}".format(self._id, self._title)
if not self._id:
# id, title = alter[0]
return alter[0][1] # u"{} {}".format(id, title)
alter_ids = len(alter) == 0 and alter[0][0] \
or u', '.join(sorted(set([a[0] for a in alter])))
# if self._main_id and self._main_id != self.id:
# return u"{} ({})".format(
# self._title,
# alter_ids,
# )
return u"{} ({})".format(
self._title,
alter_ids,
)
# def belongs_to_descriptor(self, descriptor_id):
# for descriptor in self.descriptors:
# if descriptor.id == descriptor_id:
# return True
#
# return False
def all_ids(self):
return set([self.id] + [x[0] for x in self.alternatives])
def has_alternative(self, id):
return any([x.id == id for x in self.alternatives])
def is_primary(self, descriptor):
if hasattr(self, '_primary'):
return self._primary
if self._primary_for_descriptors in [True, False]:
return self._primary_for_descriptors
else:
return getattr(descriptor, 'id', descriptor).lower() in \
[d.lower() for d in self._primary_for_descriptors]
def parse_ges_extended_format():
csv_f = resource_filename('wise.msfd',
'data/ges_terms.csv')
with open(csv_f, 'rb') as csvfile:
csv_file = csv.reader(csvfile, delimiter='\t')
rows = list(csv_file)
rows = rows[1:] # skip header
descriptors = {}
criterions = {}
descriptor = None
for row in rows:
if not row:
continue
if not row[0].strip():
continue
bits = [b.strip() for b in row]
if len(bits) == 1: # allow for editing with vim
bits.extend(['', ''])
if len(bits) == 2:
bits.append('')
b1, b2, b3 = bits
if b1.startswith('D') and ('C' not in b1):
# it's a descriptor label
descriptor = Descriptor(b1, b2.decode('utf-8'))
descriptors[descriptor.id] = descriptor
continue
if b1 in criterions:
criterion = criterions[b1]
descriptors[descriptor.id].criterions.add(criterion)
else:
criterion = Criterion(id=b1, title=b2, descriptor=descriptor.id)
criterions[criterion.id] = criterion
descriptors[descriptor.id].criterions.add(criterion)
if b3 and (not criterion.has_alternative(b3)):
crit = Criterion2012(*b3.split(' ', 1))
criterion.alternatives.append(crit)
# criterions[crit.id] = crit
# descriptors[descriptor.id].criterions.append(criterion)
return descriptors, criterions
GES_DESCRIPTORS, GES_CRITERIONS = parse_ges_extended_format()
def get_all_descriptors():
""" Returns all descriptors in the following format
:return: (('D1', 'D1 - Biodiversity'),
('D1.1', 'D1 - Biodiversity – birds'),
... )
"""
descriptors = [(v.id, v.title) for k, v in GES_DESCRIPTORS.items()]
d_sorted = sorted(descriptors, key=lambda d: natural_sort_key(d[0]))
return d_sorted
def get_descriptor(descriptor=None):
""" Returns a Descriptor object, that has criterions attached
:param descriptor: descriptor id, ex D5
"""
if descriptor == 'D6/D1':
descriptor = 'D6'
if descriptor == 'D4/D1':
descriptor = 'D4'
return GES_DESCRIPTORS[descriptor]
# if not descriptor:
# return GES_CRITERIONS
#
# return [c for c in GES_CRITERIONS if c.descriptor == descriptor]
def get_criterion(ges_id):
""" Get the first matched criterion for given ges id
:param ges_id: criterion id (ex: D1, D5C1 or 5.1.1)
"""
for c in GES_CRITERIONS.values():
if ges_id in c.all_ids():
c._main_id = ges_id
return c
def get_ges_component(ges_id):
if ges_id.upper() == 'D6/D1':
ges_id = 'D6'
elif ges_id.upper() == 'D4/D1':
ges_id = 'D4'
if is_descriptor(ges_id):
return get_descriptor(ges_id)
crit = get_criterion(ges_id)
if crit is None:
logger.warning("Criterion not found: %s", ges_id)
return None
return crit
def parse_parameters():
res = {}
for par in TERMSLIST['ReferenceParameter']:
name = par['Parameter']
if name not in res:
unit = par['Unit']
criterias = set([p['Criteria']
for p in TERMSLIST['ReferenceParameter']
if p['Parameter'] == name])
param = Parameter(name, unit, criterias)
res[name] = param
return res
PARAMETERS = parse_parameters()
def get_parameters(descriptor_code=None):
if descriptor_code is None:
return PARAMETERS.values()
descriptor = get_descriptor(descriptor_code)
crit_ids = set(descriptor.all_ids())
res = []
for p in PARAMETERS.values():
if p.criterias.intersection(crit_ids):
res.append(p)
return res
# TODO: move all label related code to labels.py
@db.use_db_session('2012')
def parse_features_from_db_2012():
res = {}
mc = sql_extra.MSFD9Feature
count, data = db.get_all_records(mc)
for row in data:
code = row.FeaturesPressuresImpacts
label = ''
theme = row.FeatureType or NOTHEME
res[code] = Feature(code, label, '', theme)
return res
FEATURES_DB_2012 = parse_features_from_db_2012()
@db.use_db_session('2018')
def parse_features_from_db_2018():
res = {}
mc = sql2018.LFeature
count, data = db.get_all_records(mc)
for row in data:
code = row.Code
label = row.Feature
theme = row.Theme or NOTHEME
subject = row.Subject
res[code] = Feature_2018(code, label, subject, theme)
return res
FEATURES_DB_2018 = parse_features_from_db_2018()
SUBJECT_2018_ORDER = [
'Structure, functions and processes of marine ecosystems',
'Anthropogenic pressures on the marine environment',
'Pressure levels and impacts in marine environment',
'Uses and human activities in or affecting the marine environment',
'Ecosystem services',
]
THEMES_2018_ORDER = [
'Species',
'Habitats',
'Ecosystems, including food webs',
'Biological',
'Physical',
'Physical and hydrological',
'Chemical',
'Substances, litter and energy',
'Nutrition',
'Materials',
'Energy',
'Mediation of waste, toxics and other nuisances',
'Mediation of flows',
'Maintenance of physical, chemical, biological conditions',
'Underpinning and/or enhancing physical and intellectual interactions',
'Underpinning and/or enhancing spiritual, symbolic and other interactions',
'Physical restructuring of rivers, coastline or seabed (water management)',
'Extraction of non-living resources',
'Production of energy',
'Extraction of living resources',
'Cultivation of living resources',
'Transport',
'Urban and industrial uses',
'Tourism and leisure',
'Security/defence',
'Education and research',
'No theme',
]
def parse_features():
res = {}
FEATURES = TERMSLIST['FeaturesSmart'] #
for fr in FEATURES:
code = fr['code']
label = fr['label']
if code in res:
continue
descs = set([f['descriptor']
.replace('D6/D1', 'D6')
.replace('D4/D1', 'D4')
for f in FEATURES
if f['code'] == code])
theme = FEATURES_DB_2018[code].theme
res[code] = Feature(code, label, descs, theme)
# this is missing from FeaturesSmart
res['BirdsAll'] = Feature('BirdsAll', 'All birds', set(['D1.1']), NOTHEME)
res['MamAll'] = Feature('MamAll', 'All mammals', set(['D1.2']), NOTHEME)
res['FishAll'] = Feature('FishAll', 'All fish', set(['D1.4', 'D3']),
NOTHEME)
res['CephaAll'] = Feature('CephaAll', 'All cephalopods', set(['D1.5']),
NOTHEME)
res['HabPelagAll'] = Feature('HabPelagAll',
'Pelagic habitats', set(['D1.6']), NOTHEME)
res['HabPelagVarSalinity'] = Feature('HabPelagVarSalinity',
'Variable salinity', set(['D1.6']),
NOTHEME)
res['HabPelagCoastal'] = Feature('HabPelagCoastal',
'Coastal', set(['D1.6']), NOTHEME)
res['HabPelagShelf'] = Feature('HabPelagShelf',
'Shelf', set(['D1.6']), NOTHEME)
res['HabPelagOcean'] = Feature('HabPelagOcean',
'Oceanic/beyond shelf', set(['D1.6']),
NOTHEME)
res['HabOther'] = Feature('HabOther',
'Other habitat types ', set(['D1.6', 'D6']),
NOTHEME)
res['HabAll'] = Feature('HabAll',
'All habitats ', set(['D1.6', 'D6']), NOTHEME)
return res
FEATURES = parse_features()
def get_features(descriptor_code=None):
if descriptor_code is None:
return FEATURES.values()
return [f
for f in FEATURES.values()
if descriptor_code in f.descriptors]
def is_descriptor(value):
return bool(DESC_RE.match(value))
def sorted_by_criterion(ids):
""" Sort/group a list of criterion ids
"""
descriptors = set() # ex: D1.1
criterias_2018 = set() # ex: D1C5
criterias_2012 = set() # ex: 5.1
indicators = set() # ex: 5.1.1
criterions = set() # ex: 1.2.1-indicator 5.2B
others = set()
for id in ids:
if id in GES_DESCRIPTORS:
descriptors.add(id)
continue
if CRIT_2018_RE.match(id):
criterias_2018.add(id)
continue
if CRIT_2012_RE.match(id):
criterias_2012.add(id)
continue
if INDICATOR_2012_RE.match(id):
indicators.add(id)
continue
if 'indicator' in id: # TODO: this needs to be normalized
criterions.add(id)
continue
others.add(id)
res = []
res.extend(sorted(descriptors, key=lambda d: d.replace('D', '')))
res.extend(sorted(criterias_2018)) # TODO: sort for double digit
criterions_2012 = criterias_2012.union(indicators)
res.extend(sorted(criterions_2012))
res.extend(sorted(criterions, key=lambda k: k.replace(' ', '')))
res.extend(sorted(others))
# print(res)
return res
def sorted_criterions(crits):
""" Given a list of criterias, returns the same list, sorted by criteria id
"""
cm = {c.id: c for c in crits}
s = sorted_by_criterion(cm.keys())
return [cm[k] for k in s]
def criteria_from_gescomponent(text):
""" Given a ges component id, such as '4.3.1.- indicators 4.3A',
return the matching indicator or criteria (or even descriptor, if provided
with a descriptor such as D1.1
"""
crit = text.split('-', 1)[0]
if crit.endswith('.'): # there is an
crit = crit[:-1]
return crit
class MarineReportingUnit(ItemLabel):
""" A labeled MarineReportingUnit container
"""
def __init__(self, id, title):
self.name = self.id = id
self.title = title
@db.use_db_session('2012')
def _muids_2012(country, region):
t = sql.t_MSFD4_GegraphicalAreasID
count, res = db.get_all_records(
(t.c.MarineUnitID,
t.c.MarineUnits_ReportingAreas),
t.c.MemberState == country,
t.c.RegionSubRegions == region,
# There are some MRUs with empty MarineUnits_ReportingAreas
# we should not exclude these, they are used in the reports
# ex: ../se/ans/d2/art9/@@view-report-data-2012 ANS-SE-SR-Nordsjon
# t.c.MarineUnits_ReportingAreas.isnot(None),
)
# r[0] = MarineUnitID, r[1] = MarineUnits_ReportingAreas
res = [MarineReportingUnit(r[0], r[1] or r[0]) for r in res]
return sorted(res)
@db.use_db_session('2018')
def _muids_2018(country, region):
# this method needs "raw" access because the shapefile column slows things
t = sql2018.MarineReportingUnit
sess = db.session()
q = sess\
.query(t.MarineReportingUnitId, t.nameTxtInt, t.Description)\
.filter(
t.CountryCode == country,
t.Region == region,
# t.MarineReportingUnitId.isnot(None),
# t.localId.isnot(None), # TODO: this suits NL, check others
)
res = [MarineReportingUnit(m.MarineReportingUnitId,
m.nameTxtInt or m.Description)
for m in q]
return sorted(res)
@timeit
def get_marine_units(country, region, year=None):
""" Get a list of ``MarineReportingUnit`` objects
"""
print "Get marine units for year: ", year
if year == '2012':
return _muids_2012(country, region)
elif year == '2018':
return _muids_2018(country, region)
raise NotImplementedError
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,575 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/translation/admin.py | import json
import logging
from datetime import datetime
from zope import event
from eea.cache.event import InvalidateMemCacheEvent
from Products.Five.browser import BrowserView
from . import normalize, save_translation
from .interfaces import ITranslationsStorage
logger = logging.getLogger('wise.msfd.translation')
class TranslationsOverview(BrowserView):
""" Translations overview page
"""
def languages(self):
return ITranslationsStorage(self.context).keys()
def available_translations(self):
storage = ITranslationsStorage(self.context)
selected_lang = self.request.form.get('language')
langstore = storage.get(selected_lang, {})
return langstore
def edit_translation(self):
form = self.request.form
language = form.get('language')
original = form.get('original') # .decode('utf-8')
original = normalize(original)
translated = form.get('tr-new').decode('utf-8')
save_translation(original, translated, language, approved=True)
deps = ['translation']
event.notify(InvalidateMemCacheEvent(raw=True, dependencies=deps))
logger.info('Invalidate cache for dependencies: %s', ', '.join(deps))
response = self.request.response
response.addHeader('Content-Type', 'application/json')
return json.dumps({'text': translated})
def add_translation(self):
form = self.request.form
language = form.get('language')
original = form.get('original') # .decode('utf-8')
original = normalize(original)
translated = form.get('translated').decode('utf-8')
save_translation(original, translated, language, approved=True)
url = './@@translations-overview?language=' + language
return self.request.response.redirect(url)
def approve_translations(self):
form = self.request.form
language = form.get('language')
approved = form.get('approved') # .decode('utf-8')
url = './@@translations-overview?language=' + language
if not approved:
return self.request.response.redirect(url)
if isinstance(approved, basestring):
approved = [approved]
storage = ITranslationsStorage(self.context)
selected_lang = self.request.form.get('language')
langstore = storage.get(selected_lang, {})
for label in approved:
label = label.decode('utf-8')
translation = langstore[label]
if translation.text.startswith('?'):
translation.text = translation.text[1:]
translation.approved = True
translation.modified = datetime.now()
return self.request.response.redirect(url)
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,576 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/scoring.py |
DEFAULT_RANGES = [
[76, 100],
[51, 75],
[26, 50],
[1, 25],
[0, 0]
]
CONCLUSIONS = [
'Not relevant',
'Very good',
'Good',
'Poor',
'Very poor',
'Not reported',
]
def get_range_index(percentage):
p = int(percentage)
for x, r in enumerate(reversed(DEFAULT_RANGES)):
if (p >= r[0]) and (p <= r[1]):
# return x + 1
return x
return len(DEFAULT_RANGES) + 1
def scoring_based(answers, scores):
raw_scores = []
for answ in answers:
score = scores[answ]
if score == '/':
continue
raw_scores.append(float(score))
return raw_scores
def get_overall_conclusion(concl_score):
if concl_score > 100:
return 1, 'Error'
score = get_range_index(concl_score)
conclusion = list(reversed(CONCLUSIONS))[score]
return score, conclusion
class OverallScores(object):
""" Class used to store the score for each phase
"""
def __init__(self, article_weights):
self.article_weights = article_weights
_init = {
'score': 0,
'max_score': 0,
'conclusion': 'Not reported',
'color': 0,
}
for phase in article_weights.values()[0].keys():
d = {}
d.update(_init)
setattr(self, phase, d)
def get_overall_score(self, article):
""" Overall conclusion art. XX: 2018
:return: 80
"""
overall_score = 0
weights = self.article_weights[article]
for phase in weights:
score = self.get_score_for_phase(phase)
overall_score += score * weights[phase]
overall_score = int(overall_score)
return get_range_index(overall_score), overall_score
def conclusion(self, phase):
""" Get the conclusion text from score_value
:return: string 'Very good'
"""
score_value = self.get_range_index_for_phase(phase)
concl = list(reversed(CONCLUSIONS))[score_value]
return concl
def get_score_for_phase(self, phase):
# max_score ............. 100%
# score ................. x%
score = getattr(self, phase)['score']
max_score = getattr(self, phase)['max_score']
return int(round(max_score and (score * 100) / max_score or 0))
def get_range_index_for_phase(self, phase):
score = self.get_score_for_phase(phase)
return get_range_index(score)
def score_tooltip(self, phase):
""" TODO not used """
score = getattr(self, phase)['score']
max_score = getattr(self, phase)['max_score']
final_score = self.get_score_for_phase(phase)
text = \
"<b>Score achieved</b>: {} (Sum of the final scores " \
"from each questions)" \
"</br><b>Max score</b>: {} (Maximum possible score)" \
"</br><b>Final score</b>: {} (Final calculated score " \
"(<b>Score achieved</b> / <b>Max score</b>) * 100)" \
.format(score, max_score, final_score)
return text
class Score(object):
""" Class used to store scores for each question
"""
def __init__(self, question, descriptor, values):
"""
:param question: instance of AssessmentQuestionDefinition
:param descriptor: 'D5'
:param values: index of the options selected for the question
ex. [3, 0, 5, 2]
"""
self.descriptor = descriptor
self.question = question
self.weight = float(question.score_weights.get(descriptor, 10.0))
self.values = values
self.scores = question.scores
@property
def is_not_relevant(self):
""" If all options selected are 'Not relevant' return True
:return: True or False
"""
if not self.values:
return False
answers = [self.scores[answ] for answ in self.values]
return answers.count('/') == len(self.values)
@property
def raw_scores(self):
""" Currently calls scoring_based function, and returns the raw scores
based on the options selected for the question
:return: list of floats [1.0, 0.25, 0, 0.75]
"""
rs = self.question.score_method(self.values, self.scores)
return rs
@property
def score_achieved(self):
""" Sum of all raw scores
:return: 1.5
"""
return sum(self.raw_scores)
@property
def max_score(self):
""" Maximum possible score
:return: 5
"""
return len(self.raw_scores)
@property
def percentage(self):
""" Calculate the percentage from raw scores
# max_score ... 100%
# raw_score ... x
:return: float 53.25
"""
# All answers are 'Not relevant'
if self.max_score == 0:
return '-'
percentage = (self.score_achieved * 100) / self.max_score
return float("{0:.1f}".format(percentage))
@property
def score_value(self):
""" Get the score value from percentage, only used to get conclusion
:return: integer from range 1-4
"""
# All answers are 'Not relevant'
if self.percentage == '-':
return 5
sv = get_range_index(self.percentage)
return sv
@property
def conclusion(self):
""" Get the conclusion text from score_value
:return: string 'Very good'
"""
concl = list(reversed(CONCLUSIONS))[self.score_value]
return concl
@property
def weighted_score(self):
""" Calculate the weighted score
:return: float 7.5
"""
ws = self.score_achieved * self.weight
return ws
@property
def max_weighted_score(self):
""" Calculate the maximum possible weighted score
:return: float 7.5
"""
ws = len(self.raw_scores) * self.weight
return ws
@property
def score_tooltip(self):
if self.is_not_relevant:
return "All selected options are 'Not relevant', therefore " \
"the question is not accounted when calculating the " \
"overall scores"
raw_score = ' + '.join(str(x) for x in self.raw_scores)
text = \
'<b>Weight</b>: {}' \
'</br><b>Max score</b>: {} (number of answered criterias/targets ' \
'excluding answers where option selected is "Not relevant")' \
'</br><b>Score achieved</b>: {} (Sum of the scores {})' \
'</br><b>Max weighted score</b>: {} (Maximum possible weighted ' \
'score <b>Weight</b> * <b>Max score</b>)' \
'</br><b>Weighted score</b>: {} (Final calculated score ' \
'<b>Score achieved</b> * <b>Weight</b>)' \
.format(self.weight, self.max_score, self.score_achieved,
raw_score, self.max_weighted_score, self.weighted_score)
return text
# A10Ad1 A0810Cy1 A08Ad4 A09Ad1 A09Ad2
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,577 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/vocabulary.py | # -*- coding: utf-8 -*-
from collections import defaultdict, namedtuple
from zope.schema.vocabulary import SimpleTerm, SimpleVocabulary
from wise.msfd import db, sql2018
ASSESSED_ARTICLES = (
('Art3', 'Art. 3(1) Marine waters',),
('Art4', 'Art. 4/2017 Decision: Marine regions, subregions, '
'and subdivisions '),
('Art5', '(MRUs)', ),
('Art6', 'Art. 6 Regional cooperation', ),
('Art7', 'Art. 7 Competent authorities', ),
('Art8', 'Art. 8 Initial assessment (and Art. 17 updates)', ),
('Art9', 'Art. 9 Determination of GES (and Art. 17 updates) ', ),
('Art10', 'Art. 10 Environmental targets (and Art. 17 updates)', ),
('Art11', 'Art. 11 Monitoring programmes (and Art. 17 updates)', ),
('Art13', 'Art. 13 Programme of measures (and Art. 17 updates)', ),
('Art14', 'Art. 14 Exceptions (and Art. 17 updates)', ),
('Art18', 'Art. 18 Interim report on programme of measures', ),
('Art19_3', 'Art. 19(3) Access to data', ),
)
# TODO: sort this vocabulary (somehow)
GES_DESCRIPTORS = (
('D1', 'D1 Biodiversity'),
('D1 Birds', 'D1 Biodiversity – birds'),
('D1 Cephalopods', 'D1 Biodiversity – cephalopods'),
('D1 Fish', 'D1 Biodiversity – fish'),
('D1 Mammals', 'D1 Biodiversity – mammals'),
('D1 Pelagic habitats', 'D1 Biodiversity – pelagic habitats'),
('D1 Reptiles', 'D1 Biodiversity – reptiles'),
('D2', 'D2 Non-indigenous species'),
('D3', 'D3 Commercial fish and shellfish'),
('D4/D1', 'D4 - Food webs / D1 Biodiversity - ecosystems'),
('D5', 'D5 Eutrophication'),
('D6/D1', 'D6 - Sea-floor integrity / D1 Biodiversity - benthic habitats'),
('D7', 'D7 Hydrographical changes'),
('D8', 'D8 Contaminants'),
('D9', 'D9 Contaminants in seafood'),
('D10', 'D10 Marine litter'),
('D11', 'D11 Energy, incl. underwater noise'),
)
def vocab_from_pairs(pairs):
""" Build a zope.schema vocabulary from pairs of (value(token), title)
"""
terms = []
for val, title in pairs:
term = SimpleTerm(val, val, title)
terms.append(term)
return SimpleVocabulary(terms)
def vocab_from_list(values):
return SimpleVocabulary([SimpleTerm(x, x, x) for x in values])
descriptors_vocabulary = vocab_from_pairs(GES_DESCRIPTORS)
articles_vocabulary = vocab_from_pairs(ASSESSED_ARTICLES)
REGIONS = {
"ABI": "NE Atlantic: Bay of Biscay & Iberian Coast",
"ACS": "NE Atlantic: Celtic Seas",
"AMA": "NE Atlantic: Macaronesia",
"ANS": "NE Atlantic: Greater North Sea",
# , incl. Kattegat & English Channel
"BAL": "Baltic Sea",
"BLK": "Black Sea",
"MAD": "Mediterranean: Adriatic Sea",
"MAL": "Mediterranean: Aegean-Levantine Sea",
"MIC": "Mediterranean: Ionian Sea & Central Mediterranean Sea",
"MWE": "Mediterranean: Western Mediterranean Sea"
}
REGIONS_SIMPLIFIED = {
'North East Atlantic': ('ABI', 'ACS', 'AMA', 'ANS'),
'West Mediterranean': ('MWE',),
'Mediterranean': ('MAD', 'MAL', 'MIC'), # 'MWE'
'Baltic Sea': ('BAL',),
'North Sea': ('ANS',),
'Black Sea': ('BLK',),
}
Region = namedtuple('Region', ['code', 'title', 'subregions',
'countries', 'is_main'])
REGIONAL_DESCRIPTORS_REGIONS = [
# Main regions
Region('BAL', 'Baltic', ('BAL',),
('FI', 'EE', 'LV', 'LT', 'PL', 'DE', 'DK', 'SE'), True),
Region('ATL', 'North East Atlantic', ('ABI', 'ACS', 'AMA', 'ANS',),
('SE', 'DK', 'DE', 'NL', 'BE', 'FR', 'UK', 'IE', 'ES', 'PT'), True),
Region('MED', 'Mediterranean', ('MAD', 'MAL', 'MIC', 'MWE'),
('UK', 'ES', 'FR', 'IT', 'MT', 'SI', 'HR', 'EL', 'CY'), True),
Region('BLK', 'Black Sea', ('BLK',), ('BG', 'RO'), True),
# Sub regions
Region('ANS', 'NE Atlantic: Greater North Sea', ('ANS',),
('SE', 'DK', 'DE', 'NL', 'BE', 'FR', 'UK'), False),
Region('ACS', 'NE Atlantic: Celtic Seas', ('ACS',),
('UK', 'IE', 'FR'), False),
Region('ABI', 'NE Atlantic: Bay of Biscay & Iberian Coast', ('ABI',),
('FR', 'ES', 'PT'), False),
Region('AMA', 'NE Atlantic: Macaronesia', ('AMA',),
('ES', 'PT'), False),
Region('MWE', 'Mediterranean: Western Mediterranean Sea', ('MWE',),
('UK', 'ES', 'FR', 'IT'), False),
Region('MAD', 'Mediterranean: Adriatic Sea', ('MAD',),
('IT', 'SI', 'HR', 'EL'), False),
Region('MIC', 'Mediterranean: Ionian Sea & Central Mediterranean Sea',
('MIC',), ('IT', 'MT', 'EL'), False),
Region('MAL', 'Mediterranean: Aegean-Levantine Sea', ('MAL',),
('EL', 'CY'), False),
]
@db.use_db_session('2018')
def get_regions_for_country(country_code):
t = sql2018.MarineReportingUnit
regions = db.get_unique_from_mapper(
t,
'Region',
t.CountryCode == country_code
)
# blacklist main/generic regions as they do not have reported data
# but they appear in the database. Countries affected: ES, IE, PT, IT
blacklist = ['NotReported', 'ATL', 'MED']
return [(code, REGIONS.get(code, code))
for code in regions
if code not in blacklist]
def make_subregions(d):
""" switches direction of REGIONS_SIMPLIFIED
Returns dict like: {
'ABI': 'North East Atlantic',
}
"""
r = defaultdict(list)
for k, vs in d.items():
for v in vs:
r[v].append(k)
return r
SUBREGIONS_TO_REGIONS = make_subregions(REGIONS_SIMPLIFIED)
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,578 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/regionalsummary/pdfexport.py | # -*- coding: utf-8 -*-
from io import BytesIO
from pkg_resources import resource_filename
import logging
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from Products.statusmessages.interfaces import IStatusMessage
from wise.msfd.translation import get_translated, retrieve_translation
from wise.msfd.utils import (ItemList, TemplateMixin, db_objects_to_dict,
fixedorder_sortkey, timeit)
import pdfkit
from ..nationalsummary.pdfexport import (AssessmentExportCover,
ProgressAssessment, SummaryAssessment)
from .base import BaseRegSummaryView
from .descriptor_assessments import RegDescriptorLevelAssessments
from .introduction import RegionalIntroduction
logger = logging.getLogger('wise.msfd')
class RegionalAssessmentExportCover(BaseRegSummaryView, AssessmentExportCover):
""" PDF Assessment cover for regional summaries """
class RegionalSummaryAssessment(BaseRegSummaryView, SummaryAssessment):
""" Make National summary code compatible for Regional summary """
def __init__(self, context, request, overall_scores,
reg_desc_region_folder):
super(SummaryAssessment, self).__init__(context, request)
self.overall_scores = overall_scores
self.reg_desc_region_folder = reg_desc_region_folder
def setup_data(self):
res = []
region_folder = self.reg_desc_region_folder
table_rows = []
descr_folders = self.get_descr_folders(region_folder)
for descr_type, descriptors in self.descriptor_types:
descr_rows = []
for descr in descriptors:
descr_folder = [
d
for d in descr_folders
if d.id.upper() == descr
][0]
# Remove brackets with text from descriptor title
# D4 - Food webs/D1 Biodiversity - ecosystems (D4/D1)
descriptor_title = descr_folder.title.split('(')[0]
row = [descriptor_title]
for article_folder in descr_folder.contentValues():
score = self.get_overall_score(
region_folder.id.upper(), descr_folder.id.upper(),
article_folder.title
)
row.append(score)
descr_rows.append(row)
table_rows.append((descr_type, descr_rows))
res.append((region_folder.title, table_rows))
self.summary_assess_data = res
return res
class RegProgressAssessment(BaseRegSummaryView, ProgressAssessment):
""" Make National summary code compatible for Regional summary """
template = ViewPageTemplateFile('pt/progress-assessment.pt')
class AssessmentExportView(BaseRegSummaryView):
help_text = "HELP TEXT"
template = ViewPageTemplateFile('pt/report-data.pt')
report_header_template = ViewPageTemplateFile(
'pt/assessment-export-header.pt'
)
year = "2012"
render_header = True
render_recommendations = True
def _get_css(self):
return [
resource_filename('wise.theme',
'static/wise/css/main.css'),
resource_filename('wise.msfd',
'static/wise/dist/css/compliance.css'),
resource_filename('wise.msfd',
'static/wise/dist/css/pdf_export.css'),
]
def _get_cover(self):
absolute_url = self.context.absolute_url()
cover_url = absolute_url + '/export-cover'
if 'localhost' in absolute_url:
return ""
cover_url = cover_url.replace('localhost:5080',
'office.pixelblaster.ro:4880')
return cover_url
def _get_toc(self):
xsl_file = resource_filename('wise.msfd', 'data/pdf_toc.xsl'),
toc = {"xsl-style-sheet": xsl_file}
return toc
def download_pdf(self):
options = {
'margin-top': '0.5in',
'margin-right': '0.5in',
'margin-bottom': '0.5in',
'margin-left': '0.5in',
# 'footer-left': "Page",
'footer-font-size': '7',
'footer-right': 'Page [page] of [topage]',
'encoding': "UTF-8",
}
css = self._get_css()
cover = self._get_cover()
toc = self._get_toc()
doc = pdfkit.from_string(
self.report_html, False, options=options,
cover=cover,
toc=toc,
css=css,
cover_first=True
)
sh = self.request.response.setHeader
sh('Content-Type', 'application/pdf')
fname = "{}-Draft".format(self.country_name)
sh('Content-Disposition',
'attachment; filename=%s.pdf' % fname)
return doc
# @cache(get_reportdata_key, dependencies=['translation'])
@timeit
def render_reportdata(self):
report_header = self.report_header_template(
title="Commission assessment / Art12 / 2018 / {}-summary".format(
self.country_name,
)
)
# trans_edit_html = self.translate_view()()
# 3. Descriptor-level assessments
descriptor_lvl_assess = RegDescriptorLevelAssessments(self, self.request)
descriptor_lvl_assess_view = descriptor_lvl_assess()
overall_scores = descriptor_lvl_assess.overall_scores
reg_desc_region_folder = descriptor_lvl_assess.reg_desc_region_folder
# 1. Introduction
introduction = RegionalIntroduction(self.context, self.request)
# 2. Summary Assessment
sum_assess = RegionalSummaryAssessment(
self, self.request, overall_scores, reg_desc_region_folder
)
# 4. Progress Assessment
prog_assess = ""
if self.render_recommendations:
prog_assess = RegProgressAssessment(self, self.request)
self.tables = [
report_header,
introduction,
sum_assess,
descriptor_lvl_assess,
prog_assess,
# ArticleTable(self, self.request, 'Art7'),
# ArticleTable(self, self.request, 'Art3-4'),
# trans_edit_html,
]
template = self.template
return template(tables=self.tables)
def __call__(self):
if 'edit-data' in self.request.form:
url = "{}/edit".format(self._country_folder.absolute_url())
return self.request.response.redirect(url)
if 'download_pdf' in self.request.form:
self.render_header = False
self.render_recommendations = False
report_html = self.render_reportdata()
self.report_html = report_html
if 'download_pdf' in self.request.form:
return self.download_pdf()
if 'translate' in self.request.form:
for value in self._translatable_values:
retrieve_translation(self.country_code, value)
messages = IStatusMessage(self.request)
messages.add(u"Auto-translation initiated, please refresh "
u"in a couple of minutes", type=u"info")
@timeit
def render_html():
return self.index()
return render_html()
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,579 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/regionaldescriptors/reportdata.py | import logging
from io import BytesIO
from sqlalchemy import or_
import xlsxwriter
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from Products.statusmessages.interfaces import IStatusMessage
from wise.msfd import db, sql2018
from wise.msfd.gescomponents import get_features, get_parameters
from wise.msfd.translation import retrieve_translation
from wise.msfd.utils import ItemList, timeit
from ..nationaldescriptors.utils import consolidate_singlevalue_to_list
from .a8 import RegDescA82012, RegDescA82018Row
from .a9 import RegDescA92012, RegDescA92018Row
from .a10 import RegDescA102012, RegDescA102018Row
from .base import BaseRegComplianceView
from .data import get_report_definition
from .proxy import Proxy2018
logger = logging.getLogger('wise.msfd')
class RegReportData2012(BaseRegComplianceView):
help_text = "HELP TEXT"
template = ViewPageTemplateFile('pt/report-data.pt')
year = "2012"
cache_key_extra = 'reg-desc-2012'
Art8 = RegDescA82012
Art9 = RegDescA92012
Art10 = RegDescA102012
@db.use_db_session('2012')
def get_report_data(self):
impl_class = getattr(self, self.article)
result = impl_class(self, self.request)
return result.allrows
def data_to_xls(self, data):
# Create a workbook and add a worksheet.
out = BytesIO()
workbook = xlsxwriter.Workbook(out, {'in_memory': True})
wtitle = self.country_region_code
worksheet = workbook.add_worksheet(unicode(wtitle)[:30])
row_index = 0
for compoundrow in data:
title = compoundrow.field.title
rows = compoundrow.rows
for row in rows:
sub_title, values = row
worksheet.write(row_index, 0, title)
worksheet.write(row_index, 1, unicode(sub_title or ''))
for j, value in enumerate(values):
worksheet.write(row_index, j + 2, unicode(value or ''))
row_index += 1
workbook.close()
out.seek(0)
return out
def download(self):
xlsdata = self.get_report_data()
xlsio = self.data_to_xls(xlsdata)
sh = self.request.response.setHeader
sh('Content-Type', 'application/vnd.openxmlformats-officedocument.'
'spreadsheetml.sheet')
fname = "-".join(["RegionalDescriptors",
self.country_region_code,
self.descriptor,
self.article,
self.year])
sh('Content-Disposition',
'attachment; filename=%s.xlsx' % fname)
return xlsio.read()
# @cache(get_reportdata_key, dependencies=['translation'])
def render_reportdata(self):
logger.info("Quering database for 2012 report data: %s %s %s",
self.country_region_code, self.article,
self.descriptor)
data = self.get_report_data()
report_header = self.report_header_template(
title="Member State report / {} / 2012 / {} / {}".format(
self.article,
self.descriptor_title,
self.country_region_name,
),
factsheet=None,
# TODO: find out how to get info about who reported
report_by='Member state',
report_due='2012-10-15',
help_text=self.help_text,
use_translation=False
)
template = self.template
return template(data=data, report_header=report_header)
def __call__(self):
if 'download' in self.request.form:
return self.download()
report_html = self.render_reportdata()
self.report_html = report_html
@timeit
def render_html():
return self.index()
return render_html()
class RegReportData2018(BaseRegComplianceView):
# implements(IReportDataView)
help_text = "HELP TEXT"
template = ViewPageTemplateFile('pt/report-data.pt')
year = "2018"
cache_key_extra = "reg-desc-2018"
# Art8 = ViewPageTemplateFile('pt/report-data.pt')
# Art9 = ViewPageTemplateFile('pt/report-data.pt')
# Art10 = ViewPageTemplateFile('pt/report-data.pt')
Art8 = RegDescA82018Row
Art9 = RegDescA92018Row
Art10 = RegDescA102018Row
@property
def all_descriptor_ids(self):
all_ids = list(self.descriptor_obj.all_ids())
if self.descriptor.startswith('D1.'):
all_ids.append('D1')
all_ids = set(all_ids)
return all_ids
@property
def get_data_from_view_Art9(self):
t = sql2018.t_V_ART9_GES_2018
count, q = db.get_all_records_ordered(
t,
('GESComponent',),
or_(t.c.Region.in_(self._countryregion_folder._subregions),
t.c.Region.is_(None)),
t.c.GESComponent.in_(self.all_descriptor_ids),
)
return q
@property
def get_data_from_view_Art8(self):
sess = db.session()
t = sql2018.t_V_ART8_GES_2018
conditions = [
t.c.Region.in_(self._countryregion_folder._subregions),
t.c.GESComponent.in_(self.all_descriptor_ids),
or_(t.c.Element.isnot(None),
t.c.Criteria.isnot(None)),
]
# groupby IndicatorCode
q = sess\
.query(t)\
.filter(*conditions)\
.distinct()
res = [row for row in q]
return res
@property
def get_data_from_view_Art10(self):
t = sql2018.t_V_ART10_Targets_2018
# TODO check conditions for other countries beside NL
# conditions = [t.c.GESComponents.in_(all_ids)]
count, res = db.get_all_records_ordered(
t,
('Features', 'TargetCode', 'Element'),
t.c.Region.in_(self._countryregion_folder._subregions),
# *conditions
)
out = []
# GESComponents contains multiple values separated by comma
# filter rows by splitting GESComponents
for row in res:
ges_comps = getattr(row, 'GESComponents', ())
ges_comps = set([g.strip() for g in ges_comps.split(',')])
if ges_comps.intersection(self.all_descriptor_ids):
out.append(row)
if not self.descriptor.startswith('D1.'):
return out
conditions = []
params = get_parameters(self.descriptor)
p_codes = [p.name for p in params]
conditions.append(t.c.Parameter.in_(p_codes))
ok_features = set([f.name for f in get_features(self.descriptor)])
out_filtered = []
for row in out:
feats = set(row.Features.split(','))
if feats.intersection(ok_features):
out_filtered.append(row)
return out_filtered
@db.use_db_session('2018')
def get_report_data(self):
# TODO check if data is filtered by features for D1
db_data = getattr(self, 'get_data_from_view_' + self.article, None)
db_data = [Proxy2018(row, self) for row in db_data]
if self.article == 'Art8':
db_data = consolidate_singlevalue_to_list(db_data, 'IndicatorCode')
countries = self.available_countries
regions = self._countryregion_folder._subregions
descriptor_obj = self.descriptor_obj
fields = get_report_definition('2018', self.article).get_fields()
impl_class = getattr(self, self.article)
result = []
for field in fields:
row_class = impl_class(self, self.request, db_data, descriptor_obj,
regions, countries, field)
field_data_method = getattr(row_class, field.getrowdata, None)
if not field_data_method:
continue
result.append(field_data_method())
# result.extend(self.get_adequacy_assessment_data())
return result
def data_to_xls(self, data):
# Create a workbook and add a worksheet.
out = BytesIO()
workbook = xlsxwriter.Workbook(out, {'in_memory': True})
wtitle = self.country_region_code
worksheet = workbook.add_worksheet(unicode(wtitle)[:30])
row_index = 0
for compoundrow in data:
title = compoundrow.field.title
rows = compoundrow.rows
for row in rows:
sub_title, values = row
worksheet.write(row_index, 0, title)
worksheet.write(row_index, 1, unicode(sub_title or ''))
for j, value in enumerate(values):
worksheet.write(row_index, j + 2, unicode(value or ''))
row_index += 1
workbook.close()
out.seek(0)
return out
def download(self):
xlsdata = self.get_report_data()
xlsio = self.data_to_xls(xlsdata)
sh = self.request.response.setHeader
sh('Content-Type', 'application/vnd.openxmlformats-officedocument.'
'spreadsheetml.sheet')
fname = "-".join(["RegionalDescriptors",
self.country_region_code,
self.descriptor,
self.article,
self.year])
sh('Content-Disposition',
'attachment; filename=%s.xlsx' % fname)
return xlsio.read()
def auto_translate(self):
data = self.get_report_data()
translatables = self.TRANSLATABLES
seen = set()
for compoundrow in data:
rows = compoundrow.rows
for row in rows:
sub_title, values = row
if compoundrow.field.name in translatables:
for indx, value in enumerate(values):
if not value:
continue
if value not in seen:
country_code = self.available_countries[indx][0]
retrieve_translation(country_code, value)
seen.add(value)
messages = IStatusMessage(self.request)
messages.add(u"Auto-translation initiated, please refresh "
u"in a couple of minutes", type=u"info")
url = self.context.absolute_url() + '/@@view-report-data-2018'
return self.request.response.redirect(url)
# @cache(get_reportdata_key, dependencies=['translation'])
def render_reportdata(self):
logger.info("Quering database for 2018 report data: %s %s %s",
self.country_region_code, self.article,
self.descriptor)
data = self.get_report_data()
report_header = self.report_header_template(
title="Member State report / {} / 2018 / {} / {}".format(
self.article,
self.descriptor_title,
self.country_region_name,
),
factsheet=None,
# TODO: find out how to get info about who reported
report_by='Member state',
report_due='2018-10-15',
help_text=self.help_text,
use_translation=True
)
# template = getattr(self, self.article, None)
template = self.template
return template(data=data, report_header=report_header)
def __call__(self):
if 'download' in self.request.form:
return self.download()
if 'translate' in self.request.form:
return self.auto_translate()
report_html = self.render_reportdata()
trans_edit_html = self.translate_view()()
self.report_html = report_html + trans_edit_html
@timeit
def render_html():
return self.index()
return render_html()
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,580 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/nationalsummary/introduction.py | import logging
from collections import defaultdict, namedtuple
from datetime import datetime
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from wise.msfd import db, sql2018
from wise.msfd.data import get_text_reports_2018
from wise.msfd.translation import get_translated, retrieve_translation
from wise.msfd.utils import (ItemList, TemplateMixin, db_objects_to_dict,
fixedorder_sortkey, timeit)
from .base import BaseNatSummaryView
from .odt_utils import (create_heading, create_paragraph, create_table,
DOCUMENT_TITLE, STYLES)
logger = logging.getLogger('wise.msfd')
def compoundrow(self, title, rows, show_header=True):
""" Function to return a compound row for 2012 report"""
FIELD = namedtuple("Field", ["name", "title"])
field = FIELD(title, title)
return CompoundRow(self, self.request, field, rows, show_header)
class CompoundRow(TemplateMixin):
template = ViewPageTemplateFile('pt/compound-row.pt')
def __init__(self, context, request, field, rows, show_header=True):
self.context = context
self.request = request
self.field = field
self.rows = rows
self.rowspan = len(rows)
self.show_header = show_header
class AssessmentAreas2018(BaseNatSummaryView):
""" Implementation of 1.3 Reporting areas (Marine Reporting Units) """
template = ViewPageTemplateFile('pt/assessment-areas.pt')
@db.use_db_session('2018')
def get_data(self):
mapper_class = sql2018.MRUsPublication
res = []
# for better query speed we get only these columns
col_names = ('Country', 'rZoneId', 'thematicId', 'nameTxtInt',
'nameText', 'spZoneType', 'legisSName', 'Area')
columns = [getattr(mapper_class, name) for name in col_names]
count, data = db.get_all_specific_columns(
columns,
mapper_class.Country == self.country_code
)
for row in data:
description = row.nameTxtInt or row.nameText or ""
translation = get_translated(description, self.country_code) or ""
if not translation:
retrieve_translation(self.country_code, description)
self._translatable_values.append(description)
res.append((row.rZoneId, row.spZoneType, row.thematicId,
description, translation))
return res
def __call__(self):
data = self.get_data()
self.areas_data = data
return self.template(data=data)
class ReportingHistoryTable(BaseNatSummaryView):
""" Implementation for the reporting history table
"""
template = ViewPageTemplateFile('pt/report-history-compound-table.pt')
show_header = False
def __init__(self, context, request):
super(ReportingHistoryTable, self).__init__(context, request)
self.data = self.get_reporting_history_data()
text_reports = get_text_reports_2018(self.country_code)
data_text = []
# FileName, LocationURL, DateDue, DateReceived, ReportingDelay
for row in text_reports:
_row = {}
file_url = row[0]
release_date = row[1]
file_url_split = file_url.split('/')
_row['FileName'] = file_url_split[-1]
_row['LocationURL'] = file_url
_row['DateDue'] = datetime.strptime('15-10-2018', '%d-%m-%Y')
_row['DateReceived'] = release_date
_row['ReportingDelay'] = None
data_text.append(_row)
self.data.extend(data_text)
@db.use_db_session('2018')
def get_reporting_history_data(self):
# obligation = 'MSFD reporting on Initial Assessments (Art. 8), ' \
# 'Good Environmental Status (Art.9), Env. targets & ' \
# 'associated indicators (Art.10) & related reporting on ' \
# 'geographic areas, regional cooperation and metadata.'
obligations = (
'MSFD - Article 4 - Spatial data',
'MSFD - Articles 8, 9 and 10 - XML data'
)
mc = sql2018.ReportingHistory
_, res = db.get_all_records(
mc,
mc.CountryCode == self.country_code,
mc.ReportingObligation.in_(obligations)
)
res = db_objects_to_dict(res)
return res
def location_url(self, location, filename):
tmpl = "<a href={} target='_blank'>{}</a>"
location = location.replace(filename, '')
# return location
return tmpl.format(location, location)
def format_date(self, date):
if not date:
return date
# formatted = date.strftime('%m/%d/%Y')
formatted = date.date()
return formatted
def headers(self):
headers = (
'Files available', 'Access to reports',
'Report due', 'Report received', 'Difference (days)'
)
return headers
def calculate_reporting_delay(self, reporting_delay, report_due,
report_date):
# if reporting_delay:
# return -reporting_delay
timedelta = report_due - report_date
return "{:+d}".format(timedelta.days)
def get_article_row(self, obligations):
# Group the data by envelope, report due, report date
# and report delay
data = [
row for row in self.data
if row.get('ReportingObligation') in obligations
]
rows = []
groups = defaultdict(list)
for row in data:
filename = row.get('FileName')
envelope = self.location_url(row.get('LocationURL'), filename)
report_due = self.format_date(row.get('DateDue'))
report_date = self.format_date(row.get('DateReceived'))
report_delay = self.calculate_reporting_delay(
row.get('ReportingDelay'), report_due, report_date
)
k = (envelope, report_due, report_date, report_delay)
groups[k].append(filename)
for _k, filenames in groups.items():
values = [
ItemList(rows=filenames), # Filenames
_k[0], # Envelope url
_k[1], # Report due
_k[2], # Report date
_k[3] # Report delay
]
rows.append(values)
sorted_rows = sorted(rows,
key=lambda _row: (_row[3], _row[2]),
reverse=True)
return sorted_rows
def __call__(self):
data = self.data
obligations = set([x.get('ReportingObligation') for x in data])
self.allrows = [
compoundrow(self, "", self.get_article_row(obligations),
show_header=self.show_header)
]
self.report_hystory_data = self.allrows[0].rows
return self.template(rows=self.allrows)
class ReportedInformationTable(BaseNatSummaryView):
""" Alternate implementation for the reporting history table
Reads data from sql2018.ReportedInformation
"""
template = ViewPageTemplateFile('pt/report-history-compound-table.pt')
show_header = False
def __init__(self, context, request):
super(ReportedInformationTable, self).__init__(context, request)
self.data = self.get_reporting_history_data()
@db.use_db_session('2018')
def get_reporting_history_data(self):
mc = sql2018.ReportedInformation
_, res = db.get_all_records(
mc,
mc.CountryCode == self.country_code,
)
res = db_objects_to_dict(res)
return res
def location_url(self, location, filename):
tmpl = "<a href={} target='_blank'>{}</a>"
location = location.replace(filename, '')
# return location
return tmpl.format(location, location)
def format_date(self, date):
if not date:
return date
# formatted = date.strftime('%m/%d/%Y')
formatted = date.date()
return formatted
def headers(self):
headers = (
'Files available', 'Access to reports',
'Report due', 'Report received', 'Difference (days)'
)
return headers
def get_text_and_spacial_files(self):
view = ReportingHistoryTable(self, self.request)
view()
return view.report_hystory_data
def get_article_rows(self):
# Group the data by envelope, report due, report date and report delay
data = self.data
rows = []
groups = defaultdict(list)
for row in data:
filename = row.get('ReportedFileLink').split('/')[-1]
envelope = self.location_url(row.get('ReportedFileLink'), filename)
# Article 18 files not relevant for this report, exclude them
if 'art18' in envelope:
continue
report_due = datetime(year=2018, month=10, day=15).date()
report_date = row.get('ReportingDate')
report_delay = report_due - report_date
k = (envelope, report_due, report_date, report_delay.days)
groups[k].append(filename)
for _k, filenames in groups.items():
values = [
ItemList(rows=filenames), # Filenames
_k[0], # Envelope url
_k[1], # Report due
_k[2], # Report date
_k[3] # Report delay
]
rows.append(values)
text_files = self.get_text_and_spacial_files()
rows.extend(text_files)
sorted_rows = sorted(rows,
key=lambda _row: (_row[3], _row[1]),
reverse=True)
return sorted_rows
def __call__(self):
self.allrows = [
compoundrow(self, 'Row', self.get_article_rows(),
show_header=self.show_header)
]
self.report_hystory_data = self.allrows[0].rows
return self.template(rows=self.allrows)
class Introduction(BaseNatSummaryView):
""" Implementation of section 1.Introduction """
template = ViewPageTemplateFile('pt/introduction.pt')
@property
def document_title(self):
text = u"Marine Strategy Framework Directive - Article 12 technical " \
u"assessment of the 2018 updates of Articles 8, 9 and 10"
return text
@timeit
def reporting_history(self):
# view = ReportedInformationTable(self, self.request)
view = ReportingHistoryTable(self, self.request)
rendered_view = view()
self.report_hystory_data = view.report_hystory_data
return rendered_view
@property
def information_memberstate(self):
text = u"Between July and October 2018, the Member States were due " \
u"to submit updates of the assessment of their marine waters " \
u"(Article 8), the determination of GES (Article 9) and the " \
u"setting of environmental targets (Article 10), in " \
u"accordance with Article 17 of the Marine Strategy " \
u"Framework Directive (MSFD)."
return text
@property
def scope_of_marine_waters(self):
output = self.get_field_value('scope_of_marine_waters')
return output
@property
def assessment_methodology(self):
output = self.get_field_value('assessment_methodology')
return output
@property
def assessment_areas_title(self):
text = u"The table lists the Marine Reporting Units used for the " \
u"2018 reporting on updates of Articles 8, 9 and 10."
return text
@property
@timeit
def assessment_areas(self):
view = AssessmentAreas2018(self, self.request)
rendered_view = view()
self.assessment_areas_data = view.areas_data
return rendered_view
def get_odt_data(self, document):
res = []
title = create_paragraph(self.document_title,
style=STYLES[DOCUMENT_TITLE])
res.append(title)
# 1. Introduction
title = create_heading(1, u'Introduction')
res.append(title)
# 1.1 Reporting history
title = create_heading(
2, u'Information reported by the Member State'
)
res.append(title)
p = create_paragraph(self.information_memberstate)
res.append(p)
# headers = ('Report format',' Files available', 'Access to reports',
# 'Report due', 'Report received', 'Difference (days)')
headers = (
'Files available', 'Access to reports',
'Report due', 'Report received', 'Difference (days)'
)
p = create_heading(3, u"Reporting history")
res.append(p)
table = create_table(document, self.report_hystory_data,
headers=headers)
res.append(table)
# 1.2 Marine waters
title = create_heading(2, u"Member State's marine waters")
res.append(title)
text = self.get_transformed_richfield_text('scope_of_marine_waters')
p = create_paragraph(text)
res.append(p)
# 1.3 Marine Unit Ids
title = create_heading(
2, u'Reporting areas (Marine Reporting Units)'
)
res.append(title)
p = create_paragraph(self.assessment_areas_title)
res.append(p)
headers = ('Region', 'Zone Type', 'MarineUnitID',
'Marine Reporting Unit Description',
'Marine Reporting Unit Description (Translated)')
table = create_table(document, self.assessment_areas_data,
headers=headers)
res.append(table)
# 1.4 Assessment methodology
title = create_heading(2, u'Assessment methodology')
res.append(title)
text = self.get_transformed_richfield_text('assessment_methodology')
p = create_paragraph(text)
res.append(p)
return res
def __call__(self):
@timeit
def render_introduction():
return self.template()
return render_introduction() | {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,581 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/convert.py | """ A collection of data transformers to be used to convert 2018 DB data in
some other useful formats. Used when displaying data.
"""
from wise.msfd.gescomponents import get_ges_component
from wise.msfd.labels import GES_LABELS
from wise.msfd.translation import get_translated
from wise.msfd.utils import ItemLabel, ItemList, SimpleTable
def csv_ges_labels_list(field, value, lang):
vals = set(value.split(','))
res = []
for v in vals:
title = GES_LABELS.get(field.label_collection, v)
i = ItemLabel(v, title)
res.append(i)
return ItemList(rows=res)
def ges_component(field, value, lang):
criterion = get_ges_component(value)
if criterion is None:
return value
return criterion
def inverse_label(field, value, lang):
title = GES_LABELS.get(field.label_collection, value)
item = ItemLabel(title, value)
return item
def ges_component_list(field, value, lang):
values = value.split(',')
rows = [ges_component(None, v, lang) for v in values]
return ItemList(rows=rows)
def csv_ges_labels_inverse_list(field, value, lang):
vals = set(value.split(','))
res = []
for v in vals:
title = GES_LABELS.get(field.label_collection, v)
i = ItemLabel(title, v)
res.append(i)
return ItemList(rows=res)
def csv_ges_labels_inverse_list_indicators(field, value, lang):
vals = set(value.split(','))
res = []
for v in vals:
i = get_indicators(field, v, lang)
res.append(i)
return ItemList(rows=res)
def format_nr(field, value, lang):
if value:
return "%.2f" % value
return value
def get_indicators(field, value, lang):
value_orig = value
title = GES_LABELS.get('indicators', value)
url = GES_LABELS.get('indicators_url', value)
tr = get_translated(title, lang)
if tr:
value = u"{} ({})".format(value, title)
title = tr
if url != value_orig:
template = u'<a style="cursor: help;" target="_blank" href="{}">{}</a>'
return ItemLabel(value, template.format(url, title))
# if tr:
# value = u"{} ({})".format(value, title)
#
# return ItemLabel(value, tr)
else:
return ItemLabel(value, title)
def format_area(field, value, lang):
if value:
return "{:.0f} km2".format(value)
return value
def mrus_as_table(field, value, lang):
return SimpleTable(field.name, value)
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,582 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/regionaldescriptors/data.py | from pkg_resources import resource_filename
from wise.msfd.compliance.utils import ReportDefinition
definition_files = {
'2018': 'data/report_2018_def.xml',
# '2012': 'data/report_2012_def.xml',
}
# f_2012 = resource_filename(__package__, definition_files['2012'])
f_2018 = resource_filename(__package__, definition_files['2018'])
REPORT_DEFS = {
'2018': {
'Art8': ReportDefinition(f_2018, 'Art8'),
'Art9': ReportDefinition(f_2018, 'Art9'),
'Art10': ReportDefinition(f_2018, 'Art10'),
},
'2012': {
# 'Art8a': ReportDefinition(f_2012, 'Art8a'),
# 'Art8b': ReportDefinition(f_2012, 'Art8b'),
# 'Art9': ReportDefinition(f_2012, 'Art9'),
# 'Art10': ReportDefinition(f_2012, 'Art10'),
}
}
def get_report_definition(year, article):
return REPORT_DEFS[year][article]
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,583 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/landingpage.py |
import lxml.etree
from pkg_resources import resource_filename
from Products.Five.browser.pagetemplatefile import (PageTemplateFile,
ViewPageTemplateFile)
from .assessment import AssessmentDataMixin
from .base import BaseComplianceView
COLOR_SUFFIX = {
"country": "-C",
"country-a": "-CA",
"region": "",
"region-a": "-A",
"row": "",
}
def _parse_landingpage_xml(path='compliance/landingpage.xml'):
xmlfile = resource_filename('wise.msfd', path)
root = lxml.etree.parse(xmlfile).getroot()
years = root.iterchildren('year')
res = [LandingPageYearDefinition(y) for y in years]
return res
class LandingPageYearDefinition(object):
""" Definition for a year group which is divided into multiple 'rows',
one row per 'date', 'who', 'article' and 'task'.
Respectively each 'row' is divided into multiple 'subrows',
a subrow representing the links for each country, or links
for a whole region, sometimes a single link for the whole article
"""
def __init__(self, year_node):
""" Initialize a year node, represented in the form of a nested list
with the following structure
[[date1, who1, article1, task1, css_class1,
[colspanA, textA, colorA, get_data_methodA]],
[date2, who2, article2, task2, css_class2,
[colspanB, textB, colorB, get_data_methodB]]
]
"""
date = year_node.attrib.get('date')
css_extra = year_node.attrib.get('extra-css-class', '')
rows = []
for row in year_node.iterchildren('element'):
who = row.attrib.get('who')
article = row.attrib.get('article')
task = row.attrib.get('task')
subrows = []
for subrow in row.iterchildren('row'):
colspan_type = subrow.attrib.get('colspan')
color_class = subrow.attrib.get('color-class', colspan_type)
get_method = subrow.attrib.get('get-method')
text = subrow.attrib.get('display-text')
subrows.append((colspan_type, text, color_class, get_method))
rows.append((date, who, article, task, css_extra, subrows))
self.rows = rows
class StartLandingPage(BaseComplianceView):
""" Landing/Home page for assessment module """
template = ViewPageTemplateFile("pt/landingpage.pt")
year_defs = _parse_landingpage_xml()
section = 'compliance-start'
def __call__(self):
data = []
for year_def in self.year_defs:
rendered_row = BaseLandingPageRow(self, self.request, year_def)()
data.append(rendered_row)
return self.template(data=data)
class BaseLandingPageRow(BaseComplianceView, AssessmentDataMixin):
""" Base class with all the needed base methods to build the landing page
structure
"""
template = PageTemplateFile('pt/landingpage-row.pt')
@property
def _nr_of_countries(self):
cnt = 0
for region_folder in self._reg_desc_region_folders:
available_countries = region_folder._countries_for_region
cnt += len(available_countries)
return cnt
@property
def regions_and_countries(self):
res = []
for region_folder in self._reg_desc_region_folders:
region_id = region_folder.id.upper()
available_countries = region_folder._countries_for_region
res.append((region_id, available_countries))
return res
def _default(self):
return {}
def get_2018_countries_assess(self):
return self.get_2018_countries(extra_path='assessments')
def get_2018_countries_reports(self):
return self.get_2018_countries(extra_path='reports')
def get_2018_countries(self, extra_path=''):
data = {}
for folder in self._nat_desc_country_folders:
url = "{}/{}".format(folder.absolute_url(), extra_path)
reg_id = folder.id.upper()
data[reg_id] = url
return data
def get_2018_regions_assess(self):
return self.get_2018_regions(extra_path='assessments')
def get_2018_regions_reports(self):
return self.get_2018_regions(extra_path='reports')
def get_2018_regions(self, extra_path=''):
data = {}
for folder in self._reg_desc_region_folders:
url = "{}/{}".format(folder.absolute_url(), extra_path)
reg_id = folder.id.upper()
data[reg_id] = url
return data
def _make_subrow_row(self, text, data, color_class, extra_css_class):
res = []
_text = text
color_suffix = COLOR_SUFFIX.get(color_class, "")
css_class = extra_css_class + " {}{}"
res.append((self._nr_of_countries, _text, data.get('ROW', ''),
css_class.format('ROW', color_suffix)))
return res
def _make_subrow_region(self, text, data, color_class, extra_css_class):
res = []
_text = text
color_suffix = COLOR_SUFFIX.get(color_class, "")
css_class = extra_css_class + " {}{}"
for region_id, available_countries in self.regions_and_countries:
if text == '_region':
_text = region_id
res.append((len(available_countries), _text,
data.get(region_id, ''),
css_class.format(region_id, color_suffix)))
return res
def _make_subrow_country(self, text, data, color_class, extra_css_class):
res = []
_text = text
color_suffix = COLOR_SUFFIX.get(color_class, "")
css_class = extra_css_class + " {}{}"
for region_id, available_countries in self.regions_and_countries:
for country in available_countries:
country_id = country[0]
country_name = country[1]
if text == '_country':
_text = country_id
res.append((1, _text, data.get(country_id, ""),
css_class.format(region_id, color_suffix)))
return res
def make_subrow(self, colspan_type, text, color_class, css_extra, data):
make_method = getattr(self, "_make_subrow_" + colspan_type)
subrow_final = make_method(text, data, color_class, css_extra)
return subrow_final
def __init__(self, context, request, year_def):
super(BaseLandingPageRow, self).__init__(context, request)
data = []
for row in year_def.rows:
date = row[0]
who = row[1]
art = row[2]
task = row[3]
css_extra = row[4]
subrows = row[5]
_subrows = []
for subrow in subrows:
colspan_type = subrow[0]
text = subrow[1]
color_class = subrow[2]
get_data_method = subrow[3]
subrow_data = getattr(self, get_data_method, self._default)()
_subrows.append(
self.make_subrow(colspan_type, text, color_class,
css_extra, subrow_data)
)
data.append((date, who, art, task, css_extra, _subrows))
self.data = data
def __call__(self):
return self.template(data=self.data)
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,584 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/__init__.py | #
from .patch import install
install()
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,585 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/search/a9.py | from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from z3c.form.browser.checkbox import CheckBoxFieldWidget
from z3c.form.field import Fields
from .. import db, sql
from ..base import EmbeddedForm, MarineUnitIDSelectForm2012
from ..db import get_all_records
from ..utils import group_query
from .base import ItemDisplayForm
from .interfaces import IA2012GesComponentsArt9
from .utils import data_to_xls, register_form_art9
class A9Form(EmbeddedForm):
""" Select the MarineUnitID for the Article 9 form
"""
record_title = 'Article 9 (GES determination)'
# session_name = '2012'
mapper_class = sql.MSFD9Descriptor
fields = Fields(IA2012GesComponentsArt9)
fields['ges_components'].widgetFactory = CheckBoxFieldWidget
def get_subform(self):
return A9MRUForm(self, self.request)
def download_results(self):
# muids = self.get_marine_unit_ids()
_, muids = self.subform.get_available_marine_unit_ids()
ges_comps = self.get_form_data_by_key(self, 'ges_components')
count, data = get_all_records(
self.mapper_class,
self.mapper_class.MarineUnitID.in_(muids),
self.mapper_class.ReportingFeature.in_(ges_comps)
)
descriptor_ids = [row.MSFD9_Descriptor_ID for row in data]
t_features = sql.t_MSFD9_Features
count, data_f = get_all_records(
t_features,
t_features.c.MSFD9_Descriptor.in_(descriptor_ids)
)
xlsdata = [
('MSFD9Descriptor', data),
('MSFD9_Features', data_f),
]
return data_to_xls(xlsdata)
class A9MRUForm(MarineUnitIDSelectForm2012):
mapper_class = sql.MSFD9Descriptor
# def get_available_marine_unit_ids(self):
# return super(A9MRUForm, self).get_available_marine_unit_ids(
# parent=self.context
# )
def get_subform(self):
return A9ItemDisplay(self, self.request)
class A9ItemDisplay(ItemDisplayForm):
""" The implementation for the Article 9 (GES determination) form
"""
extra_data_template = ViewPageTemplateFile('pt/extra-data-pivot.pt')
mapper_class = sql.MSFD9Descriptor
order_field = 'MSFD9_Descriptor_ID'
reported_date_info = {
'mapper_class': sql.MSFD9Import,
'col_import_id': 'MSFD9_Import_ID',
'col_import_time': 'MSFD9_Import_Time',
'col_filename': 'MSFD9_Import_FileName'
}
def get_import_id(self):
import_id = self.item.MSFD9_Descriptors_Import
return import_id
def get_db_results(self):
page = self.get_page()
muid = self.get_marine_unit_id()
args = [self.mapper_class, self.order_field]
if muid:
args.append(self.mapper_class.MarineUnitID == muid)
ges_comps = self.get_form_data_by_key(self.context, 'ges_components')
if ges_comps:
args.append(self.mapper_class.ReportingFeature.in_(ges_comps))
res = db.get_item_by_conditions(*args, page=page)
return res
def get_extra_data(self):
if not self.item:
return {}
desc_id = self.item.MSFD9_Descriptor_ID
t = sql.t_MSFD9_Features
total, res = db.get_table_records(
[t.c.FeatureType, t.c.FeaturesPressuresImpacts],
t.c.MSFD9_Descriptor == desc_id
)
res = group_query(res, 'FeatureType')
return [
('Feature Types', res)
]
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,586 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/search/base.py | import logging
import re
from collections import defaultdict
from datetime import datetime
from zope.interface import implements
from Products.Five.browser import BrowserView
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from wise.msfd.base import BasePublicPage
from wise.msfd.utils import Tab
from z3c.form.button import buttonAndHandler
from z3c.form.field import Fields
from z3c.form.form import Form
from . import interfaces
from ..base import BaseEnhancedForm, BaseUtil, EmbeddedForm
from ..db import (get_item_by_conditions, latest_import_ids_2018,
use_db_session)
from ..interfaces import IMainForm
from .utils import get_registered_form_sections
logger = logging.getLogger('wise.msfd')
class ItemDisplayForm(EmbeddedForm):
""" Generic form for displaying records
"""
implements(interfaces.IItemDisplayForm)
fields = Fields(interfaces.IRecordSelect)
template = ViewPageTemplateFile('pt/item-display-form.pt')
data_template = ViewPageTemplateFile('pt/item-display.pt')
extra_data = None
extra_data_template = ViewPageTemplateFile('pt/extra-data.pt')
mapper_class = None # This will be used to retrieve the item
order_field = None # This will be used to properly page between items
def update(self):
super(ItemDisplayForm, self).update()
if not self.get_main_form().reset_page:
self.data['page'] = self.widgets['page'].value
else:
self.widgets['page'].value = 0
self.data['page'] = 0
self.count, self.item = self.get_db_results()
if self.count == (int(self.data['page']) + 1):
del self.actions['next']
if int(self.data['page']) == 0:
del self.actions['prev']
def updateWidgets(self, prefix=None):
super(ItemDisplayForm, self).updateWidgets()
self.widgets['page'].mode = 'hidden'
@buttonAndHandler(u'Prev', name='prev')
def handle_prev(self, action):
value = int(self.widgets['page'].value)
self.widgets['page'].value = max(value - 1, 0)
@buttonAndHandler(u'Next', name='next')
def handle_next(self, action):
value = int(self.widgets['page'].value)
self.widgets['page'].value = value + 1
def get_extra_data(self):
return []
def extras(self):
return self.extra_data_template()
def get_page(self):
page = self.data.get('page')
if page:
return int(page)
else:
return 0
def get_db_results(self):
page = self.get_page()
muid = self.get_marine_unit_id()
args = [self.mapper_class, self.order_field]
if muid:
args.append(self.mapper_class.MarineUnitID == muid)
res = get_item_by_conditions(*args, page=page)
return res
@use_db_session('2018')
def latest_import_ids_2018(self):
latest_ids = latest_import_ids_2018()
return latest_ids
# def item_title(self, item):
# state = inspect(item)
#
# if state.identity:
# id = state.identity[0]
# else:
# id = 0
#
# return (item.__class__.__name__, id)
class MultiItemDisplayForm(ItemDisplayForm):
template = ViewPageTemplateFile('pt/multi-item-display.pt')
fields = Fields(interfaces.IRecordSelect)
def get_sections(self):
klasses = get_registered_form_sections(self)
views = [k(self, self.request) for k in klasses]
return views
class ItemDisplay(BrowserView, BaseUtil):
""" A not-registered view that will render inline (a database result)
"""
index = ViewPageTemplateFile('pt/simple-item-display.pt')
data_template = ViewPageTemplateFile('pt/item-display.pt')
extra_data_template = ViewPageTemplateFile('pt/extra-data.pt')
data = {}
def __init__(self, context, request):
self.__parent__ = self.context = context
self.request = request
self.count = 0
self.item = None
res = self.get_db_results()
if res:
self.count, self.item = res
def __call__(self):
if not self.item:
return ''
return self.index()
def get_db_results(self):
raise NotImplementedError
def get_page(self):
page = self.context.data.get('page')
if page:
return int(page)
else:
return 0
def get_extra_data(self):
return []
def extras(self):
return self.extra_data_template()
def true(view):
return True
MAIN_FORMS = (
Tab('msfd-start', 'msfd-start', 'Start',
'About <br/>search engine', '', true),
Tab('msfd-mru', 'msfd-mru', 'Article 4', 'Marine Units', '', true),
Tab('msfd-rc', 'msfd-rc', 'Article 6', 'Regional cooperation', '', true),
Tab('msfd-ca', 'msfd-ca', 'Article 7', 'Competent Authorities', '', true),
Tab('msfd-a8', 'msfd-a8', 'Article 8', 'Assessments', '', true),
Tab('msfd-a9', 'msfd-a9', 'Article 9', 'GES determinations', '', true),
Tab('msfd-a10', 'msfd-a10', 'Article 10', 'Targets', '', true),
Tab('msfd-c2', 'msfd-c2', 'Article 11', 'Monitoring programmes',
'', true),
Tab('msfd-c3', 'msfd-c3', 'Articles <br/>13, 14 & 18',
'Programmes of measures (PoM), exceptions '
'& progress on the implementation of PoM', '', true),
# Tab('msfd-c4', 'msfd-c4', 'Articles <br/>8, 9 & 10',
# '2018 reporting exercise', '', true),
# Tab('msfd-c5', 'msfd-c5', 'Article 18',
# 'Progress on the implementation of PoM', '', true),
Tab('msfd-c6', 'msfd-c6', 'Article 19.3',
'Datasets used', '', true),
)
class MainForm(BaseEnhancedForm, BasePublicPage, Form):
""" The main forms need to inherit from this clas
"""
implements(IMainForm)
template = ViewPageTemplateFile('../pt/mainform.pt')
ignoreContext = True
reset_page = False
subform = None
subform_content = None
should_download = False # flag that signals download button is hit
main_forms = MAIN_FORMS
# method = 'get'
def __init__(self, context, request):
Form.__init__(self, context, request)
@buttonAndHandler(u'Apply filters', name='continue')
def handle_continue(self, action):
self.reset_page = True
@buttonAndHandler(u'Download as spreadsheet', name='download')
def handle_download(self, action):
self.should_download = True
@property
def title(self):
return [x[1] for x in self.main_forms if x[0] == self.name][0]
@property
def spreadsheet_title(self):
title = [x[2] for x in self.main_forms if x[0] == self.name][0]
title_from_subforms = self.find_spreadsheet_title()
if title_from_subforms:
title = title_from_subforms
title = re.sub(r"[^a-zA-Z0-9]+", "_", title)
return title
def update(self):
super(MainForm, self).update()
self.data, self.errors = self.extractData()
has_values = self.data.values() and all(self.data.values())
if has_values:
self.subform = self.get_subform()
if self.subform:
# we need to update and "execute" the subforms to be able to
# discover them, because the decision process regarding
# discovery is done in the update() method of subforms
self.subform_content = self.subform()
# self.subform.update()
# @cache(request_cache_key)
def render(self):
download_action = self.find_download_action()
if download_action in (None, False):
del self.actions['download']
if download_action and self.should_download:
# TODO: need to implement this as xls response
data = download_action()
sh = self.request.response.setHeader
sh('Content-Type', 'application/vnd.openxmlformats-officedocument.'
'spreadsheetml.sheet')
# fname = self.subform.get_record_title(cntx='subform') or 'marinedb'
logger.info("Spreadsheet title: %s", self.spreadsheet_title)
fname = self.spreadsheet_title or 'marinedb'
fname = fname + '_' + str(datetime.now().replace(microsecond=0))
fname = fname.replace(' ', '_').replace('(', '').replace(')', '')\
.replace('&', '_')
sh('Content-Disposition', 'attachment; filename=%s.xlsx' % fname)
return data.read()
return super(MainForm, self).render()
def find_download_action(self):
""" Look for a download method in all subform children
"""
ctx = self
while hasattr(ctx, 'subform'):
if hasattr(ctx, 'download_results'):
return ctx.download_results
ctx = ctx.subform
if hasattr(ctx, 'download_results'):
return ctx.download_results
def find_spreadsheet_title(self):
""" Not used, just an experiment to provide custom spreadsheet titles
across all articles
"""
ctx = self.subform
while hasattr(ctx, 'subform'):
if hasattr(ctx, 'record_title'):
return ctx.record_title
ctx = ctx.subform
if hasattr(ctx, 'record_title'):
return ctx.record_title
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
42,587 | laszlocseh/wise.msfd | refs/heads/master | /src/wise/msfd/compliance/nationalsummary/pdfexport.py | # -*- coding: utf-8 -*-
from io import BytesIO
from pkg_resources import resource_filename
import logging
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from Products.statusmessages.interfaces import IStatusMessage
from wise.msfd.compliance.interfaces import INationalSummaryCountryFolder
from wise.msfd.data import get_report_filename
from wise.msfd.gescomponents import DESCRIPTOR_TYPES
from wise.msfd.translation import get_translated, retrieve_translation
from wise.msfd.utils import (ItemList, TemplateMixin, db_objects_to_dict,
fixedorder_sortkey, timeit)
from zope.interface import implements
from lpod.document import odf_new_document
from lpod.toc import odf_create_toc
import pdfkit
from ..nationaldescriptors.a7 import Article7
from ..nationaldescriptors.a34 import Article34
from ..nationaldescriptors.base import BaseView
from .base import BaseNatSummaryView
from .descriptor_assessments import DescriptorLevelAssessments
from .introduction import Introduction
from .odt_utils import (create_heading, create_paragraph,
create_table_summary, setup_document_styles)
logger = logging.getLogger('wise.msfd')
class AssessmentExportCover(BaseNatSummaryView):
template = ViewPageTemplateFile('pt/cover.pt')
def assess_date(self):
attr = 'date_assessed'
if not hasattr(self._country_folder, attr):
return '-'
date_assessed = getattr(self._country_folder, attr)
date_assessed = self._format_date(date_assessed)
return date_assessed
def __call__(self):
return self.template(date=self.assess_date())
class SummaryAssessment(BaseNatSummaryView):
""" Implementation of section 2. Summary of the assessment """
template = ViewPageTemplateFile('pt/summary-assessment.pt')
descriptor_types = DESCRIPTOR_TYPES
def __init__(self, context, request, overall_scores,
nat_desc_country_folder):
super(SummaryAssessment, self).__init__(context, request)
self.overall_scores = overall_scores
self.nat_desc_country_folder = nat_desc_country_folder
def get_overall_score(self, region_code, descriptor, article):
color = self.overall_scores[(region_code, descriptor, article)][1]
conclusion = self.overall_scores[(region_code, descriptor, article)][0]
# conclusion = conclusion.split(' ')
# conclusion = " ".join(conclusion[:-1])
return conclusion, color
def setup_data(self):
res = []
region_folders = self.get_region_folders(self.nat_desc_country_folder)
for region_folder in region_folders:
table_rows = []
descr_folders = self.get_descr_folders(region_folder)
for descr_type, descriptors in self.descriptor_types:
descr_rows = []
for descr in descriptors:
descr_folder = [
d
for d in descr_folders
if d.id.upper() == descr
][0]
# Remove brackets with text from descriptor title
# D4 - Food webs/D1 Biodiversity - ecosystems (D4/D1)
descriptor_title = descr_folder.title.split('(')[0]
row = [descriptor_title]
for article_folder in self.get_article_folders(descr_folder):
score = self.get_overall_score(
region_folder.id.upper(), descr_folder.id.upper(),
article_folder.title
)
row.append(score)
descr_rows.append(row)
table_rows.append((descr_type, descr_rows))
res.append((region_folder.title, table_rows))
self.summary_assess_data = res
return res
def get_odt_data(self, document):
res = []
headers = ('Descriptor', 'Article 9 - GES Determination',
'Article 8 - Initial Assessment',
'Article 10 - Environmental Targets')
t = create_heading(1, u"Summary of the assessment")
res.append(t)
for region_row in self.summary_assess_data:
t = create_heading(2, region_row[0])
res.append(t)
table_rows = region_row[1]
# TODO split score , it is a tuple (conclusion, color_value)
# and somehow color the table cells
table = create_table_summary(document, table_rows, headers=headers)
res.append(table)
return res
def __call__(self):
@timeit
def render_summary_assessment():
self.setup_data()
return self.template()
return render_summary_assessment()
class ProgressAssessment(BaseNatSummaryView):
""" implementation of section 3. Assessment of national
progress since 2012
"""
template = ViewPageTemplateFile('pt/progress-assessment.pt')
@property
def progress_recommendations_2012(self):
progress = self.get_field_value('progress_recommendations_2012')
return progress
@property
def progress_recommendations_2018(self):
progress = self.get_field_value('progress_recommendations_2018')
return progress
def get_odt_data(self, document):
res = []
h = create_heading(1, "Assessment of national progress since 2012")
res.append(h)
t = create_heading(2, "2012 recommendations to Member State")
res.append(t)
text = self.get_transformed_richfield_text(
'progress_recommendations_2012'
)
p = create_paragraph(text)
res.append(p)
t = create_heading(
2, "Progress against 2012 recommendations to Member State"
)
res.append(t)
text = self.get_transformed_richfield_text(
'progress_recommendations_2018'
)
p = create_paragraph(text)
res.append(p)
return res
def __call__(self):
@timeit
def render_progress_assessment():
return self.template()
return render_progress_assessment()
class Article34Copy(Article34):
""" Class to override the template """
template = ViewPageTemplateFile('pt/report-data-secondary.pt')
title = "Articles 3 & 4 Marine regions"
class Article7Copy(Article7):
""" Class to override the template """
template = ViewPageTemplateFile('pt/report-data-secondary.pt')
title = "Article 7 Competent authorities"
class ArticleTable(BaseView):
impl = {
'Art3': Article34Copy,
'Art4': Article34Copy,
'Art7': Article7Copy,
}
is_translatable = True
def __init__(self, context, request, article):
super(ArticleTable, self).__init__(context, request)
self._article = article
self.klass = self.impl[article]
year = '2012'
@property
def article(self):
return self._article
@property
def descriptor(self):
return 'Not linked'
@property
def muids(self):
return []
@property
def country_region_code(self):
return 'No region'
def get_article_title(self, klass):
tmpl = u"<h4>{}</h4>"
title = klass.title
return tmpl.format(title)
def get_report_filename(self, art=None):
# needed in article report data implementations, to retrieve the file
return get_report_filename(
self.year, self.country_code, self.country_region_code,
art or self.article, self.descriptor
)
def __call__(self):
try:
self.view = self.klass(
self, self.request, self.country_code,
self.country_region_code, self.descriptor, self.article,
self.muids
)
rendered_view = self.view()
except:
rendered_view = 'Error getting report'
return self.get_article_title(self.klass) + rendered_view
class AssessmentExportView(BaseNatSummaryView):
implements(INationalSummaryCountryFolder)
help_text = "HELP TEXT"
template = ViewPageTemplateFile('pt/report-data.pt')
report_header_template = ViewPageTemplateFile(
'pt/assessment-export-header.pt'
)
year = "2012"
render_header = True
def _get_css(self):
return [
resource_filename('wise.theme',
'static/wise/css/main.css'),
resource_filename('wise.msfd',
'static/wise/dist/css/compliance.css'),
resource_filename('wise.msfd',
'static/wise/dist/css/pdf_export.css'),
]
def _get_cover(self):
absolute_url = self.context.absolute_url()
cover_url = absolute_url + '/export-cover'
if 'localhost' in absolute_url:
return ""
cover_url = cover_url.replace('localhost:5080',
'office.pixelblaster.ro:4880')
return cover_url
def _get_toc(self):
xsl_file = resource_filename('wise.msfd', 'data/pdf_toc.xsl'),
toc = {"xsl-style-sheet": xsl_file}
return toc
def get_document(self):
result = BytesIO()
document = odf_new_document('text')
setup_document_styles(document)
body = document.get_body()
# Create the Table Of Content
toc = odf_create_toc()
# Changing the default "Table Of Content" Title :
toc.set_title("Table of Content")
# Do not forget to add every components to the document:
body.append(toc)
for table in self.tables:
if hasattr(table, 'get_odt_data'):
odt_data = table.get_odt_data(document)
body.extend(odt_data)
toc.fill()
document.save(target=result, pretty=True)
return result.getvalue()
def download(self):
doc = self.get_document()
sh = self.request.response.setHeader
sh('Content-Type', 'application/vnd.oasis.opendocument.text')
fname = "{}-Draft".format(self.country_name)
sh('Content-Disposition',
'attachment; filename=%s.odt' % fname)
return doc
def download_pdf(self):
options = {
'margin-top': '0.5in',
'margin-right': '0.5in',
'margin-bottom': '0.5in',
'margin-left': '0.5in',
# 'footer-left': "Page",
'footer-font-size': '7',
'footer-right': 'Page [page] of [topage]',
'encoding': "UTF-8",
}
css = self._get_css()
cover = self._get_cover()
toc = self._get_toc()
doc = pdfkit.from_string(
self.report_html, False, options=options,
cover=cover,
toc=toc,
css=css,
cover_first=True
)
sh = self.request.response.setHeader
sh('Content-Type', 'application/pdf')
fname = "{}-Draft".format(self.country_name)
sh('Content-Disposition',
'attachment; filename=%s.pdf' % fname)
return doc
# @cache(get_reportdata_key, dependencies=['translation'])
@timeit
def render_reportdata(self):
report_header = self.report_header_template(
title="Commission assessment / Art12 / 2018 / {}-summary".format(
self.country_name,
)
)
# trans_edit_html = self.translate_view()()
# 4. Descriptor-level assessments
descriptor_lvl_assess = DescriptorLevelAssessments(self, self.request)
descriptor_lvl_assess_view = descriptor_lvl_assess()
overall_scores = descriptor_lvl_assess.overall_scores
nat_desc_country_folder = descriptor_lvl_assess.nat_desc_country_folder
# 1. Introduction
introduction = Introduction(self.context, self.request)
# 2. Summary Assessment
sum_assess = SummaryAssessment(self, self.request, overall_scores,
nat_desc_country_folder)
# 3. Progress Assessment
prog_assess = ProgressAssessment(self, self.request)
self.tables = [
report_header,
introduction,
sum_assess,
prog_assess,
descriptor_lvl_assess
# ArticleTable(self, self.request, 'Art7'),
# ArticleTable(self, self.request, 'Art3-4'),
# trans_edit_html,
]
template = self.template
return template(tables=self.tables)
def __call__(self):
if 'edit-data' in self.request.form:
url = "{}/edit".format(self._country_folder.absolute_url())
return self.request.response.redirect(url)
if 'download_pdf' in self.request.form:
self.render_header = False
report_html = self.render_reportdata()
self.report_html = report_html
if 'download' in self.request.form:
return self.download()
if 'download_pdf' in self.request.form:
return self.download_pdf()
if 'translate' in self.request.form:
for value in self._translatable_values:
retrieve_translation(self.country_code, value)
messages = IStatusMessage(self.request)
messages.add(u"Auto-translation initiated, please refresh "
u"in a couple of minutes", type=u"info")
@timeit
def render_html():
return self.index()
return render_html()
| {"/src/wise/msfd/compliance/nationaldescriptors/a8esa.py": ["/src/wise/msfd/compliance/nationaldescriptors/data.py"], "/src/wise/msfd/compliance/nationalsummary/main.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/regionalsummary/introduction.py": ["/src/wise/msfd/compliance/nationalsummary/introduction.py"], "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py": ["/src/wise/msfd/compliance/nationaldescriptors/main.py"], "/src/wise/msfd/compliance/nationalsummary/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py"], "/src/wise/msfd/compliance/regionalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationalsummary/pdfexport.py", "/src/wise/msfd/compliance/regionalsummary/introduction.py"], "/src/wise/msfd/compliance/regionaldescriptors/reportdata.py": ["/src/wise/msfd/compliance/nationaldescriptors/utils.py", "/src/wise/msfd/compliance/regionaldescriptors/data.py"], "/src/wise/msfd/compliance/landingpage.py": ["/src/wise/msfd/compliance/assessment.py"], "/src/wise/msfd/__init__.py": ["/src/wise/msfd/patch.py"], "/src/wise/msfd/search/a9.py": ["/src/wise/msfd/__init__.py", "/src/wise/msfd/search/base.py"], "/src/wise/msfd/compliance/nationalsummary/pdfexport.py": ["/src/wise/msfd/compliance/nationaldescriptors/a34.py", "/src/wise/msfd/compliance/nationalsummary/descriptor_assessments.py", "/src/wise/msfd/compliance/nationalsummary/introduction.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.