index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
65,982 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/celery_app.py | """
Main Celery App file
"""
from raven.contrib.celery import register_signal, register_logger_signal
import celery
import raven
from meerkat_abacus.pipeline_worker import celeryconfig
from meerkat_abacus import logger
# class Celery(celery.Celery):
# def on_configure(self):
# if config.sentry_dns:
# client = raven.Client(config.sentry_dns)
# # register a custom filter to filter out duplicate logs
# register_logger_signal(client)
# # hook into the Celery error handler
# register_signal(client)
app = celery.Celery()
app.config_from_object(celeryconfig)
logger.info(celeryconfig.DEVELOPMENT)
if celeryconfig.DEVELOPMENT:
# cleans up stale task between restarts
pass
app.control.purge()
import meerkat_abacus.pipeline_worker.processing_tasks
if __name__ == "__main__":
app.start()
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,983 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/process_steps/to_data_type.py | from meerkat_abacus.pipeline_worker.process_steps import ProcessingStep
from meerkat_abacus import util
from meerkat_abacus.util import data_types
class ToDataType(ProcessingStep):
def __init__(self, param_config, session):
self.step_name = "to_data_type"
self.config = param_config
self.links_by_type, self.links_by_name = util.get_links(
param_config.config_directory +
param_config.country_config["links_file"])
self.session = session
def run(self, form, data):
"""
"""
return_data = []
# from nose.tools import set_trace; set_trace()
for data_type in data_types.data_types(param_config=self.config):
main_form = data_type["form"]
additional_forms = {}
for link in self.links_by_type.get(data_type["name"], []):
additional_forms[link["to_form"]] = link["name"]
d = {"type": data_type["name"],
"original_form": form}
if form == main_form:
if check_data_type_condition(data_type, data):
d["raw_data"] = data
else:
continue
elif form in additional_forms.keys():
d["link_data"] = {additional_forms[form]: [data]}
else:
continue
return_data.append({"form": "data",
"data": d})
return return_data
def check_data_type_condition(data_type, data):
if data_type["db_column"] and data:
if data.get(data_type["db_column"]) == data_type["condition"]:
return True
else:
return True
return False
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,984 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/country_config/demo_links.py |
links = [
{
"id": "alert_investigation",
"name": "Alert Investigation",
"from_table": "Alerts",
"from_column": "id",
"from_date": "date",
"to_table": "alert",
"to_column": "pt./alert_id",
"to_date": "end",
"which": "last",
"data": {
"status": {
"Ongoing": {"column": "alert_labs./return_lab",
"condition": ["", "unsure"]},
"Confirmed": {"column": "alert_labs./return_lab",
"condition": "yes"},
"Disregarded": {"column": "alert_labs./return_lab",
"condition": "no"}
},
"checklist": {
"Referral": {"column": "pt./checklist",
"condition": "referral"},
"Case Management": {"column": "pt./checklist",
"condition": "case_management"},
"Contact Tracing": {"column": "pt./checklist",
"condition": "contact_tracing"},
"Laboratory Diagnosis": {"column": "pt./checklist",
"condition": "return_lab"},
},
"investigator": {
"investigator": {"column": "deviceid",
"condition": "get_value"
}
}
}
}
]
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,985 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/model.py | """
Database model definition
"""
from sqlalchemy import Column, Integer, String, DateTime, DDL, Float, Text
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.dialects.postgresql import JSONB, ARRAY
from sqlalchemy.orm import validates
from sqlalchemy.event import listen
from geoalchemy2 import Geometry
from meerkat_abacus.config import config
Base = declarative_base()
existing_form_tables = {}
country_config = config.country_config
def form_tables(param_config):
for table in param_config.country_config["tables"]:
if table in existing_form_tables:
continue
existing_form_tables[table] = type(table, (Base, ), {
"__tablename__": table,
"id": Column(Integer, primary_key=True),
"uuid": Column(String, index=True),
"data": Column(JSONB)
})
create_index = DDL(
"CREATE INDEX {} ON {} USING gin(data);".format(table + "_gin", table)
)
listen(existing_form_tables[table].__table__, 'after_create', create_index)
return existing_form_tables
class DownloadDataFiles(Base):
__tablename__ = 'download_data_files'
uuid = Column(String, primary_key=True)
generation_time = Column(DateTime)
type = Column(String)
status = Column(Float)
success = Column(Integer)
class StepFailiure(Base):
__tablename__ = 'step_failures'
id = Column(Integer, primary_key=True)
data = Column(JSONB)
form = Column(String)
step_name = Column(String)
error = Column(String)
class Locations(Base):
__tablename__ = 'locations'
id = Column(Integer, primary_key=True)
country_location_id = Column(String)
name = Column(String)
parent_location = Column(Integer, index=True)
point_location = Column(Geometry("POINT"))
area = Column(Geometry("MULTIPOLYGON"))
other = Column(JSONB)
deviceid = Column(String)
clinic_type = Column(String)
case_report = Column(Integer, index=True)
level = Column(String, index=True)
start_date = Column(DateTime)
case_type = Column(ARRAY(Text), index=True)
population = Column(Integer, default=0)
service_provider = Column(String)
def __repr__(self):
return "<Location(name='%s', id='%s', parent_location='%s')>" % (
self.name, self.id, self.parent_location)
class Devices(Base):
__tablename__ = 'devices'
device_id = Column(String, primary_key=True)
tags = Column(JSONB)
class StepMonitoring(Base):
__tablename__ = 'step_monitoring'
id = Column(Integer, primary_key=True)
step = Column(String)
n = Column(Integer)
start = Column(DateTime)
end = Column(DateTime)
duration = Column(Float)
class Data(Base):
__tablename__ = 'data'
id = Column(Integer, primary_key=True)
uuid = Column(String, index=True)
device_id = Column(String, index=True)
type = Column(String, index=True)
type_name = Column(String)
date = Column(DateTime, index=True)
epi_week = Column(Integer, index=True)
epi_year = Column(Integer, index=True)
submission_date = Column(DateTime, index=True)
country = Column(Integer, index=True)
zone = Column(Integer, index=True)
region = Column(Integer, index=True)
district = Column(Integer, index=True)
clinic = Column(Integer, index=True)
clinic_type = Column(String)
case_type = Column(ARRAY(Text), index=True)
links = Column(JSONB)
tags = Column(JSONB, index=True)
variables = Column(JSONB, index=True)
categories = Column(JSONB, index=True)
geolocation = Column(Geometry("POINT"))
def __repr__(self):
return "<Data(uuid='{}', id='{}'>".format(self.uuid, self.id)
create_index = DDL("CREATE INDEX variables_gin ON data USING gin(variables);")
listen(Data.__table__, 'after_create', create_index)
create_index2 = DDL("CREATE INDEX categories_gin ON data USING gin(categories);")
listen(Data.__table__, 'after_create', create_index2)
class DisregardedData(Base):
__tablename__ = 'disregarded_data'
id = Column(Integer, primary_key=True)
uuid = Column(String, index=True)
type = Column(String, index=True)
type_name = Column(String)
date = Column(DateTime, index=True)
epi_week = Column(Integer)
epi_year = Column(Integer)
submission_date = Column(DateTime, index=True)
country = Column(Integer, index=True)
region = Column(Integer, index=True)
district = Column(Integer, index=True)
zone = Column(Integer, index=True)
clinic = Column(Integer, index=True)
clinic_type = Column(String)
case_type = Column(ARRAY(Text), index=True)
links = Column(JSONB)
tags = Column(JSONB, index=True)
variables = Column(JSONB, index=True)
categories = Column(JSONB, index=True)
geolocation = Column(Geometry("POINT"))
def __repr__(self):
return "<DisregardedData(uuid='%s', id='%s'>" % (
self.uuid, self.id
)
create_index3 = DDL("CREATE INDEX disregarded_variables_gin ON disregarded_data USING gin(variables);")
listen(DisregardedData.__table__, 'after_create', create_index3)
create_index4 = DDL("CREATE INDEX disregarded_category_gin ON disregarded_data USING gin(categories);")
listen(DisregardedData.__table__, 'after_create', create_index4)
class Links(Base):
__tablename__ = 'links'
id = Column(Integer, primary_key=True)
uuid_from = Column(String, index=True)
uuid_to = Column(String, index=True)
type = Column(String, index=True)
data_to = Column(JSONB)
class AggregationVariables(Base):
__tablename__ = 'aggregation_variables'
id_pk = Column(Integer, primary_key=True)
id = Column(String)
name = Column(String)
type = Column(String)
form = Column(String)
multiple_link = Column(String)
db_column = Column(String)
method = Column(String)
condition = Column(String)
category = Column(JSONB, index=True)
alert = Column(Integer, index=True)
alert_type = Column(String, index=True)
alert_message = Column(String)
calculation = Column(String)
disregard = Column(Integer)
calculation_group = Column(String)
calculation_priority = Column(String)
classification = Column(String)
classification_casedef = Column(String)
source = Column(String)
source_link = Column(String)
alert_desc = Column(String)
case_def = Column(String)
risk_factors = Column(String)
symptoms = Column(String)
labs_diagnostics = Column(String)
def __repr__(self):
return "<AggregationVariable(name='%s', id='%s'>" % (
self.name, self.id)
@validates("alert")
def alert_setter(self, key, alert):
if alert == "":
return 0
else:
return alert
@validates("daily")
def daily_setter(self, key, daily):
if daily == "":
return 0
else:
return daily
@validates("disregard")
def disregard_setter(self, key, disregard):
if disregard == "":
return 0
else:
return disregard
class CalculationParameters(Base):
__tablename__ = 'calculation_parameters'
id = Column(Integer, primary_key=True)
name = Column(String)
type = Column(String)
parameters = Column(JSONB)
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,986 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/process_steps/add_links.py | from sqlalchemy import func
from dateutil.parser import parse
from meerkat_abacus.pipeline_worker.process_steps import ProcessingStep
from meerkat_abacus import model, util
from meerkat_abacus import logger
class AddLinks(ProcessingStep):
def __init__(self, param_config, session):
super().__init__()
self.step_name = "add_links"
self.config = param_config
self.session = session
links_file_ = param_config.config_directory \
+ param_config.country_config["links_file"]
self.links_by_type, self.links_by_name = util.get_links(links_file_)
@property
def engine(self):
return self._engine
@engine.setter
def engine(self, new_engine):
self._engine = new_engine
def run(self, form, data):
"""
Creates all the links for a given data row
"""
new_data = []
if "raw_data" in data:
new_data = [data]
elif "link_data" in data:
new_data = self._get_from_links(data)
return_data = [{
"form": "data",
"data": self._get_to_links(data)
} for data in new_data]
return return_data
def _get_from_links(self, data):
assert len(data["link_data"].keys()) == 1
link_name_ = list(data["link_data"].keys())[0]
link = self.links_by_name[link_name_]
link_data = data["link_data"][link["name"]][0]
if link.get("to_condition"):
column, condition = link["to_condition"].split(":")
if link_data.get(column) != condition:
return []
# aggregate_condition = link['aggregate_condition'] TODO
from_form_name_ = link["from_form"]
from_form = model.form_tables(param_config=self.config)[from_form_name_]
# link_names.append(link["name"])
columns = [from_form.uuid, from_form.data]
conditions = []
from_columns = link["from_column"].split(";")
to_columns = link["to_column"].split(";")
methods = link["method"].split(";")
for from_column, to_column, method in zip(from_columns, to_columns, methods):
from_column_text = from_form.data[from_column].astext
expected_value = link_data.get(to_column)
conditions.append(from_column_text != '')
if method == "match":
condition_ = from_column_text == expected_value
conditions.append(condition_)
elif method == "lower_match":
lower_ = func.lower(from_column_text)
left = func.replace(lower_, "-", "_")
right = str(expected_value).lower().replace("-", "_")
conditions.append(left == right)
elif method == "alert_match":
id_length = self.config.country_config["alert_id_length"]
index_ = 42 - id_length
alert_id = func.substring(from_column_text, index_, id_length)
conditions.append(alert_id == expected_value)
try:
link_query = self.session.query(*columns).filter(*conditions).all()
except Exception:
logger.exception("Failed to execute query. Retrying after rollback.")
self.session.rollback()
link_query = self.session.query(*columns).filter(*conditions).all()
return_data = [{
"type": data["type"],
"original_form": from_form_name_,
"raw_data": base_form_value[1],
"link_data": {link["name"]: link_data}
} for base_form_value in link_query]
return return_data
def _get_to_links(self, data):
link_data = {}
for link in self.links_by_type.get(data["type"], []):
to_form = model.form_tables(
param_config=self.config)[link["to_form"]]
if link.get("from_condition"):
column, expected = link["from_condition"].split(":")
if data.get(column) != expected:
continue
columns = [to_form.uuid, to_form.data]
conditions = []
from_columns = link["from_column"].split(";")
to_columns = link["to_column"].split(";")
methods = link["method"].split(";")
for from_column, to_column, method in zip(from_columns, to_columns, methods):
try:
expected = str(data["raw_data"][from_column])
to_column_text = to_form.data[to_column].astext
except Exception:
logger.error(f'ERROR: {data["raw_data"]}')
continue
if method == "match":
conditions.append(to_column_text == expected)
elif method == "lower_match":
left = func.replace(func.lower(to_column_text), "-", "_")
right = str(expected).lower().replace("-", "_")
conditions.append(left == right)
elif method == "alert_match":
alert_id_ = expected[-self.config.country_config["alert_id_length"]:]
conditions.append(to_column_text == alert_id_)
conditions.append(to_column_text != '')
# handle the filter condition
if link.get("to_condition"):
column, expected = link["to_condition"].split(":")
condition = to_form.data[column].astext == expected
conditions.append(condition)
try:
link_query = self.session.query(*columns).filter(*conditions).all()
except Exception:
logger.exception("Failed to execute query. Retrying after rollback.")
self.session.rollback()
link_query = self.session.query(*columns).filter(*conditions).all()
if link["name"] in data.get("link_data", {}):
link_query.append((None, data["link_data"][link["name"]]))
if len(link_query) > 1:
# Want to correctly order the linked forms
column, method = link["order_by"].split(";")
if method == "date":
sort_function = lambda x: parse(x[1][column])
else:
sort_function = lambda x: x[1][column]
link_query = sorted(link_query, key=sort_function)
if link_query:
link_data[link["name"]] = [link[1] for link in link_query]
data["link_data"] = link_data
return data
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,987 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/tests/__init__.py | """
Meerkat Abacus Test
Unit tests Meerkat Abacus
"""
from unittest import mock
import random
import unittest
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from meerkat_abacus import model
from meerkat_abacus.pipeline_worker.pipeline import Pipeline
from meerkat_abacus.consumer.database_setup import create_db
from meerkat_abacus.config import config as param_config
class TestPipeline(unittest.TestCase):
def setUp(self):
create_db(param_config.DATABASE_URL, drop=True)
engine = create_engine(param_config.DATABASE_URL)
model.form_tables(param_config)
model.Base.metadata.create_all(engine)
self.engine = create_engine(param_config.DATABASE_URL)
Session = sessionmaker(bind=self.engine)
self.session = Session()
def tearDown(self):
pass
def test_setup(self):
param_config.country_config["pipeline"] = ["quality_control",
"write_to_db",
"quality_control"]
engine = mock.MagicMock()
session = mock.MagicMock()
pipeline = Pipeline(engine, session, param_config)
self.assertEqual(len(param_config.country_config["pipeline"]),
len(pipeline.pipeline))
def test_process_chunk(self):
param_config.country_config["pipeline"] = ["do_nothing",
"do_nothing",
"do_nothing"]
engine = mock.MagicMock()
session = mock.MagicMock()
pipeline = Pipeline(engine, session, param_config)
for i in range(30):
data = []
N = random.randint(10, 100)
for _ in range(N):
data.append({"form": "test-form",
"data": {"some-data": 4}})
after_data = pipeline.process_chunk(data)
self.assertEqual(data, after_data)
@mock.patch("meerkat_abacus.pipeline_worker.pipeline.DoNothing")
def test_error_handling(self, do_nothing_mock):
do_nothing_mock.return_value = mock.MagicMock(
**{"run.side_effect": KeyError("Test Error")})
param_config.country_config["pipeline"] = ["do_nothing"]
pipeline = Pipeline(self.engine, self.session, param_config)
data = []
N = random.randint(1, 5)
for _ in range(N):
data.append({"form": "test-form",
"data": {"some-data": 4}})
after_data = pipeline.process_chunk(data)
self.assertEqual(after_data, [])
results = self.session.query(model.StepFailiure).all()
self.assertEqual(len(results), N)
self.assertEqual(results[0].form, "test-form")
self.assertEqual(results[0].error, "KeyError: 'Test Error'")
if __name__ == "__main__":
unittest.main()
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,988 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/util/data_types.py | import csv
from meerkat_abacus.config import config
def data_types(param_config=config):
with open(param_config.config_directory + param_config.country_config["types_file"],
"r", encoding='utf-8',
errors="replace") as f:
DATA_TYPES_DICT = [_dict for _dict in csv.DictReader(f)]
return DATA_TYPES_DICT
def data_types_for_form_name(form_name, param_config=config):
return [data_type for data_type in data_types(param_config=param_config) if form_name == data_type['form']]
DATA_TYPES_DICT = data_types() | {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,989 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/processing_tasks.py | import cProfile
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy import create_engine
from meerkat_abacus.pipeline_worker.pipeline import Pipeline
from meerkat_abacus.config import config as config_
from meerkat_abacus.pipeline_worker.celery_app import app
from meerkat_abacus import logger
pipeline = None
def configure_worker():
# load the application configuration
# db_uri = conf['db_uri']
global engine
logger.info("Worker setup")
engine = create_engine(config_.DATABASE_URL)#, pool_pre_ping=True)
global session
session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
logger.info(session)
global pipeline
pipeline = Pipeline(engine, session, config_)
@app.task(bind=True, name="processing_tasks.process_data")
def process_data(self, data_rows):
if pipeline is None:
configure_worker()
logger.info("STARTING task")
engine.dispose()
pipeline.process_chunk(data_rows)
logger.info("ENDING task")
@app.task(name="processing_tasks.test_up")
def test_up():
return True
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,990 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/consumer/consumer.py | import celery
from celery import Celery
from celery.task.control import inspect
import time
import backoff
from meerkat_abacus.consumer import celeryconfig
from meerkat_abacus.consumer import database_setup
from meerkat_abacus.consumer import get_data
from meerkat_abacus.config import config
from meerkat_abacus import util, model, logger
from meerkat_abacus.util import create_fake_data
app = Celery()
app.config_from_object(celeryconfig)
app.conf.task_default_queue = 'abacus'
start_time = time.time()
session, engine = database_setup.set_up_database(False, True, config)
@backoff.on_exception(backoff.expo,
(celery.exceptions.TimeoutError,
AttributeError, OSError),
max_tries=10,
max_value=30)
def wait_for_celery_runner():
test_task = app.send_task('processing_tasks.test_up')
result = test_task.get(timeout=1)
return result
wait_for_celery_runner()
# Initial Setup
database_setup.unlogg_tables(config.country_config["tables"], engine)
logger.info("Starting initial setup")
if config.initial_data_source == "AWS_S3":
get_data.download_data_from_s3(config)
get_function = util.read_csv_file
elif config.initial_data_source == "LOCAL_CSV":
get_function = util.read_csv_file
elif config.initial_data_source == "FAKE_DATA":
get_function = util.read_csv_file
create_fake_data.create_fake_data(session,
config,
write_to="file")
elif config.initial_data_source in ["AWS_RDS", "LOCAL_RDS"]:
get_function = util.get_data_from_rds_persistent_storage
else:
raise AttributeError(f"Invalid source {config.initial_data_source}")
number_by_form = get_data.read_stationary_data(get_function, config, app)
database_setup.logg_tables(config.country_config["tables"], engine)
# Wait for initial setup to finish
celery_inspect = inspect()
for i in range(5):
celery_queues = celery_inspect.reserved()
inspect_result = celery_queues.get("celery@abacus", [])
if len(inspect_result) > 0:
break
logger.info(f"Avaiable celery queues: {inspect_result}")
time.sleep(20)
else:
setup_time = round(time.time() - start_time)
logger.error(f"Failed to wait for message queue after {setup_time} seconds.")
setup_time = round(time.time() - start_time)
logger.info(f"Finished setup in {setup_time} seconds")
failures = session.query(model.StepFailiure).all()
if failures:
N_failures = len(failures)
logger.error(f"There were{N_failures} records that failed in the pipeline, see the step_failures database table for more information")
run_dict = {
"AWS_S3": get_data.real_time_s3,
"FAKE_DATA": get_data.real_time_fake,
"AWS_SQS": get_data.real_time_sqs
}
sds = config.stream_data_source
logger.info(f"Starting real time for {sds} config.")
while True:
try:
number_by_form = run_dict[sds](app, config, session, number_by_form)
except KeyError:
raise RuntimeError(f"Unsupported data source {sds}.")
except:
logger.exception("Error in real time", exc_info=True)
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,991 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/codes/variable.py | """
Definition of the Variable class
"""
from dateutil.parser import parse
from functools import partial
from datetime import datetime, timedelta
from meerkat_abacus.config import config
country_config = config.country_config
class Variable():
"""
A class for variables such that one can check if a row of data
matches the variable
"""
def __init__(self, variable):
"""
Set up variable class. We prepare the conditions/boundaries
and determine the correct test function.
Args:
variable: model.AggregationVariable object
"""
self.variable = variable
self.column = variable.db_column
self.operations = []
self.test_types = []
i = 0
self.bool_expression = ""
self.bool_variables = []
bool_trans = {"and": "&", "or": "|"}
for term in variable.method.split(" "):
if i % 2 == 0:
if term in ["match", "sub_match", "between", "value",
"not_null", "calc"]:
self.test_types.append(term)
else:
raise NameError(
"{} has wrong test type".format(variable.id)
)
var = chr(97 + i)
self.bool_expression += 'res_dict["' + var + '"]'
self.bool_variables.append(var)
else:
if term in ["and", "or", "not"]:
self.operations.append(term)
self.bool_expression += bool_trans[term]
else:
raise NameError("Wrong logic type")
i += 1
self.bool_expression = compile(self.bool_expression, "<string>",
"eval")
self.conditions = []
for condition in variable.condition.split(";"):
if "," in condition:
c = [c.strip() for c in condition.split(",")]
if '' in c:
c.append(None)
else:
c = [condition]
self.conditions.append(c)
self.columns = []
for column in variable.db_column.split(";"):
if "," in column:
c = [c.strip() for c in column.split(",")]
else:
c = column
self.columns.append(c)
if len(self.conditions) != len(self.test_types):
raise TypeError("Need same number of conditions as test types, {}".
format(variable))
self.test_functions = {
"match": self.test_match,
"sub_match": self.test_sub_match,
"between": self.test_calc_between,
"not_null": self.test_not_null
}
if "value" in self.test_types:
if len(self.test_types) > 1:
raise NameError("Value must be only test type")
self.test_type = self.test_value
self.calculation = variable.calculation
elif "calc" in self.test_types:
if len(self.test_types) > 1:
raise NameError("calc must be only test_type")
self.calculation = variable.calculation
if not isinstance(self.columns[0], list):
self.columns[0] = [self.columns[0]]
for c in self.columns[0]:
self.calculation = self.calculation.replace(
c, 'row["' + c + '"]')
self.calculation = compile(self.calculation, "<string>", "eval")
self.test_type = self.test_calc
elif len(self.test_types) == 1:
tt = self.test_types[0]
if tt == "match":
self.test_type = partial(self.test_match, self.columns[0],
self.conditions[0])
elif tt == "sub_match":
self.test_type = partial(self.test_sub_match, self.columns[0],
self.conditions[0])
elif tt == "between":
if not isinstance(self.columns[0], list):
self.columns[0] = [self.columns[0]]
self.calculation = variable.calculation
for c in self.columns[0]:
self.calculation = self.calculation.replace(
c, 'row["' + c + '"]'
)
self.calculation = compile(
self.calculation, "<string>", "eval"
)
self.test_type = partial(self.test_calc_between,
self.columns[0], self.conditions[0],
self.calculation)
elif tt == "not_null":
self.test_type = partial(self.test_not_null, self.columns[0])
else:
self.test_type = self.test_functions[self.test_types[0]]
else:
if hasattr(variable, "calculation") and variable.calculation:
self.calculation = []
for i, calc in enumerate(variable.calculation.split(";")):
self.calculation.append(None)
if self.test_types[i] == "between":
if not isinstance(self.columns[i], list):
self.columns[i] = [self.columns[i]]
for c in self.columns[i]:
calc = calc.replace(
c, 'row["' + c + '"]')
calc = compile(calc, "<string>", "eval")
self.calculation[i] = calc
self.test_type = self.test_many
if hasattr(variable, "calculation_priority"):
self.calculation_priority = variable.calculation_priority
def test(self, row):
"""
Tests the condition defined in codes file for this variable.
Args:
row: a form row under test
Returns:
value - the return value of the test
applicable -
{
applicable: if true and value is 0. This mean that 0 is a proper
value and also indicates a passed test result. Return bool.
value: the returned value of the test
}
"""
applicable = self.test_type(row)
value = applicable
if self.test_types[0] == "calc":
if applicable == 0:
applicable = True
if applicable == "not_applicable":
value = 0
applicable = False
return {"applicable": bool(applicable),
"value": value}
def test_many(self, row):
res_dict = {}
for i in range(len(self.test_types)):
tt = self.test_types[i]
if tt == "match":
res = self.test_match(self.columns[i], self.conditions[i], row)
elif tt == "sub_match":
res = self.test_sub_match(self.columns[i], self.conditions[i],
row)
elif tt == "between":
if not isinstance(self.columns[i], list):
self.columns[i] = [self.columns[i]]
res = self.test_calc_between(self.columns[i],
self.conditions[i],
self.calculation[i], row)
elif tt == "not_null":
res = self.test_not_null(self.columns[i], row)
else:
res = self.test_functions[self.test_types[i]](row)
res_dict[self.bool_variables[i]] = res
return eval(self.bool_expression)
def test_match(self, column, condition, row):
"""Test if value is in condition list"""
try:
return row[column] in condition
except:
return 0
def test_sub_match(self, column, condition, row):
"""
We first test if value is in the list, if not we check
if value is a substring of any element in the list
"""
add = 0
try:
if row[column] in condition:
add = 1
else:
for c in condition:
if row[column] and c in row[column]:
add = 1
break
except:
pass
return add
def test_not_null(self, column, row):
""" Value not equal None"""
if column not in row:
return 0
value = row[column]
return value is not "" and value is not None and value is not 0
def test_value(self, row):
""" Value not equal None"""
if self.columns[0] not in row:
return 0
value = row[self.columns[0]]
if value is not "" and value is not None and value is not 0:
if self.calculation == "date":
if value:
try:
return parse(value).isoformat()
except ValueError:
print(value)
return 0
else:
return value
else:
return 0
def test_calc_between(self,
columns,
condition,
calc,
old_row):
"""
self. calc should be an expression with column names
from the row and mathematical expression understood by python.
We then replace all column names with their numerical values
and evalualte the resulting expression.
"""
row = {}
for c in columns:
# Initialise non-existing variables to 0.
if c not in old_row or old_row[c] == '' or old_row[c] is None:
return 0
# If row[c] is a datestring convert to #seconds.
try:
row[c] = float(old_row[c])
except ValueError:
row[c] = old_row[c]
try:
result = float(eval(calc))
greater = float(condition[0]) <= result
less = float(condition[1]) > result
return greater & less
except ZeroDivisionError:
return 0
except ValueError as e:
print("Value error while testing for code ", self.variable.id)
raise e
def test_calc(self, old_row):
"""
self. calc should be an expression with column names from
the row and mathematical expression understood by python.
We then replace all column names with their numerical values
and evalualte the resulting expression.
If the column value is a date, we replace with the number of
seconds since epi week start after epoch (e.g the first
sunday after epoch for Jordan).
"""
row = {}
for c in self.columns[0]:
# Initialise non-existing variables to 0.
if c not in old_row:
return "not_applicable"
if old_row[c] == '' or old_row[c] is None:
row[c] = 0
else:
try:
row[c] = float(old_row[c])
except ValueError:
row[c] = old_row[c]
try:
return float(eval(self.calculation))
except ZeroDivisionError:
return 0
@staticmethod
def to_date(element):
"""
Converts a row element date string to a number, if the element conforms
to one of the specified date formats. If the specified row element is a
datestring, this function calulates the number of seconds between that
datetime and the epi week start after the epoch i.e. in Jordan, the
first Sunday after 1st January 1970. If the specified row element
doesn't conform to an acceptable date string form, it just returns the
element instead.
"""
# If element isn't even a string, just return the element instantly.
if type(element) is not str:
return element
# For each format, try to parse and convert a date from the element.
# If parsing fails, try the next format.
# If success, return the converted date.
for i, date_format in enumerate(allowed_formats):
try:
date = parse_date(element, date_format)
# Want to calc using secs from the epi week start after epoch.
# Let's call this the epiepoch. Epoch was on Thurs 1/1/1970, so
# (4 + epi_week_start_day) % 7 = days between epoch & epiepoch
if isinstance(country_config['epi_week'], str):
epi_offset = (4 + int(country_config['epi_week'][4:])) % 7
else:
year = date.year
epi_offset = (
4 + country_config["epi_week"].get(year, datetime(year, 1, 1)).weekday()
) % 7
# Time since epiepoch = date - epiepoch
# Where epiepoch = epoch + epioffset.
since_epi_epoch = date - (datetime(1970, 1, 1) +
timedelta(days=epi_offset))
# Return the calculated number of seconds.
return since_epi_epoch.total_seconds()
# If failed to parse date, try a different acceptable date format.
except (ValueError, KeyError):
pass
# If the element didn't conform to a date format, just return element.
return element
# A list of the valid datestring formats
allowed_formats = [
'%b %d, %Y',
'%d-%b-%Y',
'%Y-%m-%d',
'%d-%b-%Y %I:%M:%S',
'%d-%b-%Y %H:%M:%S',
'%b %d, %Y %I:%M:%S %p',
'%Y-%m-%dT%H:%M:%S.%f',
'%Y-%m-%dT%H:%M:%S.%fZ',
'%Y-%m-%dT%H:%M:%S'
]
months = {
"Jan": 1,
"Feb": 2,
"Mar": 3,
"Apr": 4,
"May": 5,
"Jun": 6,
"Jul": 7,
"Aug": 8,
"Sep": 9,
"Oct": 10,
"Nov": 11,
"Dec": 12
}
def parse_date(string, date_format):
if date_format == '%b %d, %Y':
year = string[-4:]
f = string[:-6]
mon = f[0:3]
day = f[3:]
return datetime(int(year), months[mon], int(day))
else:
return datetime.strptime(string, date_format)
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,992 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/consumer/get_data.py | import boto3
import time
import json
from celery.task.control import inspect
from meerkat_abacus.util import create_fake_data
from meerkat_abacus import util, logger
def read_stationary_data(get_function, param_config, celery_app, N_send_to_task=15000,
previous_number_by_form={}):
"""
Read stationary data using the get_function to determine the source
"""
celery_inspect = inspect()
number_by_form = {}
for form_name in param_config.country_config["tables"]:
start = previous_number_by_form.get(form_name, 0)
logger.info(f"Start processing data for form {form_name}")
data = []
for i, element in enumerate(get_function(form_name, param_config=param_config)):
if i < start:
next
data.append({"form": form_name,
"data": dict(element)})
if i % N_send_to_task == 0:
logger.info(f"Processed {i} records")
send_task(data, celery_app, celery_inspect)
data = []
if data:
send_task(data, celery_app, celery_inspect)
logger.info("Finished processing data.")
logger.info(f"Processed {i} records")
number_by_form[form_name] = i
return number_by_form
def get_N_tasks(inspect, name):
try:
registered = len(inspect.registered()[name])
reserved = len(inspect.reserved()[name])
except:
registered = 0
reserved = 0
logger.info(f"registered {registered}, reserved {reserved}")
return reserved + registered
def send_task(data, celery_app, inspect, N=15):
"""
Sends data to process queue if the the are less than N tasks waiting
"""
while get_N_tasks(inspect, "celery@abacus") > N:
logger.info("There were too many reserved tasks so waiting 5 seconds")
time.sleep(60)
logger.info("Sending data")
celery_app.send_task("processing_tasks.process_data", [data])
def download_data_from_s3(config):
"""
Get csv-files with data from s3 bucket
Needs to be authenticated with AWS to run.
Args:
bucket: bucket_name
"""
s3 = boto3.resource('s3')
for form_name in config.country_config["tables"]:
file_name = form_name + ".csv"
s3_key = "data/" + file_name
destination_path = config.data_directory + file_name
s3.meta.client.download_file(config.s3_bucket, s3_key, destination_path)
# Real time
def real_time_s3(app, config, session, number_by_form={}):
""" Downloads data from S3 and adds new data from the CSV files"""
logger.info("Starting read from S3")
download_data_from_s3(config)
read_stationary_data(util.read_csv_file, config, previous_number_by_form=number_by_form)
logger.info("Finishing read from S3")
time.sleep(config.data_stream_interval)
return number_by_form
def real_time_fake(app, config, session, *args):
""" Creates new fake data and adds it to the system"""
logger.info("Sending fake data")
new_data = []
for form in config.country_config["tables"]:
data = create_fake_data.get_new_fake_data(form=form,
session=session,
N=10,
param_config=config,
dates_is_now=True)
new_data += [{"form": form, "data": d[0]} for d in data]
if config.fake_data_generation == "INTERNAL":
app.send_task('processing_tasks.process_data', [new_data])
elif config.fake_data_generation == "SEND_TO_SQS":
sqs_client, sqs_queue_url = util.subscribe_to_sqs(config.fake_data_sqs_endpoint,
config.fake_data_sqs_queue.lower())
for d in new_data:
d["formId"] = d["form"]
sqs_client.send_message(
QueueUrl=sqs_queue_url,
MessageBody=json.dumps(d))
for i in range(4):
real_time_sqs(app, config)
else:
raise NotImplementedError("Not yet implemented")
logger.info("Sleeping")
time.sleep(config.fake_data_interval)
sqs_client = None
sqs_queue_url = None
def real_time_sqs(app, config, *args):
""" Reads data from AWS SQS"""
global sqs_client
global sqs_queue_url
if sqs_client is None:
try:
logger.info(f"Subscribing to SQS endpoint: {config.SQS_ENDPOINT}.")
logger.info(f"Subscribing to SQS queue: {config.sqs_queue.lower()}.")
sqs_client, sqs_queue_url = util.subscribe_to_sqs(config.SQS_ENDPOINT,
config.sqs_queue.lower())
except Exception as e:
logger.exception("Error in reading message", exc_info=True)
return
try:
logger.info("Getting messages from queue " + str(sqs_queue_url))
messages = sqs_client.receive_message(QueueUrl=sqs_queue_url,
WaitTimeSeconds=19,
MaxNumberOfMessages=10)
except Exception as e:
logger.error(str(e) + ", retrying...")
return
if "Messages" in messages:
messages_to_send = []
for message in messages["Messages"]:
logger.info("Message %s", message)
receipt_handle = message["ReceiptHandle"]
logger.debug("Deleting message %s", receipt_handle)
try:
message_body = json.loads(message["Body"])
form = message_body["formId"]
form_data = message_body["data"]
messages_to_send.append({"form": form, "data": form_data})
sqs_client.delete_message(QueueUrl=sqs_queue_url,
ReceiptHandle=receipt_handle)
except Exception as e:
logger.exception("Error in reading message", exc_info=True)
app.send_task("processing_tasks.process_data", [messages_to_send])
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,993 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/tests/test_add_links.py | import unittest
from unittest.mock import patch
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from meerkat_abacus.config import config
from meerkat_abacus import model
from meerkat_abacus.pipeline_worker.process_steps import add_links
from meerkat_abacus.consumer.database_setup import create_db
class TestAddLinks(unittest.TestCase):
def setUp(self):
create_db(config.DATABASE_URL, drop=True)
engine = create_engine(config.DATABASE_URL)
model.form_tables(config)
model.Base.metadata.create_all(engine)
self.engine = create_engine(config.DATABASE_URL)
Session = sessionmaker(bind=self.engine)
self.session = Session()
def tearDown(self):
con = self.engine.connect()
table = model.form_tables(config)["demo_case"]
con.execute(table.__table__.delete())
table = model.form_tables(config)["demo_alert"]
con.execute(table.__table__.delete())
test_links = (
{"Case": [{
"name": "alert_investigation",
"to_form": "demo_alert",
"from_form": "demo_case",
"from_column": "alert_id",
"to_column": "alert_id",
"method": "match",
"order_by": "visit_data;date",
"uuid": "meta/instanceID"
}]
},
{"alert_investigation": {
"name": "alert_investigation",
"to_form": "demo_alert",
"from_form": "demo_case",
"from_column": "alert_id",
"to_column": "alert_id",
"method": "match",
"order_by": "visit_data;date",
"uuid": "meta/instanceID"
}})
@patch.object(add_links.util, 'get_links',
return_value=test_links)
def test_add_to_links(self, get_links_mock):
al = add_links.AddLinks(config, self.session)
existing_data = [{
"uuid": "a",
"data": {
"visit_date": "2017-01-14T05:38:33.482144",
"icd_code": "A01",
"patientid": "1",
"alert_id": "a1",
"module": "ncd",
"intro./visit": "new",
"id": "1"
}
},
{
"uuid": "b",
"data": {
"visit_date": "2017-01-14T05:38:33.482144",
"icd_code": "A01",
"patientid": "1",
"alert_id": "a2",
"module": "ncd",
"intro./visit": "new",
"id": "2"
}
}
]
table = model.form_tables(config)["demo_case"]
con = self.engine.connect()
con.execute(table.__table__.insert(), existing_data)
con.close()
test_data = {"type": "Case",
"original_form": "demo_alert",
"link_data": {"alert_investigation": [{"alert_id": "a1"}]}
}
results = al.run("data", test_data)
self.assertEqual(len(results), 1)
self.assertEqual(results[0]["data"]["raw_data"],
existing_data[0]["data"])
self.assertEqual(results[0]["data"]["link_data"],
test_data["link_data"])
test_links2 = (
{"Case": [{
"name": "alert_investigation",
"to_form": "demo_alert",
"from_form": "demo_case",
"from_column": "alert_id",
"to_column": "alert_id",
"method": "match",
"order_by": "visit_data;date",
"uuid": "meta/instanceID"
}]
},
{"alert_investigation": {
"name": "alert_investigation",
"to_form": "demo_alert",
"from_form": "demo_case",
"from_column": "alert_id",
"to_column": "alert_id",
"method": "match",
"order_by": "visit_data;date",
"uuid": "meta/instanceID"
}})
@patch.object(add_links.util, 'get_links',
return_value=test_links2)
def test_add_from_links(self, get_links_mock):
config.country_config["alert_id_length"] = 1
al = add_links.AddLinks(config, self.session)
existing_data = [{
"uuid": "a",
"data": {
"alert_id": "1",
}
}
]
table = model.form_tables(config)["demo_alert"]
con = self.engine.connect()
con.execute(table.__table__.insert(), existing_data)
con.close()
test_data = {"type": "Case",
"original_form": "demo_case",
"raw_data": {"alert_id": "1",
"intro./visit": "new"}
}
results = al.run("data", test_data)
self.assertEqual(len(results), 1)
self.assertEqual(results[0]["data"]["raw_data"],
test_data["raw_data"])
self.assertEqual(results[0]["data"]["link_data"],
{"alert_investigation": [existing_data[0]["data"]]})
test_links3 = (
{"Case": [{
"name": "return_visit",
"to_form": "demo_case",
"from_form": "demo_case",
"from_column": "link_id",
"to_column": "link_id",
"method": "lower_match",
"order_by": "visit_date;date",
"uuid": "meta/instanceID",
"to_condition": "visit:return"
}]
},
{"return_visit": {
"name": "return_visit",
"to_form": "demo_case",
"from_form": "demo_case",
"from_column": "link_id",
"to_column": "link_id",
"method": "lower_match",
"order_by": "visit_date;date",
"uuid": "meta/instanceID",
"to_condition": "visit:return"
}})
@patch.object(add_links.util, 'get_links',
return_value=test_links3)
def test_self_link_lower_match(self, get_links_mock):
config.country_config["alert_id_length"] = 1
al = add_links.AddLinks(config, self.session)
existing_data = [{
"uuid": "a",
"data": {
"visit_date": "2017-01-14T05:38:33.482144",
"icd_code": "A01",
"patientid": "1",
"alert_id": "aa",
"module": "ncd",
"intro./visit": "new",
"id": "1"
}
},
{
"uuid": "b",
"data": {
"visit_date": "2017-01-17T05:38:33.482144",
"link_id": "AA",
"visit": "return",
"id": "2"
}
}
]
table = model.form_tables(config)["demo_case"]
con = self.engine.connect()
con.execute(table.__table__.insert(), existing_data)
con.close()
test_data = {"type": "Case",
"original_form": "demo_case",
"link_data": {"return_visit": [{
"link_id": "Aa",
"visit": "return",
"id": "3",
"visit_date": "2017-01-16T05:38:33.482144"}]
}
}
results = al.run("data", test_data)
self.assertEqual(len(results), 1)
self.assertEqual(len(results[0]["data"]["link_data"]["return_visit"]),
2)
# Make sure they are in right order
self.assertEqual(results[0]["data"]["link_data"]["return_visit"][0]["id"],
"3")
self.assertEqual(results[0]["data"]["link_data"]["return_visit"][1]["id"],
"2")
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,994 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/util/authenticate.py | import logging
import os
import backoff
import requests
from meerkat_libs import authenticate
ABACUS_AUTH_USERNAME = os.environ.get('ABACUS_AUTH_USERNAME', 'abacus-dev-user')
ABACUS_AUTH_PASSWORD = os.environ.get('ABACUS_AUTH_PASSWORD', 'password')
abacus_auth_token_ = ''
def retry_message(i):
logging.info("Failed to authenticate. Retrying in " + str(i))
@backoff.on_exception(backoff.expo,
requests.exceptions.RequestException,
on_backoff=retry_message,
max_tries=8,
max_value=30)
@backoff.on_predicate(backoff.expo,
lambda x: x == '',
max_tries=10,
max_value=30)
def abacus_auth_token():
global abacus_auth_token_
abacus_auth_token_ = authenticate(username=ABACUS_AUTH_USERNAME,
password=ABACUS_AUTH_PASSWORD,
current_token=abacus_auth_token_)
return abacus_auth_token_
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,995 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/consumer/database_setup.py | import time
import csv
import json
import os
from dateutil.parser import parse
from geoalchemy2.shape import from_shape
from shapely.geometry import shape, Polygon, MultiPolygon
from sqlalchemy import create_engine
from sqlalchemy import exc
from sqlalchemy.orm import sessionmaker
from sqlalchemy_utils import database_exists, create_database, drop_database
from meerkat_abacus.config import config
from meerkat_abacus import model
from meerkat_abacus import util
from meerkat_abacus import logger
def create_db(url, drop=False):
"""
The function creates the database
Args:
url : the database_url
base: An SQLAlchmey declarative base with the db schema
drop: Flag to drop the database before creating it
Returns:
Boolean: True
"""
counter = 0
while counter < 5:
try:
if drop and database_exists(url):
logger.debug('Dropping database.')
drop_database(url)
if not database_exists(url):
logger.debug('Creating database.')
create_database(url)
break
except exc.OperationalError:
logger.exception('There was an error connecting to the db.', exc_info=True)
logger.error('Trying again in 5 seconds...')
time.sleep(5)
counter = counter + 1
engine = create_engine(url)
connection = engine.connect()
connection.execute("CREATE EXTENSION IF NOT EXISTS postgis")
connection.close()
return True
def import_variables(session, param_config):
"""
Import variables from codes csv-file.
Args:
session: db-session
"""
session.query(model.AggregationVariables).delete()
session.commit()
country_config = param_config.country_config
# check if the coding_list parameter exists. If not, use the legacy parameter codes_file instead
if 'coding_list' in country_config.keys():
for coding_file_name in country_config['coding_list']:
codes_file = param_config.config_directory + 'variable_codes/' + coding_file_name
for row in util.read_csv(codes_file):
if '' in row.keys():
row.pop('')
row = util.field_to_list(row, "category")
keys = model.AggregationVariables.__table__.columns._data.keys()
row = {key: row[key] for key in keys if key in row}
session.add(model.AggregationVariables(**row))
session.commit()
else:
codes_file = param_config.config_directory + param_config.country_config['codes_file'] + '.csv'
for row in util.read_csv(codes_file):
if '' in row.keys():
row.pop('')
row = util.field_to_list(row, "category")
keys = model.AggregationVariables.__table__.columns._data.keys()
row = {key: row[key] for key in keys if key in row}
session.add(model.AggregationVariables(**row))
session.commit()
def import_clinics(csv_file, session, country_id, param_config,
other_info=None, other_condition=None):
"""
Import clinics from csv file.
Args:
csv_file: path to csv file with clinics
session: SQLAlchemy session
country_id: id of the country
"""
country_config = param_config.country_config
result = session.query(model.Locations)
regions = {}
for region in result:
if region.level == "region":
regions[region.name] = region.id
districts = {}
for district in result:
if district.level == "district":
districts[district.name] = district.id
deviceids = []
with open(csv_file) as f:
clinics_csv = csv.DictReader(f)
for row in clinics_csv:
if row["deviceid"] and row["clinic"].lower() != "not used" and row[
"deviceid"] not in deviceids:
other_cond = True
if other_condition:
for key in other_condition.keys():
if row.get(key, None) and row[key] != other_condition[key]:
other_cond = False
break
if not other_cond:
continue
if "case_report" in row.keys():
if row["case_report"] in ["Yes", "yes"]:
case_report = 1
else:
case_report = 0
else:
case_report = 0
# Prepare a device item
if "device_tags" in row:
tags = row["device_tags"].split(",")
else:
tags = []
session.add(
model.Devices(
device_id=row["deviceid"], tags=tags))
deviceids.append(row["deviceid"])
# If the clinic has a district we use that as
# the parent_location, otherwise we use the region
parent_location = 1
if row["district"].strip():
parent_location = districts[row["district"].strip()]
elif row["region"].strip():
parent_location = regions[row["region"].strip()]
# Add population to the clinic and add it up through
# All the other locations
population = 0
if "population" in row and row["population"]:
population = int(row["population"])
pop_parent_location = parent_location
while pop_parent_location:
r = session.query(model.Locations).filter(
model.Locations.id == pop_parent_location).first()
r.population += population
pop_parent_location = r.parent_location
session.commit()
result = session.query(model.Locations).filter(
model.Locations.name == row["clinic"],
model.Locations.parent_location == parent_location,
model.Locations.clinic_type is not None
)
# Construct other information from config
other = {}
if other_info:
for field in other_info:
other[field] = row.get(field, None)
# Case type can be a comma seperated list.
case_type = row.get("case_type", "")
case_type = list(map(str.strip, case_type.split(',')))
# If two clinics have the same name and the same
# parent_location, we are dealing with two tablets from the
# same clinic, so we combine them.
if len(result.all()) == 0:
if row["longitude"] and row["latitude"]:
point = "POINT(" + row["longitude"] + " " + row["latitude"] + ")"
else:
point = None
if "start_date" in row and row["start_date"]:
start_date = parse(row["start_date"], dayfirst=True)
else:
start_date = country_config["default_start_date"]
session.add(
model.Locations(
name=row["clinic"],
parent_location=parent_location,
point_location=point,
deviceid=row["deviceid"],
clinic_type=row["clinic_type"].strip(),
case_report=case_report,
case_type=case_type,
level="clinic",
population=population,
other=other,
service_provider=row.get("service_provider", None),
start_date=start_date,
country_location_id=row.get(
"country_location_id",
None
)
)
)
else:
location = result.first()
location.deviceid += "," + row["deviceid"]
location.case_type = list(
set(location.case_type) | set(case_type)
) # Combine case types with no duplicates
session.commit()
def import_geojson(geo_json, session):
with open(geo_json) as f:
geometry = json.loads(f.read())
for g in geometry["features"]:
shapely_shapes = shape(g["geometry"])
if shapely_shapes.geom_type == "Polygon":
coords = list(shapely_shapes.exterior.coords)
if len(coords[0]) == 3:
shapely_shapes = Polygon([xy[0:2] for xy in list(coords)])
shapely_shapes = MultiPolygon([shapely_shapes])
elif shapely_shapes.geom_type == "MultiPolygon":
new_polys = []
for poly in shapely_shapes.geoms:
coords = list(poly.exterior.coords)
new_poly = Polygon([xy[0:2] for xy in list(coords)])
new_polys.append(new_poly)
shapely_shapes = MultiPolygon(new_polys)
else:
logger.info("shapely_shapes.geom_type : %s", shapely_shapes.geom_type)
name = g["properties"]["Name"]
location = session.query(model.Locations).filter(
model.Locations.name == name,
model.Locations.level.in_(["district",
"region", "country"])).first()
if location is not None:
location.area = from_shape(shapely_shapes)
session.commit()
def import_regions(csv_file, session, column_name,
parent_column_name, level_name):
"""
Import districts from csv file.
Args:
csv_file: path to csv file with districts
session: SQLAlchemy session
"""
parents = {}
for instance in session.query(model.Locations):
parents[instance.name] = instance.id
with open(csv_file) as f:
districts_csv = csv.DictReader(f)
for row in districts_csv:
session.add(
model.Locations(
name=row[column_name],
parent_location=parents[row[parent_column_name].strip()],
level=level_name,
population=row.get("population", 0),
country_location_id=row.get("country_location_id", None)
)
)
session.commit()
def import_locations(engine, session, param_config):
"""
Imports all locations from csv-files.
Args:
engine: SQLAlchemy connection engine
session: db session
"""
country_config = param_config.country_config
session.query(model.Locations).delete()
engine.execute("ALTER SEQUENCE locations_id_seq RESTART WITH 1;")
session.add(
model.Locations(
name=param_config.country_config["country_name"],
level="country",
country_location_id="the_country_location_id"
)
)
session.query(model.Devices).delete()
session.commit()
zone_file = None
if "zones" in country_config["locations"]:
zone_file = (param_config.config_directory + "locations/" +
country_config["locations"]["zones"])
regions_file = (param_config.config_directory + "locations/" +
country_config["locations"]["regions"])
districts_file = (param_config.config_directory + "locations/" +
country_config["locations"]["districts"])
clinics_file = (param_config.config_directory + "locations/" +
country_config["locations"]["clinics"])
if zone_file:
import_regions(zone_file, session, "zone", "country", "zone")
import_regions(regions_file, session, "region", "zone", "region")
else:
import_regions(regions_file, session, "region", "country", "region")
import_regions(districts_file, session, "district", "region", "district")
import_clinics(clinics_file, session, 1,
other_info=param_config.country_config.get("other_location_information", None),
other_condition=param_config.country_config.get("other_location_condition", None),
param_config=param_config)
for geosjon_file in param_config.country_config["geojson_files"]:
import_geojson(param_config.config_directory + geosjon_file,
session)
def import_parameters(engine, session, param_config):
"""
Imports additional calculation parameters from csv-files.
Args:
engine: SQLAlchemy connection engine
session: db session
"""
session.query(model.CalculationParameters).delete()
engine.execute("ALTER SEQUENCE calculation_parameters_id_seq RESTART WITH 1;")
parameter_files = param_config.country_config.get("calculation_parameters", [])
for file in parameter_files:
logger.debug("Importing parameter file %s", file)
file_name = os.path.splitext(file)[0]
file_extension = os.path.splitext(file)[-1]
if file_extension == '.json':
with open(param_config.config_directory + "calculation_parameters/" +
file) as json_data:
parameter_data = json.load(json_data)
session.add(
model.CalculationParameters(
name=file_name,
type=file_extension,
parameters=parameter_data
))
elif file_extension == '.csv':
# TODO: CSV implementation
pass
session.commit()
def import_dump(dump_file):
path = config.db_dump_folder + dump_file
logger.info("Loading DB dump: {}".format(path))
with open(path, 'r') as f:
command = ['psql', '-U', 'postgres', '-h', 'db', 'meerkat_db']
proc = subprocess.Popen(command, stdin=f)
stdout, stderr = proc.communicate()
def set_up_persistent_database(param_config):
"""
Sets up the test persistent db if it doesn't exist yet.
"""
logger.info("Create Persistent DB")
if not database_exists(param_config.PERSISTENT_DATABASE_URL):
create_db(param_config.PERSISTENT_DATABASE_URL, drop=False)
engine = create_engine(param_config.PERSISTENT_DATABASE_URL)
logger.info("Creating persistent database tables")
model.form_tables(param_config=param_config)
model.Base.metadata.create_all(engine)
engine.dispose()
def set_up_database(leave_if_data, drop_db, param_config):
"""
Sets up the db and imports static data.
Args:
leave_if_data: do nothing if data is there
drop_db: shall db be dropped before created
param_config: config object for Abacus in case the function is called in a Celery container
"""
set_up = True
if leave_if_data:
if database_exists(param_config.DATABASE_URL):
engine = create_engine(param_config.DATABASE_URL)
Session = sessionmaker(bind=engine)
session = Session()
if len(session.query(model.Data).all()) > 0:
set_up = False
if set_up:
logger.info("Create DB")
create_db(param_config.DATABASE_URL, drop=drop_db)
if param_config.db_dump:
import_dump(param_config.db_dump)
return set_up
engine = create_engine(param_config.DATABASE_URL)
Session = sessionmaker(bind=engine)
session = Session()
logger.info("Populating DB")
model.form_tables(param_config)
model.Base.metadata.create_all(engine)
links, links_by_name = util.get_links(param_config.config_directory +
param_config.country_config["links_file"])
indexes_already_created = {}
for link in links_by_name.values():
to_form = link["to_form"]
to_condition_column = link["to_condition"].split(":")[0]
add_index(to_form, to_condition_column, indexes_already_created, engine)
from_form = link["from_form"]
from_condition_column = link.get("from_condition", "").split(":")[0]
add_index(from_form, from_condition_column, indexes_already_created, engine)
logger.info("Import Locations")
import_locations(engine, session, param_config)
logger.info("Import calculation parameters")
import_parameters(engine, session, param_config)
logger.info("Import Variables")
import_variables(session, param_config)
for alert in session.query(model.AggregationVariables).filter(
model.AggregationVariables.alert == 1).all():
alert_type = alert.alert_type.split(":")[0]
if alert_type in ["threshold", "double"]:
engine.execute(f"CREATE index on data ((variables->>'{alert.id}'))")
return session, engine
def unlogg_tables(form_tables, engine):
for table in ["data", "disregarded_data"] + form_tables:
engine.execute(f"ALTER TABLE {table} SET UNLOGGED;")
def logg_tables(form_tables, engine):
for table in ["data", "disregarded_data"] + form_tables:
engine.execute(f"ALTER TABLE {table} SET LOGGED;")
def add_index(form, column, already_created, engine):
if column and column not in already_created.get(form, []):
engine.execute(f"CREATE index on {form} ((data->>'{column}'))")
already_created.setdefault(form, [])
already_created[form].append(column)
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,996 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/process_steps/send_alerts.py | from meerkat_abacus.pipeline_worker.process_steps import ProcessingStep
from meerkat_abacus import model
from meerkat_abacus import util
class SendAlerts(ProcessingStep):
def __init__(self, param_config, session):
self.step_name = "send_alerts"
alerts = session.query(model.AggregationVariables).filter(
model.AggregationVariables.alert == 1)
self.alert_variables = {a.id: a for a in alerts}
self.locations = util.all_location_data(session)[0]
self.config = param_config
self.session = session
def run(self, form, data):
"""
Send alerts
"""
if ("alert" in data["variables"] and
data["variables"]["alert_type"] == "individual"):
alert_id = data["uuid"][
-self.config.country_config["alert_id_length"]:]
data["variables"]["alert_id"] = alert_id
util.send_alert(alert_id, data,
self.alert_variables,
self.locations, self.config)
return [{"form": form,
"data": data}]
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,997 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/__init__.py | import logging
from meerkat_abacus.config import config
handler = logging.StreamHandler()
formatter = logging.Formatter(config.LOGGING_FORMAT)
handler.setFormatter(formatter)
level = logging.getLevelName(config.LOGGING_LEVEL)
logger = logging.getLogger(config.LOGGER_NAME)
logger.setLevel(level)
logger.addHandler(handler)
logger.propagate = 0
logger.debug("Config initialised.")
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,998 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py | from datetime import datetime, timedelta
import pandas as pd
from sqlalchemy import func, or_, and_
from sqlalchemy.sql import text
from meerkat_abacus.pipeline_worker.process_steps import ProcessingStep
from meerkat_abacus.util.epi_week import epi_year_start_date
from meerkat_abacus import model
from meerkat_abacus import util
class AddMultipleAlerts(ProcessingStep):
def __init__(self, param_config, session):
self.step_name = "add_multiple_alerts"
self.alerts = session.query(model.AggregationVariables).filter(
model.AggregationVariables.alert == 1,
model.AggregationVariables.alert_type != "indivdual").all()
self.locations = util.all_location_data(session)[0]
self.config = param_config
self.session = session
@property
def engine(self):
return self._engine
@engine.setter
def engine(self, new_engine):
self._engine = new_engine
def start_step(self):
super(AddMultipleAlerts, self).start_step()
self.found_uuids = set([])
def run(self, form, data):
"""
Checks data to see if it contributes to a multiple alert.
Currently implemented double doble and thresholds.
"""
return_data = []
if data["uuid"] not in self.found_uuids:
for a in self.alerts:
var_id = a.id
if not a.alert_type:
continue
alert_type = a.alert_type.split(":")[0]
if var_id not in data["variables"]:
continue
new_alerts = []
type_name = None
if alert_type == "threshold":
new_alerts = threshold(
var_id,
a.alert_type,
data["date"],
data["clinic"],
self.session
)
type_name = "threshold"
elif alert_type == "double":
new_alerts = double_double(a.id,
data["epi_week"],
data["epi_year"],
data["clinic"],
self.engine)
type_name = "threshold"
return_data += self._handle_new_alerts(new_alerts, a, type_name, form)
# if len(return_data) == 0:
# return_data.append({"form": form,
# "data": data})
return return_data
def _handle_new_alerts(self, new_alerts, a, type_name, form):
return_data = []
if new_alerts:
for new_alert in new_alerts:
# Choose a representative record for the alert
uuids = sorted(new_alert["uuids"])
others = uuids[1:]
representative = uuids[0]
form_table = model.form_tables(param_config=self.config)[a.form]
records = self.session.query(
model.Data, form_table).join(
(form_table,
form_table.uuid == model.Data.uuid
)).filter(model.Data.uuid.in_(new_alert["uuids"]),
model.Data.type == a.type)
data_records_by_uuid = {}
form_records_by_uuid = {}
for r in records.all():
data_records_by_uuid[r[0].uuid] = r[0]
form_records_by_uuid[r[1].uuid] = r[1]
new_variables = data_records_by_uuid[representative].variables
# Update the variables of the representative alert
new_variables["alert"] = 1
new_variables["alert_type"] = type_name
new_variables["alert_duration"] = new_alert["duration"]
new_variables["alert_reason"] = a.id
new_variables["alert_id"] = data_records_by_uuid[
representative].uuid[
-self.config.country_config["alert_id_length"]:]
self._add_alert_data(new_variables, form_records_by_uuid[representative],
a.form)
# Update all the non-representative rows
for o in others:
self._update_other_row(data_records_by_uuid[o],
form_records_by_uuid[o],
representative,
a.form)
for record in data_records_by_uuid.values():
dict_record = row_to_dict(record)
if dict_record["uuid"] not in self.found_uuids:
return_data.append({"form": form,
"data": dict_record})
self.found_uuids = self.found_uuids | set(uuids)
return return_data
def _update_other_row(self, row, form_record, representative, form):
row.variables["sub_alert"] = 1
row.variables["master_alert"] = representative
if "alert" in row.variables:
del row.variables["alert"]
if "alert_id" in row.variables:
del row.variables["alert_id"]
self._add_alert_data(row.variables, form_record, form)
def _add_alert_data(self, variables, form_record, form):
for data_var in self.config.country_config["alert_data"][form].keys():
data_column = self.config.country_config["alert_data"][form][data_var]
variables["alert_" + data_var] = form_record.data[data_column]
def row_to_dict(row):
dict_record = dict((col, getattr(row, col))
for col in row.__table__.columns.keys())
if dict_record.get("geolocation") is not None:
dict_record["geolocation"] = dict_record["geolocation"].desc
return dict_record
def threshold(var_id, alert_type, date, clinic, session):
"""
Calculate threshold alerts based on daily and weekly limits
Returns alerts for all days where there are more than limits[0] cases
of var_id in one clinic or where ther are more than limits[1] cases of
var_id in one clinic for one week.
Args:
var_id: variable id for alert
limits: (daily, weekly) limits
session: Db session
Returns:
alerts: list of alerts.
"""
conditions = [model.Data.variables.has_key(var_id),
model.Data.clinic == clinic,
model.Data.date > date - timedelta(days=7),
model.Data.date < date + timedelta(days=7)]
data = pd.read_sql(
session.query(model.Data.region, model.Data.district,
model.Data.clinic, model.Data.date, model.Data.clinic_type,
model.Data.uuid, model.Data.variables[var_id].label(var_id)).filter(
*conditions).statement, session.bind)
if len(data) == 0:
return None
# Group by clinic and day
limits = [
int(x) for x in alert_type.split(":")[1].split(",")]
hospital_limits = None
if len(limits) == 4:
hospital_limits = limits[2:]
limits = limits[:2]
daily = data.groupby(["clinic", pd.Grouper(
key="date", freq="1D")]).sum()[var_id]
daily_over_threshold = daily[daily >= limits[0]]
alerts = []
for clinic_date in daily_over_threshold.index:
clinic, date = clinic_date
data_row = data[(data["clinic"] == clinic) & (data["date"] == date)]
if len(data_row) == 0:
continue
clinic_type = data_row["clinic_type"].iloc[0]
uuids = list(data_row["uuid"])
add = False
if hospital_limits and clinic_type == "Hospital":
if len(uuids) >= hospital_limits[0]:
add = True
else:
if len(uuids) >= limits[0]:
add = True
if add:
alerts.append({
"clinic": clinic,
"reason": var_id,
"duration": 1,
"uuids": uuids,
"type": "threshold"
})
today = datetime.now()
epi_year_weekday = epi_year_start_date(today).weekday()
freq = ["W-MON", "W-TUE", "W-WED", "W-THU", "W-FRI", "W-SAT",
"W-SUN"][epi_year_weekday]
# Group by clinic and epi week
weekly = data.groupby(["clinic", pd.Grouper(
key="date", freq=freq, label="left")]).sum()[var_id]
weekly_over_threshold = weekly[weekly >= limits[1]]
for clinic_date in weekly_over_threshold.index:
clinic, date = clinic_date
cases = data[(data["clinic"] == clinic) & (data["date"] >= date) & (
data["date"] < date + timedelta(days=7))]
if len(cases) == 0:
continue
clinic_type = cases["clinic_type"].iloc[0]
uuids = list(cases.sort_values(["date"])["uuid"])
add = False
if hospital_limits and clinic_type == "Hospital":
if len(uuids) >= hospital_limits[1]:
add = True
else:
if len(uuids) >= limits[1]:
add = True
if add:
alerts.append({
"clinic": clinic,
"reason": var_id,
"duration": 7,
"uuids": uuids,
"type": "threshold"
})
return alerts
def double_double(var_id, week, year, clinic, engine):
"""
Calculate threshold alerts based on a double doubling of cases.
We want to trigger an alert for a clinic if there has been a doubling of cases
in two consecutive weeks. I.e if the case numbers look like: 2, 4, 8. We would
not trigger an alert for 2, 4, 7 or 2, 3, 8.
Args:
var_id: variable id for alert
limits: (daily, weekly) limits
session: Db session
Returns:
alerts: list of alerts.
"""
#
lower_limit = week - 2
upper_limit = week + 2
base_sql = "SELECT epi_week, count(*), string_agg(uuid, ',') from data where clinic = :clinic and variables ? :var_id and (week_where_clause) group by epi_week"
variables = {
"clinic": clinic,
"var_id": var_id
}
if lower_limit >= 1 and upper_limit <= 52:
week_where_clause = "epi_week >= :lower_limit and epi_week <= :upper_limit and epi_year = :epi_year"
variables["lower_limit"] = lower_limit
variables["upper_limit"] = upper_limit
variables["epi_year"] = year
elif upper_limit <= 52:
lower_limit = 52 + lower_limit
week_where_clause = "(epi_week >= :lower_limit and epi_year = :epi_year_1) or (epi_week <= :upper_limit and epi_year = :epi_year_2)"
variables["lower_limit"] = lower_limit
variables["upper_limit"] = upper_limit
variables["epi_year_1"] = year - 1
variables["epi_year_2"] = year
else:
upper_limit = upper_limit - 52
week_where_clause = "(epi_week >= :lower_limit and epi_year = :epi_year_1) or (epi_week <= :upper_limit and epi_year = :epi_year_2)"
variables["lower_limit"] = lower_limit
variables["upper_limit"] = upper_limit
variables["epi_year_1"] = year
variables["epi_year_2"] = year + 1
query = base_sql.replace("week_where_clause", week_where_clause)
connection = engine.connect()
data = connection.execute(text(query), **variables).fetchall()
connection.close()
counts = {}
uuids = {}
s = 0
for d in data:
row_week = d[0]
week_diff = row_week - week
if abs(week_diff) > 2:
if week_diff > 0:
row_week = row_week - 52
else:
row_week = row_week + 52
counts[row_week] = d[1]
s += d[1]
uuids[row_week] = d[2]
if s < 14:
return []
alerts = []
if counts.get(week, 0) > 1:
if (counts.get(week + 1, 0) >= 2 * counts.get(week, 0) and
counts.get(week + 2, 0) >= 2 * counts.get(week + 1, 0)):
alerts.append({
"clinic": clinic,
"reason": var_id,
"duration": 7,
"uuids": uuids[week + 2].split(","),
"type": "threshold"
})
if counts.get(week - 1, 0) > 1:
if (counts.get(week, 0) >= 2 * counts.get(week - 1, 0) and
counts.get(week + 1, 0) >= 2 * counts.get(week, 0)):
alerts.append({
"clinic": clinic,
"reason": var_id,
"duration": 7,
"uuids": uuids[week + 1].split(","),
"type": "threshold"
})
if counts.get(week - 2, 0) > 1:
if (counts.get(week - 1, 0) >= 2 * counts.get(week - 2, 0) and
counts.get(week, 0) >= 2 * counts.get(week - 1, 0)):
alerts.append({
"clinic": clinic,
"reason": var_id,
"duration": 7,
"uuids": uuids[week].split(","),
"type": "threshold"
})
return alerts
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
65,999 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/process_steps/to_codes.py | from dateutil.parser import parse
from datetime import datetime
import copy
from meerkat_abacus.pipeline_worker.process_steps import ProcessingStep
from meerkat_abacus import util
from meerkat_abacus.codes import to_codes
from meerkat_abacus.util import data_types
from meerkat_abacus import logger
class ToCodes(ProcessingStep):
def __init__(self, param_config, session):
self.step_name = "to_codes"
self.config = param_config
self.links_by_type, self.links_by_name = util.get_links(
param_config.config_directory +
param_config.country_config["links_file"])
self.locations = util.all_location_data(session)
self.data_types = {d["name"]: d for d in
data_types.data_types(param_config=self.config)}
self.variables = {}
for type_name, data_type in self.data_types.items():
self.variables[type_name] = to_codes.get_variables(session,
match_on_form=data_type["type"])
self.session = session
self.alert_id_length = self.config.country_config["alert_id_length"]
def run(self, form, data):
return_rows = []
data_type = self.data_types[data["type"]]
rows = self._get_multi_rows(data, data_type)
for row in rows:
linked_forms = row.get("link_data", {})
for key, value in linked_forms.items():
row[key] = value
row[row["original_form"]] = row["raw_data"]
variable_data, category_data, location_data, disregard = to_codes.to_code(
row,
self.variables[data_type["name"]],
self.locations,
data_type["type"],
self.config.country_config["alert_data"],
set(linked_forms),
data_type["location"]
)
if location_data is None:
logger.warning("Missing loc data")
continue
row["uuid"] = row[data_type["form"]][data_type["uuid"]]
epi_year, week, date = self._get_epi_week(row, data_type)
if epi_year is None:
continue
self._add_additional_variables(variable_data, data_type)
self._set_alert_id(variable_data, row["uuid"])
new_data = {
"date": date,
"epi_week": week,
"epi_year": epi_year,
"submission_date": self._get_submission_date(row, data_type),
"type": data_type["type"],
"uuid": row["uuid"],
"variables": variable_data,
"categories": category_data,
"links": self._get_link_uuids(data),
"type_name": data_type["name"]
}
new_data.update(location_data)
return_rows.append({"form": self._get_return_form(disregard),
"data": new_data})
return return_rows
def _set_alert_id(self, uuid, variables_data):
"""
Adds an alert id
"""
if "alert" in variables_data and "alert_id" not in variables_data:
variables_data["alert_id"] = uuid[-self.alert_id_length:]
def _get_return_form(self, disregard):
return_form = "data"
if disregard:
return_form = "disregardedData"
return return_form
def _get_submission_date(self, row, data_type):
submission_date = None
if "SubmissionDate" in row[data_type["form"]]:
submission_date = parse(
row[data_type["form"]].get("SubmissionDate")).replace(
tzinfo=None)
return submission_date
def _get_link_uuids(self, data):
links = {}
for name in data.get("link_data", {}).keys():
link = self.links_by_name[name]
links[name] = [x[link["uuid"]] for x in data["link_data"][name]]
return links
def _add_additional_variables(self, variable_data, data_type):
variable_data[data_type["var"]] = 1
variable_data["data_entry"] = 1
def _get_multi_rows(self, data, data_type):
"""
Takes a data row and splits it inro multiple rows based on the
config in the data_type
"""
if not data_type["multiple_row"]:
return [data]
fields = data_type["multiple_row"].split(",")
i = 1
data_in_row = True
sub_rows = []
while data_in_row:
data_in_row = False
sub_row = copy.deepcopy(data)
for f in fields:
column_name = f.replace("$", str(i))
sub_row_name = f.replace("$", "")
value = data["raw_data"].get(column_name, None)
if value and value != "":
sub_row["raw_data"][sub_row_name] = value
data_in_row = True
sub_row["raw_data"][data_type["uuid"]] = sub_row["raw_data"][
data_type["uuid"]] + ":" + str(i)
if data_in_row:
sub_rows.append(sub_row)
i += 1
return sub_rows
def _get_epi_week(self, row, data_type):
epi_year, week, date = None, None, None
try:
date = parse(row[data_type["form"]][data_type["date"]])
date = datetime(date.year, date.month, date.day)
epi_year, week = util.epi_week.epi_week_for_date(date,
param_config=self.config.country_config)
except KeyError:
logger.error("Missing Date field %s", data_type["date"])
except ValueError:
logger.error(f"Failed to convert date to epi week. uuid: {row.get('uuid', 'UNKNOWN')}")
logger.debug(f"Faulty row date: {date}.")
except:
logger.exception("Invalid Date: %s", row[data_type["form"]].get(data_type["date"]))
return epi_year, week, date
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
66,000 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py | from dateutil.parser import parse
from sqlalchemy import and_
from sqlalchemy.exc import OperationalError
from meerkat_abacus.pipeline_worker.process_steps import ProcessingStep
from meerkat_abacus import model, util
from meerkat_abacus import logger
class InitialVisitControl(ProcessingStep):
def __init__(self, param_config, session):
super().__init__()
self.step_name = "initial_visit_control"
self.session = session
self.param_config = param_config
@property
def engine(self):
return self._engine
@engine.setter
def engine(self, new_engine):
self._engine = new_engine
def run(self, form, data):
"""
Configures and corrects the initial visits
"""
param_config = self.param_config
empty_return = [{"form": form,
"data": data}]
if "initial_visit_control" not in param_config.country_config:
return empty_return
new_visit_value = "new"
return_visit_value = "return"
if form in param_config.country_config['initial_visit_control'].keys():
table = model.form_tables(param_config=param_config)[form]
identifier_key_list = param_config.country_config[
'initial_visit_control'][form]['identifier_key_list']
current_identifier_values = {}
for key in identifier_key_list:
if data[key] is None:
return empty_return
current_identifier_values[key] = data[key]
visit_type_key = param_config.country_config[
'initial_visit_control'][form]['visit_type_key']
if data[visit_type_key] != new_visit_value:
return empty_return
visit_date_key = param_config.country_config[
'initial_visit_control'][form]['visit_date_key']
module_key = param_config.country_config[
'initial_visit_control'][form]['module_key']
module_value = param_config.country_config[
'initial_visit_control'][form]['module_value']
if data[module_key] != module_value:
return [{"form": form,
"data": data}]
ret_corrected = self.get_initial_visits(self.session, table,
current_identifier_values,
identifier_key_list,
visit_type_key,
visit_date_key,
module_key, module_value)
if len(ret_corrected) > 0:
combined_data = [data] + [r.data for r in ret_corrected]
combined_data.sort(key=lambda d: parse(d[visit_date_key]),
reverse=False)
for row in combined_data[1:]:
row[visit_type_key] = return_visit_value
#logger.info("Updated data with uuid {}".format(
# row["meta/instanceID"])) # TODO: refactor this properly
return [{"form": form,
"data": row} for row in combined_data]
else:
return empty_return
else:
return empty_return
#file_name = config.data_directory + 'initial_visit_control_corrected_rows.csv'
#util.write_csv(log, file_name, mode="a")
return empty_return
def get_initial_visits(self, session, table, current_values,
identifier_key_list=['patientid', 'icd_code'],
visit_type_key='intro./visit',
visit_date_key='pt./visit_date',
module_key='intro./module', module_value="ncd"):
"""
Finds cases where a patient has multiple initial visits.
Args:
session: db session
table: table to check for duplicates
current_values: current_values for identifier_keys
identifier_key_list: list of json keys in the data column that should occur only once for an initial visit
visit_type_key: key of the json column data that defines visit type
visit_date_key: key of the json column data that stores the visit date
module_key: module to filter the processing to
module_value
"""
new_visit_value = "new"
# construct a comparison list that makes sure the identifier jsonb data values are not empty
empty_values_filter = []
conditions = []
for key in identifier_key_list:
# make a column object list of identifier values
conditions.append(table.data[key].astext == current_values[key])
# construct a comparison list that makes sure the identifier
# jsonb data values are not empty
empty_values_filter.append(table.data[key].astext != "")
result_query = session.query(
table.id, table.uuid,
table.data) \
.filter(table.data[visit_type_key].astext == new_visit_value) \
.filter(and_(*empty_values_filter)) \
.filter(table.data[module_key].astext == module_value)\
.filter(*conditions)
try:
results = result_query.all()
except:
logger.info("Rolled back session")
session.rollback()
results = result_query.all()
return results
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
66,001 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/pipeline_worker/process_steps/quality_control.py | """
Main functionality for importing data into abacus
"""
from dateutil.parser import parse
import random
from meerkat_abacus import util, logger
from meerkat_abacus.util import data_types
from meerkat_abacus.pipeline_worker.process_steps import ProcessingStep
from meerkat_abacus.util.epi_week import epi_week_for_date
from meerkat_abacus.codes import to_codes
class QualityControl(ProcessingStep):
def __init__(self, param_config, session):
""" Prepare arguments for quality_control
deviceids: if we should only add rows with a one of the deviceids
row_function: function to appy to the rows before inserting
start_dates: Clinic start dates, we do not add any data submitted
before these dates
quality_control: If we are performing quality controll on the data.
exclusion_list: A list of uuid's that are restricted from entering
fraction: If present imports a randomly selected subset of data.
"""
self.step_name = "quality_control"
self.session = session
config = {}
for form in param_config.country_config["tables"]:
deviceids_case = util.get_deviceids(session, case_report=True)
deviceids = util.get_deviceids(session)
start_dates = util.get_start_date_by_deviceid(session)
exclusion_list = set(util.get_exclusion_list(session, form))
uuid_field = "meta/instanceID"
if "tables_uuid" in param_config.country_config:
uuid_field = param_config.country_config["tables_uuid"].get(form, uuid_field)
if form in param_config.country_config["require_case_report"]:
form_deviceids = deviceids_case
else:
form_deviceids = deviceids
if "no_deviceid" in param_config.country_config and form in param_config.country_config["no_deviceid"]:
form_deviceids = []
quality_control = {}
quality_control_list = []
if "quality_control" in param_config.country_config:
if form in param_config.country_config["quality_control"]:
(variables, variable_forms, variable_tests,
variables_group, variables_match) = to_codes.get_variables(session,
"import")
if variables:
quality_control_list = [variables["import"][x][x]
for x in variables["import"].keys() if variables["import"][x][x].variable.form == form]
for variable in quality_control_list:
quality_control[variable] = variable.test
quality_control = quality_control
allow_enketo = False
if form in param_config.country_config.get("allow_enketo", []):
allow_enketo = param_config.country_config["allow_enketo"][form]
config[form] = {"uuid_field": uuid_field,
"deviceids": form_deviceids,
"table_name": form,
"only_new": True,
"start_dates": start_dates,
"quality_control": quality_control,
"allow_enketo": allow_enketo,
"exclusion_list": exclusion_list,
"fraction": param_config.import_fraction,
"only_import_after_date": param_config.only_import_after_date,
"param_config": param_config}
self.config = config
self.param_config = param_config
def run(self, form, row):
"""
Does quality control to change any needed data
and to check that data should be added
Args:
form: form_name
row: data_row
"""
config = self.config[form]
if self._exclude_by_start_date_or_fraction(row, form):
return []
if row[config["uuid_field"]] in config["exclusion_list"]:
return []
# If we have quality checks
remove = self._do_quality_control(row, form)
if remove:
return []
if config["deviceids"]:
if not should_row_be_added(row, form, config["deviceids"],
config["start_dates"],
self.param_config,
allow_enketo=config["allow_enketo"]):
return []
flatten_structure(row)
return [{"form": form,
"data": row}]
def _exclude_by_start_date_or_fraction(self, row, form):
if self.config[form]["fraction"]:
if random.random() > self.config[form]["fraction"]:
return True
if self.config[form]["only_import_after_date"]:
submission_date = parse(row["SubmissionDate"]).replace(tzinfo=None)
if submission_date < self.config[form]["only_import_after_date"]:
return True
return False
def _do_quality_control(self, insert_row, form):
remove = False
quality_control = self.config[form]["quality_control"]
if quality_control:
for variable in quality_control:
try:
if not quality_control[variable](insert_row)['value']:
if variable.variable.category == ["discard"]:
remove = True
else:
column = variable.column
if ";" in column or "," in column:
column = column.split(";")[0].split(",")[0]
category = variable.variable.category
replace_value = None
if category and len(category) > 0 and "replace:" in category[0]:
replace_column = category[0].split(":")[1]
replace_value = insert_row.get(replace_column,
None)
if column in insert_row:
insert_row[column] = replace_value
except Exception as e:
logger.exception("Quality Controll error for code %s",variable.variable.id, exc_info=True)
return remove
def flatten_structure(row):
"""
Flattens all lists in row to comma separated strings"
"""
for key, value in row.items():
if isinstance(value, list):
row[key] = ",".join(value)
def should_row_be_added(row, form_name, deviceids, start_dates, param_config,
allow_enketo=False):
"""
Determines if a data row should be added.
If deviceid is not None, the reccord need to have one of the deviceids.
If start_dates is not None, the record needs to be dated
after the corresponding start date
Args:
row: row to be added
form_name: name of form
deviceids(list): the approved deviceid
start_dates(dict): Clinic start dates
Returns:
should_add(Bool)
"""
ret = False
if deviceids is not None:
if row.get("deviceid", None) in deviceids:
ret = True
else:
if allow_enketo:
for url in allow_enketo:
if url in row.get("deviceid", None):
ret = True
break
else:
ret = True
if start_dates and row.get("deviceid", None) in start_dates:
if not row["SubmissionDate"]:
ret = False
elif parse(
row["SubmissionDate"]).replace(tzinfo=None) < start_dates[row["deviceid"]]:
ret = False
if ret:
ret = _validate_date_to_epi_week_convertion(form_name, row, param_config)
return ret
def _validate_date_to_epi_week_convertion(form_name, row, param_config):
form_data_types = data_types.data_types_for_form_name(form_name,
param_config=param_config)
if form_data_types:
filters = []
for form_data_type in form_data_types:
filter = __create_filter(form_data_type)
filters.append(filter)
validated_dates = []
for filter in filters:
condition_field_name = filter.get('field_name')
if not condition_field_name or __fulfills_condition(filter, row):
if __should_discard_row(row, filter, validated_dates,
param_config=param_config):
return False
return True
def __create_filter(form_data_type):
if form_data_type.get('condition'):
return {
'field_name': form_data_type['db_column'],
'value': form_data_type['condition'],
'date_field_name': form_data_type['date']
}
else:
return {
'date_field_name': form_data_type['date']
}
def __fulfills_condition(filter, row):
return row[filter['field_name']] == filter['value']
def __should_discard_row(row, filter, already_validated_dates, param_config):
column_with_date_name = filter['date_field_name']
if "$" in column_with_date_name:
column_with_date_name.replace("$", "1")
if column_with_date_name in already_validated_dates:
return False
already_validated_dates.append(column_with_date_name)
string_date = row[column_with_date_name]
if not string_date:
logger.debug(f"Empty value of date column for row with device_id: {row.get('deviceid')}" +
f" and submission date: {row.get('SubmissionDate')}")
return True
try:
date_to_check = parse(string_date).replace(tzinfo=None)
epi_week_for_date(date_to_check, param_config=param_config.country_config)
except ValueError:
logger.debug(f"Failed to process date column for row with device_id: {row.get('deviceid')}" +
f" and submission date: {row.get('SubmissionDate')}")
return True
return False
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
66,002 | fjelltopp/meerkat_abacus | refs/heads/development | /meerkat_abacus/codes/to_codes.py | """
Functionality to turn raw data into codes
"""
import meerkat_abacus.model as model
from meerkat_abacus.codes.variable import Variable
from geoalchemy2.shape import from_shape, to_shape
from shapely.geometry import Point
def get_variables(session, restrict=None, match_on_type=None, match_on_form=None):
"""
Get the variables out of the db and turn them into Variable classes.
To speed up the next step of the process we group the variables by calculation_group.
Args:
session: db-session
Returns:
dict: dictionary of id:Variable
"""
if restrict:
result = session.query(model.AggregationVariables).filter(
model.AggregationVariables.type == restrict)
else:
result = session.query(model.AggregationVariables)
variables = {}
variable_forms = {}
variable_tests = {}
variables_group = {}
match_variables = {}
for row in result:
group = row.calculation_group
if not group:
group = row.id_pk
if match_on_form is not None and match_on_type is not None:
if row.method =="match" and row.calculation_priority in ["", None] and row.form == match_on_form and row.type == match_on_type:
col = row.db_column
match_variables.setdefault(col, {})
for value in row.condition.split(","):
match_variables[col].setdefault(value.strip(), [{}, {}])
match_variables[col][value][0][row.id] = 1
if row.alert and row.alert_type == "individual":
match_variables[col][value][0]["alert"] = 1
match_variables[col][value][0]["alert_reason"] = row.id
match_variables[col][value][0]["alert_type"] = "individual"
for c in row.category:
match_variables[col][value][1][c] = row.id
else:
variables_group.setdefault(group, [])
variables_group[group].append(row.id_pk)
variables.setdefault(row.type, {})
variables[row.type].setdefault(group, {})
variables[row.type][group][row.id_pk] = Variable(row)
variable_forms[row.id_pk] = row.form
variable_tests[row.id_pk] = variables[row.type][
group][row.id_pk].test
else:
variables_group.setdefault(group, [])
variables_group[group].append(row.id_pk)
variables.setdefault(row.type, {})
variables[row.type].setdefault(group, {})
variables[row.type][group][row.id_pk] = Variable(row)
variable_forms[row.id_pk] = row.form
variable_tests[row.id_pk] = variables[row.type][
group][row.id_pk].test
return (variables, variable_forms, variable_tests,
variables_group, match_variables)
multiple_method = {"last": -1, "first": 0}
def to_code(row, variables, locations, data_type, alert_data,
mul_forms, location):
"""
Takes a row and transforms it into a data row
We iterate through each variable and add the variable_id: test_outcome to the
data.variable json dictionary if test_outcome is True.
To speed up this process we have divded the variables into groups where only one variable
can be apply to the given record. As soon as we find one of these variables, we don't test
the rest of the variables in the same group.
Args:
row: row of raw data
variables: dict of variables to check
locations: list of locations
data_type: type of row data e.g. case
location_form: Name of the main link which has location information (e.g. demo_case)
alert_data: a dictionary of name:column pairs.
For each alert we return the value of row[column] as name.
mul_form: set of links for row
location: tuple with locations
return:
new_record(model.Data): Data record
alert(model.Alerts): Alert record if created
"""
main_form = row["original_form"]
locations, locations_by_deviceid, zones, regions, districts, devices = locations
if "deviceid" in location:
column = "deviceid"
prefix = ""
if ":" in location:
splitted = location.split(":")
column = location.split(":")[1]
if len(splitted) == 3:
prefix = location.split(":")[2]
clinic_id = locations_by_deviceid.get(prefix + row[main_form][column],
None)
if not clinic_id:
return (None, None, None, None)
clinic_gps = None
if locations[clinic_id].point_location is not None:
clinic_gps = locations[clinic_id].point_location.desc
deviceid = row[main_form].get("deviceid")
ret_location = {
"clinic": clinic_id,
"clinic_type": locations[clinic_id].clinic_type,
"case_type": locations[clinic_id].case_type,
"tags": devices.get(deviceid),
"country": 1,
"device_id": deviceid,
"geolocation": clinic_gps
}
if locations[clinic_id].parent_location in districts:
ret_location["district"] = locations[clinic_id].parent_location
ret_location["region"] = (
locations[ret_location["district"]].parent_location)
ret_location["zone"] = (
locations[ret_location["region"]].parent_location)
elif locations[clinic_id].parent_location in regions:
ret_location["district"] = None
ret_location["region"] = locations[clinic_id].parent_location
ret_location["zone"] = (
locations[ret_location["region"]].parent_location)
else:
ret_location["district"] = None
ret_location["region"] = None
ret_location["zone"] = None
row[main_form]["clinic_type"] = locations[clinic_id].clinic_type
row[main_form]["service_provider"] = locations[clinic_id].service_provider
if locations[clinic_id].other:
for key in locations[clinic_id].other.keys():
row[main_form][key] = locations[clinic_id].other[key]
elif "in_geometry" in location:
fields = location.split("$")[1].split(",")
try:
point = Point(float(row[main_form][fields[0]]),
float(row[main_form][fields[1]]))
found = False
for loc in locations.values():
if loc.level == "district":
if loc.area is not None and to_shape(loc.area).contains(point):
ret_location = {
"clinic": None,
"clinic_type": None,
"case_type": None,
"tags": None,
"country": 1,
"district": loc.id,
"region": locations[loc.parent_location].id,
"geolocation": from_shape(point).desc
}
found = True
break
if not found:
print("Not Found")
return (None, None, None, None)
except ValueError:
print("Value Error in point in polygon location")
return (None, None, None, None)
else:
return (None, None, None, None)
variables, variable_forms, variable_tests, variables_group, match_variables = variables
variable_json = {}
categories = {}
for column in match_variables:
row_value = row[main_form].get(column, None)
if row_value not in ("", None):
codes, cats = match_variables[column].get(row_value, [{}, {}])
variable_json.update(codes)
categories.update(cats)
if "alert" in variable_json:
for data_var in alert_data[main_form].keys():
variable_json["alert_" + data_var] = row[
location_form][alert_data[main_form][data_var]]
disregard = False
for group in variables.get(data_type, {}).keys():
# Flag for whether the variable uses a priority system. A priority system allows variable values
# with higher priority order to overwrite values with lower priority order.
# Any variable in the group with priority data will set the flag to True
priority_flag = False
for v in variables[data_type][group]:
if hasattr(variables[data_type][group][v],"calculation_priority") and \
variables[data_type][group][v].calculation_priority not in ('', None):
priority_flag = True
intragroup_priority = 0 # Initialize the current priority level at zero
current_group_variable = None
break
else:
break
# v is the primary key for the AggregationVariables table, not the string format id the data table refers the variables with
for v in variables_group[group]:
form = variable_forms[v]
datum = row.get(form, None)
if datum:
if form in mul_forms:
method = variables[data_type][group][
v].variable.multiple_link
if method in ["last", "first"]:
data = datum[multiple_method[method]]
test_outcome = variables[data_type][group][
v].test(data)
elif method == "count":
test_outcome = {"applicable": 1,
"value": len(datum)}
elif method == "any":
test_outcome = {"applicable": 0,
"value": 0}
for d in datum:
test_outcome = variables[data_type][group][
v].test(d)
if test_outcome:
break
elif method == "all":
test_outcome = 1
for d in datum:
t_o = variables[data_type][group][v].test(d)
if not t_o:
test_outcome = 0
break
else:
test_outcome = variable_tests[v](datum)
#if there is no test outcome but there is another variable in the priority queue, test the variable next in prioritisation
#if not test_outcome:
# test_outcome = variable_tests[v_backup](datum)
if test_outcome["applicable"]:
if test_outcome["value"] == 1:
# This is done to allocate an integer into the
# test_outcome instead of a boolean value
test_outcome["value"] = 1
# fetch the string key for the current variable
variable_string_key = variables[data_type][group][v].variable.id
# Check whether the variable group uses a priority system
if priority_flag:
# This is the initial state
if intragroup_priority == 0:
variable_json[variables[data_type][
group][v].variable.id] = test_outcome["value"] # insert new value
intragroup_priority = int(variables[data_type][
group][v].calculation_priority) # store current intragroup priority
current_group_variable = variables[data_type][
group][v].variable.id # store the variable id
# A higher priority order value is encountered
elif intragroup_priority > int(variables[data_type][group][v].calculation_priority):
del variable_json[current_group_variable] # remove existing group value of lower priority order
variable_json[variables[data_type][
group][v].variable.id] = test_outcome["value"] # insert new value
intragroup_priority = int(variables[data_type][
group][v].calculation_priority) # store current intragroup priority
current_group_variable = variables[data_type][
group][v].variable.id # store the variable id
# Otherwise, do nothing
else:
#allocate the test outcome to the json object using the variable string id as key
variable_json[variables[data_type][
group][v].variable.id] = test_outcome["value"]
for cat in variables[data_type][
group][v].variable.category:
categories[cat] = variables[data_type][
group][v].variable.id
if variables[data_type][group][v].variable.alert:
if variables[data_type][group][
v].variable.alert_type == "individual":
variable_json["alert"] = 1
variable_json["alert_type"] = "individual"
variable_json["alert_reason"] = variables[
data_type][group][v].variable.id
for data_var in alert_data[row["original_form"]].keys():
variable_json["alert_" + data_var] = row[
main_form].get(alert_data[row["original_form"]][data_var])
if variables[data_type][group][v].variable.disregard:
disregard = True
if not priority_flag: # When handling groups with priority order, loop through every variable
break # We break out of the current group as all variables in a group are mutually exclusive
if disregard and variable_json.get("alert_type", None) != "individual":
disregard = False
return (variable_json, categories, ret_location, disregard)
| {"/meerkat_abacus/pipeline_worker/tests/test_add_multiple_alerts.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/send_alerts_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/pipeline.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py", "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py", "/meerkat_abacus/pipeline_worker/process_steps/add_links.py", "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py", "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py", "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py", "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py"], "/meerkat_abacus/consul_export.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/util/authenticate.py", "/meerkat_abacus/model.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/fake_data_to_form.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/epi_week.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/util/create_fake_data.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_initial_visit_control.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/tests/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/write_to_db.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/__init__.py": ["/meerkat_abacus/__init__.py"], "/meerkat_abacus/util/__init__.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_codes_step.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/tests/test_to_data_type.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/tests/variable_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/to_codes_test.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/codes/to_codes.py", "/meerkat_abacus/codes/variable.py"], "/meerkat_abacus/pipeline_worker/tests/test_quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/pipeline_worker/celery_app.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/processing_tasks.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_data_type.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/model.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_links.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/__init__.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/consumer/database_setup.py", "/meerkat_abacus/config.py"], "/meerkat_abacus/util/data_types.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/processing_tasks.py": ["/meerkat_abacus/pipeline_worker/pipeline.py", "/meerkat_abacus/config.py", "/meerkat_abacus/pipeline_worker/celery_app.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/consumer/consumer.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/codes/variable.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/consumer/get_data.py": ["/meerkat_abacus/util/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/tests/test_add_links.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/consumer/database_setup.py"], "/meerkat_abacus/consumer/database_setup.py": ["/meerkat_abacus/config.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/send_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/__init__.py": ["/meerkat_abacus/config.py"], "/meerkat_abacus/pipeline_worker/process_steps/add_multiple_alerts.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/to_codes.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/initial_visit_control.py": ["/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/__init__.py"], "/meerkat_abacus/pipeline_worker/process_steps/quality_control.py": ["/meerkat_abacus/__init__.py", "/meerkat_abacus/util/__init__.py", "/meerkat_abacus/pipeline_worker/process_steps/__init__.py", "/meerkat_abacus/util/epi_week.py"], "/meerkat_abacus/codes/to_codes.py": ["/meerkat_abacus/model.py", "/meerkat_abacus/codes/variable.py"]} |
66,003 | elmohndes/gui-python | refs/heads/master | /face.py | import tkinter as tk
import tkFont
from sys import exit
from add import ADD
from search import SEARCH
class FACE:
def __init__(self):
self.root1 = tk.Tk()
self.root1.minsize(500, 200)
self.root1.title("Abdulrahman")
#columnconfigure make column resizable
self.root1.columnconfigure((5,9), weight = 1)
self.root1.rowconfigure((3,7), weight =1)
self.font1 = tkFont.Font(size = 20, weight = 'bold')
self.label1 = tk.Label(self.root1, text = 'Welcome to Awlad Elsheikh',
font = self.font1)
self.label1.grid(row = 3,column =5,
columnspan = 10, sticky = tk.E + tk.W)
self.button1 = tk.Button(self.root1, text = "Search",
font =self.font1, command = self.searchFun)
self.button1.grid(row = 7,column = 5,
columnspan = 3, sticky = tk.EW + tk.NS)
self.button2 = tk.Button(self.root1, text = "Add",
font = self.font1, command = self.addFun)
self.button2.grid(row = 7,column = 9,
columnspan = 3, sticky = tk.EW + tk.NS)
self.root1.mainloop()
def searchFun(self):
obj = SEARCH()
def addFun(self):
obj = ADD()
| {"/main.py": ["/face.py"]} |
66,004 | elmohndes/gui-python | refs/heads/master | /search.py | import tkinter as tk
import tkFont
class SEARCH:
def __init__(self):
self.root2 = tk.Tk()
self.root2.title('Search')
self.root2.minsize(1366, 300)
self.root2.bind('<Return>', self.find)
self.font2 = tkFont.Font(size = 20)
self.font4 = tkFont.Font(size = 10)
self.entry = tk.Entry(self.root2, font = self.font2, width = 150)
self.entry.grid(row = 0, column = 0, columnspan = 7,
sticky = tk.EW +tk.NS)
self.button = tk.Button(self.root2,text = 'Search', width = 10,
font = self.font2, command = self.find)
self.button.grid(row = 0, column = 7, columnspan = 3,
sticky = tk.EW + tk.NS)
self.root2.columnconfigure((0,7,8,9), weight = 1)
self.text_display()
self.root2.mainloop()
def text_display(self):
lab1 = tk.Label(self.root2, text = "Items", font = self.font2)
lab2 = tk.Label(self.root2, text = "Price", font = self.font4, width = 3)
lab3 = tk.Label(self.root2, text = "Quantity", font = self.font4, width = 3)
lab4 = tk.Label(self.root2, text = "Total Price", font = self.font4, width = 3)
lab1.grid(row = 1, column =0, columnspan = 7, sticky = tk.EW + tk.NS)
lab2.grid(row = 1, column =7, sticky = tk.EW + tk.NS)
lab3.grid(row = 1, column =8, sticky = tk.EW + tk.NS)
lab4.grid(row = 1, column =9, sticky = tk.EW + tk.NS)
self.text1 = tk.Text(self.root2,
takefocus = 0, state = 'disabled', border = 0,
wrap = 'none',bg = 'khaki', font = self.font4)
self.text1.grid(row = 2, column =0, columnspan = 7, sticky = tk.EW + tk.NS)
self.text2 = tk.Text(self.root2,
takefocus = 0, state = 'disabled', border = 0,
wrap = 'none',bg = 'khaki')
self.text2.grid(row = 2, column =7, sticky = tk.EW + tk.NS)
self.text3 = tk.Text(self.root2,
takefocus = 0, state = 'disabled', border = 0,
wrap = 'none',bg = 'khaki')
self.text3.grid(row = 2, column =8, sticky = tk.EW + tk.NS)
self.text4 = tk.Text(self.root2,
takefocus = 0, state = 'disabled', border = 0,
wrap = 'none',bg = 'khaki')
self.text4.grid(row = 2, column =9, sticky = tk.EW + tk.NS)
#here we make function which search in file and if it find items it call display
def find(self, event = None):
self.var = 0
self.clear() #here we clear the value of the last operation
find_type = self.entry.get() #we get new inserted value
f = open('file')
for line in f: #we search for value in file
line_content = line.split('\t')
if find_type == line_content[0]:
self.display(line_content) # if we found it we display it
self.var = 1 #if not we call error function
self.error()
def display(self, show):
self.text1.config(state = 'normal')
self.text2.config(state = 'normal')
self.text3.config(state = 'normal')
self.text4.config(state = 'normal')
try:
show[1] = int(show[1])
show[2] = int(show[2])
self.text4.insert(1.0, show[1] * show[2])
except ValueError:
self.text1.insert(1.0, 'quantity or price was not inserted')
return
self.text1.insert(1.0, show[0])
self.text1.insert(1.0, '\n')
self.text2.insert(1.0, show[1])
self.text2.insert(1.0, '\n')
self.text3.insert(1.0, show[2])
self.text3.insert(1.0, '\n')
self.text4.insert(1.0, '\n')
self.text1.config(state = 'disabled')
self.text2.config(state = 'disabled')
self.text3.config(state = 'disabled')
self.text4.config(state = 'disabled')
def clear(self):
self.text1.config(state = 'normal')
self.text2.config(state = 'normal')
self.text3.config(state = 'normal')
self.text4.config(state = 'normal')
self.text1.delete(1.0, tk.END)
self.text2.delete(1.0, tk.END)
self.text3.delete(1.0, tk.END)
self.text4.delete(1.0, tk.END)
self.text1.config(state = 'disabled')
self.text2.config(state = 'disabled')
self.text3.config(state = 'disabled')
self.text4.config(state = 'disabled')
def error(self):
if self.var == 0:
self.clear()
self.text1.config(state = 'normal')
self.text1.insert(1.0,"sorry we didn't find the Item you search for")
self.text1.config(state = 'disabled')
| {"/main.py": ["/face.py"]} |
66,005 | elmohndes/gui-python | refs/heads/master | /main.py | from face import FACE
app = FACE()
| {"/main.py": ["/face.py"]} |
66,006 | elmohndes/gui-python | refs/heads/master | /add.py | import tkinter as tk
import tkFont
import time
class ADD:
def __init__(self):
self.root3 = tk.Tk()
self.root3.title("Add")
self.root3.minsize(1366, 300)
self.root3.bind('<Return>', self.enter)
self.font3 = tkFont.Font(size = 20)
self.font5 = tkFont.Font(size = 30, weight = 'bold')
self.display()
self.root3.mainloop()
def display(self):
label1 = tk.Label(self.root3, text = "Item", font = self.font3, width = 50)
label2 = tk.Label(self.root3, text = "Price", font = self.font3, width = 10)
label3 = tk.Label(self.root3, text = "Quantity", font = self.font3, width = 11)
label1.grid(row = 2, column = 1, columnspan = 9, sticky = tk.EW)
label2.grid(row = 2, column = 10, sticky = tk.EW)
label3.grid(row = 2, column = 11, sticky = tk.EW)
self.entry1 = tk.Entry(self.root3, font = ('calibri', 20), width = 58)
self.entry2 = tk.Entry(self.root3, font = ('calibri', 20), width = 10)
self.entry3 = tk.Entry(self.root3, font = ('calibri', 20), width = 11)
self.entry1.grid(row = 3, column = 1, columnspan = 9,
sticky = tk.EW + tk.NS)
self.entry2.grid(row = 3, column = 10, sticky = tk.EW + tk.NS)
self.entry3.grid(row = 3, column = 11, sticky = tk.EW + tk.NS)
self.button = tk.Button(self.root3, text = "Add", font = self.font5,
width = 25, command = self.enter)
self.button.grid(row = 6, column = 5, columnspan = 5, pady = 100,
sticky = tk.EW + tk.NS)
#i wanted to make it able to increment and decrement quantity and price
#but i found to do this this i should delete all lines and rewrite them again
#so i will wait until know how to use mysql with python
#i'm sorry but program is not complete and i don't promis that it will be completed soon
def enter(self, var = None):
foo = open('file', 'a+')
foo.write(self.entry1.get())
foo.write('\t')
foo.write(self.entry2.get())
foo.write('\t')
foo.write(self.entry3.get())
foo.write('\n')
foo.close()
time.sleep(2)
self.clear()
def clear(self):
self.entry1.delete(0, tk.END)
self.entry2.delete(0, tk.END)
self.entry3.delete(0, tk.END)
| {"/main.py": ["/face.py"]} |
66,010 | joereddington/watson | refs/heads/master | /session.py | class Session(object):
project = "Unknown"
start = ""
end = ""
content = ""
def __init__(self, project, start, end, content):
self.project, self.start, self.end = project, start, end
def length(self):
return (self.end-self.start)
def __str__(self):
return " {} to {} ({})".format(
self.start.strftime("%d/%m/%y %H:%M"), self.end.strftime("%H:%M"), str(self.length())[:-3])
| {"/watson.py": ["/timechart.py", "/calendar_helper_functions.py", "/entry.py"], "/history_list.py": ["/entry.py"], "/test_dr_watson.py": ["/calendar_helper_functions.py", "/watson.py", "/command_list.py", "/entry.py"], "/command_list.py": ["/entry.py"]} |
66,011 | joereddington/watson | refs/heads/master | /watson.py | #!/usr/bin/python
import re
import sys
import math
import pytz
import calendar_helper_functions as icalhelper
import glob
import datetime
import argparse
import os
import json
import timechart
from session import Session
from atom import Atom
#Todo:
# (C) log file to atoms should take content rather than a filename
__TIME_FORMAT = "%d/%m/%y %H:%M"
max_dist_between_logs = 15 # in minutes TODO these should be arguments for different types of input.
min_session_size = 15 # in minutes
def setup_argument_list():
"creates and parses the argument list for Watson"
parser = argparse.ArgumentParser( description="manages Watson")
parser.add_argument("action", help="What to do/display: options are 'sort', 'now', and 'sleep'")
parser.add_argument('-d', nargs="?" , help="Show only tasks that are at least this many days old")
parser.add_argument( '-v', dest='verbatim', action='store_true', help='Verbose mode')
parser.set_defaults(verbatim=False)
return parser.parse_args()
# Summary ######################################################################
def output_sessions_as_projects(sessions):
total_time = sum([entry.length()
for entry in sessions], datetime.timedelta())
projects = list(set([entry.project for entry in sessions]))
for project in projects:
projectreport(project, sessions, args.verbatim)
print "Total project time".ljust(45)+str(total_time)
return total_time
def output_sessions_as_account(sessions):
total_time = sum([entry.length()
for entry in sessions], datetime.timedelta())
projects = {}
for session in sessions:
if session.project in projects:
projects[session.project]+=session.length()
else:
projects[session.project]=session.length()
for key, value in sorted(projects.iteritems(), key=lambda (k,v): (v,k)):
print "%s: %s" % (value, key)
print "Total project time".ljust(45)+str(total_time)
return total_time
def projectreport(name, sessions, verbose):
project_sessions = [ entry for entry in sessions if ( entry.project == name)]
total_time = sum([entry.length() for entry in project_sessions], datetime.timedelta())
if verbose:
print "#### {}\n\nTotal Time on this project: {}\n".format(name.strip().ljust(65), str(total_time)[:-3])
for entry in project_sessions:
print entry
else:
print "{}: {}".format(name.strip().ljust(45), total_time)
return total_time
def sleep_report(project_sessions):
for entry in project_sessions:
print entry
total_time = sum([entry.length() for entry in project_sessions], datetime.timedelta())
average_time = avg_time([entry.length() for entry in project_sessions])
wake_list = [str(entry.end)[11:] for entry in project_sessions]
# print wake_list
# print mean_time(wake_list)
st_dev_length = st_dev([entry.length() for entry in project_sessions])
# print wake_time
print "\n\nTotal Sleep Time: {}".format(str(total_time)[:-3])
print "Average Sleep Time: {}".format(str(average_time))
print "Average Wake Time: {}".format(mean_time(wake_list))
print "ST-dev for average: {}".format(str(st_dev_length))
return total_time
from cmath import rect, phase
from math import radians, degrees
def mean_angle(deg):
return degrees(phase(sum(rect(1, radians(d)) for d in deg)/len(deg)))
def mean_time(times):
t = (time.split(':') for time in times)
seconds = ((float(s) + int(m) * 60 + int(h) * 3600)
for h, m, s in t)
day = 24 * 60 * 60
to_angles = [s * 360. / day for s in seconds]
mean_as_angle = mean_angle(to_angles)
mean_seconds = mean_as_angle * day / 360.
if mean_seconds < 0:
mean_seconds += day
h, m = divmod(mean_seconds, 3600)
m, s = divmod(m, 60)
return '%02i:%02i:%02i' % (h, m, s)
def avg_time(datetimes):
total = sum(dt.total_seconds() for dt in datetimes)
avg = total / len(datetimes)
return datetime.timedelta(seconds=avg);
def st_dev(datetimes):
total = sum(dt.total_seconds() for dt in datetimes)
avg = total / len(datetimes)
#Now for standard devation
#For each datapoint, find the square of it's difference from the mean and sum them.
step1 = sum((dt.total_seconds()-avg)*(dt.total_seconds()-avg) for dt in datetimes)
step2 = step1/len(datetimes)
step3 = math.sqrt(step2)
return datetime.timedelta(seconds=step3);
def days_old(session):
delta = datetime.datetime.now() - session.start.replace(hour = 0, minute = 0, second = 0, microsecond = 0)
return delta.days
########## Processing ##########
def get_sessions(atoms):
#This has two phases
if len(atoms)==0:
return []
last= datetime.datetime.strptime( "11/07/10 10:00", __TIME_FORMAT)
lasttitle=atoms[0].title
current = atoms[0].get_S()
grouped_timevalues=[]
current_group=[]
#Step1: group all atoms into the largest groups such that every start time but one is within 15 minutes of an end time of another
#Oh- that's NOT*actually* what this does...this does 'within 15 minutes of the *last*'
for current in atoms:
if ((current.get_S()-last) > datetime.timedelta( minutes=max_dist_between_logs)):
grouped_timevalues.append(current_group)
current_group=[current]
elif (current.get_S() <last): #preventing negative times being approved...
grouped_timevalues.append(current_group)
current_group=[current]
elif (current.title != lasttitle): #preventing negative times being approved...
grouped_timevalues.append(current_group)
current_group=[current]
last = current.get_E()
lasttitle=current.title
current_group.append(current)
grouped_timevalues.append(current_group)
#Step 2 - return those groups that are bigger than a set value.
sessions=[]
for i in grouped_timevalues:
if i:
if ((get_latest_end(i)-get_earliest_start(i)) >datetime.timedelta(minutes=min_session_size)):
sessions.append(Session(i[0].title,get_earliest_start(i),get_latest_end(i),i))
return sessions
def get_latest_end(atoms):
max=atoms[0].get_E()
for atom in atoms:
if atom.get_E()>max:
max=atom.get_E()
return max
def get_earliest_start(atoms):
min=atoms[0].get_S()
for atom in atoms:
if atom.get_S()<min:
min=atom.get_E()
return min
def get_atom_clusters(atomsin):
atoms=[]
lastatom=atomsin[0]
for atom in atomsin:
if atom.start[:4]== lastatom.start[:4]:
atom_minutes=int(atom.start[0:2])*60+int(atom.start[3:5])
lastatom_minutes=int(lastatom.start[0:2])*60+int(lastatom.start[3:5])
difference=atom_minutes-lastatom_minutes
if difference<1:
atom.title="Exercise"
atoms.append(atom)
lastatom=atom
return atoms
def make_exercise_file(args,atoms):
sessions=get_sessions(get_atom_clusters(atoms))
timechart.graph_out(sessions,"exercise")
return sessions
def make_sleep_file(args,atoms):
global max_dist_between_logs
global min_session_size
pre=max_dist_between_logs
pre2=min_session_size
min_session_size = 60 # in minutes
max_dist_between_logs=240
sessions=get_sessions(atoms)
sessions=invert_sessions(sessions)
max_dist_between_logs=pre
min_session_size = pre2
return sessions
def make_projects_file(vision_dir, name):
atoms=[]
for file in glob.glob(vision_dir+"/*.md"):
atoms.extend(log_file_to_atoms(file))
sessions=get_sessions(atoms)
timechart.graph_out(sessions,name)
return sessions
def cut(atoms,start,end):
TF = "%d-%b-%Y %H:%M"
start_time= datetime.datetime.strptime( start, TF)
end_time= datetime.datetime.strptime( end, TF)
return_atoms=[]
for current in atoms:
if (current.get_S() > start_time):
if (current.get_S() < end_time):
return_atoms.append(current)
return return_atoms
def invert_sessions(sessions):
lastsession=sessions[0]
new_sessions=[]
for session in sessions:
new_sessions.append(Session(session.project,lastsession.end,session.start,session.content))
lastsession=session
return new_sessions
########## Input ##########
def log_file_to_atoms(filename, title=None):
if title==None:
title=filename
content=icalhelper.get_content(filename)
if "title" in content[0]:
title=content[0][7:].strip()
entries="\n".join(content).split("######")
atoms=[]
lastdate="01/01/10"
date=""
entries=entries[1:]
for e in entries:
atom=Atom()
lines=e.split("\n",1)
# atom.content="\n".join(lines[1:]).strip()+"\n"
atom.content=lines[1]
atom.title=title
datetitle= e.split("\n")[0]
date= datetitle.split(",")[0]
if(len( datetitle.split(","))>1):
postitle= datetitle.split(",")[1]
if len(postitle)>2:
atom.title=postitle
date=date.replace("2016-","16 ")
date=date.replace("2017-","17 ")
date=re.sub(r":[0-9][0-9] GMT","",date)
date=re.sub(r":[0-9][0-9] BST","",date)
date=re.sub(r"to [0-9][0-9]/../..","to",date)
if date.find("/")>0: #Then we have both date and time.
newdate=date[:9].strip()
atom.start=date[9:9+15].strip()
atom.date=newdate
lastdate=newdate
else:
atom.start=date.strip()
atom.date=lastdate
if "to" in atom.start:
#Then it was a 'to' construct and has a start and end time
atom.end = atom.start[9:]
atom.start = atom.start[:5]
else:
atom.end=atom.start
atom.start=atom.start[:5]
atom.end=atom.end[:5]
atoms.append(atom)
return atoms
def heartrate_to_atoms(filename):
#01-May-2017 23:46,01-May-2017 23:46,69.0
TF = "%d-%b-%Y %H:%M"
timestamplength=len("01-May-2017 23:46")
datelength=len("01-May-2017")
content=icalhelper.get_content(filename)
if (args.d):
if args.d:
index=int(args.d)*1500
content=content[len(content)-index:]
atoms=[]
for a in content:
start=a[datelength+1:timestamplength]
date=a[:datelength]
end=a[timestamplength+1+datelength+1:(timestamplength*2)+1]
atoms.append(Atom(start,end,date,"Sleep","Alive",TF))#labeling it sleep is wrong, but it keep the same name for the inversion.
atoms.pop(0)
return atoms
def desktop_tracking_file_to_atoms(filename,tag="mail"):
content=icalhelper.get_content(filename)
matchingcontent= [line for line in content if (tag in line )]
TF = "%d/%m/%y %H:%M"
atoms=[]
for line in matchingcontent:
content=line[19:]
start=line[11:16]
end=line[11:16]
date=line[8:10]+"/"+line[5:7]+"/"+line[2:4]
atoms.append(Atom(start,end,date,"mail",content,TF))
return atoms
def commandline_file_to_atoms(filename):
filecontent=icalhelper.get_content(filename)
TF = "%d/%m/%y %H:%M"
atoms=[]
for line in filecontent:
content=line[25:].strip()
start=line[16:21]
end=line[16:21]
date=line[7:9]+"/"+line[10:12]+"/"+line[13:15]
atoms.append(Atom(start,end,date,"Command line"," "+ content,TF))
return atoms
pass
def camera_uploads_to_atoms(targetdir=r"/Users/josephreddington/Dropbox/Camera Uploads/"):
TF = "%d/%m/%y %H:%M"
import os.path, time
atoms=[]
for file in glob.glob(targetdir+"*"):
modified_date= datetime.datetime.fromtimestamp(os.path.getmtime(file))
#content = "\n\n".format(file.replace(" ","\ "))
content = '\n\n<img alt="Imported Image" src="{}" height=160/></p>\n\n'.format(file)
atoms.append(Atom(modified_date.strftime("%H:%M"),modified_date.strftime("%H:%M"),modified_date.strftime("%d/%m/%y"),"Image",content,TF))
return sorted(atoms,key=lambda x: x.get_S(), reverse=False)
# Output
def calendar_output(filename,sessions, matchString=None):
cal = icalhelper.get_cal()
for entry in sessions:
if (matchString==None) or (matchString==entry.project):
icalhelper.add_event(cal, entry.project, entry.start, entry.end)
icalhelper.write_cal(filename,cal)
def print_original(atoms):
for atom in atoms:
print "###### "+atom.date+ " "+ atom.start+ " to "+atom.end
print "{}".format(atom.content)
def atoms_to_text(atoms):
returntext=""
lastdate=""
for atom in atoms:
if lastdate==atom.date:
datestring=""
else:
datestring=" "+atom.date
lastdate=atom.date
if atom.start==atom.end:
returntext+= "######"+datestring+ " "+ atom.start+","
else:
returntext+= "######"+datestring+ " "+ atom.start+ " to "+atom.end+","
returntext+= "{}".format(atom.content)
return returntext
# Driver files.
def pink_slime(config_file='/config.json'):
# print "Hello"
cwd=os.path.dirname(os.path.abspath(__file__))
atoms=[]
atoms.extend(log_file_to_atoms("/Users/josephreddington/Dropbox/git/flow/gromit/journal_2018-01-01.md"))
atoms.extend(commandline_file_to_atoms(cwd+'/testinputs/commandline.txt'))
atoms.extend(camera_uploads_to_atoms())
atoms=cut(atoms,"01-Jan-2018 00:00","01-Jan-2018 23:59")
temp=sorted(atoms,key=lambda x: x.get_S(), reverse=False)
sessions=get_sessions(temp)
def full_detect(config_file='/config.json'):
cwd=os.path.dirname(os.path.abspath(__file__))
config = json.loads(open(cwd+config_file).read())
vision_dir = config["projects"]
gromit_dir = config["journals"]
if args.action == "now":
print datetime.datetime.now(pytz.timezone("Europe/London")).strftime("###### "+__TIME_FORMAT)
return
sessions=[]
pacesetter_sessions=get_sessions(log_file_to_atoms(config["pacesetter"]))
email_sessions=get_sessions(desktop_tracking_file_to_atoms(config["desktop"]))
watch_atoms=heartrate_to_atoms(config['heart'])
exercise_sessions=make_exercise_file(args,watch_atoms)
sleep_sessions=make_sleep_file(args,watch_atoms)
delores_sessions=get_sessions(log_file_to_atoms(config["delores"]))
projects_sessions=make_projects_file(vision_dir, "projects")
gromit_sessions=make_projects_file(gromit_dir, "Journals")
timechart.graph_out(email_sessions,"email")
# timechart.graph_out(pacesetter_sessions,"Pacesetter")
timechart.graph_out(delores_sessions,"DELORES")
timechart.graph_out(gromit_sessions,"journals")
sessions.extend(pacesetter_sessions)
sessions.extend(delores_sessions)
sessions.extend(email_sessions)
sessions.extend(exercise_sessions)
sessions.extend(projects_sessions)
sessions.extend(gromit_sessions)
if args.d:
sessions = [i for i in sessions if days_old(i)<int(args.d)]
sleep_sessions = [i for i in sleep_sessions if days_old(i)<int(args.d)]
time =0
if args.action == "sleep":
time= sleep_report(sleep_sessions)
if args.action == "sort":
time= output_sessions_as_projects(sessions)
if args.action == "account":
time=output_sessions_as_account(sessions)
# calendar_output(cwd+"/calendars/pacesetter.ics",pacesetter_sessions)
calendar_output(cwd+"/calendars/email.ics",email_sessions)
calendar_output(cwd+"/calendars/projects.ics",projects_sessions)
calendar_output(cwd+"/calendars/Exercise.ics",exercise_sessions)
calendar_output(cwd+"/calendars/Sleep.ics",sleep_sessions)
calendar_output(cwd+"/calendars/gromit.ics",gromit_sessions)
return time
| {"/watson.py": ["/timechart.py", "/calendar_helper_functions.py", "/entry.py"], "/history_list.py": ["/entry.py"], "/test_dr_watson.py": ["/calendar_helper_functions.py", "/watson.py", "/command_list.py", "/entry.py"], "/command_list.py": ["/entry.py"]} |
66,012 | joereddington/watson | refs/heads/master | /oyster.py | from icalendar import Calendar, Event
import sys
import glob
import datetime
import sys
from pytz import UTC # timezone
import calendar_helper_functions as icalhelper
# Watson is really only designed to parse formats and output them as
# calendar events. The inputs should know, for example, their start and
# end times already...
def processOyster(content):
__TIME_FORMAT = "%d/%m/%Y %H:%M"
cal = icalhelper.get_cal()
for x in content:
print x
if "Date" in x:
pass
elif "Start" in x:
pass
else:
journey = x.split(',')
date=journey[0]
if "Bus Journey" in x:
journeytime = datetime.datetime.strptime( "{} {}".format(journey[0], journey[1]), __TIME_FORMAT)
icalhelper.add_event(
cal,
"Bus Journey",
journeytime,
journeytime +
datetime.timedelta(
minutes=20))
else:
starttime=journey[1][:5]
endtime=journey[1][8:]
journeytime = datetime.datetime.strptime( "{} {}".format(journey[0], starttime), __TIME_FORMAT)
journeyendtime = datetime.datetime.strptime(
"{} {}".format(journey[0], endtime), __TIME_FORMAT)
icalhelper.add_event(
cal, journey[2], journeytime, journeyendtime)
return cal
def process_hours(content):
__TIME_FORMAT = "%d/%m/%Y %H:%M"
cal = icalhelper.get_cal()
for x in content:
print "XX:"+x
if "Clocked" in x:
pass
else:
if "Sleep" in x:
if "2016" in x:
journey = x.split(',')
#print datetime.date.today().strftime(__TIME_FORMAT)
#print x
journeytime = datetime.datetime.strptime(
journey[1].replace('"', ''), __TIME_FORMAT)
endtime = datetime.datetime.strptime(
journey[2].replace('"', ''), __TIME_FORMAT)
icalhelper.add_event(
cal, "Sleep", journeytime, endtime)
print "event added"+x
print "returning with calendar"
return cal
if __name__ == "__main__":
# location="oyster/*.csv"
# content=[]
# for file in glob.glob(location):
#
# content.extend(icalhelper.get_content(file))
content= icalhelper.get_content("/Users/josephreddington/Dropbox/git/flow/watson/oyster/all.csv")
icalhelper.write_cal("Oyster.ics",processOyster(content))
#write_cal("Sleep.ics", process_hours(get_content("inputfiles/sleep.csv")))
| {"/watson.py": ["/timechart.py", "/calendar_helper_functions.py", "/entry.py"], "/history_list.py": ["/entry.py"], "/test_dr_watson.py": ["/calendar_helper_functions.py", "/watson.py", "/command_list.py", "/entry.py"], "/command_list.py": ["/entry.py"]} |
66,013 | joereddington/watson | refs/heads/master | /timechart.py | import datetime
import os
# Running mean/Moving average
def get_running_mean(l, N):
sum = 0
result = list(0 for x in l)
for i in range(0, N):
sum = sum + l[i]
result[i] = sum / (i+1)
for i in range(N, len(l)):
sum = sum - l[i-N] + l[i]
result[i] = sum / N
return result
def graph_out(sessions,slug):
DAY_COUNT = 26
total_time = []
for single_date in (
datetime.datetime.today() - datetime.timedelta(days=n)
for n in range(DAY_COUNT)):
single_date_sessions = [
entry for entry in sessions if (
entry.start.date() == single_date.date())]
element = int(
sum(
[entry.length()
for entry in single_date_sessions],
datetime.timedelta()).total_seconds() / 60)
total_time = [element]+total_time
running_mean = get_running_mean(total_time, 7)
write_to_javascript(total_time,running_mean,slug)
def write_to_javascript(total_time,running_mean,slug):
f = open(os.path.dirname(os.path.abspath(__file__))+"/javascript/"+slug+".js", 'wb')
f.write(slug+"sessions=["+",".join(str(x) for x in total_time)+"];\n")
f.write(slug+"running_mean=["+",".join(str(x) for x in running_mean)+"]")
f.close()
| {"/watson.py": ["/timechart.py", "/calendar_helper_functions.py", "/entry.py"], "/history_list.py": ["/entry.py"], "/test_dr_watson.py": ["/calendar_helper_functions.py", "/watson.py", "/command_list.py", "/entry.py"], "/command_list.py": ["/entry.py"]} |
66,014 | joereddington/watson | refs/heads/master | /processphp.py | from icalendar import Calendar, Event
import datetime
import sys
from pytz import UTC # timezone
def addEvent(cal, summary, start, end):
event = Event()
event.add('summary', summary)
event.add('dtstart', start)
event.add('dtend', end)
event.add('dtstamp', end)
event['uid'] = summary+str(start)+str(end)
event.add('priority', 5)
cal.add_component(event)
def getCal():
cal = Calendar()
cal.add('prodid', '-//My calendar product//mxm.dk//')
cal.add('version', '2.0')
return cal
def write_cal(outfilename, cal):
print "Writing calendar"
f = open(outfilename, 'wb')
f.write(cal.to_ical())
f.close()
def get_content(infilename):
with open(infilename) as f:
content = f.readlines()
return content
def processOyster(content):
__TIME_FORMAT = "%d-%b-%y %H:%M"
cal = getCal()
for x in content:
if "Start" in x:
pass
else:
journey = x.split(',')
journeytime = datetime.datetime.strptime(
"{} {}".format(journey[0], journey[1]), __TIME_FORMAT)
if "Bus Journey" in x:
addEvent(
cal,
"Bus Journey",
journeytime,
journeytime +
datetime.timedelta(
minutes=20))
else:
journeyendtime = datetime.datetime.strptime(
"{} {}".format(journey[0], journey[2]), __TIME_FORMAT)
addEvent(
cal, journey[3], journeytime, journeyendtime)
return cal
def process_hours(tag, content):
__TIME_FORMAT = "%d/%m/%y %H:%M"
cal = getCal()
print "Tag:"+tag
for x in content:
if "Clocked" in x:
pass
else:
if tag in x:
if any(year in x for year in ['17','16','18']):
print "XX:"+x
journey = x.split(',')
#print datetime.date.today().strftime(__TIME_FORMAT)
#print x
journeytime = datetime.datetime.strptime(
journey[1].replace('"', ''), __TIME_FORMAT)
endtime = datetime.datetime.strptime(
journey[2].replace('"', ''), __TIME_FORMAT)
print "{} {} {}".format(tag, journeytime, endtime)
addEvent( cal, tag, journeytime, endtime)
print "event added"+x
print "returning with calendar"
return cal
def process_email(content):
__TIME_FORMAT = "%Y-%m-%d%H:%M:%S"
cal = getCal()
content = [x for x in content if "2016-10" in x]
breakdown = [(x[:10], x[11:19], x[19:]) for x in content if any(
a in x[19:] for a in ["Gmail", "irmail"])]
day_bucket = {}
for thing in breakdown:
day_bucket.setdefault(
thing[0], []).append(
(thing[1], thing[2]))
for key in day_bucket.keys():
print "{} {} {}".format(key, day_bucket[key][0][0], day_bucket[key][-1][0])
journeytime = datetime.datetime.strptime(
key+day_bucket[key][0][0], __TIME_FORMAT)
endtime = datetime.datetime.strptime(
key+day_bucket[key][-1][0], __TIME_FORMAT)
addEvent(cal, "Processing Email", journeytime, endtime)
return cal
#write_cal("Sleep.ics", process_hours(get_content("inputfiles/sleep.csv")))
content=get_content("test.txt")
write_cal("Sleep.ics", process_hours("Sleep",content))
write_cal("Climbing.ics", process_hours("Climbing",content))
write_cal("Swimming.ics", process_hours("Swimming",content))
#content= sys.argv[1].split("hope")
#write_cal("calendars/Sleep.ics", process_hours("Sleep",content))
#write_cal("calendars/Climbing.ics", process_hours("Climbing",content))
#write_cal("calendars/Swimming.ics", process_hours("Swimming",content))
| {"/watson.py": ["/timechart.py", "/calendar_helper_functions.py", "/entry.py"], "/history_list.py": ["/entry.py"], "/test_dr_watson.py": ["/calendar_helper_functions.py", "/watson.py", "/command_list.py", "/entry.py"], "/command_list.py": ["/entry.py"]} |
66,015 | joereddington/watson | refs/heads/master | /calendar_helper_functions.py | from icalendar import Calendar, Event
def add_event(cal, summary, start, end):
event = Event()
event.add('summary', summary)
event.add('dtstart', start)
event.add('dtend', end)
event.add('dtstamp', end)
event['uid'] = summary+str(start)+str(end)
event.add('priority', 5)
cal.add_component(event)
def get_cal():
cal = Calendar()
cal.add('prodid', '-//My calendar product//mxm.dk//')
cal.add('version', '2.0')
return cal
def write_cal(outfilename, cal):
f = open(outfilename, 'wb')
f.write(cal.to_ical())
f.close()
def get_content(infilename):
with open(infilename) as f:
content = f.readlines()
return content
| {"/watson.py": ["/timechart.py", "/calendar_helper_functions.py", "/entry.py"], "/history_list.py": ["/entry.py"], "/test_dr_watson.py": ["/calendar_helper_functions.py", "/watson.py", "/command_list.py", "/entry.py"], "/command_list.py": ["/entry.py"]} |
66,016 | joereddington/watson | refs/heads/master | /test_watson.py | from unittest import TestCase
import unittest
import watson
import urllib
import json
import os
import atom
import session
import datetime
from urllib2 import urlopen, Request
class watsonTest(TestCase):
def test_fast_strptime(self):
test1="02/07/17 15:22"
TIME_FORMAT = "%d/%m/%y %H:%M"
result=atom.fastStrptime(test1,TIME_FORMAT)
otherresult=datetime.datetime.strptime(test1,TIME_FORMAT)
self.assertEqual(result,otherresult)
def test_fast_strptime_from_watch(self):
test1="01-Jan-2018 07:22"
TIME_FORMAT = "%d-%b-%Y %H:%M"
result=atom.fastStrptime(test1,TIME_FORMAT)
otherresult=datetime.datetime.strptime(test1,TIME_FORMAT)
self.assertEqual(result,otherresult)
def test_download_repo_json(self):
self.assertEqual(3,3)
def test_log_file_to_atoms(self):
atoms=watson.log_file_to_atoms("testinputs/regressions/livenotes.md")
self.assertEqual(len(atoms),582)
def test_log_file_to_atoms_inline(self):
atoms=watson.log_file_to_atoms("testinputs/regressions/livenotesinline.md")
self.assertEqual(len(atoms),582)
def test_log_file_to_atoms_inline_wrong(self):
atoms=watson.log_file_to_atoms("testinputs/wrong.md")
print atoms[0]
sessions=watson.get_sessions(atoms)
self.assertEqual(len(atoms),1)
def test_commandline_file_to_atoms(self):
atoms=watson.commandline_file_to_atoms("testinputs/commandline.txt")
self.assertEqual(len(atoms),6475)
def test_log_file_to_atoms_problem(self):
atoms=watson.log_file_to_atoms("testinputs/problem.md")
sessions=watson.get_sessions(atoms)
self.assertEqual(len(sessions),0)
def test_split_on_title(self):
atoms=watson.log_file_to_atoms("testinputs/splitontitle.md")
sessions=watson.get_sessions(atoms)
self.assertEqual(len(sessions),9)
def test_read_desktop_log_file(self):
atoms=watson.desktop_tracking_file_to_atoms("testinputs/desktop.md")
self.assertEqual(len(atoms),66)
def test_make_email_sessions(self):
atoms=watson.desktop_tracking_file_to_atoms("testinputs/desktop.md")
sessions=watson.get_sessions(atoms)
self.assertEqual(len(sessions),2)
def test_get_sessions_works_with_no_atoms(self):
atoms=[]
sessions=watson.get_sessions(atoms)
self.assertEqual(len(sessions),0)
def test_log_file_to_atoms_blanktitle(self):
atoms=watson.log_file_to_atoms("testinputs/regressions/livenotes.md")
self.assertEqual(atoms[0].title,"testinputs/regressions/livenotes.md")
def test_log_file_to_atoms_proper_title(self):
atoms=watson.log_file_to_atoms("testinputs/regressions/bug-with-markdown-links.md")
self.assertEqual(atoms[0].title,"Bug with markdown links")
def test_make_sessions(self):
atoms=watson.log_file_to_atoms("testinputs/regressions/livenotes.md")
sessions=watson.get_sessions(atoms)
self.assertEqual(len(sessions),36)
def test_read_heartrate_file(self):
atoms=watson.heartrate_to_atoms("testinputs/heart.csv")
self.assertEqual(len(atoms),164866)
def test_count_awake_sessions(self):
watson.args =lambda:None
setattr(watson.args, 'action', 'sort')
setattr(watson.args, 'd',None)
setattr(watson.args, 'verbatim',None)
TF = "%d-%b-%Y %H:%M"
pre=watson.max_dist_between_logs
watson.max_dist_between_logs=90
atoms=watson.heartrate_to_atoms("testinputs/heartshort.csv")
sessions=watson.get_sessions(atoms)
watson.max_dist_between_logs=pre
projects = list(set([entry.project for entry in sessions]))
# for project in projects:
# watson.projectreport(project, sessions, True)
self.assertEqual(len(sessions),140)
def test_invert_sessions(self):
pre=watson.max_dist_between_logs
watson.max_dist_between_logs=90
atoms=watson.heartrate_to_atoms("testinputs/heartshort.csv")
sessions=watson.get_sessions(atoms)
#print "XXX{}".format(sessions[0])
sessions=watson.invert_sessions(sessions)
watson.max_dist_between_logs=pre
projects = list(set([entry.project for entry in sessions]))
# for project in projects:
# watson.projectreport(project, sessions, True)
self.assertEqual(len(sessions),140)
def test_get_exercise_atoms(self):
TF = "%d-%b-%Y %H:%M"
atoms=watson.heartrate_to_atoms("testinputs/heartshort.csv")
atoms=watson.get_atom_clusters(atoms)
self.assertEqual(len(atoms),33064)
def test_get_image_atoms(self):
TF = "%d-%b-%Y %H:%M"
atoms=watson.camera_uploads_to_atoms("testinputs/images/")
self.assertEqual(len(atoms),5)
def test_output_image_atoms(self):
#Sorting is based on last modified time, which on macs is done to the minute, event if the filename is done to the second, hence this can look like it' in the wrong order.
TF = "%d-%b-%Y %H:%M"
atoms=watson.camera_uploads_to_atoms("testinputs/images/")
image_text=watson.atoms_to_text(atoms)
image_text=image_text.replace("\n\n","\n")
self.maxDiff = None
print image_text
self.assertMultiLineEqual(open('testoutputs/image.md').read().strip(),image_text.strip())
self.assertEqual(len(atoms),5)
def test_combination(self):
TF = "%d-%b-%Y %H:%M"
atoms=watson.camera_uploads_to_atoms("testinputs/images/")
atoms.extend(watson.log_file_to_atoms("testinputs/augment1.md"))
sorted_atoms=sorted(atoms,key=lambda x: x.get_S(), reverse=False)
image_text=watson.atoms_to_text(sorted_atoms)
image_text=image_text.replace("\n\n","\n")
self.maxDiff = None
print image_text
self.assertMultiLineEqual(open('testoutputs/augment1result.md').read().strip(),image_text.strip())
def test_get_exercise_sessions(self):
TF = "%d-%b-%Y %H:%M"
atoms=watson.heartrate_to_atoms("testinputs/heartshort.csv")
atoms=watson.get_atom_clusters(atoms)
sessions=watson.get_sessions(atoms)
self.assertEqual(len(sessions),58)
def test_calendar_write(self):
watson.args =lambda:None
setattr(watson.args, 'action', 'sort')
setattr(watson.args, 'd',None)
setattr(watson.args, 'verbatim',None)
TF = "%d-%b-%Y %H:%M"
atoms=watson.heartrate_to_atoms("testinputs/heartshort.csv")
atoms=watson.get_atom_clusters(atoms)
sessions=watson.get_sessions(atoms)
self.assertEqual(len(sessions),58)
watson.calendar_output('testoutputs/exercise.ics',sessions)
self.maxDiff = None
self.assertMultiLineEqual(open('testoutputs/exercise.ics').read().strip(),open('testinputs/exercise.ics').read().strip(),)
def test_sleep_regression(self):
watson.args =lambda:None
setattr(watson.args, 'action', 'sort')
setattr(watson.args, 'd',None)
setattr(watson.args, 'verbatim',None)
TF = "%d-%b-%Y %H:%M"
atoms=watson.heartrate_to_atoms("testinputs/heart.csv")
atoms=watson.get_atom_clusters(atoms)
sessions=watson.get_sessions(atoms)
watson.calendar_output('testoutputs/sleepregression.ics',sessions)
self.maxDiff = None
self.assertMultiLineEqual(open('testoutputs/sleepregression.ics').read().strip(),open('testinputs/sleepregression.ics').read().strip(),)
def test_selective_calendar_write(self):
TF = "%d-%b-%Y %H:%M"
atoms=watson.heartrate_to_atoms("testinputs/heartshort.csv")
atoms=watson.get_atom_clusters(atoms)
sessions=watson.get_sessions(atoms)
email_atoms=watson.desktop_tracking_file_to_atoms("testinputs/desktop.md")
email_sessions=watson.get_sessions(email_atoms)
sessions.extend(email_sessions)
watson.calendar_output('testoutputs/exerciseSelective.ics',sessions, 'Exercise')
self.maxDiff = None
self.assertMultiLineEqual(open('testoutputs/exerciseSelective.ics').read().strip(),open('testinputs/exercise.ics').read().strip(),)
def test_fullregression2018_01_01(self):
watson.args =lambda:None
setattr(watson.args, 'action', 'sort')
setattr(watson.args, 'd',None)
setattr(watson.args, 'verbatim',None)
self.assertEqual(watson.full_detect('/testinputs/full2018-01-01/config.json'),datetime.timedelta(53, 18600))
def test_fullcoverage2018_01_01(self):
watson.args =lambda:None
setattr(watson.args, 'action', 'sort')
setattr(watson.args, 'd',30000000)
setattr(watson.args, 'verbatim',None)
watson.full_detect('/testinputs/full2018-01-01/config.json')
setattr(watson.args, 'action', 'sleep')
watson.full_detect('/testinputs/full2018-01-01/config.json')
setattr(watson.args, 'action', 'now')
watson.full_detect('/testinputs/full2018-01-01/config.json')
def test_time_split(self):
TF = "%d-%b-%Y %H:%M"
atoms=watson.heartrate_to_atoms("testinputs/heartshort.csv")
start="02-Jan-2017 12:27"
end="02-Jan-2017 16:27"
atoms=watson.cut(atoms,start,end)
self.assertEqual(len(atoms),1036)
def test_journal_bug(self):
atoms=watson.log_file_to_atoms("testinputs/strange.md")
sessions=watson.get_sessions(atoms)
self.assertEqual(len(sessions),1)
def test_midnight_bug(self):
atoms=watson.log_file_to_atoms("testinputs/midnight.md")
sessions=watson.get_sessions(atoms)
self.assertEqual(len(sessions),1)
def test_print_original_identity(self):
atoms=watson.log_file_to_atoms("testinputs/strange.md")
strange_text=watson.atoms_to_text(atoms)
strange_text=strange_text.replace("\n\n","\n")
self.maxDiff = None
print strange_text
self.assertMultiLineEqual(open('testinputs/strange.md').read().strip(),strange_text)
if __name__=="__main__":
unittest.main()
| {"/watson.py": ["/timechart.py", "/calendar_helper_functions.py", "/entry.py"], "/history_list.py": ["/entry.py"], "/test_dr_watson.py": ["/calendar_helper_functions.py", "/watson.py", "/command_list.py", "/entry.py"], "/command_list.py": ["/entry.py"]} |
66,017 | joereddington/watson | refs/heads/master | /archive/desktoptrackingprocess.py | #!/usr/bin/python
"Module for compiling tracking data in bar chart"
from __future__ import division
import datetime
import time
from icalendar import Calendar, Event
import numpy as np
import matplotlib.pyplot as plt
from datetime import datetime, date, timedelta
# First! I want Columns to identify a period of an hour of NO readings and
# overlay it as black! Then I want to use that as a split....
# So we're going to add some aspects to this - chief amoung them the ability to split days into several parts.
# The first stage is to be able to do it for an individual day - and the
# easy way of doing that is to give each day a start and end time and then
# put another graph ontop - see how that looks. Ideally, of course, I'd also
# like this to export to my calendar.
__DAY_COUNT = 7
__HOME_DIR = "/Users/josephreddington/Dropbox/Dreamhost/joereddington.com/stress/"
__OUTPUT_FILE = __HOME_DIR + 'columns.png'
weekdays = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] * __DAY_COUNT * 2
def addEvent(cal, summary, start,end):
event = Event()
event.add('summary', summary)
event.add('dtstart', start)
event.add('dtend', end)
event.add('dtstamp', end)
event['uid'] = summary+str(start)+str(end)
event.add('priority', 5)
cal.add_component(event)
def getCal():
cal = Calendar()
cal.add('prodid', '-//My calendar product//mxm.dk//')
cal.add('version', '2.0')
return cal
def write_cal(outfilename,cal):
f = open(outfilename, 'wb')
f.write(cal.to_ical())
f.close()
class FullRotation(object):
WINDOW_TITLE_FILE = "/Users/josephreddington/" + "Dropbox/git/DesktopTracking/output/results.txt"
__WAKE_FILE = "/Users/josephreddington/" + "Dropbox/git/Columns/chestopenings.txt"
dayactivities = []
date = ""
def __init__(self, single_date):
self.date = single_date
self.dayactivities = self.get_activity_list_for_date(
self.WINDOW_TITLE_FILE, single_date)
def __str__(self):
return weekdays[self.date.weekday(
)] + " " + str(self.date) + "\n" + '\n'.join(str(item) for item in self.dayactivities)
def get_activity_list_for_date(self, filename, single_date):
"returns a filled in activity list for the given date"
datestring = time.strftime("%Y-%m-%d", single_date.timetuple())
item_list = self.construct_activity_list(datestring)
self.process_logfile(filename, datestring, item_list)
normalise_activity_list(item_list)
return item_list
def process_logfile(self, filename, datestring, item_list):
"compares every line in a file with the triggers in a list of activity recorders"
log_file = open(filename)
line = log_file.readline()
while line:
if datestring in line:
seconds_since_midnight = (datetime.strptime(
line[11:19], '%H:%M:%S') - get_first()).total_seconds()
if seconds_since_midnight > 13000:
for item in item_list:
item.examine(line)
line = log_file.readline()
return item_list
def construct_activity_list(self, datestring):
"constructs the activity list and also processes the wake file"
item_list = []
item_list.append(
ActivityRecorder(
"email", ["firefox:Inbox", "Gmail", "Airmail"],
"red"))
return item_list
class ActivityRecorder(object):
"For each activity tracked we record the set of triggers and the relevant meta data"
search_strings = []
seconds_first, seconds_last = 0, 0
name, color, first_seen, last_seen = "", "", "", ""
def __init__(self, name, search_string, color):
self.name, self.search_strings, self.color = name, search_string, color
def __str__(self):
return self.name.ljust(10) + "%s (%d) until %s (%d)" % (
self.first_seen, self.seconds_first, self.last_seen, self.seconds_last)
def examine(self, line):
"checks an individual line for triggers"
if any(s in line for s in self.search_strings):
if self.first_seen is "":
self.first_seen = line[11:19]
self.last_seen = line[11:19]
def get_first():
"helper function for readability"
return datetime.strptime('00:00:00', '%H:%M:%S')
def convert(in_time):
"helper function converting '%H:%M:%S' to seconds"
if in_time is "":
return 0
return (datetime.strptime(in_time, '%H:%M:%S') -
get_first()).total_seconds()
def normalise_activity_list(item_list):
"converts the activity so that the seconds of first and last usage are availible"
for item in item_list:
item.seconds_first = convert(item.first_seen)
item.seconds_last = convert(item.last_seen)
def mark_section(ind, main_list, index):
"places the indexed item of the main_list onto the chart using the activity color"
top = [main_list[i][index].seconds_last - main_list[i]
[index].seconds_first for i in range(__DAY_COUNT)]
start = [main_list[i][index].seconds_first for i in range(__DAY_COUNT)]
plt.bar(ind, top, 0.35, color=main_list[0][index].color, bottom=start)
def get_average_sleep_time(main_list):
"outputs information on the average sleep/wake time, days without a sleep time aren't counted"
wake_times = filter(None,
[main_list[i][0].seconds_first
for i in range(__DAY_COUNT)])
try:
average_seconds = sum(wake_times) / len(wake_times)
m, s = divmod(average_seconds, 60)
h, m = divmod(m, 60)
except ZeroDivisionError: # because the chest file might be blank for a start
h, m, s = (0, 0, 0)
return "Average boot time: %d:%02d:%02d" % (h, m, s)
# credit to http://stackoverflow.com/a/775075
def run():
cal=getCal()
"the main run function, heart of the program"
activity_recorder_list = []
for single_date in (date.today() - timedelta(days=n)
for n in range(__DAY_COUNT)):
day = FullRotation(single_date)
print day.date
print day.dayactivities[0].name
if day.dayactivities[0].first_seen is "":
print "Not today!"
else:
startdate=datetime.strptime("{} {}".format(str(day.date), day.dayactivities.first_seen), '%H:%M:%S')
addEvent(cal, "Processing Emails", startdate,enddate)
print "here!"
activity_recorder_list.insert(0, day.dayactivities)
plt.savefig(__OUTPUT_FILE, dpi=200)
if __name__ == "__main__":
run()
| {"/watson.py": ["/timechart.py", "/calendar_helper_functions.py", "/entry.py"], "/history_list.py": ["/entry.py"], "/test_dr_watson.py": ["/calendar_helper_functions.py", "/watson.py", "/command_list.py", "/entry.py"], "/command_list.py": ["/entry.py"]} |
66,018 | joereddington/watson | refs/heads/master | /atom.py | import datetime
m = { 'Jan': 1, 'Feb': 2, 'Mar': 3, 'Apr':4, 'May':5, 'Jun':6, 'Jul':7, 'Aug':8, 'Sep':9, 'Oct':10, 'Nov':11, 'Dec':12 }
def fastStrptime(val, format):
# edited from http://ze.phyr.us/faster-strptime/
try:
l = len(val)
if format == '%d/%m/%y %H:%M' and (l == 14):
temp= datetime.datetime(
2000+int(val[6:8]), # %Y
int(val[3:5]), # %m
int(val[0:2]), # %d
int(val[9:11]), # %H
int(val[12:14]), # %M
0, # %s
0, # %f
)
return temp
# The watch
if format == "%d-%b-%Y %H:%M" and (l == 17):
temp= datetime.datetime(
int(val[7:11]), # %Y
m[val[3:6]], # %m
int(val[0:2]), # %d
int(val[12:14]), # %H
int(val[15:17]), # %M
0, # %s
0, # %f
)
return temp
# Default to the native strptime for other formats.
print "Warning: falling through {} {} {}".format(val, format, l)
return datetime.datetime.strptime(val, format)
except ValueError:
print "Exception for this:"
print val
raise ValueError
class Atom(object):
def __init__(self, start="",end="", date="",title="", content="", TF="%d/%m/%y %H:%M"):
self.content=content
self.start=start
self.title=title
self.end=end
self.date=date
self.TF=TF
self.s=None
self.e=None
def get_S(self):
try:
total_date=self.date+" "+self.start
if not self.s:
self.s= fastStrptime(total_date,self.TF)
return self.s
except ValueError:
print "Exception:"
print self
def get_E(self):
try:
total_date=self.date+" "+self.end
if not self.e:
self.e= fastStrptime(total_date,self.TF)
types=str(type(self.e))
if "date" not in types:
self.e= fastStrptime(total_date,self.TF)
# self.e= datetime.datetime.strptime(total_date,self.TF)
#print self.e
return self.e
except ValueError:
print "Exception in E:"
print self
def __str__(self):
return "{}, from {} to {} on {}".format(self.title,self.start,self.end,self.date)
| {"/watson.py": ["/timechart.py", "/calendar_helper_functions.py", "/entry.py"], "/history_list.py": ["/entry.py"], "/test_dr_watson.py": ["/calendar_helper_functions.py", "/watson.py", "/command_list.py", "/entry.py"], "/command_list.py": ["/entry.py"]} |
66,022 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/models/AbstractModel.py | import abc
class AbstractModel(object):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __init__(X, Y, modelParams):
pass
@abc.abstractmethod
def addPoint(x, y):
pass
@abc.abstractmethod
def predictBatch(X):
pass
| {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,023 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/models/GP/GPlib.py | from ..AbstractModel import AbstractModel
import GPy
import time
import tensorflow as tf
import numpy as np
class GPlib(AbstractModel):
# The training_targets are the Y's which are real numbers
def __init__(self,
training_data,
training_targets,
modelParams):
self.training_data = training_data
self.training_targets = training_targets
self.n_points = training_data.shape[0]
self.input_d = training_data.shape[1]
self.output_d = training_targets.shape[1]
self.kern = 'rbf'
if ('kern' in modelParams):
self.kern = modelParams['kern']
self.reset()
def reset(self):
self.models = []
for i in range(0, self.output_d):
if (self.kern == 'matern'):
kernel = GPy.kern.Matern52(input_dim=self.input_d, ARD=True)
else:
kernel = GPy.kern.RBF(input_dim=self.input_d, ARD=True)
model = GPy.models.GPRegression(self.training_data, self.training_targets[:, i:i+1],kernel)
model.optimize_restarts(num_restarts = 10)
model.optimize(messages=False)
#print(kernel)
self.models.append(model)
def addPoint(self, x, y):
self.training_data = np.vstack((x, self.training_data))
self.training_targets = np.vstack((y, self.training_targets))
self.reset()
def predictBatch(self, test_data):
means = np.array([[]]*test_data.shape[0])
vars = np.array([[]]*test_data.shape[0])
for model in self.models:
mean, var = model.predict(test_data, full_cov=False)
means = np.concatenate((means, mean), axis=1)
vars = np.concatenate((vars, var.reshape((-1, 1))), axis=1)
return means, vars
| {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,024 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/models/DeepGP/layers/GPLayer.py | from .BaseLayer import *
from ..nodes.GPNode import *
import tensorflow as tf
import numpy as np
class GPLayer(BaseLayer):
def __init__(self,
n_points,
n_inducing_points,
n_nodes, input_means,
input_vars,
set_for_training,
initial=None):
BaseLayer.__init__(self, input_means, input_vars)
self.nodes = []
self.output_means_list = []
self.output_vars_list = []
for i in range(n_nodes):
if (initial is None):
initial_sample = None
else:
indices = np.random.choice(initial.shape[0], size=n_inducing_points)
initial_sample = initial[indices, :]
gp_node = GPNode(input_means,
input_vars,
n_points,
n_inducing_points,
set_for_training,
initial=initial_sample)
output_mean, output_var = gp_node.getOutput()
self.output_means_list.append(output_mean)
self.output_vars_list.append(output_var)
self.nodes.append(gp_node)
self.output_means = tf.concat(self.output_means_list, 1)
self.output_vars = tf.concat(self.output_vars_list, 1)
self.energy = tf.add_n([n.getEnergyContribution() for n in self.nodes])
def getEnergyContribution(self):
return self.energy
def getOutput(self):
return self.output_means, self.output_vars
| {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,025 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py | from .BaseLayer import *
from ..nodes.OutputNodeRegression import *
import tensorflow as tf
import numpy as np
class OutputLayerRegression(BaseLayer):
def __init__(self, target_placeholder, input_means, input_vars):
BaseLayer.__init__(self, input_means, input_vars)
self.output_node = OutputNodeRegression(target_placeholder,
input_means,
input_vars)
self.output_means, self.output_vars = self.output_node.getOutput()
def getEnergyContribution(self):
return self.output_node.getEnergyContribution()
def getOutput(self):
return self.output_means, self.output_vars
| {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,026 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/models/Random/RandomModel.py | from ..AbstractModel import AbstractModel
import numpy as np
class RandomModel(AbstractModel):
def __init__(self,
training_data,
training_targets,
modelParams):
self.dim = training_targets.shape[1]
def addPoint(self, x, y):
pass
def predictBatch(self, X_):
return np.random.rand(X_.shape[0], self.dim), np.random.rand(X_.shape[0], self.dim)
| {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,027 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py | from .BaseNode import *
import tensorflow as tf
import numpy as np
class OutputNodeRegression(BaseNode):
def __init__(self, target_placeholder, input_means, input_vars):
BaseNode.__init__(self, input_means, input_vars)
self.target_placeholder = target_placeholder
self.input_means = input_means
self.input_vars = input_vars
self.output_means = input_means
self.output_vars = input_vars
def getEnergyContribution(self):
z = -0.5 * tf.log(2.0 * np.pi * self.input_vars)
exp = - 0.5 * tf.square(self.target_placeholder - self.input_means) \
/ self.input_vars
return tf.reduce_sum(tf.reduce_sum(z + exp, 1, keep_dims=True),
0,
keep_dims=True)
def getOutput(self):
return self.output_means, self.output_vars
| {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,028 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/optimizer/aquisition/SMSego.py | from .AbstractAquisition import AbstractAquisition
import numpy as np
class SMSego(AbstractAquisition):
def __init__(self, aquisitionParams):
self.gain = 1.0
if ('gain' in aquisitionParams):
self.gain = aquisitionParams['gain']
self.epsilon = 1e-6
if ('epsilon' in aquisitionParams):
self.epsilon = aquisitionParams['epsilon']
self.n_dim = 2
if ('n_dim' in aquisitionParams):
self.n_dim = aquisitionParams['n_dim']
self.reference = np.array([20, 40])
if ('reference' in aquisitionParams):
self.reference = aquisitionParams['reference']
def getGoalName(self):
return 'Hypervolume'
def getGoalValue(self, frontier):
return self.getVolume(frontier)
# This function is exponential in the number of dimensions
def getVolume(self, Y):
return self.getVolumeRecursive(Y, 0)
def getVolumeRecursive(self, Y, dim):
if (dim == Y.shape[1]-1):
return self.reference[dim] - min(Y[:, dim])
sortedY = np.array(sorted(Y, key=lambda Y_entry: -Y_entry[dim]))
accumulator = 0.0
sweep = self.reference[dim]
while (sortedY.shape[0] > 0):
accumulator += (sweep - sortedY[0, dim]) \
* self.getVolumeRecursive(sortedY, dim+1)
sweep = sortedY[0, dim]
sortedY = sortedY[1:, :]
return accumulator
def getAquisitionBatch(self, X, model, frontier):
n_points = X.shape[0]
means, vars = model.predictBatch(X)
pot_sol = means - self.gain * np.sqrt(vars)
hv_frontier = self.getVolume(frontier)
aquisitions = np.ones((n_points))
for i in range(0, n_points):
penalty = 0.0
for k in range(0, frontier.shape[0]):
if np.all(frontier[k, :] <= pot_sol[i, :] + self.epsilon):
p = -1 + np.prod(1 +
np.maximum(pot_sol[i, :] - frontier[k, :],
np.zeros_like(pot_sol[i, :]))
)
penalty = np.maximum(penalty, p)
if (penalty == 0.0):
hv_pot = self.getVolume(np.vstack((pot_sol[i, :], frontier)))
aquisitions[i] = -hv_frontier + hv_pot
else:
aquisitions[i] = -penalty
return aquisitions
| {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,029 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/generate_likelihood_table.py | from tabulate import tabulate
import numpy as np
import re
names = ['dgps', 'dgps_shallow', 'dgps_disjoint', 'gplib', 'gplib_matern']
for ext in ['', '_rmse']:
table = [['model'] + [20, 50, 100, 200, 300] * 3]
for name in names:
ll = {'20':[],
'50':[],
'100':[],
'200':[],
'300':[]}
acc = {'20':[],
'50':[],
'100':[],
'200':[],
'300':[]}
eng = {'20':[],
'50':[],
'100':[],
'200':[],
'300':[]}
for i in range(1, 100):
with open('./predictor_rmse/{}{}_{}.txt'.format(name, ext, str(i)), 'r') as f:
content = f.readlines()
for line in content:
lik = []
split = line.split(' ')
for term in split:
try:
lik.append(float(term))
except:
pass
if (lik[1] < 40.0 and lik[1] > -40.0):
ll[split[0]].append(lik[1])
acc[split[0]].append(lik[2])
eng[split[0]].append(lik[3])
line = [name]
for list in [acc, eng, ll]:
for x in ['20', '50', '100', '200', '300']:
#print(len(list[x]))
mean = 0.0
for l in list[x]:
mean += l / len(list[x])
sigmas = 0.0
for l in list[x]:
sigmas += (mean - l)**2 / (len(list[x]) - 1.0)
line.append('{0:.2f} +- {1:.2f}'.format(mean, 1.984 * np.sqrt(sigmas / len(list[x]))))
table.append(line)
table = np.array(table)
table = np.transpose(table)
print(table)
print(tabulate(table, tablefmt="latex"))
| {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,030 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/models/DeepGP/GPNetwork.py | from .layers.InputLayer import *
from .layers.OutputLayerRegression import *
from .layers.OutputLayerRegressionMultioutput import *
from .layers.NoisyLayer import *
from .layers.GPLayer import *
from ..AbstractModel import AbstractModel
import time
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
class GPNetwork(AbstractModel):
# The training_targets are the Y's which are real numbers
def __init__(self,
training_data,
training_targets,
modelParams):
self.training_data = training_data
self.training_targets = training_targets
self.n_points = training_data.shape[0]
self.input_d = training_data.shape[1]
self.output_d = training_targets.shape[1]
self.maxiter = 1500
if ('maxiter' in modelParams):
self.maxiter = modelParams['maxiter']
self.layer_types = ['gp', 'gp']
if ('layer_types' in modelParams):
self.layer_types = modelParams['layer_types']
self.layer_nodes = [self.input_d, self.output_d]
if ('layer_nodes' in modelParams):
self.layer_nodes = modelParams['layer_nodes']
self.minibatch_size = 500
if ('minibatch_size' in modelParams):
self.minibatch_size = modelParams['minibatch_size']
self.learning_rate = 0.01
if ('learning_rate' in modelParams):
self.learning_rate = modelParams['learning_rate']
self.retrain = 0
if ('retrain' in modelParams):
self.retrain = modelParams['retrain']
self.retrain_counter = 0
self.early_stopping = False
if ('early_stopping' in modelParams):
self.early_stopping = modelParams['early_stopping']
self.decay_lr = False
if ('decay_lr' in modelParams):
self.decay_lr = modelParams['decay_lr']
self.resetGraph()
print("Start training")
self.train()
def addPoint(self, x, y):
self.training_data = np.vstack((x, self.training_data))
self.training_targets = np.vstack((y, self.training_targets))
self.n_points += 1
self.session.run(self.n_points_tf.assign(self.n_points))
self.train()
def predictBatch(self, test_data):
self.session.run(self.set_for_training.assign(0.0))
fd = {self.data_placeholder:test_data}
return self.session.run((self.output_mean, self.output_var),
feed_dict=fd)
def addInputLayer(self):
assert len(self.layers) == 0
self.layers.append(InputLayer(self.data_placeholder))
def addNoisyLayer(self):
assert len(self.layers) != 0
means, vars = self.layers[-1].getOutput()
new_layer = NoisyLayer(means, vars)
self.layers.append(new_layer)
def addGPLayer(self, n_inducing_points, n_nodes=1, initial=None):
assert len(self.layers) != 0
means, vars = self.layers[-1].getOutput()
new_layer = GPLayer(self.n_points_tf,
n_inducing_points,
n_nodes,
means,
vars,
self.set_for_training,
initial)
self.layers.append(new_layer)
def addOutputLayerRegression(self):
assert len(self.layers) != 0
means, vars = self.layers[-1].getOutput()
new_layer = OutputLayerRegression(self.target_placeholder,
means,
vars)
self.layers.append(new_layer)
self.output_mean, self.output_var = new_layer.getOutput()
def resetGraph(self):
tf.reset_default_graph()
print('Initializing computation graphs')
self.n_points_tf = tf.Variable(self.n_points,
trainable=False,
dtype=tf.float32)
self.set_for_training = tf.Variable(1.0,
trainable=False,
dtype=tf.float32)
self.data_placeholder = tf.placeholder(tf.float32,
[None, self.input_d])
self.target_placeholder = tf.placeholder(tf.float32,
[None, self.output_d])
self.layers = []
self.addInputLayer()
for l in range(0, len(self.layer_types)):
print('Layer {0}'.format(l))
gp_points = max(int(np.ceil(0.1 * self.n_points)), 5)
if (l == 0):
self.addGPLayer(gp_points,
self.layer_nodes[l],
initial=self.training_data)
elif (self.layer_types[l] == 'gp'):
self.addGPLayer(gp_points, self.layer_nodes[l])
elif (self.layer_types[l] == 'noisy'):
self.addNoisyLayer()
self.addOutputLayerRegression()
layer_energies = [l.getEnergyContribution() for l in self.layers]
self.energy = tf.add_n(layer_energies)
if (self.decay_lr):
global_step = tf.Variable(0, trainable=False)
boundaries = [1500, 3000, 6000]
values = [0.01, 0.003, 0.001, 0.0003]
self.actual_learning_rate = tf.train.piecewise_constant(global_step, boundaries, values)
adam = tf.train.AdamOptimizer(self.actual_learning_rate)
self.optimizer = adam.minimize(-self.energy, global_step=global_step)
else:
adam = tf.train.AdamOptimizer(self.learning_rate)
self.optimizer = adam.minimize(-self.energy)
print("Initializing variables")
init_op = tf.global_variables_initializer()
self.session = tf.Session()
self.session.run(init_op)
def train(self):
self.retrain_counter += 1
print('{0} iterations until retrain'.format(self.retrain - self.retrain_counter))
if (self.retrain_counter == self.retrain):
self.retrain_counter = 0
self.resetGraph()
if (self.decay_lr):
print('Learning rate: %f' % (self.session.run(self.actual_learning_rate)))
self.session.run(self.set_for_training.assign(1.0))
n_batches = int(np.ceil(1.0 * self.n_points / self.minibatch_size))
last_energy = 0.0
failed_to_improve = False
for iter in range(self.maxiter):
suffle = np.random.permutation(self.n_points)
training_data = self.training_data[ suffle, : ]
training_targets = self.training_targets[ suffle, : ]
start_epoch = time.time()
epoch_energy = 0.0
for i in range(n_batches):
start_i = i * self.minibatch_size
end_i = min((i + 1) * self.minibatch_size, self.n_points)
minibatch_data = training_data[start_i : end_i, : ]
minibatch_targets = training_targets[start_i : end_i, : ]
fd = {
self.data_placeholder:minibatch_data,
self.target_placeholder:minibatch_targets
}
_, e = self.session.run((self.optimizer, self.energy),
feed_dict=fd)
epoch_energy += e
if (iter % 50 == 0):
print('Epoch: {}, - Energy: {} Time: {}'
.format(iter, epoch_energy, time.time() - start_epoch))
if (last_energy >= epoch_energy and failed_to_improve and self.early_stopping):
print('Early stopping')
break
else:
failed_to_improve = (last_energy >= epoch_energy)
last_energy = epoch_energy
| {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,031 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/models/DeepGP/layers/InputLayer.py | from .BaseLayer import BaseLayer
from ..nodes.InputNode import *
import tensorflow as tf
import numpy as np
class InputLayer(BaseLayer):
def __init__(self, data_placeholder):
self.input_means = data_placeholder
self.input_vars = tf.zeros_like(data_placeholder)
BaseLayer.__init__(self, self.input_means, self.input_vars)
self.input_node = InputNode(self.input_means, self.input_vars)
self.output_means, self.output_vars = self.input_node.getOutput()
def getEnergyContribution(self):
return self.input_node.getEnergyContribution()
def getOutput(self):
return self.output_means, self.output_vars
| {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,032 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/dgplot.py | from models.DeepGP.GPNetwork import GPNetwork
import numpy as np
import pylab
import matplotlib.pyplot as plt
range_ = 1.0
def modelplot(model, node, xx, yy, name):
zz = pylab.zeros(xx.shape)
data = []
for i in range(xx.shape[0]):
for j in range(xx.shape[1]):
data.append([xx[i,j], yy[i,j]])
data = np.array(data)
model.session.run(model.set_for_training.assign(0.0))
fd = {node.input_means:data,
node.input_vars: np.zeros_like(data)}
pred, inducing = model.session.run((node.output_means, node.z),
feed_dict=fd)
k = 0
for i in range(xx.shape[0]):
for j in range(xx.shape[1]):
zz[i, j] = pred[k, 0]
k += 1
pylab.figure()
pylab.pcolor(xx,yy,zz, cmap='RdBu', vmin=-range_, vmax=range_)
pylab.colorbar()
plt.scatter(inducing[:, 0], inducing[:, 1], c='k')
plt.xlim(-range_, range_)
plt.ylim(-range_, range_)
plt.savefig(r'figs/{0}.eps'.format(name))
plt.savefig(r'figs/{0}.pdf'.format(name))
plt.savefig(r'figs/{0}.png'.format(name))
def prettyplot(f):
xx, yy = pylab.meshgrid(pylab.linspace(-range_,range_, 100),
pylab.linspace(-range_,range_, 100))
zz = pylab.zeros(xx.shape)
for i in range(xx.shape[0]):
for j in range(xx.shape[1]):
zz[i,j] = f(np.array([xx[i,j], yy[i,j]]))
pylab.pcolor(xx,yy,zz, cmap='RdBu', vmin=-range_, vmax=range_)
pylab.colorbar()
plt.xlim(-1, 1)
plt.ylim(-1, 1)
plt.xlabel(r'$x_1$')
plt.ylabel(r'$x_2$')
plt.title('Training data')
plt.savefig('figs/orig_data.eps')
plt.savefig('figs/orig_data.pdf')
plt.savefig('figs/orig_data.png')
trainx, trainy = pylab.meshgrid(pylab.linspace(-1,1, 25),
pylab.linspace(-1,1, 25))
modelParams = {'model':'dgp',
'maxiter': 300,
'minibatch_size': 300,
'layer_types': ['gp', 'noisy', 'gp', 'noisy'],
'layer_nodes': [2, 1, 2, 1],
'early_stopping': False}
training_data = []
training_targets = []
for i in range(trainx.shape[0]):
for j in range(trainx.shape[1]):
training_data.append([trainx[i, j], trainy[i, j]])
training_targets.append(f(np.array([trainx[i,j], trainy[i,j]])).flatten())
model = GPNetwork(np.array(training_data), np.array(training_targets), modelParams)
zz = pylab.zeros(xx.shape)
data = []
for i in range(xx.shape[0]):
for j in range(xx.shape[1]):
data.append([xx[i,j], yy[i,j]])
data = np.array(data)
model.session.run(model.set_for_training.assign(0.0))
fd = {model.data_placeholder: data}
pred = model.session.run((model.output_mean),
feed_dict=fd)
k = 0
for i in range(xx.shape[0]):
for j in range(xx.shape[1]):
zz[i, j] = pred[k, 0]
k += 1
pylab.figure()
pylab.pcolor(xx,yy,zz, cmap='RdBu', vmin=-range_, vmax=range_)
pylab.colorbar()
plt.xlim(-1, 1)
plt.ylim(-1, 1)
plt.title('DGP model')
plt.xlabel(r'$x_1$')
plt.ylabel(r'$x_2$')
plt.savefig(r'figs/dgp_model.eps')
plt.savefig(r'figs/dgp_model.pdf')
plt.savefig(r'figs/dgp_model.png')
for i, k in [(1, 0), (1, 1), (3, 0)]:
modelplot(model, model.layers[i].nodes[k], xx, yy, 'layer{0}node{1}'.format(i, k))
| {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,033 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/models/DeepGP/layers/NoisyLayer.py | from .BaseLayer import *
from ..nodes.NoisyNode import *
import tensorflow as tf
import numpy as np
class NoisyLayer(BaseLayer):
def __init__(self, input_means, input_vars):
BaseLayer.__init__(self, input_means, input_vars)
self.noisy_node = NoisyNode(input_means, input_vars)
self.output_means, self.output_vars = self.noisy_node.getOutput()
def getEnergyContribution(self):
return self.noisy_node.getEnergyContribution()
def getOutput(self):
return self.output_means, self.output_vars
| {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,034 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/models/DeepGP/GPNetworkDJ.py | from ..AbstractModel import AbstractModel
from .GPNetwork import GPNetwork
import time
import tensorflow as tf
import numpy as np
class GPNetworkDJ(AbstractModel):
# The training_targets are the Y's which are real numbers
def __init__(self,
training_data,
training_targets,
modelParams):
self.training_data = training_data
self.training_targets = training_targets
self.n_points = training_data.shape[0]
self.input_d = training_data.shape[1]
self.output_d = training_targets.shape[1]
self.models = []
for i in range(0, self.output_d):
model = GPNetwork(self.training_data, self.training_targets[:, i:(i+1)], modelParams)
self.models.append(model)
def addPoint(self, x, y):
for i, model in enumerate(self.models):
model.addPoint(x, y[:, i:(i+1)])
def predictBatch(self, test_data):
means = np.array([[]]*test_data.shape[0])
vars = np.array([[]]*test_data.shape[0])
for model in self.models:
mean, var = model.predictBatch(test_data)
means = np.concatenate((means, mean), axis=1)
vars = np.concatenate((vars, var), axis=1)
return means, vars
| {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,035 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/main.py | import matplotlib
matplotlib.use('Agg')
from optimizer.Optimizer import optimize
from dgplot import prettyplot
import numpy as np
# import matplotlib.pyplot as plt
import sys
def fcliff(x):
result = []
tmp = 0.9*np.exp(-3.0*np.dot(x, x))
if (x[0] <= 0.0):
return np.array([[-tmp]])
else:
return np.array([[tmp]])
# prettyplot(fcliff)
def f(x):
return np.array([(x-0.5)**2, (x+0.5)**2])
from sklearn import preprocessing
from scipy.stats import multivariate_normal
from models.DeepGP.GPNetwork import GPNetwork
from models.DeepGP.GPNetworkDJ import GPNetworkDJ
from models.GP.GaussianProcess import GaussianProcess
from models.GP.GaussianProcessDJ import GaussianProcessDJ
from models.Random.RandomModel import RandomModel
from models.dgps.dgps_net import Dgps_net
from models.dgps.dgps_net_dj import Dgps_netDJ
from models.GP.GPlibDJ import GPlibDJ
with open("/home/hava/MPhil_Project/code/DeepGPs/random_evaluations.txt", "r") as re:
re_proc = []
iss = []
oss = []
for line in re:
line_split = line.split(' ')
i = [float(x) for x in line_split[0:13]]
o = [float(x) for x in line_split[13:15]]
iss.append(i)
oss.append(o)
iss = np.array(iss)
oss = np.array(oss)
iss = preprocessing.scale(iss)
#oss = preprocessing.scale(oss)
for i in range(0, iss.shape[0]):
re_proc.append((iss[i, :], oss[i, :]))
re_proc = np.array(re_proc)
print(re_proc)
def fre(x):
for i, o in re_proc:
if (np.allclose(i, x, atol=1e-08)):
return o
assert(True)
def freinv(x):
for i, o in re_proc:
if (np.allclose(o, x, atol=1e-08)):
return i
assert(True)
modelParams = {'model':'gplib'}
aquisitionParams = {'aquisition':'SMSego',
'gain': 2.0}
frontier, curve = optimize(f, np.array([[i] for i in np.linspace(-1, 1, 500)]), modelParams, aquisitionParams, 5, 1)
# plt.figure(3)
# ax = plt.gca()
# plt.gca().grid(True)
# ind = frontier[:, 0].argsort()
# frontier = frontier[ind, :]
# plt.plot(frontier[:, 0], frontier[:, 1], 'gs')
# flist = []
# current_point = [0, 10]
# for i in range(0, len(frontier)):
# current_point[0] = frontier[i, 0]
# flist.append(current_point)
# flist.append(frontier[i, :])
# current_point = current_point.copy()
# current_point[1] = frontier[i, 1]
# current_point[0] = 10
# flist.append(current_point)
# flist = np.array(flist)
# plt.plot(flist[:, 0], flist[:, 1], 'g-')
# ax.fill_between(flist[:, 0], 10, flist[:, 1], facecolor='green', alpha=0.5, hatch='//')
# plt.xlabel('Obj 1')
# plt.ylabel('Obj 2')
# plt.title('Pareto frontier')
# plt.draw()
# plt.figure()
# plt.gca().grid(True)
# plt.plot(range(0, len(curve)), curve, 'bo-')
# plt.ylabel('Hypervolume')
# plt.xlabel('Iterations')
# plt.title('Increasing hypervolume over the iterations')
# plt.savefig('curve.eps')
# used = np.array(used)
# plt.figure()
# plt.gca().grid(True)
# plt.scatter(frontier[:, 0], frontier[:, 1], c='k', marker='s')
# plt.scatter(used[:, 0], used[:, 1], c='k', marker='x')
# plt.xlabel('Accuracy')
# plt.ylabel('Power consumption')
# plt.title('Evaluated points')
# plt.savefig('points.eps')
# plt.show()
| {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,036 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/models/DeepGP/nodes/InputNode.py | from .BaseNode import *
import tensorflow as tf
import numpy as np
class InputNode(BaseNode):
def __init__(self, input_means, input_vars):
BaseNode.__init__(self, input_means, input_vars)
self.data_placeholder = input_means
def getOutput(self):
return self.data_placeholder, tf.zeros_like(self.data_placeholder)
def getEnergyContribution(self):
return tf.constant(0.0, tf.float32, [1, 1]) | {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,037 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/optimizer/Optimizer.py | from models.DeepGP.GPNetwork import GPNetwork
from models.DeepGP.GPNetworkDJ import GPNetworkDJ
from models.GP.GPlib import GPlib
from models.Random.RandomModel import RandomModel
from .aquisition.SMSego import SMSego
import numpy as np
import time
import matplotlib.pyplot as plt
import sys
import os.path
def optimize(f,
candidates,
modelParams,
aquisitionParams,
init_eval,
max_eval,
output_dir,
plots=False):
print('Initializing models')
# Points of the initial evaluations
if (os.path.isfile('{0}/points.txt'.format(output_dir))):
with open('{0}/points.txt'.format(output_dir)) as file:
init_points = []
for line in file: # read rest of lines
init_points.append([float(x) for x in line.split()])
init_points = np.array(init_points)
with open('{0}/candidates.txt'.format(output_dir)) as file:
candidates = []
for line in file: # read rest of lines
candidates.append([float(x) for x in line.split()])
candidates = np.array(candidates)
else:
init_index = np.random.randint(0, candidates.shape[0], (init_eval))
init_points = candidates[init_index, :]
candidates = np.delete(candidates, init_index, 0)
print('Initital Points')
init_values = []
for point in init_points:
init_values.append(f(point))
init_values = np.reshape(np.array(init_values), (len(init_values), -1))
print(init_values)
# Model
if (modelParams['model'] == 'dgp'):
model = GPNetwork(init_points, init_values, modelParams)
elif (modelParams['model'] == 'dgp_dj'):
model = GPNetworkDJ(init_points, init_values, modelParams)
elif (modelParams['model'] == 'gp'):
model = GPlib(init_points, init_values, modelParams)
elif (modelParams['model'] == 'rnd'):
model = RandomModel(init_points, init_values, modelParams)
else:
print('Unspecified model name')
# Aquisition
if (aquisitionParams['aquisition'] == 'SMSego'):
aquisition_function = SMSego(aquisitionParams)
else:
print('Unspecified aquisition function')
# Iteration
frontier = find_frontier(init_values)
if (os.path.isfile('{0}/curve.txt'.format(output_dir))):
curve = []
with open('{0}/curve.txt'.format(output_dir)) as file:
for line in file: # read rest of lines
curve.append(float(line))
else:
curve = [aquisition_function.getGoalValue(frontier)]
for iter in range(0, max_eval):
iter_start = time.time()
print('Predicting')
pred_means, pred_vars = model.predictBatch(candidates)
#print(pred_vars)
pred_vars = np.sqrt(pred_vars)
if (plots):
plt.figure(1)
plt.clf()
plt.gca().grid(True)
print('Plotting')
ind = candidates[:, 0].argsort()
plt.plot(candidates[ind], pred_means[ind, 0], 'b-', label='Obj 1')
plt.plot(candidates[ind], pred_means[ind, 0] - pred_vars[ind, 0], 'b--')
plt.plot(candidates[ind], pred_means[ind, 0] + pred_vars[ind, 0], 'b--')
plt.plot(candidates[ind], pred_means[ind, 1], 'g-', label='Obj 2')
plt.plot(candidates[ind], pred_means[ind, 1] - pred_vars[ind, 1], 'g--')
plt.plot(candidates[ind], pred_means[ind, 1] + pred_vars[ind, 1], 'g--')
plt.xlabel('x')
plt.ylabel('Objective')
plt.title('Model predictions')
plt.legend()
plt.show()
aquisition_values = aquisition_function.getAquisitionBatch(candidates,
model,
frontier)
max_aquisition_index = np.argmax(aquisition_values)
if (plots):
plt.figure(2)
plt.clf()
plt.gca().grid(True)
plt.plot(candidates[ind], aquisition_values[ind], 'r-')
plt.xlabel('x')
plt.ylabel('Aquisition value')
plt.title('Aquisition function')
plt.draw()
plt.figure(3)
ax = plt.gca()
plt.gca().grid(True)
ind = frontier[:, 0].argsort()
frontier = frontier[ind, :]
plt.plot(frontier[:, 0], frontier[:, 1], 'gs')
flist = []
reference_point = [10, 10]
if ('reference' in aquisitionParams):
reference_point = aquisitionParams['reference']
current_point = [0, reference_point[1]]
for i in range(0, len(frontier)):
current_point[0] = frontier[i, 0]
if (i > 0 or frontier[i, 1] > reference_point[1]):
flist.append(current_point)
flist.append(frontier[i, :])
current_point = current_point.copy()
current_point[1] = frontier[i, 1]
if (current_point[0] < reference_point[0]):
current_point[0] = reference_point[0]
flist.append(current_point)
flist = np.array(flist)
plt.plot(flist[:, 0], flist[:, 1], 'g-')
ax.fill_between(flist[:, 0], reference_point[1], flist[:, 1], where=reference_point[1] >= flist[:, 1], facecolor='green', alpha=0.5, hatch='//')
plt.xlabel('Obj 1')
plt.ylabel('Obj 2')
plt.title('Pareto frontier')
plt.draw()
plt.show()
max_aquisition_value = aquisition_values[max_aquisition_index]
new_point = candidates[max_aquisition_index]
init_points = np.vstack((new_point, init_points))
new_point_value = np.reshape(np.array(f(new_point)), (1, -1))
print('New point at {0}'.format(new_point_value))
candidates = np.delete(candidates, max_aquisition_index, 0)
model.addPoint(new_point, new_point_value)
frontier = find_frontier(np.vstack((new_point_value, frontier)))
print('Iter {0}, {1} improved to {2} in {3} time'
.format(iter,
aquisition_function.getGoalName(),
aquisition_function.getGoalValue(frontier),
time.time() - iter_start)
)
curve.append(aquisition_function.getGoalValue(frontier))
if (output_dir is not None):
frontierfile = open('{0}/frontier.txt'.format(output_dir), 'w+')
curvefile = open('{0}/curve.txt'.format(output_dir), 'w+')
#pointsfile = open('{0}/points.txt'.format(output_dir), 'w+')
#candidatesfile = open('{0}/candidates.txt'.format(output_dir), 'w+')
for item in frontier:
frontierfile.write("%s\n" % item)
for item in np.array(curve):
curvefile.write("%s\n" % item)
np.savetxt('{0}/points.txt'.format(output_dir), init_points)
np.savetxt('{0}/candidates.txt'.format(output_dir), candidates)
#for item in init_points:
# pointsfile.write("%s\n" % item)
#for item in candidates:
# candidatesfile.write("%s\n" % item)
return frontier, np.array(curve)
def find_frontier(init_values):
frontier_ind = []
for i in range(0, init_values.shape[0]):
dominated = False
for j in range(0, init_values.shape[0]):
if (np.all(np.all(init_values[i, :] > init_values[j, :]))):
dominated = True
if (not dominated):
frontier_ind.append(i)
return init_values[np.array(frontier_ind), :]
| {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,038 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/optimizer/aquisition/AbstractAquisition.py | import abc
class AbstractAquisition(object):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def getAquisitionBatch(X, model, existingY):
pass
| {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,039 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/models/DeepGP/layers/BaseLayer.py | import abc
class BaseLayer(object):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def getEnergyContribution(self):
return 0.0
@abc.abstractmethod
def getOutput(self):
return 0.0, 0.0
def __init__(self, input_means, input_vars):
self.input_means = input_means
self.input_vars = input_vars
| {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,040 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/time_comp.py |
import numpy as np
import argparse
from sklearn import preprocessing
from scipy.stats import multivariate_normal
import time
import math
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from models.DeepGP.GPNetwork import GPNetwork
from models.DeepGP.GPNetworkDJ import GPNetworkDJ
from models.GP.GaussianProcess import GaussianProcess
from models.GP.GaussianProcessDJ import GaussianProcessDJ
from models.Random.RandomModel import RandomModel
from models.dgps.dgps_net import Dgps_net
from models.dgps.dgps_net_dj import Dgps_netDJ
from models.GP.GPlibDJ import GPlibDJ
with open("/home/hava/MPhil_Project/code/DeepGPs/random_evaluations.txt", "r") as re:
re_proc = []
iss = []
oss = []
for line in re:
line_split = line.split(' ')
i = [float(x) for x in line_split[0:13]]
o = [float(x) for x in line_split[13:15]]
iss.append(i)
oss.append(o)
iss = np.array(iss)
oss = np.array(oss)
iss = preprocessing.scale(iss)
#oss = preprocessing.scale(oss)
for i in range(0, iss.shape[0]):
re_proc.append((iss[i, :], oss[i, :]))
re_proc = np.array(re_proc)
print(re_proc)
def fre(x):
for i, o in re_proc:
if (np.allclose(i, x, atol=1e-08)):
return o
assert(True)
def freinv(x):
for i, o in re_proc:
if (np.allclose(o, x, atol=1e-08)):
return i
assert(True)
modelParams2 = {'model':'dgps'}
modelParams1 = {'model':'dgp',
'maxiter': 1,
'layer_types': ['gp', 'noisy', 'gp', 'noisy'],
'layer_nodes': [2, 1, 2, 1],
'early_stopping': False}
doms = []
for i in range(0, iss.shape[0]):
current = 0
for j in range(0, iss.shape[0]):
if (oss[i, 0] > oss[j, 0] and oss[i, 1] > oss[j, 1]):
current += 1
doms.append(current)
doms = np.array(doms)
dom_inds = doms.argsort()
print(doms[dom_inds])
for n_points in [300]:
#with open('random_points.txt') as file:
# init_points = []
# for line in file: # read rest of lines
# init_points.append([float(x) for x in line.split()])
# init_points = np.array(init_points)
train_ind = np.random.choice(dom_inds[0:1200], size=n_points)
init_points = iss[train_ind, :]
#with open('random_frontier.txt') as file:
# frontier = []
# for line in file: # read rest of lines
# line = line[1:-2]
# frontier.append([float(x) for x in line.split()])
# frontier_points = np.array([freinv(np.array(l)) for l in frontier])
test_ind = [x for x in dom_inds[0:1200] if x not in train_ind]
frontier_points = iss[test_ind, :]
frontier = oss[test_ind, :]
init_values = []
for point in init_points:
init_values.append(fre(point))
init_values = np.reshape(np.array(init_values), (len(init_values), -1))
# Model
start = time.time()
model = GPNetwork(init_points, init_values, modelParams1)
tftime = time.time() - start
#start = time.time()
#model2 = Dgps_net(init_points, init_values, modelParams2)
#theanotime = time.time() - start
ll_train = []
ll_test = []
energy = []
for i in range(0, 2000):
means, vars = model.predictBatch(frontier_points)
#if (np.isnan(vars).any() or (vars <= 0.0).any()):
# for asd in range(0, 12):
# print('PROBLEM')
# model.train()
llsum = 0.0
llsep = [0.0, 0.0]
rmsesum = 0.0
rmsesep = [0.0, 0.0]
for i in range(0, frontier_points.shape[0]):
for j in range(0, 2):
llsum += multivariate_normal.logpdf(frontier[i, j], mean=means[i, j], cov=vars[i, j])
rmsesum += (means[i, j] - frontier[i, j])**2
#llsep[j] += multivariate_normal.logpdf(frontier[i, j], mean=means[i, j], cov=vars[i, j])
#rmsesep[j] += (means[i, j] - frontier[i, j])**2
# file.write("{0} mean {1}, var {2}, actual {3}\n".format(labels[j], means[i, j], vars[i, j], frontier[i, j]))
print("After {0} points, the avg log likelihood is {1}".format(n_points, 0.5 * llsum / frontier_points.shape[0]))
ll_test.append(0.5 * llsum / frontier_points.shape[0])
means, vars = model.predictBatch(init_points)
llsum = 0.0
llsep = [0.0, 0.0]
rmsesum = 0.0
rmsesep = [0.0, 0.0]
for i in range(0, init_points.shape[0]):
for j in range(0, 2):
llsum += multivariate_normal.logpdf(init_values[i, j], mean=means[i, j], cov=vars[i, j])
rmsesum += (means[i, j] - init_values[i, j])**2
llsep[j] += multivariate_normal.logpdf(init_values[i, j], mean=means[i, j], cov=vars[i, j])
rmsesep[j] += (means[i, j] - init_values[i, j])**2
# file.write("{0} mean {1}, var {2}, actual {3}\n".format(labels[j], means[i, j], vars[i, j], frontier[i, j]))
ll_train.append(0.5 * llsum / init_points.shape[0])
print("After {0} points, the avg log likelihood is {1}".format(n_points, 0.5 * llsum / init_points.shape[0]))
energy.append(model.get_energy())
model.train()
plt.figure()
plt.plot(np.linspace(1, 2000, num=2000), ll_train, label='Training log-likelihood')
plt.plot(np.linspace(1, 2000, num=2000), ll_test, label='Test log-likelihood')
plt.ylabel('Log-likelihood')
plt.ylim(-4, 0)
plt.xlabel('Iterations')
plt.title('Training and Test log-likelihoods')
plt.legend()
serial = np.random.randint(100000)
plt.savefig('figs/ll_{}_{}.eps'.format(n_points, serial))
plt.savefig('figs/ll_{}_{}.pdf'.format(n_points, serial))
plt.figure()
plt.plot(np.linspace(1, 2000, num=2000), np.reshape(energy, (-1)))
plt.ylabel('Energy')
plt.ylim(-1500, 500)
plt.xlabel('Iterations')
plt.title('Model energy')
plt.savefig('figs/en_{}_{}.eps'.format(n_points, serial))
plt.savefig('figs/en_{}_{}.pdf'.format(n_points, serial))
#with open("times.txt", "a") as myfile:
# myfile.write("{} tf time: {}, thenao time: {}".format(n_points, tftime, theanotime)) | {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,041 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/single_experiment.py | import matplotlib
matplotlib.use('Agg')
from optimizer.Optimizer import optimize
import numpy as np
import argparse
from sklearn import preprocessing
with open("/home/hava/MPhil_Project/code/DeepGPs/random_evaluations.txt", "r") as re:
re_proc = []
iss = []
oss = []
for line in re:
line_split = line.split(' ')
i = [float(x) for x in line_split[0:13]]
o = [float(x) for x in line_split[13:15]]
iss.append(i)
oss.append(o)
iss = np.array(iss)
oss = np.array(oss)
#iss = preprocessing.scale(iss)
#oss = preprocessing.scale(oss)
for i in range(0, iss.shape[0]):
re_proc.append((iss[i, :], oss[i, :]))
re_proc = np.array(re_proc)
print(re_proc)
def fre(x):
for i, o in re_proc:
if (np.allclose(i, x, atol=1e-08)):
return o
assert(True)
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('model', help='shallow/deep_joint/deep_disjoint')
parser.add_argument('output_dir')
parser.add_argument('iterations', type=int)
parser.add_argument('-rs', dest='seed', type=int, default=np.random.random_integers(5000),
help='random seed')
parser.add_argument('-rt', dest='retrain', type=int, default=1,
help='retrain frequency')
args = parser.parse_args()
import random
random.seed(args.seed)
np.random.seed(args.seed)
if (args.model == 'shallow'):
print('Shallow GP')
modelParams = {'model':'dgp',
'maxiter': 15000,
'layer_types': ['gp'],
'layer_nodes': [2],
'retrain': args.retrain}
elif (args.model == 'shallow_disjoint'):
print('Shallow disjoint GP')
modelParams = {'model':'dgp_dj',
'maxiter': 15000,
'layer_types': ['gp'],
'layer_nodes': [1],
'retrain': args.retrain}
elif (args.model == 'deep_joint'):
print('Deep joint GP')
modelParams = {'model':'dgp',
'maxiter': 15000,
'layer_types': ['gp', 'gp'],
'layer_nodes': [2, 2],
'retrain': args.retrain}
elif (args.model == 'deep_disjoint'):
print('Deep disjoint GP')
modelParams = {'model':'dgp_dj',
'maxiter': 15000,
'layer_types': ['gp', 'gp'],
'layer_nodes': [2, 1],
'retrain': args.retrain}
elif (args.model == 'dgps'):
print('dgps model')
modelParams = {'model':'dgps'}
elif (args.model == 'gp'):
print('GP')
modelParams = {'model':'gp',
'maxiter': 40000,
'retrain': args.retrain}
elif (args.model == 'gp_disjoint'):
print('Disjoint GP')
modelParams = {'model':'gp_dj',
'maxiter': 40000,
'retrain': args.retrain}
elif (args.model == 'random'):
print('Random')
modelParams = {'model':'rnd'}
aquisitionParams = {'aquisition':'SMSego',
'gain': 2.0}
frontier, curve = optimize(fre, np.array([i for i, o in re_proc]), modelParams, aquisitionParams, 50, args.iterations, args.output_dir)
| {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,042 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/likelihood_test.py |
import numpy as np
import argparse
from sklearn import preprocessing
from scipy.stats import multivariate_normal
import math
import matplotlib
matplotlib.use('Agg')
from models.DeepGP.GPNetwork import GPNetwork
from models.DeepGP.GPNetworkDJ import GPNetworkDJ
from models.GP.GaussianProcess import GaussianProcess
from models.GP.GaussianProcessDJ import GaussianProcessDJ
from models.Random.RandomModel import RandomModel
from models.dgps.dgps_net import Dgps_net
from models.dgps.dgps_net_dj import Dgps_netDJ
from models.GP.GPlibDJ import GPlibDJ
with open("/home/hava/MPhil_Project/code/DeepGPs/random_evaluations.txt", "r") as re:
re_proc = []
iss = []
oss = []
for line in re:
line_split = line.split(' ')
i = [float(x) for x in line_split[0:13]]
o = [float(x) for x in line_split[13:15]]
iss.append(i)
oss.append(o)
iss = np.array(iss)
oss = np.array(oss)
iss = preprocessing.scale(iss)
#oss = preprocessing.scale(oss)
for i in range(0, iss.shape[0]):
re_proc.append((iss[i, :], oss[i, :]))
re_proc = np.array(re_proc)
print(re_proc)
def fre(x):
for i, o in re_proc:
if (np.allclose(i, x, atol=1e-08)):
return o
assert(True)
def freinv(x):
for i, o in re_proc:
if (np.allclose(o, x, atol=1e-08)):
return i
assert(True)
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('model', help='shallow/deep_joint/deep_disjoint')
parser.add_argument('dir', help='shallow/deep_joint/deep_disjoint')
parser.add_argument('-rs', dest='seed', type=int, default=422,
help='random seed')
args = parser.parse_args()
import random
random.seed(args.seed)
np.random.seed(args.seed)
n_prints = 1
deep_maxiter = 15000
if (args.model == 'shallow'):
print('Shallow GP')
modelParams = {'model':'dgp',
'maxiter': 2000,
'layer_types': ['gp', 'noisy'],
'layer_nodes': [2, 1]}
elif (args.model == 'shallow_disjoint'):
print('Shallow disjoint GP')
modelParams = {'model':'dgp_dj',
'maxiter': 2000,
'layer_types': ['gp', 'noisy'],
'layer_nodes': [2, 1]}
elif (args.model == 'deep_joint'):
print('Deep joint GP')
modelParams = {'model':'dgp',
'maxiter': 2000,
'layer_types': ['gp', 'noisy', 'gp', 'noisy'],
'layer_nodes': [2, 1, 2, 1]}
elif (args.model == 'deep_disjoint'):
print('Deep disjoint GP')
modelParams = {'model':'dgp_dj',
'maxiter': 2000,
'layer_types': ['gp', 'noisy', 'gp', 'noisy'],
'layer_nodes': [2, 1, 2, 1]}
elif (args.model == 'dgps'):
print('dgps model')
modelParams = {'model':'dgps'}
elif (args.model == 'dgps_disjoint'):
print('dj dgps model')
modelParams = {'model':'dgps_dj'}
elif (args.model == 'dgps_shallow'):
print('shallow dgps model')
modelParams = {'model':'dgps',
'shallow': True}
elif (args.model == 'gp'):
print('GP')
modelParams = {'model':'gp',
'maxiter': 40000,
'retrain': args.retrain}
elif (args.model == 'gplib'):
print('GPlib')
modelParams = {'model':'gplib'}
elif (args.model == 'gplib_matern'):
print('GPlib matern')
modelParams = {'model':'gplib',
'kern': 'matern'}
elif (args.model == 'gp_disjoint'):
print('Disjoint GP')
modelParams = {'model':'gp_dj',
'maxiter': 5000}
elif (args.model == 'random'):
print('Random')
modelParams = {'model':'rnd'}
filename = '{}/{}_{}.txt'.format(args.dir, args.model, args.seed)
with open(filename, 'w+') as file:
pass
filename_rmse = '{}/{}_rmse_{}.txt'.format(args.dir, args.model, args.seed)
with open(filename_rmse, 'w+') as file:
pass
doms = []
for i in range(0, iss.shape[0]):
current = 0
for j in range(0, iss.shape[0]):
if (oss[i, 0] > oss[j, 0] and oss[i, 1] > oss[j, 1]):
current += 1
doms.append(current)
doms = np.array(doms)
dom_inds = doms.argsort()
print(doms[dom_inds])
for n_points in [0]:
#with open('random_points.txt') as file:
# init_points = []
# for line in file: # read rest of lines
# init_points.append([float(x) for x in line.split()])
# init_points = np.array(init_points)
train_ind = np.random.choice(dom_inds[0:1200], size=n_points)
init_points = iss[train_ind, :]
#with open('random_frontier.txt') as file:
# frontier = []
# for line in file: # read rest of lines
# line = line[1:-2]
# frontier.append([float(x) for x in line.split()])
# frontier_points = np.array([freinv(np.array(l)) for l in frontier])
test_ind = [x for x in dom_inds[0:1200] if x not in train_ind]
frontier_points = iss[test_ind, :]
frontier = oss[test_ind, :]
init_values = []
for point in init_points:
init_values.append(fre(point))
init_values = np.reshape(np.array(init_values), (len(init_values), -1))
# Model
if (modelParams['model'] == 'dgp'):
model = GPNetwork(init_points, init_values, modelParams)
elif (modelParams['model'] == 'dgp_dj'):
model = GPNetworkDJ(init_points, init_values, modelParams)
elif (modelParams['model'] == 'gp'):
model = GaussianProcess(init_points, init_values, modelParams)
elif (modelParams['model'] == 'gp_dj'):
model = GaussianProcessDJ(init_points, init_values, modelParams)
elif (modelParams['model'] == 'rnd'):
model = RandomModel(init_points, init_values, modelParams)
elif (modelParams['model'] == 'dgps'):
model = Dgps_net(init_points, init_values, modelParams)
elif (modelParams['model'] == 'dgps_dj'):
model = Dgps_netDJ(init_points, init_values, modelParams)
elif (modelParams['model'] == 'gplib'):
model = GPlibDJ(init_points, init_values, modelParams)
else:
print('Unspecified model name')
#for x in range(0, n_prints):
means, vars = model.predictBatch(frontier_points)
llsum = 0.0
llsep = [0.0, 0.0]
rmsesum = 0.0
rmsesep = [0.0, 0.0]
for i in range(0, frontier_points.shape[0]):
for j in range(0, 2):
llsum += multivariate_normal.logpdf(frontier[i, j], mean=means[i, j], cov=vars[i, j])
rmsesum += (means[i, j] - frontier[i, j])**2
llsep[j] += multivariate_normal.logpdf(frontier[i, j], mean=means[i, j], cov=vars[i, j])
rmsesep[j] += (means[i, j] - frontier[i, j])**2
# file.write("{0} mean {1}, var {2}, actual {3}\n".format(labels[j], means[i, j], vars[i, j], frontier[i, j]))
print("Accuracy {0}, Power {1}".format(llsep[0] / frontier_points.shape[0], llsep[1] / frontier_points.shape[0]))
print("After {0} points, the avg log likelihood is {1}".format(n_points, 0.5 * llsum / frontier_points.shape[0]))
# model.train()
with open(filename, 'a') as file:
file.write("{0} {1} {2} {3} \n".format(n_points, 0.5 * llsum / frontier_points.shape[0], llsep[0] / frontier_points.shape[0], llsep[1] / frontier_points.shape[0]))
with open(filename_rmse, 'a') as file:
file.write("{0} {1} {2} {3} \n".format(n_points, np.sqrt(rmsesum * 0.5 / frontier_points.shape[0]), np.sqrt(rmsesep[0] / frontier_points.shape[0]), np.sqrt(rmsesep[1] / frontier_points.shape[0])))
| {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,043 | mhavasi/MPhil_Project | refs/heads/master | /code/DeepGPs/models/DeepGP/nodes/NoisyNode.py | from .BaseNode import *
import tensorflow as tf
import numpy as np
class NoisyNode(BaseNode):
def __init__(self, input_means, input_vars):
BaseNode.__init__(self, input_means, input_vars)
self.input_means = input_means
self.input_vars = input_vars
input_d = self.input_means.get_shape().as_list()[1]
self.log_noise = tf.Variable(tf.zeros([1, input_d]))
self.output_means = self.input_means
self.output_vars = input_vars + tf.exp(self.log_noise)
def getEnergyContribution(self):
return tf.constant(0.0, tf.float32, [1, 1])
def getOutput(self):
return self.output_means, self.output_vars | {"/code/DeepGPs/models/GP/GPlib.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/GPLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py"], "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/OutputNodeRegression.py"], "/code/DeepGPs/models/Random/RandomModel.py": ["/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/optimizer/aquisition/SMSego.py": ["/code/DeepGPs/optimizer/aquisition/AbstractAquisition.py"], "/code/DeepGPs/models/DeepGP/GPNetwork.py": ["/code/DeepGPs/models/DeepGP/layers/InputLayer.py", "/code/DeepGPs/models/DeepGP/layers/OutputLayerRegression.py", "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py", "/code/DeepGPs/models/DeepGP/layers/GPLayer.py", "/code/DeepGPs/models/AbstractModel.py"], "/code/DeepGPs/models/DeepGP/layers/InputLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/InputNode.py"], "/code/DeepGPs/models/DeepGP/layers/NoisyLayer.py": ["/code/DeepGPs/models/DeepGP/layers/BaseLayer.py", "/code/DeepGPs/models/DeepGP/nodes/NoisyNode.py"], "/code/DeepGPs/models/DeepGP/GPNetworkDJ.py": ["/code/DeepGPs/models/AbstractModel.py", "/code/DeepGPs/models/DeepGP/GPNetwork.py"], "/code/DeepGPs/optimizer/Optimizer.py": ["/code/DeepGPs/optimizer/aquisition/SMSego.py"]} |
66,044 | kasbah/kiur | refs/heads/master | /kiur/urls.py | from django.conf.urls import patterns, include, url
from haystack.forms import ModelSearchForm
from haystack.query import SearchQuerySet
from haystack.views import SearchView, search_view_factory
import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
#sqs = SearchQuerySet() #
urlpatterns = patterns('',
url(r"^$", "kiur.views.index"),
#url(r'^search/$', include('haystack.urls')),
url(r"^search/$", "kiur.views.search"),
(r'^comments/', include('django.contrib.comments.urls')),
#url(r"^(?P<libmod_type>components)/$", "kiur.views.SearchWithRequest",name='haystack_search'),
#url(r"^(?P<libmod_type>footprints)/$", "kiur.views.SearchWithRequest",name='haystack_search'),
#url(r'^$', search_view_factory(
# view_class=SearchView,
# template='index.html',
# searchqueryset=sqs,
# form_class=ModelSearchForm
#), name='haystack_search'),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^login/$', "kiur.views.login"),
url(r'^logout/$', "django.contrib.auth.views.logout"),
url(r"submit/(?P<step>\d)/$", "libmods.views.submit"),
#url(r"^libs/", include("libmods.urls")),
url(r"^components/(?P<url_cmp_name>[\w\W]+)/$", "libmods.views.cmp_detail"),
url(r"^footprints/(?P<url_ftp_name>[\w\W]+)/$", "libmods.views.ftp_detail"),
url(r"^modify_basket/$", "kiur.views.modify_basket"),
url(r"^download/$", "kiur.views.download"),
#url(r"(?P<url_cmp_name>[\w|\W]+)/$", "cmp_detail"),
)
if settings.DEBUG:
# files (images, css, javascript, etc.)
urlpatterns += patterns('',
(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT}))
| {"/tools/templatetags/tools_extras.py": ["/libmods/models.py"], "/libmods/admin.py": ["/libmods/models.py"], "/libmods/views.py": ["/kiur/haystack_forms.py", "/kiur/views.py", "/libmods/parse.py", "/libmods/models.py", "/libmods/forms.py"], "/libmods/search_indexes.py": ["/libmods/models.py"]} |
66,045 | kasbah/kiur | refs/heads/master | /tools/templatetags/tools_extras.py | from django import template
from libmods.models import Component, Footprint
register = template.Library()
@register.inclusion_tag("tools/basket.html", takes_context=True)
def render_basket(context):
cps = len(filter(lambda x: type(x) == Component, context["basket"]))
fps = len(filter(lambda x: type(x) == Footprint, context["basket"]))
if cps == 1:
cplural = ""
else:
cplural = "s"
if fps == 1:
fplural = ""
else:
fplural = "s"
is_empty = (cps + fps) == 0
return {"is_empty":is_empty, "cplural": cplural, "components": cps, "fplural": fplural, "footprints": fps}
@register.filter
def basket_to_numbers (basket):
cps = len(filter(lambda x: type(x) == Component, basket))
fps = len(filter(lambda x: type(x) == Footprint, basket))
if cps == 1:
l = " components and "
else:
l = " components and "
if fps == 1:
m = " footprint"
else:
m = " footprints"
return str(cps) + l + str(fps) + m
| {"/tools/templatetags/tools_extras.py": ["/libmods/models.py"], "/libmods/admin.py": ["/libmods/models.py"], "/libmods/views.py": ["/kiur/haystack_forms.py", "/kiur/views.py", "/libmods/parse.py", "/libmods/models.py", "/libmods/forms.py"], "/libmods/search_indexes.py": ["/libmods/models.py"]} |
66,046 | kasbah/kiur | refs/heads/master | /libmods/urls.py | from django.conf.urls import patterns, url
urlpatterns = patterns("libmods.views",
url(r"(?P<url_cmp_name>[\w|\W]+)/$", "cmp_detail"),
#url(r"footprints/(?P<url_ftp_name>[\w|\W]+)/$", "ftp_detail"),
)
| {"/tools/templatetags/tools_extras.py": ["/libmods/models.py"], "/libmods/admin.py": ["/libmods/models.py"], "/libmods/views.py": ["/kiur/haystack_forms.py", "/kiur/views.py", "/libmods/parse.py", "/libmods/models.py", "/libmods/forms.py"], "/libmods/search_indexes.py": ["/libmods/models.py"]} |
66,047 | kasbah/kiur | refs/heads/master | /libmods/admin.py | from libmods.models import Footprint, Component
from django.contrib import admin
from django.contrib.contenttypes import generic
from custom_comments.models import CommentWithFlag
class ProblemsInline(generic.GenericStackedInline):
model = CommentWithFlag
ct_field = "problem_object_type"
ct_fk_field = "problem_object_id"
class LibModAdmin(admin.ModelAdmin):
inlines = [
ProblemsInline,
]
admin.site.register(Footprint, LibModAdmin)
admin.site.register(Component, LibModAdmin)
| {"/tools/templatetags/tools_extras.py": ["/libmods/models.py"], "/libmods/admin.py": ["/libmods/models.py"], "/libmods/views.py": ["/kiur/haystack_forms.py", "/kiur/views.py", "/libmods/parse.py", "/libmods/models.py", "/libmods/forms.py"], "/libmods/search_indexes.py": ["/libmods/models.py"]} |
66,048 | kasbah/kiur | refs/heads/master | /kiur/views.py | from django.shortcuts import render, get_object_or_404
from django.template.context import RequestContext
from django.http import HttpResponseRedirect
from django.http import HttpResponseNotAllowed
from django.http import HttpResponse
import ast
from haystack.views import SearchView, search_view_factory
from haystack.query import SearchQuerySet
#from haystack.forms import HighlightedModelSearchForm, ModelSearchForm, FacetedSearchForm
from kiur.haystack_forms import CustomSearchForm
from django.contrib.auth.views import login as djlogin
from libmods.models import Component, Footprint
import json
def get_session_form(request):
try:
form = CustomSearchForm(request.session["last_search"])
except KeyError:
form = CustomSearchForm()
return form
def get_session_basket(request):
try:
basket = request.session["basket"]
except KeyError:
basket = []
return basket
def get_session_context(request):
form = get_session_form(request)
basket = get_session_basket(request)
user = request.user;
return {"form": form, "basket": basket, "user":user}
def index(request):
form = CustomSearchForm()
basket = get_session_basket(request)
return render(request, "index.html", {"form": form, "basket": basket})
def _modify_basket(request, libmod):
basket = get_session_basket(request)
if libmod in basket:
basket.remove(libmod)
else:
basket.append(libmod)
#uniquify just to be sure
basket = list(set(basket))
request.session["basket"] = basket
class CustomSearchView(SearchView):
def __name__(self):
return "CustomSearchView"
def extra_context(self):
extra = super(CustomSearchView, self).extra_context()
extra["models"] = self.request.GET.get("models", "")
extra["basket"] = get_session_basket(self.request)
return extra
def modify_basket(request):
if request.POST:
p = request.POST
try:
if p["_type"] == "Component":
libmod = Component.objects.get(name=p["libmod"])
elif p["_type"] == "Footprint":
libmod = Footprint.objects.get(name=p["libmod"])
else:
print p["_type"]
raise TypeError
_modify_basket(request, libmod)
except:
if request.is_ajax():
data = {"success":False}
return HttpResponse(json.dumps(data), mimetype="application/json")
if request.is_ajax():
basket = request.session["basket"]
data = {}
data["in_basket"] = libmod in basket
data["name"] = p["libmod"]
data["_type"] = p["_type"]
data["libs"] = len(filter(lambda x: isinstance(x, Component), basket))
data["mods"] = len(filter(lambda x: isinstance(x, Footprint), basket))
data["success"] = True
return HttpResponse(json.dumps(data), mimetype="application/json")
else:
try:
q = request.session["last_search"]["q"]
except:
q = ""
try:
models = request.session["last_search"]["models"]
except:
models = ""
try:
page = request.session["last_search"]["page"]
except:
page = "1"
return HttpResponseRedirect("/search/?q="+ q +"&models=" + models + "&page=" + page)
else:
#XXX should check for request.GET but it doesn't seem to work..
if request.is_ajax():
basket = request.session["basket"]
data = {}
data["libs"] = len(filter(lambda x: type(x) is Component, basket))
data["mods"] = len(filter(lambda x: type(x) is Footprint, basket))
data["success"] = True
return HttpResponse(json.dumps(data), mimetype="application/json")
return HttpResponseNotAllowed(request)
from django.core.servers.basehttp import FileWrapper
import os
import tempfile
from django.core.files.temp import NamedTemporaryFile
def download(request):
#TODO make a file in /tmp/ and let apache serve it
if request.POST:
p = request.POST
f = tempfile.NamedTemporaryFile()
if p["_type"] == "Component":
libmod = get_object_or_404(Component, name=p["libmod"])
name = p["libmod"] + ".lib"
version = ast.literal_eval(libmod.ki_version)
f.write("EESchema-LIBRARY Version "+ ".".join(version) + "\n")
elif p["_type"] == "Footprint":
name = p["libmod"] + ".mod"
libmod = get_object_or_404(Footprint, p["libmod"])
f.write(libmod.ki_text)
f.seek(0)
response = HttpResponse(FileWrapper(f), content_type='text/plain')
#response['Content-Length'] = os.path.getsize(f)
response['Content-Disposition'] = "attachment; filename=" + name
return response
def search(request):
request.session["last_search"] = request.GET
sqs = SearchQuerySet().filter(content_auto=request.GET.get('q', ''))
request.session["sqs"] = sqs
view = search_view_factory(
view_class=CustomSearchView,
template="search.html",
searchqueryset=sqs,
form_class=CustomSearchForm,
context_class=RequestContext,
)
return view(request)
def login(request):
return djlogin(request, extra_context=get_session_context(request))
| {"/tools/templatetags/tools_extras.py": ["/libmods/models.py"], "/libmods/admin.py": ["/libmods/models.py"], "/libmods/views.py": ["/kiur/haystack_forms.py", "/kiur/views.py", "/libmods/parse.py", "/libmods/models.py", "/libmods/forms.py"], "/libmods/search_indexes.py": ["/libmods/models.py"]} |
66,049 | kasbah/kiur | refs/heads/master | /kiur/models.py | #from django.contrib.auth.models import User
#
#class UserProfile(models.Model):
# # This field is required.
# user = models.OneToOneField(User)
#
# # Other fields here
# last_search = models.CharField(max_length=200)
| {"/tools/templatetags/tools_extras.py": ["/libmods/models.py"], "/libmods/admin.py": ["/libmods/models.py"], "/libmods/views.py": ["/kiur/haystack_forms.py", "/kiur/views.py", "/libmods/parse.py", "/libmods/models.py", "/libmods/forms.py"], "/libmods/search_indexes.py": ["/libmods/models.py"]} |
66,050 | kasbah/kiur | refs/heads/master | /libmods/forms.py | from django import forms
class UploadFormOne(forms.Form):
lib_mod_dcm_or_wrl = forms.FileField()
class UploadFormLib(forms.Form):
lib = forms.CharField()
dcm = forms.FileField()
class UploadFormDcm(forms.Form):
lib = forms.FileField()
dcm = forms.CharField()
class UploadFormMod(forms.Form):
mod = forms.CharField()
wrl = forms.FileField()
class UploadFormWrl(forms.Form):
mod = forms.FileField()
dcm = forms.CharField()
| {"/tools/templatetags/tools_extras.py": ["/libmods/models.py"], "/libmods/admin.py": ["/libmods/models.py"], "/libmods/views.py": ["/kiur/haystack_forms.py", "/kiur/views.py", "/libmods/parse.py", "/libmods/models.py", "/libmods/forms.py"], "/libmods/search_indexes.py": ["/libmods/models.py"]} |
66,051 | kasbah/kiur | refs/heads/master | /kiur/haystack_forms.py | from django import forms
from haystack import site as haystack_site
from haystack.forms import SearchForm, model_choices
from django.utils.text import capfirst
from django.utils.encoding import smart_unicode
from django.utils.translation import ugettext_lazy as _
#from haystack.forms import model_choices
from django.db import models
class CustomSearchForm(SearchForm):
def __init__(self, *args, **kwargs):
super(CustomSearchForm, self).__init__(*args, **kwargs)
self.fields['models'] = forms.ChoiceField(choices=self.model_choices(), required=False, label=('Search In'))#, widget=forms.RadioSelect(attrs={"onclick": "this.form.submit();"}))
def get_models(self):
"""Return a list of model classes in the index."""
search_models = []
if self.is_valid():
if not (self.cleaned_data["models"] == ""):
search_models.append(models.get_model(*self.cleaned_data["models"].split('.')))
else:
for item in self.all_choices:
search_models.append(models.get_model(*item.split(".")))
return search_models
def search(self):
sqs = super(CustomSearchForm, self).search()
return sqs.models(*self.get_models())
def model_choices(self,site=None):
if site is None:
site = haystack_site
choices = []
self.all_choices = []
for m in site.get_indexed_models():
choices.append(("%s.%s" % (m._meta.app_label, m._meta.module_name), smart_unicode(m._meta.verbose_name_plural)))
self.all_choices.append("%s.%s" % (m._meta.app_label, m._meta.module_name))
#I want these in component then footprint order
choices.sort()
choices.reverse()
choices.append(("", "everything"))
return reversed(choices)
| {"/tools/templatetags/tools_extras.py": ["/libmods/models.py"], "/libmods/admin.py": ["/libmods/models.py"], "/libmods/views.py": ["/kiur/haystack_forms.py", "/kiur/views.py", "/libmods/parse.py", "/libmods/models.py", "/libmods/forms.py"], "/libmods/search_indexes.py": ["/libmods/models.py"]} |
66,052 | kasbah/kiur | refs/heads/master | /libmods/views.py | from django.shortcuts import render, get_object_or_404
from django.template.context import RequestContext
from urllib import unquote
from kiur.haystack_forms import CustomSearchForm
from haystack.views import SearchView, search_view_factory
from django import forms
from kiur.views import get_session_context
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect, Http404
import libmods.parse as parse
from libmods.models import Footprint, Component
from libmods.forms import UploadFormOne, UploadFormLib, UploadFormDcm, UploadFormWrl, UploadFormMod
def cmp_detail(request, url_cmp_name):
extra_context = get_session_context(request)
extra_context["component"] = get_object_or_404(Component, name=unquote(url_cmp_name))
extra_context["next"] = "/components/" + url_cmp_name + "/"
return render(request, "libmods/cmp_detail.html", extra_context)
def ftp_detail(request, url_ftp_name):
extra_context = get_session_context(request)
extra_context["footprint"] = get_object_or_404(Footprint, name=unquote(url_ftp_name))
extra_context["next"] = "/components/" + url_ftp_name + "/"
return render(request, "libmods/ftp_detail.html", extra_context)
@login_required
def submit(request, step):
step = int(step)
extra_context = get_session_context(request)
if step == 1:
extra_context["upload_form"] = UploadFormOne()
extra_context["redirect"] = "/submit/2/"
return render(request, "submit.html", extra_context)
elif step == 2:
form = UploadFormOne(request.POST, request.FILES)
if form.is_valid():
try:
parse_context = parse.parse_uploaded_file(request)
extra_context.update(parse_context)
except parse.ParseFailed as e:
extra_context["upload_form"] = UploadFormOne()
extra_context["redirect"] = "/submit/2/"
extra_context["error_message"] = e
return render(request, "submit.html", extra_context)
else:
extra_context["redirect"] = "/submit/3/"#?t=" + form.kind
return render(request, "submit.html", extra_context)
else:
extra_context["upload_form"] = UploadFormOne()
extra_context["redirect"] = "/submit/2/"
extra_context["error_message"] = "Invalid upload file"
return render(request, "submit.html", extra_context)
elif step == 3:
pass
else:
raise Http404
| {"/tools/templatetags/tools_extras.py": ["/libmods/models.py"], "/libmods/admin.py": ["/libmods/models.py"], "/libmods/views.py": ["/kiur/haystack_forms.py", "/kiur/views.py", "/libmods/parse.py", "/libmods/models.py", "/libmods/forms.py"], "/libmods/search_indexes.py": ["/libmods/models.py"]} |
66,053 | kasbah/kiur | refs/heads/master | /libmods/parse.py | #import magic
import re
import warnings
from django.utils import timezone
from django.contrib.auth.models import User
from libmods.models import Footprint, Component
from libmods.forms import UploadFormOne, UploadFormLib, UploadFormDcm, UploadFormWrl, UploadFormMod
MAX_UPLOAD_SIZE = 2621440 # 2.5MB
def parse_uploaded_file(request):
''' Determines the type of file an upload is and calls the right
function to save the models and returns a context dictionary
'''
f = request.FILES["lib_mod_dcm_or_wrl"]
if f.size > MAX_UPLOAD_SIZE:
raise ParseFailed("File too Large. The maximum file size allowed is %.1f MB." % (MAX_UPLOAD_SIZE/1048576.0))
first_line = f.readline()
parsed = {}
#for chunk in f.chunks():
# break
#if (magic.Magic(mime=True).from_buffer(chunk) != "text/plain"):
# raise ParseFailed("File is not text/plain type.")
if "EESchema-LIBRARY" in first_line:
parsed["upload_form"] = UploadFormLib()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
parsed["saved"], parsed["db_duplicates"] = ParseLib(f, first_line, request)
parsed["up_duplicates"] = filter(lambda i: issubclass(i.category, ProblemInUp), w)
elif "EESchema-DOCLIB" in first_line:
form = UploadFormDcm(initial={"dcm":f})
elif "PCBNEW-LibModule-V1" in first_line:
form = UploadFormMod(initial={"mod":f})
elif "VRML" in first_line:
form = UploadFormWrl(initial={"wrl":f})
else:
raise ParseFailed("Not a valid KiCAD file.")
return parsed
def ParseLib(f, first_line, request):
'''
Parser for .lib file. Will save and return a list of saved components
and a list of components whos name clashes with one in the
database. Will raise a warning if there are duplicate names or other
problems within an uploaded .lib itself.
'''
try:
ki_version = re.match(r".*(\d\.\d)", first_line).group(1).split(".")
except:
raise ParseFailed("File identified as EESchema Library but cannot determine version.")
lib_open = False
components_text = {}
duplicates = []
saved = []
for line in f:
if line[0] == '#':
pass
elif line[:3] == "DEF":
if lib_open:
components_text[name] += "ENDDEF\n"
warnings.warn("The component definition for %s is not closed properly. There may be a problem parsing this component." % name, ProblemInUp)
lib_open = False
try:
name = re.match(r"DEF ([\/\-_\w]+) ", line).group(1)
except:
warnings.warn("Problem determining name of definition beginning with:\n\t" + line, ProblemInUp)
else:
if name in components_text:
warnings.warn ("Duplicate component %s in uploaded .lib" % name, ProblemInUp)
components_text[name] = line
lib_open = True
elif (line[:6] == "ENDDEF") and lib_open:
components_text[name] += line
lib_open = False
elif lib_open:
components_text[name] += line
if lib_open:
components_text[name] += "ENDDEF\n"
warnings.warn("A component definition for %s is not closed properly. There may be a problem parsing this component." % name, ProblemInUp)
for name, text in components_text.iteritems():
print name
try:
lib = Component.objects.get(name=name)
except Component.DoesNotExist:
lib = Component()
lib.name = name
lib.description = ""
lib.submitter = request.user
lib.maintainer = request.user
lib.revision = 1
lib.votes = 0
lib.date_added = timezone.now()
lib.ki_version = ki_version
lib.ki_text = text
lib.save()
saved.append(lib)
except Component.MultipleObjectsReturned:
warnings.warn("There are already multiple components with the name %s. This really shouldn't have happend. Please contact the administrator." % name, ProblemInUp)
duplicates.append({
"component" : lib
,"new_text": text
})
else:
duplicates.append({
"component" : lib
,"new_text": text
})
return saved, duplicates
class ProblemInUp(UserWarning):
pass
class ParseFailed(Exception):
def __init__(self, msg=None):
if msg is None:
msg = "Parsing of uploaded file failed."
super(ParseFailed, self).__init__(msg)
| {"/tools/templatetags/tools_extras.py": ["/libmods/models.py"], "/libmods/admin.py": ["/libmods/models.py"], "/libmods/views.py": ["/kiur/haystack_forms.py", "/kiur/views.py", "/libmods/parse.py", "/libmods/models.py", "/libmods/forms.py"], "/libmods/search_indexes.py": ["/libmods/models.py"]} |
66,054 | kasbah/kiur | refs/heads/master | /libmods/search_indexes.py | import datetime
from haystack.indexes import RealTimeSearchIndex
from haystack import site, indexes
from libmods.models import Component, Footprint
class LibModIndex(RealTimeSearchIndex):
#the templates are templates/search/indexes/libmods/component_content_auto.txt
#and footprint_content_auto.txt
content_auto = indexes.EdgeNgramField(document=True, use_template=True)
#content_auto = CharField(document=True, use_template=True)
#def index_queryset(self):
# return Component.objects.filter(date_added__lte=datetime.datetime.now())
site.register(Component, LibModIndex)
site.register(Footprint, LibModIndex)
| {"/tools/templatetags/tools_extras.py": ["/libmods/models.py"], "/libmods/admin.py": ["/libmods/models.py"], "/libmods/views.py": ["/kiur/haystack_forms.py", "/kiur/views.py", "/libmods/parse.py", "/libmods/models.py", "/libmods/forms.py"], "/libmods/search_indexes.py": ["/libmods/models.py"]} |
66,055 | kasbah/kiur | refs/heads/master | /tools/forms.py | from django import forms
from django.template.defaultfilters import filesizeformat
from django.utils.translation import ugettext_lazy as _
import magic
class ContentTypeRestrictedFileField(forms.FileField):
"""
Same as FileField, but you can specify:
* content_types - list containing allowed content_types.
Example: ['application/pdf', 'image/jpeg']
* max_upload_size - a number indicating the maximum file
size allowed for upload.
2.5MB - 2621440
5MB - 5242880
10MB - 10485760
20MB - 20971520
50MB - 5242880
100MB 104857600
250MB - 214958080
500MB - 429916160
"""
def __init__(self, *args, **kwargs):
self.content_types = kwargs.pop("content_types")
self.max_upload_size = kwargs.pop("max_upload_size")
super(ContentTypeRestrictedFileField, self).__init__(*args, **kwargs)
def clean(self, *args, **kwargs):
data = super(ContentTypeRestrictedFileField, self).clean(*args, **kwargs)
#we just want the first chunk for the header
#should be a better way...
for chunk in data.chunks():
break
content_type = magic.Magic(mime=True).from_buffer(chunk)
try:
if content_type in self.content_types:
if data._size > self.max_upload_size:
raise forms.ValidationError(_('Please keep filesize under'
'%s. Current filesize %s')
% (filesizeformat(self.max_upload_size), filesizeformat(data._size)))
else:
raise forms.ValidationError(_('Filetype not supported.'))
except AttributeError:
print "attr error"
pass
return data
| {"/tools/templatetags/tools_extras.py": ["/libmods/models.py"], "/libmods/admin.py": ["/libmods/models.py"], "/libmods/views.py": ["/kiur/haystack_forms.py", "/kiur/views.py", "/libmods/parse.py", "/libmods/models.py", "/libmods/forms.py"], "/libmods/search_indexes.py": ["/libmods/models.py"]} |
66,056 | kasbah/kiur | refs/heads/master | /libmods/models.py | from django.db import models
from django.utils import encoding
from django.contrib.auth.models import User
from easy_thumbnails.signals import saved_file
from easy_thumbnails.signal_handlers import generate_aliases_global
saved_file.connect(generate_aliases_global)
from custom_comments.models import CommentWithFlag
from django.contrib.contenttypes import generic
class LibMod(models.Model):
''' This is the abstract base model for all library types '''
name = models.CharField(max_length=200)
description = models.CharField(max_length=200)
image = models.ImageField(upload_to="libmodimages", null=True, blank=True)
revision = models.IntegerField()
date_added = models.DateTimeField("date added")
#the CommaSeparatedIntegerField may not be a good match for this data
ki_version = models.CommaSeparatedIntegerField(max_length=20)
ki_text = models.TextField()
part_of_ki = models.BooleanField()
votes = models.IntegerField()
#We associate the comments with the object when they report a problem.
#It is a generic relationship as it is a one-to-many relationship which
#is stored in this model rather than the comments.
problems_reported = generic.GenericRelation(CommentWithFlag,
content_type_field="problem_object_type",
object_id_field="problem_object_id")
#just a convenience function used during development to print all fields
def get_fields(self):
return [(field.name, field.value_to_string(self)) for field in self._meta.fields]
class Meta:
abstract = True
def __unicode__(self):
return self.name
class Footprint(LibMod):
#The submitter and maintainer have different related names for different types
#of libraries so that we can keep the associated libaries seperate when
#we look at the maintainer.
submitter = models.ForeignKey(User, related_name="fp_submitter")
maintainer = models.ForeignKey(User, related_name="fp_maintainer")
class Component(LibMod):
submitter = models.ForeignKey(User, related_name="cp_submitter")
maintainer = models.ForeignKey(User, related_name="cp_maintainer")
#components may have many footprints associated with them and vice versa
#but we simply define this in the component model
footprints = models.ManyToManyField(Footprint, null=True, blank=True)
from django.contrib.comments.signals import comment_was_posted
from django.dispatch import receiver
@receiver(comment_was_posted)
def comment_posted_callback(sender, comment, **kwargs):
''' When a comment is posted, we check if a problem is reported too.'''
if comment.report_problem:# and (comment.content_object.problem_reported is None):
comment.problem_object = comment.content_object
comment.save()
| {"/tools/templatetags/tools_extras.py": ["/libmods/models.py"], "/libmods/admin.py": ["/libmods/models.py"], "/libmods/views.py": ["/kiur/haystack_forms.py", "/kiur/views.py", "/libmods/parse.py", "/libmods/models.py", "/libmods/forms.py"], "/libmods/search_indexes.py": ["/libmods/models.py"]} |
66,075 | jpurplefox/pokemon_battles | refs/heads/master | /tests/integration/test_mongo_repository.py | from pokemon_battles.domain import models
from pokemon_battles.adapters import repositories
from ..random_refs import random_team_name
def test_get_team_by_name(mongo_database):
repo = repositories.MongoTeamRepository(mongo_database)
team_name_1, team_name_2 = random_team_name(), random_team_name()
spark = models.Pokemon(
'Spark',
models.known_species['Pikachu'],
level=20,
moves=[models.known_moves['Thunder Shock']],
)
bubble = models.Pokemon(
'Bubble',
models.known_species['Squirtle'],
level=20,
moves=[models.known_moves['Bubble']],
)
team_1 = models.Team(team_name_1, pokemons=[spark])
team_2 = models.Team(team_name_2, pokemons=[bubble])
repo.add(team_1)
repo.add(team_2)
assert repo.get(team_name_1) == team_1
assert repo.get(team_name_2) == team_2
def test_update_team(mongo_database):
repo = repositories.MongoTeamRepository(mongo_database)
team_name = random_team_name()
spark = models.Pokemon(
'Spark',
models.known_species['Pikachu'],
level=20,
moves=[models.known_moves['Thunder Shock']],
)
team = models.Team(team_name)
repo.add(team)
assert repo.get(team_name) == team
team.add_pokemon(spark)
repo.update(team)
assert repo.get(team_name) == team
| {"/tests/integration/test_mongo_repository.py": ["/tests/random_refs.py"], "/tests/integration/test_redis_repository.py": ["/tests/random_refs.py"], "/tests/e2e/test_api.py": ["/tests/random_refs.py"]} |
66,076 | jpurplefox/pokemon_battles | refs/heads/master | /src/pokemon_battles/service_layer/user_messagebus.py | import abc
from flask_socketio import SocketIO
from pokemon_battles.domain import user_events
class AbstractUserMessagebus(abc.ABC):
pass
@abc.abstractmethod
def emit(self, event: user_events.UserEvent):
raise NotImplementedError
class FlaskSocketIOUserMessagebus(AbstractUserMessagebus):
def __init__(self, message_queue):
self.socketio = SocketIO(message_queue=message_queue)
def emit(self, event: user_events.UserEvent):
if isinstance(event, user_events.BattleReady):
event_name = 'battle_ready'
data = {'battle_ref': event.battle_ref}
if isinstance(event, user_events.BattleFinished):
event_name = 'battle_finished'
data = {'winner': event.winner}
if isinstance(event, user_events.PokemonUsedMove):
event_name = 'move'
data = {'pokemon': event.pokemon, 'move': event.move}
if isinstance(event, user_events.PokemonChanged):
event_name = 'pokemon_changed'
data = {'player': event.player, 'pokemon_nickname': event.pokemon_nickname}
if isinstance(event, user_events.TurnReady):
event_name = 'turn_ready'
data = {'battle_ref': event.battle_ref}
if isinstance(event, user_events.PokemonFainted):
event_name = 'pokemon_fainted'
data = {'pokemon': event.pokemon}
self.socketio.emit(event_name, data, room=event.battle_ref)
| {"/tests/integration/test_mongo_repository.py": ["/tests/random_refs.py"], "/tests/integration/test_redis_repository.py": ["/tests/random_refs.py"], "/tests/e2e/test_api.py": ["/tests/random_refs.py"]} |
66,077 | jpurplefox/pokemon_battles | refs/heads/master | /src/pokemon_battles/domain/events.py | from dataclasses import dataclass
class Event:
pass
@dataclass(frozen=True)
class TurnReady(Event):
battle_ref: str
@dataclass(frozen=True)
class TurnFinished(Event):
battle_ref: str
@dataclass(frozen=True)
class MovePerformed(Event):
battle_ref: str
player: str
pokemon_nickname: str
move_name: str
@dataclass(frozen=True)
class OpponentMovePerformed(Event):
battle_ref: str
@dataclass(frozen=True)
class PokemonChanged(Event):
battle_ref: str
player: str
pokemon_nickname: str
| {"/tests/integration/test_mongo_repository.py": ["/tests/random_refs.py"], "/tests/integration/test_redis_repository.py": ["/tests/random_refs.py"], "/tests/e2e/test_api.py": ["/tests/random_refs.py"]} |
66,078 | jpurplefox/pokemon_battles | refs/heads/master | /tests/conftest.py | import pytest
from pymongo import MongoClient
from redis import Redis
from pokemon_battles import config
@pytest.fixture
def mongo_database():
client = MongoClient(config.get_mongo_uri())
return client.test_database
@pytest.fixture
def redis_client():
return Redis.from_url(url=config.get_redis_uri())
| {"/tests/integration/test_mongo_repository.py": ["/tests/random_refs.py"], "/tests/integration/test_redis_repository.py": ["/tests/random_refs.py"], "/tests/e2e/test_api.py": ["/tests/random_refs.py"]} |
66,079 | jpurplefox/pokemon_battles | refs/heads/master | /src/pokemon_battles/config.py | import os
def get_api_url():
return os.environ.get('API_URL', 'http://localhost:5000')
def get_mongo_uri():
host = os.environ.get('MONGO_HOST', 'localhost')
port = os.environ.get('MONGO_PORT', '27017')
return f'mongodb://{host}:{port}/'
def get_redis_uri():
host = os.environ.get('REDIS_HOST', 'localhost')
port = os.environ.get('REDIS_PORT', '6379')
return f'redis://{host}:{port}/'
| {"/tests/integration/test_mongo_repository.py": ["/tests/random_refs.py"], "/tests/integration/test_redis_repository.py": ["/tests/random_refs.py"], "/tests/e2e/test_api.py": ["/tests/random_refs.py"]} |
66,080 | jpurplefox/pokemon_battles | refs/heads/master | /src/pokemon_battles/domain/commands.py | from dataclasses import dataclass, field
class Command:
pass
@dataclass(frozen=True)
class AddTeam(Command):
name: str
@dataclass(frozen=True)
class AddPokemonToTeam(Command):
team_name: str
nickname: str
species: str
lvl: int
move_names: list = field(default_factory=list)
@dataclass(frozen=True)
class HostBattle(Command):
team_name: str
@dataclass(frozen=True)
class JoinBattle(Command):
battle_ref: str
team_name: str
@dataclass(frozen=True)
class RegisterUseMove(Command):
battle_ref: str
player: str
move_name: str
@dataclass(frozen=True)
class RegisterChangePokemon(Command):
battle_ref: str
player: str
pokemon_nickname: str
| {"/tests/integration/test_mongo_repository.py": ["/tests/random_refs.py"], "/tests/integration/test_redis_repository.py": ["/tests/random_refs.py"], "/tests/e2e/test_api.py": ["/tests/random_refs.py"]} |
66,081 | jpurplefox/pokemon_battles | refs/heads/master | /src/pokemon_battles/domain/models.py | import math
from dataclasses import dataclass, field
from typing import List, Set
from . import events, user_events
@dataclass(frozen=True)
class Species:
name: str
hp: int
attack: int
defense: int
sp_attack: int
sp_defense: int
speed: int
known_species = {
'Squirtle': Species(
'Squirtle',
hp=44,
attack=48,
defense=65,
sp_attack=50,
sp_defense=64,
speed=43,
),
'Pikachu': Species(
'Pikachu',
hp=35,
attack=55,
defense=30,
sp_attack=50,
sp_defense=40,
speed=90,
),
'Caterpie': Species(
'Caterpie',
hp=45,
attack=30,
defense=35,
sp_attack=20,
sp_defense=20,
speed=45,
),
'Ninetales': Species(
'Ninetales',
hp=73,
attack=76,
defense=75,
sp_attack=81,
sp_defense=100,
speed=100,
),
}
@dataclass(frozen=True)
class Move:
name: str
power: int
known_moves = {
'Thunder Shock': Move('Thunder Shock', 40),
'Bubble': Move('Bubble', 40),
'Flamethrower': Move('Flamethrower', 90),
'Tackle': Move('Tackle', 40),
}
@dataclass
class Pokemon:
nickname: str
species: Species
level: int
moves: Set[Move] = field(default_factory=set)
def to_dict(self):
return {
'nickname': self.nickname,
'species': self.species.name,
'level': self.level,
'moves': [move.name for move in self.moves],
}
@classmethod
def from_dict(cls, data):
return cls(
nickname=data['nickname'],
species=known_species[data['species']],
level=data['level'],
moves=[known_moves[move] for move in data.get('moves', [])],
)
def _calculate_stats(self, base):
return math.floor(5 + base * 2 * self.level / 100)
@property
def max_hp(self):
return math.floor(10 + self.level + 2 * self.species.hp * self.level / 100)
@property
def attack(self):
return self._calculate_stats(self.species.attack)
@property
def defense(self):
return self._calculate_stats(self.species.defense)
@property
def sp_attack(self):
return self._calculate_stats(self.species.sp_attack)
@property
def sp_defense(self):
return self._calculate_stats(self.species.sp_defense)
@property
def speed(self):
return self._calculate_stats(self.species.speed)
@dataclass
class Team:
name: str
pokemons: List[Pokemon] = field(default_factory=list)
def add_pokemon(self, pokemon: Pokemon):
self.pokemons.append(pokemon)
def to_dict(self):
return {
'name': self.name,
'pokemons': [pokemon.to_dict() for pokemon in self.pokemons]
}
@classmethod
def from_dict(cls, data):
return cls(
name=data['name'],
pokemons=[Pokemon.from_dict(pokemon_data) for pokemon_data in data.get('pokemons', [])]
)
@dataclass
class BattlingPokemon:
pokemon: Pokemon
hp: int = 0
is_active: bool = False
def to_dict(self):
return {
'pokemon': self.pokemon.to_dict(),
'hp': self.hp,
'is_active': self.is_active,
}
@classmethod
def from_dict(cls, data):
return cls(
pokemon=Pokemon.from_dict(data['pokemon']),
hp=data['hp'],
is_active=data['is_active'],
)
def receive_damage(self, damage):
self.hp = self.hp - damage
def perform_move_against(self, move, other_pokemon):
level_factor = 2 + 2 * self.pokemon.level / 5
attack_defense_ratio = self.pokemon.attack / other_pokemon.pokemon.defense
damage = math.floor(level_factor * move.power * attack_defense_ratio / 50) + 2
other_pokemon.receive_damage(damage)
return damage
@property
def is_fainted(self):
return self.hp <= 0
def set_active(self, value):
self.is_active = value
class Action:
@staticmethod
def from_dict(data):
if not data:
return None
return actions[data['action_type']].from_dict(data['action_data'])
@dataclass
class ActionChangePokemon(Action):
pokemon_nickname: str
def to_dict(self):
return {
'action_type': 'change_pokemon',
'action_data': {'pokemon_nickname': self.pokemon_nickname}
}
@classmethod
def from_dict(cls, data):
return cls(pokemon_nickname=data['pokemon_nickname'])
@dataclass
class ActionUseMove(Action):
pokemon_nickname: str
move: str
def to_dict(self):
return {
'action_type': 'use_move',
'action_data': {'pokemon_nickname': self.pokemon_nickname, 'move': self.move}
}
@classmethod
def from_dict(cls, data):
return cls(pokemon_nickname=data['pokemon_nickname'], move=data['move'])
actions = {
'change_pokemon': ActionChangePokemon,
'use_move': ActionUseMove,
}
@dataclass
class Battle:
ref: str
host_pokemons: List[BattlingPokemon]
opponent_pokemons: List[BattlingPokemon] = field(default_factory=list)
host_action: Action = None
opponent_action: Action = None
events: list = field(default_factory=list, repr=False, compare=False)
user_events: list = field(default_factory=list, repr=False, compare=False)
def to_dict(self):
return {
'ref': self.ref,
'host_pokemons': [pokemon.to_dict() for pokemon in self.host_pokemons],
'opponent_pokemons': [pokemon.to_dict() for pokemon in self.opponent_pokemons],
'host_action': self.host_action.to_dict() if self.host_action else None,
'opponent_action': self.opponent_action.to_dict() if self.opponent_action else None,
}
@classmethod
def from_dict(cls, data):
return cls(
ref=data['ref'],
host_pokemons=[
BattlingPokemon.from_dict(pokemon) for pokemon in data.get('host_pokemons', [])
],
opponent_pokemons=[
BattlingPokemon.from_dict(pokemon) for pokemon in data.get('opponent_pokemons', [])
],
host_action=Action.from_dict(data.get('host_action')),
opponent_action=Action.from_dict(data.get('opponent_action')),
)
@classmethod
def host(cls, ref: str, host_team: Team):
battle = cls(
ref,
[BattlingPokemon(pokemon, pokemon.max_hp) for pokemon in host_team.pokemons],
)
battle.host_pokemons[0].is_active = True
battle.events = []
battle.user_events = []
return battle
def join(self, opponent_team):
self.opponent_pokemons = [
BattlingPokemon(pokemon, pokemon.max_hp) for pokemon in opponent_team.pokemons
]
self.opponent_pokemons[0].is_active = True
self.user_events.append(user_events.BattleReady(self.ref))
@property
def active_host_pokemon(self):
return next(pokemon for pokemon in self.host_pokemons if pokemon.is_active)
@property
def active_opponent_pokemon(self):
return next(pokemon for pokemon in self.opponent_pokemons if pokemon.is_active)
def register_use_move(self, player: str, move_name: str):
if player == 'host':
self.host_action = ActionUseMove(self.active_host_pokemon.pokemon.nickname, move_name)
if player == 'opponent':
self.opponent_action = ActionUseMove(self.active_opponent_pokemon.pokemon.nickname, move_name)
if self.host_action and self.opponent_action:
self.events.append(events.TurnReady(self.ref))
def register_change_pokemon(self, player: str, pokemon_nickname: str):
if player == 'host':
self.host_action = ActionChangePokemon(pokemon_nickname)
if player == 'opponent':
self.opponent_action = ActionChangePokemon(pokemon_nickname)
if self.host_action and self.opponent_action:
self.events.append(events.TurnReady(self.ref))
def change_pokemon(self, player: str, pokemon_nickname: str):
if player == 'host':
pokemons = self.host_pokemons
if player == 'opponent':
pokemons = self.opponent_pokemons
[pokemon.set_active(False) for pokemon in pokemons]
[pokemon.set_active(True) for pokemon in pokemons if pokemon.pokemon.nickname == pokemon_nickname]
print(pokemon_nickname)
print(self.active_opponent_pokemon)
self.user_events.append(user_events.PokemonChanged(self.ref, player, pokemon_nickname))
def process_turn(self):
if isinstance(self.host_action, ActionUseMove):
self.events.append(events.MovePerformed(
self.ref,
'host',
self.active_host_pokemon.pokemon.nickname,
self.host_action.move,
))
if isinstance(self.host_action, ActionChangePokemon):
self.events.append(
events.PokemonChanged(self.ref, 'host', self.host_action.pokemon_nickname)
)
if isinstance(self.opponent_action, ActionUseMove):
self.events.append(events.MovePerformed(
self.ref,
'opponent',
self.active_opponent_pokemon.pokemon.nickname,
self.opponent_action.move,
))
if isinstance(self.opponent_action, ActionChangePokemon):
self.events.append(
events.PokemonChanged(self.ref, 'opponent', self.opponent_action.pokemon_nickname)
)
self.host_action = None
self.opponent_action = None
self.events.append(events.TurnFinished(self.ref))
def finish_turn(self):
if self.active_host_pokemon.is_fainted:
self.user_events.append(
user_events.PokemonFainted(self.ref, self.active_host_pokemon.pokemon.species.name)
)
self.user_events.append(user_events.BattleFinished(self.ref, 'opponent'))
elif self.active_opponent_pokemon.is_fainted:
self.user_events.append(
user_events.PokemonFainted(self.ref, self.active_opponent_pokemon.pokemon.species.name)
)
self.user_events.append(user_events.BattleFinished(self.ref, 'host'))
else:
self.user_events.append(user_events.TurnReady(self.ref))
def _perform_move(self, pokemon: Pokemon, move: Move, opponent: Pokemon):
if pokemon.is_active:
damage = pokemon.perform_move_against(move, opponent)
user_event = user_events.PokemonUsedMove(
self.ref,
pokemon.pokemon.species.name,
move.name,
damage,
)
self.user_events.append(user_event)
pokemon.next_move = None
def perform_move(self, player: str, pokemon_nickname: str, move_name):
if player == 'host':
pokemon_that_moved = self.active_host_pokemon
pokemon_that_receive_move = self.active_opponent_pokemon
if player == 'opponent':
pokemon_that_moved = self.active_opponent_pokemon
pokemon_that_receive_move = self.active_host_pokemon
move = known_moves[move_name]
self._perform_move(pokemon_that_moved, move, pokemon_that_receive_move)
def get_possible_moves(self, player):
if player == 'host':
moves = self.active_host_pokemon.pokemon.moves
if player == 'opponent':
moves = self.active_opponent_pokemon.pokemon.moves
return [move.name for move in moves]
def get_inactive_pokemons(self, player):
if player == 'host':
pokemons = self.host_pokemons
if player == 'opponent':
pokemons = self.opponent_pokemons
return [pokemon.pokemon.nickname for pokemon in pokemons if not pokemon.is_active]
| {"/tests/integration/test_mongo_repository.py": ["/tests/random_refs.py"], "/tests/integration/test_redis_repository.py": ["/tests/random_refs.py"], "/tests/e2e/test_api.py": ["/tests/random_refs.py"]} |
66,082 | jpurplefox/pokemon_battles | refs/heads/master | /tests/integration/test_redis_repository.py | import uuid
from pokemon_battles.domain import models
from pokemon_battles.adapters import repositories
from ..random_refs import random_team_name
def test_get_battle_by_ref(redis_client):
repo = repositories.RedisBattleRepository(redis_client)
battle_ref = str(uuid.uuid4())
spark = models.Pokemon(
'Spark',
models.known_species['Pikachu'],
level=20,
moves=[models.known_moves['Thunder Shock']],
)
bubble = models.Pokemon(
'Bubble',
models.known_species['Squirtle'],
level=20,
moves=[models.known_moves['Bubble']],
)
buggy = models.Pokemon(
'Buggy',
models.known_species['Caterpie'],
level=20,
moves=[models.known_moves['Tackle']],
)
team_1 = models.Team('Host', pokemons=[spark])
team_2 = models.Team('Opponent', pokemons=[bubble, buggy])
battle = models.Battle.host(battle_ref, team_1)
repo.add(battle)
assert repo.get(battle_ref) == battle
battle.join(team_2)
battle.host_action = models.ActionUseMove('Squirtle', 'Bubble')
battle.opponent_action = models.ActionChangePokemon('Buggy')
repo.update(battle)
assert repo.get(battle_ref) == battle
| {"/tests/integration/test_mongo_repository.py": ["/tests/random_refs.py"], "/tests/integration/test_redis_repository.py": ["/tests/random_refs.py"], "/tests/e2e/test_api.py": ["/tests/random_refs.py"]} |
66,083 | jpurplefox/pokemon_battles | refs/heads/master | /src/setup.py | from setuptools import setup
setup(
name='pokemon_battles',
version='0.1',
packages=['pokemon_battles'],
)
| {"/tests/integration/test_mongo_repository.py": ["/tests/random_refs.py"], "/tests/integration/test_redis_repository.py": ["/tests/random_refs.py"], "/tests/e2e/test_api.py": ["/tests/random_refs.py"]} |
66,084 | jpurplefox/pokemon_battles | refs/heads/master | /tests/random_refs.py | import uuid
def random_suffix():
return uuid.uuid4().hex[:6]
def random_team_name(name=''):
return f'team-{name}-{random_suffix()}'
| {"/tests/integration/test_mongo_repository.py": ["/tests/random_refs.py"], "/tests/integration/test_redis_repository.py": ["/tests/random_refs.py"], "/tests/e2e/test_api.py": ["/tests/random_refs.py"]} |
66,085 | jpurplefox/pokemon_battles | refs/heads/master | /src/pokemon_battles/adapters/repositories.py | import abc
import json
from pokemon_battles.domain import models
class AbstractTeamRepository(abc.ABC):
def __init__(self):
self.seen = list()
def add(self, team: models.Team):
self._add(team)
self.seen.append(team)
def get(self, name: str):
team = self._get(name)
if team:
self.seen.append(team)
return team
class MongoTeamRepository(AbstractTeamRepository):
def __init__(self, database):
super().__init__()
self.database = database
@property
def collection(self):
return self.database.teams
def update(self, team: models.Team):
self.collection.replace_one({'name': team.name}, team.to_dict())
def _add(self, team: models.Team):
self.collection.insert_one(team.to_dict())
def _get(self, name: str):
raw_data = self.collection.find_one({'name': name})
if not raw_data:
return None
return models.Team.from_dict(raw_data)
class AbstractBattleRepository(abc.ABC):
def __init__(self):
self.seen = list()
def add(self, battle: models.Battle):
self._add(battle)
self.seen.append(battle)
def get(self, battle_ref: str):
battle = self._get(battle_ref)
if battle:
self.seen.append(battle)
return battle
class RedisBattleRepository(AbstractBattleRepository):
def __init__(self, client):
super().__init__()
self.client = client
def get_key(self, ref):
return f'battle-{ref}'
def update(self, team: models.Team):
self.client.set(self.get_key(team.ref), json.dumps(team.to_dict()))
def _add(self, team: models.Team):
self.client.set(self.get_key(team.ref), json.dumps(team.to_dict()))
def _get(self, ref: str):
raw_data = json.loads(self.client.get(self.get_key(ref)))
if not raw_data:
return None
return models.Battle.from_dict(raw_data)
| {"/tests/integration/test_mongo_repository.py": ["/tests/random_refs.py"], "/tests/integration/test_redis_repository.py": ["/tests/random_refs.py"], "/tests/e2e/test_api.py": ["/tests/random_refs.py"]} |
66,086 | jpurplefox/pokemon_battles | refs/heads/master | /tests/unit/test_models.py | from pokemon_battles.domain import models
def test_pokemon_calculates_stats_properly():
spark = models.Pokemon('Spark', models.known_species['Pikachu'], level=20)
assert spark.max_hp == 44
assert spark.attack == 27
assert spark.defense == 17
assert spark.sp_attack == 25
assert spark.sp_defense == 21
assert spark.speed == 41
| {"/tests/integration/test_mongo_repository.py": ["/tests/random_refs.py"], "/tests/integration/test_redis_repository.py": ["/tests/random_refs.py"], "/tests/e2e/test_api.py": ["/tests/random_refs.py"]} |
66,087 | jpurplefox/pokemon_battles | refs/heads/master | /src/pokemon_battles/service_layer/unit_of_work.py | import abc
from pymongo import MongoClient
from redis import Redis
from pokemon_battles import config
from pokemon_battles.adapters import repositories
from . import messagebus, user_messagebus
class AbstractUnitOfWork(abc.ABC):
def __enter__(self):
return self
def __exit__(self, *args):
self.rollback()
def commit(self):
self._commit()
self.publish_events()
self.publish_user_events()
def publish_events(self):
for battle in self.battles.seen:
while battle.events:
event = battle.events.pop(0)
messagebus.handle(event, uow=self)
def publish_user_events(self):
for battle in self.battles.seen:
while battle.user_events:
event = battle.user_events.pop(0)
self.user_messagebus.emit(event)
@abc.abstractmethod
def _commit(self):
raise NotImplementedError
@abc.abstractmethod
def rollback(self):
raise NotImplementedError
def init_repositories(self, team_repository, battle_repository):
self._teams = team_repository
self._battles = battle_repository
@property
def teams(self):
return self._teams
@property
def battles(self):
return self._battles
class UnitOfWork(AbstractUnitOfWork):
def __init__(self):
mongo_database = MongoClient(config.get_mongo_uri()).pokemon
redis_client = Redis.from_url(url=config.get_redis_uri())
self.init_repositories(
repositories.MongoTeamRepository(mongo_database),
repositories.RedisBattleRepository(redis_client)
)
self.user_messagebus = user_messagebus.FlaskSocketIOUserMessagebus(config.get_redis_uri())
def _commit(self):
for team in self._teams.seen:
self._teams.update(team)
for battle in self._battles.seen:
self._battles.update(battle)
def rollback(self):
pass
| {"/tests/integration/test_mongo_repository.py": ["/tests/random_refs.py"], "/tests/integration/test_redis_repository.py": ["/tests/random_refs.py"], "/tests/e2e/test_api.py": ["/tests/random_refs.py"]} |
66,088 | jpurplefox/pokemon_battles | refs/heads/master | /src/pokemon_battles/service_layer/messagebus.py | import logging
from typing import Union
from pokemon_battles.domain import commands, events
from . import handlers
logger = logging.getLogger(__name__)
Message = Union[commands.Command, events.Event]
def handle(message: Message, uow):
if isinstance(message, events.Event):
handle_event(message, uow)
elif isinstance(message, commands.Command):
return handle_command(message, uow)
else:
raise Exception(f'{message} was not an Event or Command')
def handle_event(event: events.Event, uow):
for handler in EVENT_HANDLERS[type(event)]:
try:
logger.info('handling event %s with handler %s', event, handler)
handler(event, uow=uow)
except:
logger.exception('Exception handling event %s', event)
raise
def handle_command(command, uow):
logger.debug('handling command %s', command)
try:
handler = COMMAND_HANDLERS[type(command)]
return handler(command, uow=uow)
except Exception:
logger.exception('Exception handling command %s', command)
raise
EVENT_HANDLERS = {
events.MovePerformed: [handlers.move_performed],
events.PokemonChanged: [handlers.pokemon_changed],
events.TurnReady: [handlers.turn_ready],
events.TurnFinished: [handlers.turn_finished],
}
COMMAND_HANDLERS = {
commands.AddPokemonToTeam: handlers.add_pokemon_to_team,
commands.AddTeam: handlers.add_team,
commands.HostBattle: handlers.host_battle,
commands.JoinBattle: handlers.join_battle,
commands.RegisterUseMove: handlers.register_use_move,
commands.RegisterChangePokemon: handlers.register_change_pokemon,
}
| {"/tests/integration/test_mongo_repository.py": ["/tests/random_refs.py"], "/tests/integration/test_redis_repository.py": ["/tests/random_refs.py"], "/tests/e2e/test_api.py": ["/tests/random_refs.py"]} |
66,089 | jpurplefox/pokemon_battles | refs/heads/master | /tests/e2e/test_api.py | import requests
from pokemon_battles import config
from ..random_refs import random_team_name
def post_to_add_team(name):
url = config.get_api_url()
r = requests.post(
f'{url}/add_team',
json={'name': name}
)
assert r.status_code == 201
def post_to_add_pokemon_to_team(team_name, nickname, species, level, moves):
url = config.get_api_url()
r = requests.post(
f'{url}/add_pokemon',
json={
'team_name': team_name,
'nickname': nickname,
'species': species,
'level': level,
'moves': moves,
}
)
assert r.status_code == 200
def post_to_host_a_battle(team_name):
url = config.get_api_url()
r = requests.post(
f'{url}/host_battle',
json={'team_name': team_name}
)
data = r.json()
assert r.status_code == 201
assert 'battle_ref' in data
return data['battle_ref']
def get_battle(battle_ref):
url = config.get_api_url()
r = requests.get(f'{url}/battle/{battle_ref}')
assert r.status_code == 200
return r.json()
def get_actions(battle_ref, player):
url = config.get_api_url()
r = requests.get(f'{url}/battle/{battle_ref}/actions', {'player': player})
assert r.status_code == 200
return r.json()
def post_to_join_a_battle(battle_ref, team_name):
url = config.get_api_url()
r = requests.post(
f'{url}/join_battle',
json={'battle_ref': battle_ref, 'team_name': team_name}
)
assert r.status_code == 200
def post_to_register_a_move(battle_ref, player, move_name):
url = config.get_api_url()
r = requests.post(
f'{url}/register_a_move',
json={'battle_ref': battle_ref, 'player': player, 'move_name': move_name}
)
assert r.status_code == 200
def post_to_register_a_pokemon_change(battle_ref, player, pokemon_nickname):
url = config.get_api_url()
r = requests.post(
f'{url}/register_a_pokemon_change',
json={'battle_ref': battle_ref, 'player': player, 'pokemon_nickname': pokemon_nickname}
)
assert r.status_code == 200
def test_team_creation_happy_path():
team_name = random_team_name()
post_to_add_team(team_name)
post_to_add_pokemon_to_team(team_name, 'Spark', 'Pikachu', 20, ['Thunder Shock'])
def test_battle_happy_path():
host_team_name, opponent_team_name = random_team_name(), random_team_name()
post_to_add_team(host_team_name)
post_to_add_pokemon_to_team(host_team_name, 'Spark', 'Pikachu', 20, ['Thunder Shock'])
post_to_add_team(opponent_team_name)
post_to_add_pokemon_to_team(opponent_team_name, 'Bubble', 'Squirtle', 20, ['Bubble'])
post_to_add_pokemon_to_team(opponent_team_name, 'Buggy', 'Caterpie', 20, ['Tackle'])
battle_ref = post_to_host_a_battle(host_team_name)
post_to_join_a_battle(battle_ref, opponent_team_name)
battle_data = get_battle(battle_ref)
assert battle_data is not None
assert get_actions(battle_ref, 'host') == {'moves': ['Thunder Shock'], 'pokemons': []}
assert get_actions(battle_ref, 'opponent') == {'moves': ['Bubble'], 'pokemons': ['Buggy']}
post_to_register_a_move(battle_ref, 'host', 'Thunder Shock')
post_to_register_a_pokemon_change(battle_ref, 'opponent', 'Buggy')
| {"/tests/integration/test_mongo_repository.py": ["/tests/random_refs.py"], "/tests/integration/test_redis_repository.py": ["/tests/random_refs.py"], "/tests/e2e/test_api.py": ["/tests/random_refs.py"]} |
66,090 | jpurplefox/pokemon_battles | refs/heads/master | /src/pokemon_battles/domain/user_events.py | from dataclasses import dataclass
class UserEvent:
pass
@dataclass
class PokemonUsedMove(UserEvent):
battle_ref: str
pokemon: str
move: str
damage: int
@dataclass
class BattleReady(UserEvent):
battle_ref: str
@dataclass
class BattleFinished(UserEvent):
battle_ref: str
winner: str
@dataclass
class TurnReady(UserEvent):
battle_ref: str
@dataclass
class PokemonFainted(UserEvent):
battle_ref: str
pokemon: str
@dataclass
class PokemonChanged(UserEvent):
battle_ref: str
player: str
pokemon_nickname: str
| {"/tests/integration/test_mongo_repository.py": ["/tests/random_refs.py"], "/tests/integration/test_redis_repository.py": ["/tests/random_refs.py"], "/tests/e2e/test_api.py": ["/tests/random_refs.py"]} |
66,091 | jpurplefox/pokemon_battles | refs/heads/master | /src/pokemon_battles/endpoints/flask_app.py | from flask import Flask, jsonify, render_template, request
from flask_socketio import SocketIO, send, join_room
import eventlet
from pokemon_battles import config
from pokemon_battles.domain import commands
from pokemon_battles.service_layer import messagebus, unit_of_work
eventlet.monkey_patch()
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app, cors_allowed_origins='*', message_queue=config.get_redis_uri())
@app.route('/')
def index():
return render_template('index.html')
@app.route('/add_team', methods=['POST'])
def add_team():
cmd = commands.AddTeam(request.json['name'])
uow = unit_of_work.UnitOfWork()
messagebus.handle(cmd, uow)
return jsonify({'status': 'OK'}), 201
@app.route('/add_pokemon', methods=['POST'])
def add_pokemon():
cmd = commands.AddPokemonToTeam(
request.json['team_name'],
request.json['nickname'],
request.json['species'],
request.json['level'],
request.json['moves'],
)
uow = unit_of_work.UnitOfWork()
messagebus.handle(cmd, uow)
return jsonify({'status': 'OK'}), 200
@app.route('/host_battle', methods=['POST'])
def host_battle():
cmd = commands.HostBattle(
request.json['team_name'],
)
uow = unit_of_work.UnitOfWork()
battle_ref = messagebus.handle(cmd, uow)
return jsonify({'battle_ref': battle_ref}), 201
@app.route('/join_battle', methods=['POST'])
def join_battle():
cmd = commands.JoinBattle(
request.json['battle_ref'],
request.json['team_name'],
)
uow = unit_of_work.UnitOfWork()
battle_ref = messagebus.handle(cmd, uow)
return jsonify({'status': 'OK'}), 200
@app.route('/battle/<ref>', methods=['GET'])
def get_battle(ref):
uow = unit_of_work.UnitOfWork()
with uow:
battle = uow.battles.get(ref)
return jsonify(battle.to_dict()), 200
@app.route('/battle/<ref>/actions', methods=['GET'])
def get_actions(ref):
uow = unit_of_work.UnitOfWork()
player = request.args.get('player')
with uow:
battle = uow.battles.get(ref)
moves = battle.get_possible_moves(player)
pokemons = battle.get_inactive_pokemons(player)
return jsonify({'moves': moves, 'pokemons': pokemons}), 200
@app.route('/register_a_move', methods=['POST'])
def register_a_move():
cmd = commands.RegisterUseMove(
request.json['battle_ref'],
request.json['player'],
request.json['move_name'],
)
uow = unit_of_work.UnitOfWork()
battle_ref = messagebus.handle(cmd, uow)
return jsonify({'status': 'OK'}), 200
@app.route('/register_a_pokemon_change', methods=['POST'])
def register_a_pokemon_change():
cmd = commands.RegisterChangePokemon(
request.json['battle_ref'],
request.json['player'],
request.json['pokemon_nickname'],
)
uow = unit_of_work.UnitOfWork()
battle_ref = messagebus.handle(cmd, uow)
return jsonify({'status': 'OK'}), 200
@socketio.on('connect')
def test_connect():
print('Connected')
send('Connected')
@socketio.on('join')
def on_join(message):
print('Joining a room')
join_room(message['room'])
@socketio.on('disconnect')
def test_disconnect():
print('Disconnected')
send('Disconnected')
@socketio.on('message')
def handle_message(message):
print('received message: ' + message)
send('received message: ' + message)
| {"/tests/integration/test_mongo_repository.py": ["/tests/random_refs.py"], "/tests/integration/test_redis_repository.py": ["/tests/random_refs.py"], "/tests/e2e/test_api.py": ["/tests/random_refs.py"]} |
66,092 | jpurplefox/pokemon_battles | refs/heads/master | /tests/unit/test_handlers.py | from pokemon_battles.adapters import repositories
from pokemon_battles.domain import commands, user_events
from pokemon_battles.service_layer import messagebus
from pokemon_battles.service_layer.unit_of_work import AbstractUnitOfWork
from pokemon_battles.service_layer.user_messagebus import AbstractUserMessagebus
class FakeTeamRepository(repositories.AbstractTeamRepository):
def __init__(self):
super().__init__()
self._teams = []
def _add(self, team):
self._teams.append(team)
def _get(self, name):
return next(team for team in self._teams if team.name == name)
class FakeBattleRepository(repositories.AbstractBattleRepository):
def __init__(self):
super().__init__()
self._battles = []
def _add(self, battle):
self._battles.append(battle)
def _get(self, ref):
return next(battle for battle in self._battles if battle.ref == ref)
class FakeUserMessagebus(AbstractUserMessagebus):
def __init__(self):
self.events = []
def emit(self, event):
self.events.append(event)
class FakeUnitOfWork(AbstractUnitOfWork):
def __init__(self):
self.init_repositories(FakeTeamRepository(), FakeBattleRepository())
self.user_messagebus = FakeUserMessagebus()
self.commited = False
def _commit(self):
self.commited = True
def rollback(self):
pass
def test_add_team():
uow = FakeUnitOfWork()
messagebus.handle(commands.AddTeam('My team'), uow)
assert uow.teams.get('My team') is not None
assert uow.commited
def test_add_pokemon_to_team():
uow = FakeUnitOfWork()
messagebus.handle(commands.AddTeam('My team'), uow)
messagebus.handle(
commands.AddPokemonToTeam('My team', 'Spark', 'Pikachu', lvl=20, move_names=['Thunder Shock']),
uow=uow
)
assert len(uow.teams.get('My team').pokemons) == 1
def create_a_battle(uow):
messagebus.handle(commands.AddTeam('Host team'), uow)
messagebus.handle(commands.AddTeam('Opponent team'), uow)
messagebus.handle(
commands.AddPokemonToTeam('Host team', 'Spark', 'Pikachu', lvl=20, move_names=['Thunder Shock']),
uow=uow
)
messagebus.handle(
commands.AddPokemonToTeam('Opponent team', 'Bubble', 'Squirtle', lvl=20, move_names=['Bubble']),
uow=uow
)
battle_ref = messagebus.handle(commands.HostBattle('Host team'), uow)
messagebus.handle(commands.JoinBattle(battle_ref, 'Opponent team'), uow)
return battle_ref
def test_host_and_join_a_battle():
uow = FakeUnitOfWork()
battle_ref = create_a_battle(uow)
assert uow.battles.get(battle_ref) is not None
def test_a_battle_turn_is_successfully_complete():
uow = FakeUnitOfWork()
battle_ref = create_a_battle(uow)
messagebus.handle(commands.RegisterUseMove(battle_ref, 'host', 'Thunder Shock'), uow)
messagebus.handle(commands.RegisterUseMove(battle_ref, 'opponent', 'Bubble'), uow)
battle = uow.battles.get(battle_ref)
expected_events = [
user_events.BattleReady(battle_ref),
user_events.PokemonUsedMove(battle_ref, 'Pikachu', 'Thunder Shock', 8),
user_events.PokemonUsedMove(battle_ref, 'Squirtle', 'Bubble', 13),
user_events.TurnReady(battle_ref),
]
assert uow.user_messagebus.events == expected_events
def test_opponent_can_choose_first_next_turn_move():
uow = FakeUnitOfWork()
battle_ref = create_a_battle(uow)
messagebus.handle(commands.RegisterUseMove(battle_ref, 'opponent', 'Bubble'), uow)
assert user_events.TurnReady(battle_ref) not in uow.user_messagebus.events
messagebus.handle(commands.RegisterUseMove(battle_ref, 'host', 'Thunder Shock'), uow)
assert user_events.TurnReady(battle_ref) in uow.user_messagebus.events
def test_a_battle_finishes():
uow = FakeUnitOfWork()
messagebus.handle(commands.AddTeam('Host team'), uow)
messagebus.handle(commands.AddTeam('Opponent team'), uow)
messagebus.handle(
commands.AddPokemonToTeam('Host team', 'Flame', 'Ninetales', lvl=100, move_names=['Flamethrower']),
uow=uow
)
messagebus.handle(
commands.AddPokemonToTeam('Opponent team', 'Buggy', 'Caterpie', lvl=5, move_names=['Tackle']),
uow=uow
)
battle_ref = messagebus.handle(commands.HostBattle('Host team'), uow)
messagebus.handle(commands.JoinBattle(battle_ref, 'Opponent team'), uow)
messagebus.handle(commands.RegisterUseMove(battle_ref, 'host', 'Flamethrower'), uow)
messagebus.handle(commands.RegisterUseMove(battle_ref, 'opponent', 'Tackle'), uow)
assert user_events.PokemonFainted(battle_ref, 'Caterpie') in uow.user_messagebus.events
assert user_events.BattleFinished(battle_ref, 'host') in uow.user_messagebus.events
def test_can_change_active_pokemon():
uow = FakeUnitOfWork()
messagebus.handle(commands.AddTeam('Host team'), uow)
messagebus.handle(commands.AddTeam('Opponent team'), uow)
messagebus.handle(
commands.AddPokemonToTeam('Host team', 'Spark', 'Pikachu', lvl=20, move_names=['Flamethrower']),
uow=uow
)
messagebus.handle(
commands.AddPokemonToTeam('Host team', 'Flame', 'Ninetales', lvl=20, move_names=['Flamethrower']),
uow=uow
)
messagebus.handle(
commands.AddPokemonToTeam('Opponent team', 'Bubble', 'Squirtle', lvl=20, move_names=['Bubble']),
uow=uow
)
battle_ref = messagebus.handle(commands.HostBattle('Host team'), uow)
messagebus.handle(commands.JoinBattle(battle_ref, 'Opponent team'), uow)
messagebus.handle(commands.RegisterChangePokemon(battle_ref, 'host', 'Flame'), uow)
messagebus.handle(commands.RegisterUseMove(battle_ref, 'opponent', 'Tackle'), uow)
assert user_events.PokemonChanged(battle_ref, 'host', 'Flame') in uow.user_messagebus.events
| {"/tests/integration/test_mongo_repository.py": ["/tests/random_refs.py"], "/tests/integration/test_redis_repository.py": ["/tests/random_refs.py"], "/tests/e2e/test_api.py": ["/tests/random_refs.py"]} |
66,093 | jpurplefox/pokemon_battles | refs/heads/master | /src/pokemon_battles/service_layer/handlers.py | import uuid
from pokemon_battles.domain import commands, events, models
def add_team(cmd: commands.AddTeam, uow):
team = models.Team(cmd.name)
with uow:
uow.teams.add(team)
uow.commit()
def add_pokemon_to_team(cmd: commands.AddPokemonToTeam, uow):
moves = [models.known_moves[move_name] for move_name in cmd.move_names]
pokemon = models.Pokemon(cmd.nickname, models.known_species[cmd.species], cmd.lvl, moves)
with uow:
team = uow.teams.get(cmd.team_name)
team.add_pokemon(pokemon)
uow.commit()
def host_battle(cmd: commands.HostBattle, uow):
ref = str(uuid.uuid4())
with uow:
team = uow.teams.get(cmd.team_name)
uow.battles.add(models.Battle.host(ref, team))
uow.commit()
return ref
def join_battle(cmd: commands.JoinBattle, uow):
with uow:
team = uow.teams.get(cmd.team_name)
battle = uow.battles.get(cmd.battle_ref)
battle.join(team)
uow.commit()
def register_use_move(cmd: commands.RegisterUseMove, uow):
with uow:
battle = uow.battles.get(cmd.battle_ref)
battle.register_use_move(cmd.player, cmd.move_name)
uow.commit()
def register_change_pokemon(cmd: commands.RegisterChangePokemon, uow):
with uow:
battle = uow.battles.get(cmd.battle_ref)
battle.register_change_pokemon(cmd.player, cmd.pokemon_nickname)
uow.commit()
def move_performed(event: events.MovePerformed, uow):
with uow:
battle = uow.battles.get(event.battle_ref)
battle.perform_move(event.player, event.pokemon_nickname, event.move_name)
uow.commit()
def pokemon_changed(event: events.PokemonChanged, uow):
with uow:
battle = uow.battles.get(event.battle_ref)
battle.change_pokemon(event.player, event.pokemon_nickname)
uow.commit()
def turn_ready(event: events.TurnReady, uow):
with uow:
battle = uow.battles.get(event.battle_ref)
battle.process_turn()
uow.commit()
def turn_finished(event: events.TurnFinished, uow):
with uow:
battle = uow.battles.get(event.battle_ref)
battle.finish_turn()
uow.commit()
| {"/tests/integration/test_mongo_repository.py": ["/tests/random_refs.py"], "/tests/integration/test_redis_repository.py": ["/tests/random_refs.py"], "/tests/e2e/test_api.py": ["/tests/random_refs.py"]} |
66,105 | frankbx/Volume | refs/heads/master | /recommend.py | from time import ctime, time
import numpy as np
from volumeUtils import *
class RecommendEngine():
def __init__(self):
pass
start = time()
print('Start at:', ctime())
data = pd.read_csv('combo8.csv', dtype={'code': np.str})
data['p'] = data['1'] / data['100']
data.sort_values(by=['p'], inplace=True, ascending=False)
print(data[data['100'] > 700].loc[:, ['code', 'p', '1', '2', '3', '4', '5', '100']].head(20))
end = time()
print('End at:', ctime())
print('Duration:', round(end - start, 2), 'seconds')
| {"/recommend.py": ["/volumeUtils.py"], "/dataAnalyzer.py": ["/volumeUtils.py"], "/pair_correlation.py": ["/volumeUtils.py"], "/dataLoader.py": ["/volumeUtils.py"], "/data_convertor.py": ["/volumeUtils.py"], "/dataAcquisition.py": ["/volumeUtils.py"]} |
66,106 | frankbx/Volume | refs/heads/master | /dataAnalyzer.py | import numpy as np
from volumeUtils import *
mkt_overview_columns = ['date', 'market', 'p_change', 'amount', 'up']
def market_overview():
# Get an estimate of market performance of the day
# Points of interest:
# 1. SH, SZ change percent
# 2. SH, SZ volume
# 3. Number of stocks go up, down and flat
# 4. Distribution of each group
pass
def combo_counter(seq, counter):
length = len(seq)
s = pd.Series(range(min(seq), min(seq) + length))
seq.index = range(0, length)
d = {'origin': seq, 'i': s}
df = pd.DataFrame(d)
df['delta'] = seq - s
# print(df)
n = len(df[df.delta == 0])
# print(n)
for i in range(n, 0, -1):
if i not in counter:
counter[i] = 1
else:
counter[i] += 1
for x in range(i - 1, 0, -1):
if x not in counter:
counter[x] = 1
else:
counter[x] += 1
a = df[df.delta > 0].origin
# print(type(a))
if length - n > 0:
combo_counter(a, counter)
class AnalyticsEngine(object):
def __init__(self, ktype='D', force_reload=False):
self.ktype = ktype
# TODO add param to force load from all stock files
if os.path.exists('./daily.csv') and not force_reload:
self.big_data = self.load_data_from_consolidated_file()
else:
self.big_data = self.load_data_from_files()
self.algorithms = []
print(self.big_data.shape, 'data loaded')
def load_data_from_files(self):
data = []
basics = pd.read_csv('./basics.csv', dtype={'code': np.str})
codes = basics.code
l = len(codes)
c = 1
for code in codes:
d = read_data(code, self.ktype)
if d is not None:
d['code'] = code
# pass
else:
c += 1
continue
data.append(d)
big_data = pd.concat(data, ignore_index=True)
return big_data
def load_data_from_consolidated_file(self):
data = pd.read_csv('./daily.csv', dtype={'code': np.str})
return data
def save_data(self):
self.big_data.to_csv('./daily.csv', index=False)
# TODO add ktype
# TODO add logic to handle missing start or end
def data_in_period(self, original, start=None, end=None):
if start is None and end is None:
return original
elif start is not None and end is not None:
rng = pd.date_range(start, end)
mask = pd.DataFrame(None, index=rng)
data = mask.merge(original, left_index=True, right_index=True)
return data
def run_combo(self, percentage):
codes = self.big_data.keys()
result_list = []
for code in codes:
# print('Processing...', code)
data = self.big_data[code].copy()
if data is not None:
data['intIdx'] = range(0, len(data))
match = data[data.p_change > percentage].copy()
# print(match.intIdx)
if len(match) > 0:
counter = {'code': code, 'total': len(data)}
combo_counter(match.intIdx, counter)
result_list.append(counter)
df = pd.DataFrame(result_list)
df.fillna(value=0, inplace=True)
df.set_index('code', inplace=True)
# df.pop('code')
# df.sort_index(axis=1, inplace=True)
df.to_csv('combo' + str(percentage) + '.csv')
# A strategy is to define a set of factors and score all stocks based on certain algorithm
# A strategy then is validated by test using data in specified time frame.
class Strategy(object):
def __init__(self, **kwargs):
print(kwargs)
if __name__ == '__main__':
start = time()
print('Start at:', ctime())
engine = AnalyticsEngine(force_reload=True)
print(engine.big_data.open, engine.big_data.close)
engine.save_data()
# paras = {'name': 'strategy', 'p_change': 5, 'turnover': 1}
# strategy = Strategy(**paras)
end = time()
print('End at:', ctime())
print('Duration:', round(end - start, 2), 'seconds')
# 一般分析步骤:
# 1. Turnover rate: select actively transaction in past 3 days
# 2. Get tick data: buy > sell, amount delta
# 3. Get big deals: buy > sell, amount delta
# 4. Cluster analysis
| {"/recommend.py": ["/volumeUtils.py"], "/dataAnalyzer.py": ["/volumeUtils.py"], "/pair_correlation.py": ["/volumeUtils.py"], "/dataLoader.py": ["/volumeUtils.py"], "/data_convertor.py": ["/volumeUtils.py"], "/dataAcquisition.py": ["/volumeUtils.py"]} |
66,107 | frankbx/Volume | refs/heads/master | /canslim.py | # -*- coding: utf8 -*-
import tushare as ts
print(ts.__version__)
report = ts.get_report_data(2016, 1)
report['year'] = 2016
report['quarter'] = 1
for y in range(2005, 2016):
for q in range(1, 5):
print(y, q)
r = ts.get_report_data(y, q)
r['year'] = y
r['quarter'] = q
report = report.append(r, ignore_index=True)
report.to_csv('report05Q4-16Q1.csv')
| {"/recommend.py": ["/volumeUtils.py"], "/dataAnalyzer.py": ["/volumeUtils.py"], "/pair_correlation.py": ["/volumeUtils.py"], "/dataLoader.py": ["/volumeUtils.py"], "/data_convertor.py": ["/volumeUtils.py"], "/dataAcquisition.py": ["/volumeUtils.py"]} |
66,108 | frankbx/Volume | refs/heads/master | /pair_correlation.py | import math
import numpy as np
from volumeUtils import *
corrlation_results = []
data = {}
def code_2_file(code):
if code.startswith('6'):
return './data/daily1/' + code + '.SH.csv'
else:
return './data/daily1/' + code + '.SZ.csv'
def read_data(code, start='2014-12-31'):
global data
file_name = code_2_file(code)
d = pd.read_csv(file_name)
d.index = d.date
data[code] = d[d.index > start]
# return data[data.index > start]
def pair_correlation(code1, code2):
global corrlation_results
df1 = data[code1]
df2 = data[code2]
x = df1.close - df1.close.mean()
y = df2.close - df2.close.mean()
cor = (x * y).sum() / math.sqrt((x * x).sum() * (y * y).sum())
corrlation_results.append({'code1': code1, 'code2': code2, 'corr': cor})
def calc_pair_correlation(combinations, params):
for pair in combinations:
code1, code2 = pair
pair_correlation(code1, code2)
if __name__ == '__main__':
# print(pair_correlation('600084', '601668'))
basics = pd.read_csv('./basics.csv', dtype={'code': np.str})
basics = basics[basics.timeToMarket < 20141231]
basics = basics[basics.timeToMarket > 0]
codes = list(basics.code)
codes.sort()
# print(codes)
# length = ceil(len(codes) / 100)
length = len(codes)
print(length)
combinations = []
start = time()
print('Start at:', ctime())
for c in codes:
read_data(c)
end = time()
print('Data loading End at:', ctime())
print('Duration:', round(end - start, 2), 'seconds')
for i in range(0, length):
for j in range(i + 1, length):
combinations.append((codes[i], codes[j]))
parallel_processing(tasks=combinations, processing_func=calc_pair_correlation, chunck_size=850000)
df = pd.DataFrame(corrlation_results)
df.to_csv('results4.csv', index=False)
# print(len(results))
end = time()
print('End at:', ctime())
print('Duration:', round(end - start, 2), 'seconds')
| {"/recommend.py": ["/volumeUtils.py"], "/dataAnalyzer.py": ["/volumeUtils.py"], "/pair_correlation.py": ["/volumeUtils.py"], "/dataLoader.py": ["/volumeUtils.py"], "/data_convertor.py": ["/volumeUtils.py"], "/dataAcquisition.py": ["/volumeUtils.py"]} |
66,109 | frankbx/Volume | refs/heads/master | /yahoo.py | import datetime
import tushare as ts
try:
from matplotlib.finance import quotes_historical_yahoo_ochl
except ImportError:
# quotes_historical_yahoo_ochl was named quotes_historical_yahoo before matplotlib 1.4
from matplotlib.finance import quotes_historical_yahoo as quotes_historical_yahoo_ochl
start = datetime.datetime(2012, 1, 1)
end = datetime.datetime(2016, 11, 1)
def add_suffix(code):
if code.startswith('6'):
return code + '.ss'
else:
return code + '.sz'
basics = ts.get_stock_basics()
symbols = [add_suffix(c) for c in basics.index]
# print(symbols)
# symbols = ['601600.ss', '600362.ss']
yahoo_symbols =[]
for symbol in symbols:
try:
quote = quotes_historical_yahoo_ochl(symbol, start, end, asobject=True)
yahoo_symbols.append(symbol)
except Exception as e:
print(symbol)
continue
| {"/recommend.py": ["/volumeUtils.py"], "/dataAnalyzer.py": ["/volumeUtils.py"], "/pair_correlation.py": ["/volumeUtils.py"], "/dataLoader.py": ["/volumeUtils.py"], "/data_convertor.py": ["/volumeUtils.py"], "/dataAcquisition.py": ["/volumeUtils.py"]} |
66,110 | frankbx/Volume | refs/heads/master | /dataLoader.py | '''
This script is to load all data files per parameters
'''
import pandas as pd
from time import ctime, time
from volumeUtils import *
class DataLoader:
def __init__(self, start=None, end=None, ktype='D'):
self.big_data = None
data_dir = DATA_DIR_DICT[ktype]
print(data_dir)
d = []
for parent_dir_name, dir_names, file_names in os.walk(data_dir):
for filename in file_names:
print(os.path.join(parent_dir_name, filename))
data = pd.read_csv(os.path.join(parent_dir_name, filename), encoding='cp936')
# if self.big_data is None:
# self.big_data = [data]
# else:
# self.big_data.append(data)
d.append(data)
# self.big_data = pd.concat(d, ignore_index=True)
self.big_data = d
def get_data(self):
return self.big_data
if __name__ == '__main__':
start = time()
print('Start at:', ctime())
dl = DataLoader(ktype='m')
d = dl.get_data()
print(d[-1])
end = time()
print('End at:', ctime())
print('Duration:', round(end - start, 2), 'seconds')
| {"/recommend.py": ["/volumeUtils.py"], "/dataAnalyzer.py": ["/volumeUtils.py"], "/pair_correlation.py": ["/volumeUtils.py"], "/dataLoader.py": ["/volumeUtils.py"], "/data_convertor.py": ["/volumeUtils.py"], "/dataAcquisition.py": ["/volumeUtils.py"]} |
66,111 | frankbx/Volume | refs/heads/master | /data_convertor.py | import os
import numpy as np
from volumeUtils import *
TDX_MINUTE_DATA_DIRECTORY = 'c:/data/minute/'
TDX_FIVE_MINUTES_DATA_DIRECTORY = 'c:/data/5minutes/'
def transform_tongdaxin_data(original_file, transformed_file):
data = pd.read_csv(original_file,
header=None, names=['date', 'time', 'open', 'high', 'low', 'close', 'volume', 'amount'],
encoding='cp936', dtype={'time': np.str})[:-1]
if os.path.exists(transformed_file):
existing_data = pd.read_csv(transformed_file, dtype={'time': np.str})
r, c = existing_data.shape
if r > 1:
latest_date = existing_data.date[r - 1]
# latest_time = existing_data.time[r - 1]
# delta1 = data[data.date == latest_date][data.time > latest_time]
delta2 = data[data.date > latest_date]
# delta1.to_csv(transformed_file, mode='a', header=None, index=False)
delta2.to_csv(transformed_file, mode='a', header=None, index=False)
else:
r, c = data.shape
if r > 1:
data.to_csv(transformed_file, index=False, encoding='utf-8', dtype={'time': np.str})
def transform_parallel(source, target):
filenames = os.listdir(source)
if not os.path.exists(target):
os.mkdir(target)
parallel_processing(tasks=filenames, processing_func=transform, chunck_size=200,
params={'source': source, 'target': target})
def transform(filenames, params):
source = params['source']
target = params['target']
for file in filenames:
# print(file, 'processed')
original_file = os.path.join(source, file)
transformed_file = os.path.join(target, file)
transform_tongdaxin_data(original_file, transformed_file)
transform_parallel(TDX_FIVE_MINUTES_DATA_DIRECTORY, FIVE_MINUTE_DATA_DIR)
transform_parallel(TDX_MINUTE_DATA_DIRECTORY, MINUTE_DATA_DIR)
| {"/recommend.py": ["/volumeUtils.py"], "/dataAnalyzer.py": ["/volumeUtils.py"], "/pair_correlation.py": ["/volumeUtils.py"], "/dataLoader.py": ["/volumeUtils.py"], "/data_convertor.py": ["/volumeUtils.py"], "/dataAcquisition.py": ["/volumeUtils.py"]} |
66,112 | frankbx/Volume | refs/heads/master | /plotWidget.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import numpy as np
import qdarkstyle
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from matplotlib.backend_bases import key_press_handler
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
class PlotWidget(QWidget):
def __init__(self):
super(PlotWidget, self).__init__()
self.initUI()
self.data = np.arange(20).reshape([4, 5]).copy()
self.on_draw()
def initUI(self):
self.fig = Figure((5.0, 4.0), dpi=50)
self.canvas = FigureCanvas(self.fig)
self.canvas.setParent(self)
self.canvas.setFocusPolicy(Qt.StrongFocus)
self.canvas.setFocus()
# self.mpl_toolbar = NavigationToolbar(self.canvas, self)
#
# self.canvas.mpl_connect('key_press_event', self.on_key_press)
vbox = QVBoxLayout()
vbox.addWidget(self.canvas) # the matplotlib canvas
# vbox.addWidget(self.mpl_toolbar)
self.setLayout(vbox)
def on_draw(self):
self.fig.clear()
self.axes = self.fig.add_subplot(111)
# self.axes.plot(self.x, self.y, 'ro')
self.axes.imshow(self.data, interpolation='nearest')
# self.axes.plot([1,2,3])
self.canvas.draw()
def on_key_press(self, event):
print('you pressed', event.key)
# implement the default mpl key press events described at
# http://matplotlib.org/users/navigation_toolbar.html#navigation-keyboard-shortcuts
key_press_handler(event, self.canvas, self.mpl_toolbar)
if __name__ == '__main__':
app = QApplication(sys.argv)
app.setStyleSheet(qdarkstyle.load_stylesheet(pyside=False))
form = QMainWindow()
form.setWindowTitle("Plot Demo")
pltWidget = PlotWidget()
form.setCentralWidget(pltWidget)
form.show()
app.exec_()
| {"/recommend.py": ["/volumeUtils.py"], "/dataAnalyzer.py": ["/volumeUtils.py"], "/pair_correlation.py": ["/volumeUtils.py"], "/dataLoader.py": ["/volumeUtils.py"], "/data_convertor.py": ["/volumeUtils.py"], "/dataAcquisition.py": ["/volumeUtils.py"]} |
66,113 | frankbx/Volume | refs/heads/master | /volumeUtils.py | # -*- coding: utf8 -*-
import os
import pandas as pd
from math import ceil
import threading
from time import ctime, time
DATA_FILE_SUFFIX = {'D': '-D.csv', 'W': '-W.csv', 'M': '-M.csv'}
MINUTE_DATA_DIR = './data/minute/'
FIVE_MINUTE_DATA_DIR = './data/5minutes/'
DAILY_DATA_DIR = './data/daily1/'
WEEKLY_DATA_DIR = './data/weekly/'
K_TYPES = ['m', '5m', 'D', 'W']
DATA_DIR_DICT = {
'D': DAILY_DATA_DIR,
'W': WEEKLY_DATA_DIR,
'm': MINUTE_DATA_DIR,
'5m': FIVE_MINUTE_DATA_DIR
}
def add_suffix(code):
if code.startswith('6'):
return code + '.SH'
else:
return code + '.SZ'
def read_data(code, ktype='D'):
filename = './data/' + code + DATA_FILE_SUFFIX[ktype]
data = None
if os.path.exists(filename):
data = pd.read_csv(filename) # ,index_col='date',parse_dates=True)
return data
def read_data_from_file(filename):
data = None
if os.path.exists(filename):
data = pd.read_csv(filename)
return data
def split_into_chunck(data_list, chunck_size=100):
l = len(data_list)
n = ceil(l / chunck_size)
deck = list()
for i in range(n):
if ((1 + i) * chunck_size) < l:
deck.append(data_list[i * chunck_size:(i + 1) * chunck_size])
else:
deck.append(data_list[i * chunck_size:])
print('Total length:', l)
print('Chunck size:', chunck_size)
print('Number of chuncks:', deck.__len__())
return deck
def parallel_processing(tasks, processing_func, chunck_size=100, params=None):
chuncks = split_into_chunck(tasks, chunck_size)
threads = list()
for i in range(len(chuncks)):
th = threading.Thread(target=processing_func, args=(chuncks[i], params))
threads.append(th)
start = time()
print('Start at:', ctime())
print(len(threads))
for t in threads:
t.start()
for t in threads:
t.join()
end = time()
print('End at:', ctime())
print('Duration:', round(end - start, 2))
| {"/recommend.py": ["/volumeUtils.py"], "/dataAnalyzer.py": ["/volumeUtils.py"], "/pair_correlation.py": ["/volumeUtils.py"], "/dataLoader.py": ["/volumeUtils.py"], "/data_convertor.py": ["/volumeUtils.py"], "/dataAcquisition.py": ["/volumeUtils.py"]} |
66,114 | frankbx/Volume | refs/heads/master | /dataAcquisition.py | # -*- coding: utf8 -*-
import numpy as np
import tushare as ts
from volumeUtils import *
print(ts.__version__)
# TODO load all data into single file
# TODO incremental add data
# TODO load tick data
'''
DataCollector is to download below kinds of data from internet:
1. Index: SH, SZ
2. Stock basics
3. Stock K data, including 1 min, 5 min, 15 min, 30 min, 60 min, Daily, Weekly and Monthly
4. Stock tick data
Requirements:
1. All data will be saved to local disk using HDF5 format
2. Data will be incrementally added to existing file
'''
def get_sz_data():
sz = ts.get_h_data('399106', start='2000-01-01', index=True)
# sz.to_csv('sz.csv')
return sz
def get_sh_data():
sh = ts.get_h_data('000001', start='2000-01-05', index=True)
sh.to_csv('sh.csv')
return sh
def get_all_data(ktype='D', ):
df = pd.read_csv('basics.csv', dtype={'code': np.str})
directory = DATA_DIR_DICT[ktype]
if not os.path.exists(directory):
os.mkdir(directory)
chuncks = split_into_chunck(df.code, 40)
threads = list()
for i in range(len(chuncks)):
th = threading.Thread(target=process, args=(chuncks[i], ktype))
threads.append(th)
start = time()
print('Start at:', ctime())
print(len(threads))
for t in threads:
t.start()
for t in threads:
t.join()
end = time()
print('End at:', ctime())
print('Duration:', round(end - start, 2))
def process(code_list, ktype='D'):
for code in code_list:
get_stock_data(code, ktype)
# sleep(0.2)
def get_stock_data(code, ktype='D'):
directory = DATA_DIR_DICT[ktype]
filename = directory + add_suffix(code) + '.csv'
# print(filename)
# check if the file already exists
if os.path.exists(filename):
# get the latest date
# print(filename)
try:
existing_data = pd.read_csv(filename)
except pd.io.common.EmptyDataError as e:
print(filename)
row, col = existing_data.shape
latest_date = existing_data.date[row - 1]
# retrieve data from the latest date
data = ts.get_k_data(code=code, start=latest_date, retry_count=30, pause=2)
if data is not None:
r, c = data.shape
# discard duplicated data of the last day if there's more than 1 row
if r > 1:
# Locate by integer, not index
delta_data = data.iloc[1:r].copy()
# The data is sorted so that the latest data at the bottom of the file.
# It's easier to append future data while keep the ascending order of date
delta_data.sort_index(axis=0, inplace=True)
# print(delta_data)
# Append data to the file
delta_data.to_csv(filename, mode='a', header=None, index=False)
print(code, 'updated')
else:
basics = pd.read_csv('./basics.csv', dtype={'code': np.str})
# basics = basics[basics.timeToMarket != 0]
basics.index = basics.code
start_date = str(basics.ix[code]['timeToMarket'])
if start_date != '0':
start_date = start_date[0:4] + '-' + start_date[4:6] + '-' + start_date[6:8]
else:
start_date = None
# print(start_date)
# Create the data file directly
data = ts.get_k_data(code=code, start=start_date, retry_count=20, pause=1)
# Data can be None if it's a new stock
if data is not None and len(data) >= 1:
# The data is sorted so that the latest data at the bottom of the file.
# It's easier to append future data while keep the ascending order of date
data.sort_index(axis=0, inplace=True)
data.to_csv(filename, index=False)
print(code, 'created')
def get_stock_basics():
basics = ts.get_stock_basics()
# basics['date']=
basics.to_csv("./basics.csv", encoding='utf8')
def get_tick_data(code, start=None, end=None):
pass
if __name__ == '__main__':
get_stock_basics()
get_all_data(ktype='D')
# get_all_data(ktype='W')
| {"/recommend.py": ["/volumeUtils.py"], "/dataAnalyzer.py": ["/volumeUtils.py"], "/pair_correlation.py": ["/volumeUtils.py"], "/dataLoader.py": ["/volumeUtils.py"], "/data_convertor.py": ["/volumeUtils.py"], "/dataAcquisition.py": ["/volumeUtils.py"]} |
66,139 | pawan7697/django | refs/heads/main | /dashbord/urls.py | from django.urls import path
from .import views
urlpatterns = [
path('dashbord/', views.dashbord, name='dashbord'),
] | {"/subcategory/views.py": ["/subcategory/models.py"], "/supercategory/views.py": ["/supercategory/models.py"], "/products/views.py": ["/subcategory/models.py", "/supercategory/models.py", "/products/models.py", "/products/forms.py"], "/products/forms.py": ["/products/models.py"], "/supercategory/models.py": ["/subcategory/models.py"], "/subcategory/admin.py": ["/subcategory/models.py"]} |
66,140 | pawan7697/django | refs/heads/main | /supercategory/urls.py | from django.urls import path
from .import views
urlpatterns = [
path('addSupercateory/', views.addSupercateory, name='addSupercateory'),
path('ajaxsubcategory/', views.ajaxsubcategory, name='ajaxsubcategory'),
path('SupercategorySubmit/', views.SupercategorySubmit, name='SupercategorySubmit'),
path('SupercategoryView/', views.SupercategoryView, name='SupercategoryView'),
] | {"/subcategory/views.py": ["/subcategory/models.py"], "/supercategory/views.py": ["/supercategory/models.py"], "/products/views.py": ["/subcategory/models.py", "/supercategory/models.py", "/products/models.py", "/products/forms.py"], "/products/forms.py": ["/products/models.py"], "/supercategory/models.py": ["/subcategory/models.py"], "/subcategory/admin.py": ["/subcategory/models.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.