|
|
|
|
|
from __future__ import print_function |
|
|
from flask import Flask, render_template,request,flash , json, url_for,g , redirect , jsonify , send_file ,make_response |
|
|
import json |
|
|
import fitz |
|
|
from PIL import Image |
|
|
import cv2 |
|
|
import numpy as np |
|
|
import pilecaps_adr |
|
|
import base64 |
|
|
from db import dropbox_connect |
|
|
import cv2 |
|
|
import pandas as pd |
|
|
import time |
|
|
from io import BytesIO, StringIO |
|
|
import urllib |
|
|
import tempfile |
|
|
from flask import Flask, Response |
|
|
from werkzeug.wsgi import wrap_file |
|
|
import tameem3_2 |
|
|
import pypdfium2 as pdfium |
|
|
from urllib.parse import urlparse |
|
|
import re |
|
|
import pixelconversion |
|
|
import tameem2_1 |
|
|
import io |
|
|
from urllib.parse import unquote |
|
|
import API |
|
|
import MC_Templates_API |
|
|
import tsadropboxretrieval |
|
|
import doc_search |
|
|
import google_sheet_Legend |
|
|
import dxf__omar3_2 |
|
|
import requests |
|
|
import google_sheet_to_xml |
|
|
from threading import Thread |
|
|
import mainDBAlaa |
|
|
import datetime |
|
|
import doors_fasterrcnn |
|
|
import deploying_3_3 |
|
|
import Doors_Schedule |
|
|
import Code_2_7 |
|
|
import Counting_Columns_2_1 |
|
|
|
|
|
import ezdxf |
|
|
import Azure_api |
|
|
from flask import Flask, render_template, session, redirect, url_for |
|
|
from flask_session import Session |
|
|
import os, traceback |
|
|
from werkzeug.utils import secure_filename |
|
|
import Legend_Detection |
|
|
|
|
|
|
|
|
app = Flask(__name__) |
|
|
|
|
|
|
|
|
app.config["SESSION_TYPE"] = "filesystem" |
|
|
app.config["SESSION_PERMANENT"] = False |
|
|
app.config["SESSION_FILE_DIR"] = "./flask_session_files" |
|
|
app.secret_key = "your_secret_key" |
|
|
|
|
|
|
|
|
sess = Session() |
|
|
sess.init_app(app) |
|
|
|
|
|
|
|
|
global colorsused |
|
|
global pdflink |
|
|
|
|
|
cached_tables = { |
|
|
"projects": None, |
|
|
"parts": None, |
|
|
"sections": None |
|
|
} |
|
|
global hatched_areas2_7 |
|
|
|
|
|
|
|
|
def log_error(message, issue_type="backend"): |
|
|
filename = os.path.basename(__file__) |
|
|
|
|
|
|
|
|
if filename.startswith(('2.6', '2.8', '2.1')): |
|
|
contactName = 'Tameem' |
|
|
elif filename.startswith(('2.7', '3.2', '3.3')): |
|
|
contactName = 'Omar' |
|
|
else: |
|
|
contactName = 'Marthe' |
|
|
|
|
|
|
|
|
if issue_type == "connection": |
|
|
issue_msg = "Connection issue detected" |
|
|
elif issue_type == "frontend": |
|
|
issue_msg = "Frontend issue detected" |
|
|
else: |
|
|
issue_msg = "Backend error detected" |
|
|
|
|
|
error_msg = f"{issue_msg}. {message}. Please contact {contactName} from the ADR Team." |
|
|
print(error_msg) |
|
|
return error_msg |
|
|
|
|
|
@app.route("/", methods=["GET", "POST"]) |
|
|
def getInfotoMeasure(): |
|
|
try: |
|
|
return render_template("gui2.html") |
|
|
except Exception as e: |
|
|
|
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details) |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
@app.route("/password", methods=["GET", "POST"]) |
|
|
def password_page(): |
|
|
try: |
|
|
return render_template("gui2.html") |
|
|
except Exception as e: |
|
|
|
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details) |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
@app.route("/check_password", methods=["POST"]) |
|
|
def check_password(): |
|
|
try: |
|
|
password = request.form.get("password") |
|
|
correct_password = "c900" |
|
|
|
|
|
if password == correct_password: |
|
|
session["authenticated"] = True |
|
|
return jsonify({"authenticated": True}), 200 |
|
|
else: |
|
|
return jsonify({"authenticated": False}), 200 |
|
|
except Exception as e: |
|
|
|
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details) |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
@app.route("/mainGUI", methods=["GET", "POST"]) |
|
|
def main_gui(): |
|
|
try: |
|
|
if "authenticated" not in session or not session["authenticated"]: |
|
|
return redirect(url_for("password_page")) |
|
|
return render_template("proposed-GUI.html") |
|
|
except Exception as e: |
|
|
|
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details) |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
@app.route("/measurementConsole",methods=["GET", "POST"]) |
|
|
def measurementConsoleFn(): |
|
|
try: |
|
|
return render_template("proposed-GUI.html") |
|
|
except Exception as e: |
|
|
|
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details) |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
@app.route('/WordSearch',methods=["GET", "POST"]) |
|
|
def searchDocument(): |
|
|
return render_template('wordSearch.html') |
|
|
|
|
|
@app.route("/WordSearch",methods=["GET", "POST"]) |
|
|
def getInfo2toMeasure(): |
|
|
try: |
|
|
|
|
|
return render_template("wordSearch.html") |
|
|
except Exception as e: |
|
|
|
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details) |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
@app.route('/searchInDocs',methods=["GET", "POST"]) |
|
|
def getSearchinDocs(): |
|
|
try: |
|
|
arr=[] |
|
|
values = request.get_json() |
|
|
keyword=values.get('keyword') |
|
|
listpfProjs=values.get('listofprojs') |
|
|
print(keyword,listpfProjs) |
|
|
df,img_list=doc_search.search_docs(keyword,listpfProjs) |
|
|
for img in img_list: |
|
|
_, buffer = cv2.imencode('.png', img) |
|
|
arr.append(base64.b64encode(buffer).decode('utf-8')) |
|
|
return jsonify([df.to_html(index=False, escape=False),arr]) |
|
|
except Exception as e: |
|
|
|
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details) |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
@app.route('/searchInFolder',methods=["GET", "POST"]) |
|
|
def getSearchinFolder(): |
|
|
try: |
|
|
arr=[] |
|
|
values = request.get_json() |
|
|
keyword=values.get('keyword') |
|
|
projname=values.get('ProjectName') |
|
|
df,img_list=doc_search.slow_search(keyword=keyword,project=projname) |
|
|
for img in img_list: |
|
|
_, buffer = cv2.imencode('.png', img) |
|
|
arr.append(base64.b64encode(buffer).decode('utf-8')) |
|
|
return jsonify([df.to_html(index=False, escape=False),arr]) |
|
|
|
|
|
except Exception as e: |
|
|
|
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details) |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
@app.route('/getsunburst',methods=["GET", "POST"]) |
|
|
def getSunburst(): |
|
|
try: |
|
|
|
|
|
tree=doc_search.prepare_sunburst() |
|
|
return jsonify(tree.to_dict()) |
|
|
except Exception as e: |
|
|
|
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details) |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
@app.route('/test') |
|
|
def test_route(): |
|
|
try: |
|
|
|
|
|
x = 10 / 0 |
|
|
return jsonify({"result": x}) |
|
|
except Exception as e: |
|
|
|
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details) |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
@app.route('/getprojectnames/', methods=['GET']) |
|
|
def getprjnamesfromTestAPI(): |
|
|
try: |
|
|
prjnames, prjids = API.getPrjNames() |
|
|
|
|
|
|
|
|
global cached_tables |
|
|
cached_tables["projects"] = pd.DataFrame({ |
|
|
"ProjectName": prjnames, |
|
|
"ProjectId": prjids |
|
|
}) |
|
|
|
|
|
return jsonify([prjnames, prjids]) |
|
|
|
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/getprojectparts/<jsdata>', methods=['GET']) |
|
|
def getprjpartsfromTestAPI(jsdata): |
|
|
try: |
|
|
prjparts, partsIds = API.getprjParts(jsdata) |
|
|
|
|
|
global cached_tables |
|
|
df = pd.DataFrame({"ProjectPart": prjparts, "ProjectPartId": partsIds, "ProjectId": [int(jsdata)] * len(prjparts)}) |
|
|
cached_tables["parts"] = df |
|
|
|
|
|
return jsonify([prjparts, partsIds]) |
|
|
|
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
@app.route('/getprojectsections/<jsdata>', methods=['GET']) |
|
|
def getprjSectionsfromTestAPI(jsdata): |
|
|
try: |
|
|
|
|
|
data = json.loads(jsdata) |
|
|
|
|
|
if data[0] == 'testing': |
|
|
sections = API.getprjSections() |
|
|
project_id = data[0] |
|
|
project_part_id = None |
|
|
else: |
|
|
|
|
|
project_id = data[0] |
|
|
project_part_id = data[1] if len(data) > 1 else None |
|
|
sections = API.getprjSections(project_id, project_part_id) |
|
|
|
|
|
global cached_tables |
|
|
df = pd.DataFrame({ |
|
|
"ProjectSection": sections, |
|
|
"ProjectId": [project_id] * len(sections), |
|
|
"ProjectPartId": [project_part_id] * len(sections) |
|
|
}) |
|
|
cached_tables["sections"] = df |
|
|
|
|
|
return jsonify(sections) |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
@app.route('/getmethod/<jsdata>', methods=['GET']) |
|
|
def get_javascript_data(jsdata): |
|
|
|
|
|
progress_updates = [] |
|
|
|
|
|
def generate_progress(): |
|
|
try: |
|
|
|
|
|
yield f"data: 5\n\n" |
|
|
yield f"data: 10\n\n" |
|
|
|
|
|
documentsToMeasure, RelevantDocuments, extracted_path = tsadropboxretrieval.retrieveProjects( |
|
|
jsdata, |
|
|
progress_callback=lambda p: progress_updates.append(p) |
|
|
) |
|
|
|
|
|
|
|
|
while progress_updates: |
|
|
progress = progress_updates.pop(0) |
|
|
yield f"data: {progress}\n\n" |
|
|
|
|
|
yield f"data: 100\n\n" |
|
|
result = json.dumps([documentsToMeasure, RelevantDocuments, extracted_path]) |
|
|
yield f"data: {result}\n\n" |
|
|
|
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
|
|
|
yield f"event:error\ndata:{json.dumps({'error': error_msg})}\n\n" |
|
|
|
|
|
except Exception: |
|
|
|
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
|
|
|
yield f"event:error\ndata:{json.dumps({'error': error_msg})}\n\n" |
|
|
|
|
|
return Response(generate_progress(), content_type='text/event-stream') |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/tableDetection',methods=["GET", "POST"]) |
|
|
def TableDetection(): |
|
|
try: |
|
|
return render_template('tableDetection.html') |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/savedetectedtables/',methods=["POST"]) |
|
|
def table(): |
|
|
try: |
|
|
|
|
|
tt = eval(request.form.get('answers')) |
|
|
print("Value of tt = ",tt) |
|
|
print("Value of tt[0] = ",tt[0]) |
|
|
pdflist=[] |
|
|
pdfnames=[] |
|
|
if tt[0].startswith('http'): |
|
|
pdf_path=tt[0] |
|
|
if pdf_path and ('http' in pdf_path or 'dropbox' in pdf_path): |
|
|
pdf_path = pdf_path.replace('dl=0', 'dl=1') |
|
|
|
|
|
response = requests.get(pdf_path) |
|
|
pdf_content = BytesIO(response.content) |
|
|
if not pdf_content: |
|
|
raise ValueError("No valid PDF content found.") |
|
|
|
|
|
excel_io = Azure_api.detect_tables([response.content],['pdfname.pdf']) |
|
|
else: |
|
|
for i in range(len(tt[0])): |
|
|
print("Value of tt[0][i] = ",tt[0][i]) |
|
|
pdfpath, _ = tsadropboxretrieval.getPathtoPDF_File(nameofPDF=tt[0][i]) |
|
|
dbx = tsadropboxretrieval.ADR_Access_DropboxTeam('user') |
|
|
_, res = dbx.files_download(path=pdfpath) |
|
|
pdf_bytes = res.content |
|
|
pdflist.append(pdf_bytes) |
|
|
pdfnames.append(pdfpath) |
|
|
|
|
|
|
|
|
|
|
|
print("pdflist = ",pdflist) |
|
|
excel_io = Azure_api.detect_tables(pdflist,pdfnames) |
|
|
|
|
|
if excel_io is None: |
|
|
|
|
|
return ('', 204) |
|
|
|
|
|
return send_file( |
|
|
excel_io, |
|
|
as_attachment=True, |
|
|
download_name='detected_tables.xlsx', |
|
|
mimetype='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' |
|
|
) |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
@app.route('/TestingMeasurement',methods=["GET", "POST"]) |
|
|
def TestingMeasurement(): |
|
|
try: |
|
|
return render_template('TestingMeasurement.html') |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
@app.route('/FindInitialMarkups',methods=["GET", "POST"]) |
|
|
def FindInitialMarkups(): |
|
|
try: |
|
|
return render_template('FindInitialMarkups.html') |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
pdf_content = None |
|
|
pageNumTextFound = 0 |
|
|
BASE_URL = "https://marthee-nbslink.hf.space" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/view-pdf', methods=['GET']) |
|
|
def download_pdf(): |
|
|
try: |
|
|
|
|
|
full_query_string = request.query_string.decode() |
|
|
parsed_params = urllib.parse.parse_qs(full_query_string) |
|
|
|
|
|
|
|
|
pdf_link = parsed_params.get('pdfLink', [None])[0] |
|
|
keyword = parsed_params.get('keyword', [None])[0] |
|
|
|
|
|
if not pdf_link or not keyword: |
|
|
return "Missing required parameters.", 400 |
|
|
|
|
|
|
|
|
pdf_link = urllib.parse.unquote(pdf_link) |
|
|
keyword = urllib.parse.unquote(keyword) |
|
|
|
|
|
|
|
|
try: |
|
|
keyword = json.loads(keyword) |
|
|
except json.JSONDecodeError: |
|
|
keyword = [keyword] |
|
|
|
|
|
print("Extracted PDF Link:", pdf_link) |
|
|
print("Extracted Keywords:", keyword) |
|
|
createDF=False |
|
|
pdf_content = Find_Hyperlinking_text.annotate_text_from_pdf([pdf_link], keyword)[0] |
|
|
if pdf_content is None: |
|
|
return "PDF content not found.", 404 |
|
|
|
|
|
pdf_bytes = BytesIO(pdf_content) |
|
|
return send_file( |
|
|
pdf_bytes, |
|
|
mimetype='application/pdf', |
|
|
as_attachment=False, |
|
|
download_name=f"annotated_page_{pageNumTextFound}.pdf" |
|
|
) |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
@app.route('/api/process-data', methods=['POST']) |
|
|
def receive_pdf_data(): |
|
|
try: |
|
|
global pdf_content, pageNumTextFound |
|
|
|
|
|
|
|
|
pdfLink, keyword = finddata() |
|
|
|
|
|
if not pdfLink or not keyword: |
|
|
return jsonify({"error": "Both 'pdfLink' and 'keyword' must be provided."}), 400 |
|
|
|
|
|
try: |
|
|
print(pdfLink, keyword) |
|
|
|
|
|
|
|
|
pdfbytes, pdf_document , df ,tablepdfoutput= Find_Hyperlinking_text.annotate_text_from_pdf([pdfLink], keyword) |
|
|
dbxTeam= tsadropboxretrieval.ADR_Access_DropboxTeam('user') |
|
|
|
|
|
|
|
|
metadata = dbxTeam.sharing_get_shared_link_metadata(pdfLink) |
|
|
dbPath='/TSA JOBS/ADR Test/FIND/' |
|
|
pdflink= tsadropboxretrieval.uploadanyFile(doc=pdf_document,path=dbPath,pdfname=metadata.name) |
|
|
print('LINKS0',pdflink) |
|
|
|
|
|
dbPath='/TSA JOBS/ADR Test/FIND/' |
|
|
tablepdfLink=tsadropboxretrieval.uploadanyFile(doc=tablepdfoutput,path=dbPath,pdfname=metadata.name.rsplit(".pdf", 1)[0] +' Markup Summary'+'.pdf') |
|
|
print(f"PDF successfully uploaded to Dropbox at") |
|
|
print('LINKS1',tablepdfLink) |
|
|
return jsonify({ |
|
|
"message": "PDF processed successfully.", |
|
|
"PDF_MarkedUp": pdflink, |
|
|
'Table_PDF_Markup_Summary': tablepdfLink |
|
|
}) |
|
|
|
|
|
except Exception as e: |
|
|
return jsonify({"error": str(e)}), 500 |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
def finddata(): |
|
|
pdfLink = 'https://www.dropbox.com/scl/fi/hnp4mqigb51a5kp89kgfa/00801-ARC-20-ZZ-S-A-0002.pdf?rlkey=45abeoebzqw4qwnslnei6dkd6&st=m4yrcjm2&dl=1' |
|
|
keyword = ['115 INTEGRATED MRI ROOM LININGS', '310 ACCURACY'] |
|
|
return pdfLink, keyword |
|
|
|
|
|
|
|
|
|
|
|
@app.route('/legends',methods=["GET", "POST"]) |
|
|
def legendDirectory(): |
|
|
try: |
|
|
return render_template('legendDirectory.html') |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/getdrivelinks/<jsdata>',methods=["GET", "POST"]) |
|
|
def getlinkscreated(jsdata): |
|
|
try: |
|
|
spreadsheet_service,drive_service,gc= google_sheet_Legend.authorizeLegend() |
|
|
ids=gc.spreadsheet_ids() |
|
|
titles=gc.spreadsheet_titles() |
|
|
allpaths=[] |
|
|
print('HEREEEEEEEEEE') |
|
|
|
|
|
for i in range(0,len(titles)): |
|
|
print('titles',titles[i]) |
|
|
if not (titles[i].startswith('API') or (titles[i].startswith('Dropbox')) ) : |
|
|
|
|
|
ws=gc.open(titles[i]) |
|
|
path_metadata = ws.get_developer_metadata('path') |
|
|
print(path_metadata) |
|
|
allpaths.append([titles[i], ws.get_developer_metadata('path')[0].value , drive_service.files().get(fileId=ids[i],fields="createdTime, modifiedTime").execute() ,ids[i] ]) |
|
|
|
|
|
return jsonify(allpaths) |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
@app.route('/RetrieveMCTNames/',methods=['GET']) |
|
|
|
|
|
def CallAPIforMCTNames(): |
|
|
|
|
|
try: |
|
|
DictionaryOfTemplates=MC_Templates_API.RetrieveMC_Templates_API() |
|
|
|
|
|
print('here') |
|
|
return jsonify(DictionaryOfTemplates) |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def hexRGB(color): |
|
|
try: |
|
|
color=color.lstrip('#') |
|
|
color= tuple(int(color[i:i+2], 16) for i in (0, 2, 4)) |
|
|
color=list(color) |
|
|
return color |
|
|
except Exception as e: |
|
|
|
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details) |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
@app.route('/updatepreviewimg/<jsdata>',methods=["GET", "POST"]) |
|
|
def getfromdropboxImg(jsdata): |
|
|
try: |
|
|
|
|
|
pdfpath='' |
|
|
|
|
|
jsdata=eval(jsdata) |
|
|
print('pdfnameeee==',jsdata) |
|
|
|
|
|
dbPath='/TSA JOBS/ADR Test/'+jsdata[0][0]+'/'+jsdata[0][1]+'/'+jsdata[0][2]+'/Measured Plan/'+jsdata[1] |
|
|
print(dbPath) |
|
|
dbxTeam= tsadropboxretrieval.ADR_Access_DropboxTeam('user') |
|
|
md, res =dbxTeam.files_download(path=dbPath) |
|
|
data = res.content |
|
|
doc = fitz.open("pdf",data) |
|
|
page=doc[0] |
|
|
pix = page.get_pixmap() |
|
|
pl=Image.frombytes('RGB', [pix.width,pix.height],pix.samples) |
|
|
img=np.array(pl) |
|
|
img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) |
|
|
_, buffer = cv2.imencode('.png', img) |
|
|
return base64.b64encode(buffer).decode('utf-8') |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
@app.route('/savebase64Img/',methods=["POST"]) |
|
|
def stringToRGB(): |
|
|
try: |
|
|
vv = eval(request.form.get('answers')) |
|
|
|
|
|
|
|
|
if type(vv[0]) == list: |
|
|
vv[0] = vv[0][0] |
|
|
if vv[0].startswith('http'): |
|
|
if ( vv[5].startswith('3.2') or vv[5].startswith('3.3') or vv[5].startswith('2.7')) : |
|
|
print('3.2 section') |
|
|
|
|
|
|
|
|
link = urllib.parse.unquote(vv[0].strip('"')) |
|
|
if link and ('http' in link or 'dropbox' in link): |
|
|
if 'dl=0' in link: |
|
|
link = link.replace('dl=0', 'dl=1') |
|
|
elif 'www.dropbox.com' in link and '?dl=1' not in link: |
|
|
link += '?dl=1' |
|
|
|
|
|
response = requests.get(link) |
|
|
|
|
|
pdf_content = BytesIO(response.content) |
|
|
if not pdf_content: |
|
|
raise ValueError("No valid PDF content found.") |
|
|
if 'file' not in request.files: |
|
|
print('error, No file part in the request') |
|
|
else: |
|
|
file = request.files['file'] |
|
|
print('file done, measuring') |
|
|
arr=measureproject(result=vv,dxffile=file,pdf_content=pdf_content) |
|
|
|
|
|
return jsonify(arr) |
|
|
|
|
|
if vv[5].startswith('2.8') or vv[5].startswith('2.6'): |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
link = urllib.parse.unquote(vv[0].strip('"')) |
|
|
if link and ('http' in link or 'dropbox' in link): |
|
|
if 'dl=0' in link: |
|
|
link = link.replace('dl=0', 'dl=1') |
|
|
elif 'www.dropbox.com' in link and '?dl=1' not in link: |
|
|
link += '?dl=1' |
|
|
|
|
|
response = requests.get(link) |
|
|
|
|
|
pdf_content = BytesIO(response.content) |
|
|
doc = fitz.open(stream=pdf_content, filetype="pdf") |
|
|
|
|
|
page=doc[0] |
|
|
if page.rotation!=0: |
|
|
page.set_rotation(0) |
|
|
pix = page.get_pixmap(dpi=300) |
|
|
pl=Image.frombytes('RGB', [pix.width,pix.height],pix.samples) |
|
|
if 'file' not in request.files: |
|
|
print('error, No file part in the request') |
|
|
else: |
|
|
csvfile = request.files['csvfile'] |
|
|
print('csvfile done, measuring') |
|
|
arr=measureproject(result=vv,img=pl,pdf_content=pdf_content,csvfile=csvfile) |
|
|
|
|
|
else: |
|
|
if ( vv[5][2].startswith('3.2') or vv[5][2].startswith('3.3') or vv[5][2].startswith('2.7')) : |
|
|
print('3.2 section') |
|
|
pdfpath,pdflink=tsadropboxretrieval.getPathtoPDF_File(nameofPDF=vv[0]) |
|
|
dbxTeam= tsadropboxretrieval.ADR_Access_DropboxTeam('user') |
|
|
md, res =dbxTeam.files_download(path=pdfpath) |
|
|
dataDoc = res.content |
|
|
if 'file' not in request.files: |
|
|
print('error, No file part in the request') |
|
|
else: |
|
|
file = request.files['file'] |
|
|
print('file done, measuring') |
|
|
arr=measureproject(vv,dataDoc,0,file) |
|
|
return jsonify(arr) |
|
|
|
|
|
|
|
|
|
|
|
if vv[5][2].startswith('1.0'): |
|
|
opencv_img,dataDoc = plan2img( str(vv[0]) ) |
|
|
if vv[1]==220: |
|
|
imgdata = base64.b64decode(vv[6]) |
|
|
img=Image.open(io.BytesIO(imgdata)) |
|
|
opencv_img= cv2.cvtColor(np.array(img), cv2.COLOR_RGB2BGR) |
|
|
arr=measureproject(vv,dataDoc,opencv_img) |
|
|
|
|
|
if vv[5][2].startswith('2.1'): |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
data_url = vv[6] |
|
|
header, b64 = data_url.split(',', 1) |
|
|
segmented_img_bytes = base64.b64decode(b64) |
|
|
|
|
|
pdfpath,pdflink=tsadropboxretrieval.getPathtoPDF_File(nameofPDF= str(vv[0])) |
|
|
dbxTeam= tsadropboxretrieval.ADR_Access_DropboxTeam('user') |
|
|
md, res =dbxTeam.files_download(path=pdfpath) |
|
|
dataDoc = res.content |
|
|
opencv_img,_ = convert2img2_1(str(vv[0])) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
arr=measureproject(vv,dataDoc,segmented_img_bytes) |
|
|
|
|
|
|
|
|
if vv[5][2].startswith('2.8') or vv[5][2].startswith('2.6'): |
|
|
vv = eval(request.form.get('answers')) |
|
|
print(f"el mawgood fe vv[0]: {vv[0]}") |
|
|
|
|
|
arr_s = [] |
|
|
dataDocs = [] |
|
|
pls = [] |
|
|
for v in vv[0]: |
|
|
pdfpath,pdflink=tsadropboxretrieval.getPathtoPDF_File(nameofPDF= str(v)) |
|
|
dbxTeam= tsadropboxretrieval.ADR_Access_DropboxTeam('user') |
|
|
md, res =dbxTeam.files_download(path=pdfpath) |
|
|
dataDoc = res.content |
|
|
dataDocs.append(dataDoc) |
|
|
doc = fitz.open("pdf",dataDoc) |
|
|
page=doc[0] |
|
|
if page.rotation!=0: |
|
|
page.set_rotation(0) |
|
|
pix = page.get_pixmap(dpi=300) |
|
|
pl=Image.frombytes('RGB', [pix.width,pix.height],pix.samples) |
|
|
pls.append(pl) |
|
|
arr=measureproject(vv,dataDocs,pls) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return jsonify(arr) |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
@app.route('/2.1Trial/',methods=["POST"]) |
|
|
def measure2_1(): |
|
|
try: |
|
|
name = request.get_json() |
|
|
result=name.get('allvalues') |
|
|
arr=measureproject(result) |
|
|
return arr |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
def measureproject(result,dataDoc=0,img=0,dxffile=0,pdf_content=0,csvfile=0): |
|
|
try: |
|
|
colorarr=[] |
|
|
global pdflink |
|
|
pdfpath='/' |
|
|
|
|
|
if pdf_content: |
|
|
section=result[5] |
|
|
pdfpath+='testinglink/' |
|
|
else: |
|
|
section=result[5][2] |
|
|
for word in result[5]: |
|
|
pdfpath+=word +'/' |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
arr=[] |
|
|
|
|
|
if section.startswith('1.0'): |
|
|
for item in result[2]: |
|
|
|
|
|
c=hexRGB(item) |
|
|
colorarr.append(c) |
|
|
print('RATIOS=',result[3], result[4]) |
|
|
|
|
|
|
|
|
imgPerimeter1,image_new1,SimilarAreaDictionary , colorsUsed,spreadsheet_url, spreadsheetId,list1, pdflink, areas_Perimeters, namepathArr =pilecaps_adr.drawAllContours(dataDoc,img,result[1],colorarr, result[3], result[4], result[0],pdfpath) |
|
|
_, buffer = cv2.imencode('.png', image_new1) |
|
|
arr=[base64.b64encode(buffer).decode('utf-8'),SimilarAreaDictionary.to_dict(),spreadsheet_url , spreadsheetId,colorsUsed,list1.to_dict(), pdflink, areas_Perimeters, namepathArr] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
elif section.startswith('3.3') or section.startswith('3.2'): |
|
|
print('code of 3.3 and 3.2') |
|
|
dxfpath=dxffile.read() |
|
|
with tempfile.NamedTemporaryFile(suffix='.dxf', delete=False) as temp_file: |
|
|
temp_file.write(dxfpath) |
|
|
temp_filename = temp_file.name |
|
|
print(temp_filename) |
|
|
CorrectionRatio=result[8] |
|
|
print("result = ",result) |
|
|
SearchArray=result[6] |
|
|
parsed_url = urlparse(result[0]) |
|
|
filename = parsed_url.path.split('/')[-1] |
|
|
print(filename) |
|
|
nameofpdf=filename |
|
|
points_Of_drawing_Canvas=drawonpdf(nameofpdf,result[7]) |
|
|
if pdf_content: |
|
|
if section.startswith('3.3'): |
|
|
doc,outputimg, SimilarAreaDictionary ,spreadsheetId, spreadsheet_url , namepathArr , list1,hatched_areas , bax_pretty_xml, column_xml=deploying_3_3.mainFunctionDrawImgPdf(dataDoc,temp_filename,result[4] ,SearchArray,CorrectionRatio,points_Of_drawing_Canvas,pdfpath,result[0],pdf_content) |
|
|
else: |
|
|
doc,outputimg, SimilarAreaDictionary ,spreadsheetId, spreadsheet_url , namepathArr , list1,hatched_areas, bax_pretty_xml, column_xml=dxf__omar3_2.mainFunctionDrawImgPdf(dataDoc,temp_filename,result[4] ,SearchArray,CorrectionRatio,points_Of_drawing_Canvas,pdfpath,result[0],pdf_content) |
|
|
else: |
|
|
if section.startswith('3.3'): |
|
|
doc,outputimg, SimilarAreaDictionary ,spreadsheetId, spreadsheet_url , namepathArr , list1,hatched_areas , bax_pretty_xml, column_xml=deploying_3_3.mainFunctionDrawImgPdf(dataDoc,temp_filename,result[4] ,SearchArray,CorrectionRatio,points_Of_drawing_Canvas,pdfpath,result[0]) |
|
|
else: |
|
|
doc,outputimg, SimilarAreaDictionary ,spreadsheetId, spreadsheet_url , namepathArr , list1,hatched_areas, bax_pretty_xml, column_xml=dxf__omar3_2.mainFunctionDrawImgPdf(dataDoc,temp_filename,result[4] ,SearchArray,CorrectionRatio,points_Of_drawing_Canvas,pdfpath,result[0]) |
|
|
global colorsused |
|
|
colorsused=list(SimilarAreaDictionary['Color']) |
|
|
dbPath='/TSA JOBS/ADR Test'+pdfpath+'Measured Plan/' |
|
|
print(dbPath,result[0]) |
|
|
|
|
|
pdflink= tsadropboxretrieval.uploadanyFile(doc=doc,path=dbPath,pdfname=nameofpdf) |
|
|
|
|
|
_, buffer = cv2.imencode('.png', outputimg) |
|
|
|
|
|
|
|
|
|
|
|
bax_b64 = base64.b64encode(bax_pretty_xml.encode("utf-8")).decode("ascii") |
|
|
xml_b64 = base64.b64encode(column_xml.encode("utf-8")).decode("ascii") |
|
|
|
|
|
bax_link= tsadropboxretrieval.upload_string_file(content_str=bax_pretty_xml,filename="baxfile.bax",path=dbPath) |
|
|
xml_link= tsadropboxretrieval.upload_string_file(content_str=column_xml,filename="customCols.xml",path=dbPath) |
|
|
|
|
|
arr=[ base64.b64encode(buffer).decode('utf-8'),SimilarAreaDictionary.to_dict(), spreadsheet_url,spreadsheetId,[],list1.to_dict(),pdflink,hatched_areas,namepathArr ,bax_b64,xml_b64, bax_link,xml_link] |
|
|
|
|
|
elif section.startswith('2.7') : |
|
|
print('code of 2.7') |
|
|
dxfpath=dxffile.read() |
|
|
with tempfile.NamedTemporaryFile(suffix='.dxf', delete=False) as temp_file: |
|
|
temp_file.write(dxfpath) |
|
|
temp_filename = temp_file.name |
|
|
print(temp_filename) |
|
|
CorrectionRatio=result[10] |
|
|
SearchArray=result[6] |
|
|
CollectedColors=result[7] |
|
|
print("CollectedColors in app.py = ",CollectedColors) |
|
|
Thickness=result[8] |
|
|
print("result[9] = ",result[9]) |
|
|
parsed_url = urlparse(result[0]) |
|
|
filename = parsed_url.path.split('/')[-1] |
|
|
print(filename) |
|
|
nameofpdf=filename |
|
|
points_Of_drawing_Canvas=drawonpdf(nameofpdf,result[9]) |
|
|
print("result for 2.7 = ",result) |
|
|
print("SearchArray = ",SearchArray) |
|
|
global hatched_areas2_7 |
|
|
if pdf_content: |
|
|
doc,outputimg, SimilarAreaDictionary ,spreadsheetId, spreadsheet_url , namepathArr , list1,hatched_areas,bax_pretty_xml,column_xml=Code_2_7.mainFunctionDrawImgPdf(dataDoc,temp_filename,result[4],SearchArray,CorrectionRatio,CollectedColors,points_Of_drawing_Canvas,Thickness, pdfpath,result[0],pdf_content) |
|
|
else: |
|
|
doc,outputimg, SimilarAreaDictionary ,spreadsheetId, spreadsheet_url , namepathArr , list1,hatched_areas,bax_pretty_xml,column_xml=Code_2_7.mainFunctionDrawImgPdf(dataDoc,temp_filename,result[4],SearchArray,CorrectionRatio,CollectedColors,points_Of_drawing_Canvas,Thickness, pdfpath,result[0]) |
|
|
|
|
|
hatched_areas2_7=hatched_areas |
|
|
colorsused=list(SimilarAreaDictionary['Color']) |
|
|
dbPath='/TSA JOBS/ADR Test'+pdfpath+'Measured Plan/' |
|
|
print(dbPath,result[0]) |
|
|
|
|
|
pdflink= tsadropboxretrieval.uploadanyFile(doc=doc,path=dbPath,pdfname=nameofpdf) |
|
|
|
|
|
_, buffer = cv2.imencode('.png', outputimg) |
|
|
bax_b64 = base64.b64encode(bax_pretty_xml.encode("utf-8")).decode("ascii") |
|
|
xml_b64 = base64.b64encode(column_xml.encode("utf-8")).decode("ascii") |
|
|
|
|
|
bax_link= tsadropboxretrieval.upload_string_file(content_str=bax_pretty_xml,filename="baxfile.bax",path=dbPath) |
|
|
xml_link= tsadropboxretrieval.upload_string_file(content_str=column_xml,filename="customCols.xml",path=dbPath) |
|
|
|
|
|
arr=[ base64.b64encode(buffer).decode('utf-8'),SimilarAreaDictionary.to_dict(), spreadsheet_url,spreadsheetId,[],list1.to_dict(),pdflink,[],namepathArr,bax_b64,xml_b64, bax_link,xml_link] |
|
|
|
|
|
|
|
|
elif section.startswith('2.8') or section.startswith('2.6'): |
|
|
|
|
|
imgss=[] |
|
|
dpxlinks=[] |
|
|
legendLinks=[] |
|
|
listofmarkups=[] |
|
|
SearchArray=result[7] |
|
|
print('searchhh:',SearchArray) |
|
|
print('csv',csvfile) |
|
|
CSV_UPLOAD_DIR = os.path.join(os.path.dirname(__file__), "uploaded_csv") |
|
|
|
|
|
|
|
|
if pdf_content: |
|
|
link = urllib.parse.unquote(result[6].strip('"')) |
|
|
if link and ('http' in link or 'dropbox' in link): |
|
|
if 'dl=0' in link: |
|
|
link = link.replace('dl=0', 'dl=1') |
|
|
elif 'www.dropbox.com' in link and '?dl=1' not in link: |
|
|
link += '?dl=1' |
|
|
|
|
|
response = requests.get(link) |
|
|
|
|
|
pdf_contentSched = BytesIO(response.content) |
|
|
|
|
|
annotatedimgs, pdf_document , list1, repeated_labels , not_found, bax_pretty_xml, column_xml =Doors_Schedule.mainRun(pdf_contentSched, dataDoc, SearchArray,pdf_content,pdf_contentSched) |
|
|
else: |
|
|
sch_csv_pdf = False |
|
|
file_names = result[6] |
|
|
if not file_names: |
|
|
raise ValueError("No schedule files provided in result[6].") |
|
|
|
|
|
first_name = str(file_names[0]).lower() |
|
|
|
|
|
if first_name.endswith(".csv"): |
|
|
|
|
|
|
|
|
|
|
|
os.makedirs(CSV_UPLOAD_DIR, exist_ok=True) |
|
|
|
|
|
|
|
|
|
|
|
uploaded_csvs = request.files.getlist("csvFilename") |
|
|
|
|
|
saved_paths = [] |
|
|
|
|
|
for f in uploaded_csvs: |
|
|
if not f.filename: |
|
|
continue |
|
|
safe_name = secure_filename(f.filename) |
|
|
save_path = os.path.join(CSV_UPLOAD_DIR, safe_name) |
|
|
f.save(save_path) |
|
|
saved_paths.append(save_path) |
|
|
|
|
|
annotatedimgs, pdf_document, list1, repeated_labels, not_found, bax_pretty_xml, column_xml = Doors_Schedule.mainRun( |
|
|
saved_paths, |
|
|
dataDoc, |
|
|
SearchArray, |
|
|
sch_csv_pdf |
|
|
) |
|
|
else: |
|
|
dataDocDoorsSchedule = [] |
|
|
sch_csv_pdf = True |
|
|
for r in result[6]: |
|
|
pdfpathDoors,_=tsadropboxretrieval.getPathtoPDF_File(nameofPDF= r) |
|
|
dbxTeam= tsadropboxretrieval.ADR_Access_DropboxTeam('user') |
|
|
md, resDoors =dbxTeam.files_download(path=pdfpathDoors) |
|
|
dataDocDoorsSchedule.append(resDoors.content) |
|
|
annotatedimgs, pdf_document , list1, repeated_labels , not_found, bax_pretty_xml, column_xml =Doors_Schedule.mainRun(dataDocDoorsSchedule, dataDoc, SearchArray, sch_csv_pdf) |
|
|
|
|
|
dbPath='/TSA JOBS/ADR Test'+pdfpath+'Measured Plan/' |
|
|
pdflink= tsadropboxretrieval.uploadanyFile(doc=pdf_document,path=dbPath,pdfname="combined_output.pdf") |
|
|
|
|
|
repeatedLabelsReturn='' |
|
|
NotFoundReturn='' |
|
|
if len(repeated_labels)>0: |
|
|
repeatedLabelsReturn=repeated_labels |
|
|
if len(not_found)>0: |
|
|
NotFoundReturn=not_found |
|
|
annotatedimgsBuffered=[] |
|
|
for b in annotatedimgs: |
|
|
_, buffer = cv2.imencode('.png', b) |
|
|
|
|
|
b64_str = base64.b64encode(buffer).decode('utf-8') |
|
|
annotatedimgsBuffered.append(b64_str) |
|
|
bax_b64 = base64.b64encode(bax_pretty_xml.encode("utf-8")).decode("ascii") |
|
|
xml_b64 = base64.b64encode(column_xml.encode("utf-8")).decode("ascii") |
|
|
dbPath='/TSA JOBS/ADR Test'+pdfpath+'Measured Plan/' |
|
|
bax_link= tsadropboxretrieval.upload_string_file(content_str=bax_pretty_xml,filename="baxfile.bax",path=dbPath) |
|
|
xml_link= tsadropboxretrieval.upload_string_file(content_str=column_xml,filename="customCols.xml",path=dbPath) |
|
|
|
|
|
arr = [ |
|
|
annotatedimgsBuffered, |
|
|
pdflink, |
|
|
list1.to_dict(), |
|
|
str(repeatedLabelsReturn), |
|
|
str(NotFoundReturn), |
|
|
bax_b64, |
|
|
xml_b64, |
|
|
bax_link, |
|
|
xml_link |
|
|
] |
|
|
|
|
|
elif section.startswith('2.1'): |
|
|
|
|
|
imgss=[] |
|
|
dpxlinks=[] |
|
|
legendLinks=[] |
|
|
listofmarkups=[] |
|
|
data_url = result[6] |
|
|
header, b64 = data_url.split(',', 1) |
|
|
segmented_img_bytes = base64.b64decode(b64) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
output_dict = Counting_Columns_2_1.mainfun(dataDoc, segmented_img_bytes) |
|
|
arr = output_dict |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return arr |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
def drawonpdf(nameofpdf,coords): |
|
|
try: |
|
|
pdfpath,pdflink=tsadropboxretrieval.getPathtoPDF_File(nameofPDF=nameofpdf) |
|
|
dbxTeam= tsadropboxretrieval.ADR_Access_DropboxTeam('user') |
|
|
md, res =dbxTeam.files_download(path=pdfpath) |
|
|
data = res.content |
|
|
doc = fitz.open("pdf",data) |
|
|
page=doc[0] |
|
|
for shape in coords: |
|
|
if not isinstance(shape, dict): |
|
|
continue |
|
|
|
|
|
points_list = shape.get("coordinates", []) |
|
|
if not isinstance(points_list, list) or len(points_list) < 2: |
|
|
continue |
|
|
|
|
|
|
|
|
vertices = [(p["x"], p["y"]) for p in points_list if "x" in p and "y" in p] |
|
|
|
|
|
|
|
|
points = [fitz.Point(x, y) * page.derotation_matrix for x, y in vertices] |
|
|
|
|
|
|
|
|
if len(points) > 2: |
|
|
annot = page.add_polygon_annot(points) |
|
|
else: |
|
|
annot = page.add_polyline_annot(points) |
|
|
|
|
|
|
|
|
annot.set_colors(stroke=(1, 0, 0)) |
|
|
annot.set_border(width=1) |
|
|
annot.update() |
|
|
|
|
|
return points |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/wallsLegend/", methods=["POST"]) |
|
|
def callOmarLegend(): |
|
|
|
|
|
nameofPDF = request.form.get("nameofPDF") |
|
|
print('nameofPDF',nameofPDF) |
|
|
groupedValues = json.loads(request.form.get("groupedValues")) |
|
|
print('groupedValues',groupedValues) |
|
|
|
|
|
if "file" not in request.files: |
|
|
return jsonify({"error": "No file received"}), 400 |
|
|
|
|
|
file = request.files["file"] |
|
|
|
|
|
with tempfile.NamedTemporaryFile(suffix=".dxf", delete=False) as temp_file: |
|
|
temp_file.write(file.read()) |
|
|
temp_filename = temp_file.name |
|
|
|
|
|
|
|
|
pdfpath, pdflink = tsadropboxretrieval.getPathtoPDF_File(nameofPDF=nameofPDF) |
|
|
dbxTeam = tsadropboxretrieval.ADR_Access_DropboxTeam("user") |
|
|
md, res = dbxTeam.files_download(path=pdfpath) |
|
|
dataDoc = res.content |
|
|
|
|
|
|
|
|
colorsArray = Legend_Detection.Legend_Detection(dataDoc,temp_filename,groupedValues) |
|
|
print('colorsArray',colorsArray) |
|
|
return jsonify(colorsArray) |
|
|
|
|
|
|
|
|
@app.route("/canvaspdftoimgBackground/<jsdata>",methods=["GET", "POST"]) |
|
|
def pdftoimgCanvas(jsdata): |
|
|
try: |
|
|
img=plan2img(jsdata)[0] |
|
|
_, buffer = cv2.imencode('.png', img) |
|
|
arr=[base64.b64encode(buffer).decode('utf-8') , img.shape[0],img.shape[1]] |
|
|
return jsonify(arr) |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
@app.route("/canvaspdftoimg/<jsdata>",methods=["GET", "POST"]) |
|
|
def pdftoimgCanvas2(jsdata): |
|
|
try: |
|
|
img=convert2img2_1(jsdata)[0] |
|
|
_, buffer = cv2.imencode('.png', img) |
|
|
arr=[base64.b64encode(buffer).decode('utf-8') , img.shape[0],img.shape[1]] |
|
|
return jsonify(arr) |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
def plan2img(nameofpdf): |
|
|
try: |
|
|
pdfpath,pdflink=tsadropboxretrieval.getPathtoPDF_File(nameofPDF=nameofpdf) |
|
|
dbxTeam= tsadropboxretrieval.ADR_Access_DropboxTeam('user') |
|
|
md, res =dbxTeam.files_download(path=pdfpath) |
|
|
data = res.content |
|
|
doc = fitz.open("pdf",data) |
|
|
page=doc[0] |
|
|
|
|
|
|
|
|
pix = page.get_pixmap() |
|
|
pl=Image.frombytes('RGB', [pix.width,pix.height],pix.samples) |
|
|
img=np.array(pl) |
|
|
img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) |
|
|
return img ,data |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
img_cv2 = None |
|
|
segmented_img = None |
|
|
current_hsv = { |
|
|
'h_min': 0, 'h_max': 179, |
|
|
's_min': 0, 's_max': 255, |
|
|
'v_min': 0, 'v_max': 255 |
|
|
} |
|
|
|
|
|
def convert2img2_1(nameofpdf): |
|
|
try: |
|
|
global img_cv2 |
|
|
pdfpath,pdflink=tsadropboxretrieval.getPathtoPDF_File(nameofPDF=nameofpdf) |
|
|
dbxTeam= tsadropboxretrieval.ADR_Access_DropboxTeam('user') |
|
|
md, res =dbxTeam.files_download(path=pdfpath) |
|
|
data = res.content |
|
|
pdf = pdfium.PdfDocument(data) |
|
|
page = pdf.get_page(0) |
|
|
pil_image = page.render().to_pil() |
|
|
pl1=np.array(pil_image) |
|
|
img = cv2.cvtColor(pl1, cv2.COLOR_RGB2BGR) |
|
|
img_cv2 = img |
|
|
return img, data |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
def convert2img(path): |
|
|
try: |
|
|
pdf = pdfium.PdfDocument(path) |
|
|
page = pdf.get_page(0) |
|
|
pil_image = page.render().to_pil() |
|
|
pl1=np.array(pil_image) |
|
|
img = cv2.cvtColor(pl1, cv2.COLOR_RGB2BGR) |
|
|
return img |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/click', methods=['POST']) |
|
|
def click(): |
|
|
try: |
|
|
|
|
|
global img_cv2, current_hsv |
|
|
if img_cv2 is None: |
|
|
return jsonify({'error': 'No image loaded'}), 400 |
|
|
|
|
|
data = request.json |
|
|
x, y = int(data['x']), int(data['y']) |
|
|
b, g, r = img_cv2[y, x] |
|
|
hsv = cv2.cvtColor(np.uint8([[[b, g, r]]]), cv2.COLOR_BGR2HSV)[0][0] |
|
|
h, s, v = hsv.tolist() |
|
|
|
|
|
current_hsv = { |
|
|
'h_min': h, 'h_max': h, |
|
|
's_min': s, 's_max':s, |
|
|
'v_min': v, 'v_max': v |
|
|
} |
|
|
|
|
|
return jsonify({ |
|
|
'info': f'RGB: ({r}, {g}, {b}) - HSV: ({h}, {s}, {v})', |
|
|
'hMin': current_hsv['h_min'], 'hMax': current_hsv['h_max'], |
|
|
'sMin': current_hsv['s_min'], 'sMax': current_hsv['s_max'], |
|
|
'vMin': current_hsv['v_min'], 'vMax': current_hsv['v_max'] |
|
|
}) |
|
|
|
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
@app.route('/update_mask', methods=['POST']) |
|
|
def update_mask(): |
|
|
try: |
|
|
global img_cv2, segmented_img, current_hsv |
|
|
if img_cv2 is None: |
|
|
return jsonify({'error': 'No image uploaded yet'}), 400 |
|
|
|
|
|
data = request.get_json(force=True) or {} |
|
|
current_hsv = { |
|
|
'h_min': int(data.get('hMin', 0)), 'h_max': int(data.get('hMax', 179)), |
|
|
's_min': int(data.get('sMin', 0)), 's_max': int(data.get('sMax', 255)), |
|
|
'v_min': int(data.get('vMin', 0)), 'v_max': int(data.get('vMax', 255)), |
|
|
} |
|
|
|
|
|
hsv = cv2.cvtColor(img_cv2, cv2.COLOR_BGR2HSV) |
|
|
lower = np.array([current_hsv['h_min'], current_hsv['s_min'], current_hsv['v_min']], dtype=np.uint8) |
|
|
upper = np.array([current_hsv['h_max'], current_hsv['s_max'], current_hsv['v_max']], dtype=np.uint8) |
|
|
mask = cv2.inRange(hsv, lower, upper) |
|
|
|
|
|
segmented_img = cv2.bitwise_and(img_cv2, img_cv2, mask=mask) |
|
|
|
|
|
|
|
|
ok, buf = cv2.imencode('.png', segmented_img) |
|
|
if not ok: |
|
|
return jsonify({'error': 'PNG encode failed'}), 500 |
|
|
b64 = base64.b64encode(buf).decode('utf-8') |
|
|
data_url = 'data:image/png;base64,' + b64 |
|
|
|
|
|
return jsonify({ |
|
|
'image_data': data_url |
|
|
}) |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
@app.route('/img_to_click', methods=['POST']) |
|
|
def img_to_click(blob): |
|
|
try: |
|
|
pdf = fitz.open("pdf", blob) |
|
|
page = pdf.get_page(0) |
|
|
pil_image = page.render().to_pil() |
|
|
pl1=np.array(pil_image) |
|
|
img = cv2.cvtColor(pl1, cv2.COLOR_RGB2BGR) |
|
|
_, buffer = cv2.imencode('.png', img) |
|
|
return base64.b64encode(buffer).decode('utf-8') |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/mctnametoGoogleSheet/<jsdata>",methods=["GET", "POST"]) |
|
|
def sendmctnametoLegend(jsdata): |
|
|
try: |
|
|
result = json.loads(jsdata) |
|
|
print(result) |
|
|
global pdflink |
|
|
summaryid=google_sheet_Legend.mapnametoLegend(result,colorsused,pdflink) |
|
|
allreturns=[summaryid] |
|
|
return jsonify(allreturns) |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
@app.route("/getguessednamepath/",methods=["GET", "POST"]) |
|
|
def getguessedNames(): |
|
|
try: |
|
|
guessednamesDrpdwn=google_sheet_Legend.getallguessednames() |
|
|
return jsonify(guessednamesDrpdwn) |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def calcRef(img): |
|
|
try: |
|
|
blk = np.ones(img.shape, dtype="uint8") * [[[np.uint8(0), np.uint8(0), np.uint8(0)]]] |
|
|
|
|
|
start_point = (50, 100) |
|
|
end_point = (120, 200) |
|
|
color = (255, 255, 255) |
|
|
thickness = -1 |
|
|
|
|
|
blk = cv2.rectangle(blk, start_point, end_point, color, thickness) |
|
|
|
|
|
blk = cv2.cvtColor(blk, cv2.COLOR_BGR2GRAY) |
|
|
|
|
|
contourzz, hierarchy = cv2.findContours(image=blk, mode=cv2.RETR_EXTERNAL, method=cv2.CHAIN_APPROX_NONE) |
|
|
for i, cnt3 in enumerate(contourzz): |
|
|
M = cv2.moments(cnt3) |
|
|
if M['m00'] != 0.0: |
|
|
x2 = int(M['m10']/M['m00']) |
|
|
y2 = int(M['m01']/M['m00']) |
|
|
area = cv2.contourArea(cnt3) |
|
|
perimeter = cv2.arcLength(cnt3, True) |
|
|
return area,perimeter , blk |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
def modifyingcalcRefDynamic(img): |
|
|
try: |
|
|
imgcopy = img.copy() |
|
|
|
|
|
blk = np.ones(img.shape, dtype="uint8") * [[[np.uint8(0), np.uint8(0), np.uint8(0)]]] |
|
|
|
|
|
x = 50 |
|
|
y = 100 |
|
|
xD = int(img.shape[1] * 0.10) |
|
|
yD = int(img.shape[0] * 0.10) |
|
|
start_point = (x, y) |
|
|
end_point = (x+xD, y+yD) |
|
|
blue = (255, 0, 0) |
|
|
white = (255, 255, 255) |
|
|
thickness = -1 |
|
|
|
|
|
imgcopy = cv2.rectangle(imgcopy, start_point, end_point, blue, thickness) |
|
|
blk = cv2.rectangle(blk, start_point, end_point, white, thickness) |
|
|
|
|
|
blk = cv2.cvtColor(blk, cv2.COLOR_BGR2GRAY) |
|
|
|
|
|
contourzz, hierarchy = cv2.findContours(image=blk, mode=cv2.RETR_EXTERNAL, method=cv2.CHAIN_APPROX_NONE) |
|
|
for i, cnt3 in enumerate(contourzz): |
|
|
M = cv2.moments(cnt3) |
|
|
if M['m00'] != 0.0: |
|
|
x2 = int(M['m10']/M['m00']) |
|
|
y2 = int(M['m01']/M['m00']) |
|
|
area = cv2.contourArea(cnt3) |
|
|
perimeter = cv2.arcLength(cnt3, True) |
|
|
return area, perimeter, blk , imgcopy |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/downloadPDFfromLink') |
|
|
def downloadPDFfromLinkFunc(): |
|
|
try: |
|
|
encoded_url = request.args.get('url') |
|
|
link = urllib.parse.unquote(encoded_url) |
|
|
|
|
|
if link and ('http' in link or 'dropbox' in link): |
|
|
if 'dl=0' in link: |
|
|
link = link.replace('dl=0', 'dl=1') |
|
|
elif 'www.dropbox.com' in link and '?dl=1' not in link: |
|
|
link += '?dl=1' |
|
|
|
|
|
try: |
|
|
res = requests.get(link) |
|
|
res.raise_for_status() |
|
|
except Exception as e: |
|
|
return f"Error downloading PDF from link: {e}", 400 |
|
|
|
|
|
pdf_data = res.content |
|
|
filename = link.split("/")[-1].split("?")[0] or "downloaded.pdf" |
|
|
|
|
|
response = make_response(io.BytesIO(pdf_data).getvalue()) |
|
|
response.headers.set('Content-Type', 'application/pdf') |
|
|
response.headers.set('Content-Disposition', f'attachment; filename="{filename}"') |
|
|
return response |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
@app.route('/getdropboxurl/<jsdata>',methods=["GET", "POST"]) |
|
|
def calldropboxurl(jsdata): |
|
|
try: |
|
|
print('jsdata',jsdata) |
|
|
pdfurl=tsadropboxretrieval.getPathtoPDF_File(nameofPDF=jsdata)[1] |
|
|
print('urll',pdfurl) |
|
|
if pdfurl and ('http' in pdfurl or 'dropbox' in pdfurl): |
|
|
if 'dl=0' in pdfurl: |
|
|
pdfurl = pdfurl.replace('dl=0', 'dl=1') |
|
|
print('urll1',pdfurl) |
|
|
|
|
|
response = requests.get(pdfurl) |
|
|
pdf_content = BytesIO(response.content) |
|
|
if pdf_content is None: |
|
|
raise ValueError("No valid PDF content found.") |
|
|
|
|
|
|
|
|
pdf_document = fitz.open(stream=pdf_content, filetype="pdf") |
|
|
pdf_bytes = BytesIO() |
|
|
pdf_document.save(pdf_bytes) |
|
|
return Response(pdf_bytes.getvalue(), content_type='application/pdf') |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
@app.route('/refreshDropbox',methods=["GET", "POST"]) |
|
|
def checkdropbox(): |
|
|
try: |
|
|
print('checkingggdf') |
|
|
dfFromDropbox=tsadropboxretrieval.DropboxItemstoDF("/TSA JOBS")[0] |
|
|
dfParquet=tsadropboxretrieval.GetParquetDF() |
|
|
|
|
|
dfParquet1 = dfParquet[['name', 'path_display', 'client_modified', 'server_modified']] |
|
|
|
|
|
deletedrows = pd.concat([dfFromDropbox, dfParquet1]).drop_duplicates(keep=False) |
|
|
deletedrows = deletedrows.reset_index(drop=True) |
|
|
deletedrows.columns = ['name', 'path_display', 'client_modified', 'server_modified'] |
|
|
differences = deletedrows[~deletedrows.isin(dfFromDropbox)].dropna() |
|
|
if (len(differences)>0): |
|
|
print(differences) |
|
|
dbxTeam=tsadropboxretrieval.dropbox_upload_file(dfFromDropbox) |
|
|
stringReturned= 'Updated Sucessfully.' |
|
|
else: |
|
|
stringReturned= 'Nothing to update.' |
|
|
return 'stringReturned' |
|
|
|
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
def refreshDropboxRetrievals(extractedPath): |
|
|
try: |
|
|
dfFromDropbox = tsadropboxretrieval.DropboxItemstoDF(extractedPath)[0] |
|
|
dfParquet = tsadropboxretrieval.GetParquetDF() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
dfParquet = dfParquet[['name', 'path_display', 'client_modified', 'server_modified']] |
|
|
dfFromDropbox = dfFromDropbox[['name', 'path_display', 'client_modified', 'server_modified']] |
|
|
|
|
|
|
|
|
dfParquetUpdated = dfParquet[~dfParquet['path_display'].str.startswith(extractedPath)] |
|
|
|
|
|
|
|
|
dfParquetUpdated = pd.concat([dfParquetUpdated, dfFromDropbox], ignore_index=True) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
tsadropboxretrieval.dropbox_upload_file(dfParquetUpdated) |
|
|
|
|
|
if len(dfFromDropbox) > 0: |
|
|
print("Updated entries:", dfFromDropbox) |
|
|
return 'Updated Successfully.' |
|
|
else: |
|
|
return 'Nothing to update.' |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
@app.route('/postdropboxprojects/<path:encoded_path>') |
|
|
def handle_path(encoded_path): |
|
|
try: |
|
|
decoded_path = urllib.parse.unquote(encoded_path) |
|
|
extracted_path = json.loads(decoded_path) |
|
|
print('path to refresh',extracted_path) |
|
|
stringReturned=refreshDropboxRetrievals(extracted_path) |
|
|
print(stringReturned) |
|
|
return stringReturned |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
@app.route('/get-pdf/<jsdata>') |
|
|
def get_pdf(jsdata): |
|
|
try: |
|
|
print('pdfname',jsdata) |
|
|
|
|
|
pdfpath,pdflink=tsadropboxretrieval.getPathtoPDF_File(nameofPDF=jsdata) |
|
|
print('pdfpath',pdfpath) |
|
|
dbxTeam= tsadropboxretrieval.ADR_Access_DropboxTeam('user') |
|
|
md, res =dbxTeam.files_download(path=pdfpath) |
|
|
pdf_data = res.content |
|
|
response = make_response(io.BytesIO(pdf_data).getvalue()) |
|
|
response.headers.set('Content-Type', 'application/pdf') |
|
|
response.headers.set('Content-Disposition', 'attachment', filename='filename.pdf') |
|
|
|
|
|
return response |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
@app.route('/pixeltestingimg') |
|
|
def pixeltesting(): |
|
|
try: |
|
|
encoded_url = request.args.get('url') |
|
|
link = urllib.parse.unquote(encoded_url.strip('"')) |
|
|
if link and ('http' in link or 'dropbox' in link): |
|
|
if 'dl=0' in link: |
|
|
link = link.replace('dl=0', 'dl=1') |
|
|
elif 'www.dropbox.com' in link and '?dl=1' not in link: |
|
|
link += '?dl=1' |
|
|
|
|
|
pdf_path=link |
|
|
response = requests.get(pdf_path) |
|
|
pdf_content = BytesIO(response.content) |
|
|
if not pdf_content: |
|
|
raise ValueError("No valid PDF content found.") |
|
|
progress_updates = [] |
|
|
def generate_progressPixel(): |
|
|
|
|
|
yield f"data: 10\n\n" |
|
|
|
|
|
doc,areaPixel,perimeterPixel=pixelconversion.drawisrotated(pdf_content=pdf_content) |
|
|
|
|
|
yield f"data: 20\n\n" |
|
|
dbPath='/TSA JOBS/ADR Test/'+'TestingLinks'+'/'+'Scale Document' +'/' |
|
|
dburl=tsadropboxretrieval.uploadanyFile(doc=doc,pdfname=str('testinglink') ,path=dbPath) |
|
|
|
|
|
yield f"data: 40\n\n" |
|
|
outputs=[areaPixel,perimeterPixel , dburl] |
|
|
while progress_updates: |
|
|
progress = progress_updates.pop(0) |
|
|
yield f"data: {progress}\n\n" |
|
|
|
|
|
yield f"data: 80\n\n" |
|
|
yield f"data: 100\n\n" |
|
|
result = json.dumps(outputs) |
|
|
yield f"data: {result}\n\n" |
|
|
|
|
|
return Response(generate_progressPixel(), content_type='text/event-stream') |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
@app.route("/pixelimg/<jsdata>",methods=["GET", "POST"]) |
|
|
def getimg(jsdata): |
|
|
try: |
|
|
progress_updates = [] |
|
|
jsdata=eval(jsdata) |
|
|
print('piexxxeell',jsdata) |
|
|
def generate_progressPixel(): |
|
|
|
|
|
yield f"data: 10\n\n" |
|
|
|
|
|
pdfpath,pdflink=tsadropboxretrieval.getPathtoPDF_File(nameofPDF=jsdata[3], progress_callback=lambda p: progress_updates.append(p)) |
|
|
|
|
|
dbxTeam= tsadropboxretrieval.ADR_Access_DropboxTeam('user') |
|
|
md, res =dbxTeam.files_download(path=pdfpath) |
|
|
data = res.content |
|
|
|
|
|
|
|
|
if str(jsdata[2]).startswith('1.0'): |
|
|
doc,areaPixel,perimeterPixel=pixelconversion.drawisrotated(data=data,dpi=300) |
|
|
else: |
|
|
doc,areaPixel,perimeterPixel=pixelconversion.drawisrotated(data=data) |
|
|
|
|
|
yield f"data: 20\n\n" |
|
|
dbPath='/TSA JOBS/ADR Test/'+jsdata[0]+'/'+jsdata[1]+'/'+jsdata[2]+'/'+'Scale Document' +'/' |
|
|
dburl=tsadropboxretrieval.uploadanyFile(doc=doc,pdfname=str(jsdata[3]) ,path=dbPath) |
|
|
|
|
|
yield f"data: 40\n\n" |
|
|
outputs=[areaPixel,perimeterPixel , dburl] |
|
|
while progress_updates: |
|
|
progress = progress_updates.pop(0) |
|
|
yield f"data: {progress}\n\n" |
|
|
|
|
|
yield f"data: 80\n\n" |
|
|
yield f"data: 100\n\n" |
|
|
result = json.dumps(outputs) |
|
|
yield f"data: {result}\n\n" |
|
|
|
|
|
return Response(generate_progressPixel(), content_type='text/event-stream') |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/_submission',methods=["GET", "POST"]) |
|
|
def getnewlegend(): |
|
|
try: |
|
|
pdfpth='' |
|
|
alljson = request.get_json() |
|
|
list1=alljson.get('dict1') |
|
|
print('list1',list1) |
|
|
|
|
|
path=alljson.get('path') |
|
|
|
|
|
spreadsheetId=alljson.get('spreadsheetId') |
|
|
|
|
|
pdfpathpath=alljson.get('pdfpathpath') |
|
|
|
|
|
print(pdfpathpath,type(pdfpathpath)) |
|
|
pdfname=request.args.get('pdfname') |
|
|
for word in eval(pdfpathpath): |
|
|
pdfpth+='/' +word |
|
|
pdfpth+='/' |
|
|
dbPath='/TSA JOBS/ADR Test'+pdfpth+'Measured Plan/' |
|
|
print(pdfpth) |
|
|
deletedrows1=google_sheet_Legend.deletemarkups(list1=list1,dbPath=dbPath,path=path) |
|
|
arr1=[deletedrows1.to_dict()] |
|
|
print('arr,',arr1) |
|
|
return jsonify(arr1) |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
@app.route('/deletemarkupsroute',methods=["GET", "POST"]) |
|
|
def dltmarkupslegend(): |
|
|
try: |
|
|
print('IN deletemarkupsroute') |
|
|
pdfpth='' |
|
|
alljson = request.get_json() |
|
|
SimilarAreaDictionary=alljson.get('dict') |
|
|
|
|
|
deletedrows=alljson.get('deletedrows') |
|
|
print('deletedrowsssssssssssssssssssssssssssssss',deletedrows) |
|
|
|
|
|
path=alljson.get('path') |
|
|
|
|
|
spreadsheetId=alljson.get('spreadsheetId') |
|
|
|
|
|
areaPermArr=alljson.get('areaPermArr') |
|
|
print('aaaaaaaaaaaaa',areaPermArr) |
|
|
|
|
|
section=alljson.get('section') |
|
|
|
|
|
pdfpathpath=alljson.get('pdfpathpath') |
|
|
|
|
|
for word in eval(pdfpathpath): |
|
|
pdfpth+='/' +word |
|
|
pdfpth+='/' |
|
|
|
|
|
deletedrows=pd.DataFrame(deletedrows) |
|
|
print('deletedrows',deletedrows) |
|
|
if section.startswith('2.7'): |
|
|
areaPermArr=hatched_areas2_7 |
|
|
if section.startswith('1.0') or section.startswith('3.2') or section.startswith('3.3'): |
|
|
newlgnd=google_sheet_Legend.deletefromlegend(deletedrows=deletedrows,SimilarAreaDictionarycopy=SimilarAreaDictionary, section=section,areaPermArr=areaPermArr) |
|
|
elif section.startswith('2.8') or section.startswith('2.6'): |
|
|
newlgnd=google_sheet_Legend.deletedoors(deletedrows,SimilarAreaDictionary) |
|
|
print('done wit 2.8 in deleting, didnt append yet ') |
|
|
else: |
|
|
newlgnd=google_sheet_Legend.deletefromlegend(deletedrows=deletedrows,SimilarAreaDictionarycopy=SimilarAreaDictionary, section=section) |
|
|
try: |
|
|
newlgnd=google_sheet_Legend.legendGoogleSheets(SimilarAreaDictionary=newlgnd,path=path,spreadsheetId=spreadsheetId ,pdfpath=pdfpth) |
|
|
except: |
|
|
|
|
|
print("An exception occurred") |
|
|
time.sleep(20) |
|
|
newlgnd=google_sheet_Legend.legendGoogleSheets(SimilarAreaDictionary=newlgnd,path=path,spreadsheetId=spreadsheetId,pdfpath=pdfpth) |
|
|
|
|
|
return jsonify('donee') |
|
|
|
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/getAPITables/',methods=["GET", "POST"]) |
|
|
def returnAPITables(): |
|
|
try: |
|
|
|
|
|
table1,table2,table3=API.GenerateTables() |
|
|
return jsonify([table1.to_dict(),table2.to_dict(),table3.to_dict()]) |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
@app.route('/refreshAPI', methods=["GET", "POST"]) |
|
|
def checktables(): |
|
|
try: |
|
|
projectname = request.args.get('prjname') |
|
|
projectpart = request.args.get('prjpart') |
|
|
projectpartid = request.args.get('prjpartid') |
|
|
|
|
|
global cached_tables |
|
|
|
|
|
|
|
|
if cached_tables["projects"] is None or cached_tables["projects"].empty: |
|
|
return jsonify(["No cached data available yet. Please fetch projects first."]) |
|
|
|
|
|
|
|
|
new_projects, new_parts, new_sections = API.GenerateTables() |
|
|
|
|
|
returnString = None |
|
|
|
|
|
|
|
|
old_names = cached_tables["projects"]["ProjectName"].tolist() |
|
|
new_names = new_projects["ProjectName"].tolist() |
|
|
if set(old_names) != set(new_names): |
|
|
added = list(set(new_names) - set(old_names)) |
|
|
removed = list(set(old_names) - set(new_names)) |
|
|
returnString = ["Changes in Projects", "project", {"added": added, "removed": removed}] |
|
|
|
|
|
|
|
|
elif projectname and cached_tables["parts"] is not None and not cached_tables["parts"].empty: |
|
|
prjid = new_projects.loc[new_projects['ProjectName'] == projectname, 'ProjectId'].values[0] |
|
|
old_parts = cached_tables["parts"].loc[cached_tables["parts"]["ProjectId"] == prjid, "ProjectPart"].tolist() |
|
|
new_parts_list = new_parts.loc[new_parts["ProjectId"] == prjid, "ProjectPart"].tolist() |
|
|
if set(old_parts) != set(new_parts_list): |
|
|
added = list(set(new_parts_list) - set(old_parts)) |
|
|
removed = list(set(old_parts) - set(new_parts_list)) |
|
|
returnString = ["Changes in Parts", "part", {"added": added, "removed": removed}] |
|
|
|
|
|
|
|
|
elif projectname and projectpart and projectpartid and cached_tables["sections"] is not None and not cached_tables["sections"].empty: |
|
|
prjid = new_projects.loc[new_projects['ProjectName'] == projectname, 'ProjectId'].values[0] |
|
|
old_sections = cached_tables["sections"]["ProjectSection"].tolist() |
|
|
new_sections_list = new_sections[ |
|
|
(new_sections["ProjectId"] == prjid) & |
|
|
(new_sections["ProjectPartId"] == int(projectpartid)) |
|
|
]["ProjectSection"].tolist() |
|
|
if set(old_sections) != set(new_sections_list): |
|
|
added = list(set(new_sections_list) - set(old_sections)) |
|
|
removed = list(set(old_sections) - set(new_sections_list)) |
|
|
returnString = ["Changes in Sections", "section", {"added": added, "removed": removed}] |
|
|
|
|
|
if not returnString: |
|
|
returnString = ["No changes detected"] |
|
|
|
|
|
|
|
|
cached_tables["projects"] = new_projects |
|
|
cached_tables["parts"] = new_parts |
|
|
cached_tables["sections"] = new_sections |
|
|
|
|
|
return jsonify(returnString) |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
@app.route('/refreshAPIAppendNewTables',methods=["GET", "POST"]) |
|
|
def appendNewTables(): |
|
|
try: |
|
|
|
|
|
API.AppendtablestoSheets() |
|
|
return jsonify('appended') |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
@app.route('/summarytoXML/<jsdata>',methods=["GET", "POST"]) |
|
|
def cvtSummarytoXML(jsdata): |
|
|
try: |
|
|
path='/TSA JOBS/ADR Test/' |
|
|
result = json.loads(jsdata) |
|
|
for word in result[0]: |
|
|
path+=word +'/' |
|
|
print(path) |
|
|
path=path+'XML/' |
|
|
|
|
|
|
|
|
xmllink=google_sheet_to_xml.create_xml(documentname=result[1],dbPath=path) |
|
|
return jsonify(xmllink) |
|
|
except (ConnectionError, TimeoutError) as e: |
|
|
|
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_msg = log_error(str(e), issue_type="connection") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
except Exception: |
|
|
from flask import current_app |
|
|
with current_app.app_context(): |
|
|
error_details = traceback.format_exc() |
|
|
error_msg = log_error(error_details, issue_type="backend") |
|
|
return jsonify({"error": error_msg}), 500 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def runn(): |
|
|
|
|
|
from gevent.pywsgi import WSGIServer |
|
|
http_server = WSGIServer(("0.0.0.0", 7860), app) |
|
|
http_server.serve_forever() |
|
|
|
|
|
|
|
|
|
|
|
def keep_alive(): |
|
|
t=Thread(target=runn) |
|
|
t.start() |
|
|
|
|
|
dtn = datetime.datetime.now(datetime.timezone.utc) |
|
|
print(dtn) |
|
|
next_start = datetime.datetime(dtn.year, dtn.month, dtn.day, 21, 0, 0).astimezone(datetime.timezone.utc) |
|
|
print(next_start) |
|
|
keep_alive() |
|
|
|
|
|
while 1: |
|
|
dtnNow = datetime.datetime.now(datetime.timezone.utc) |
|
|
print(dtnNow) |
|
|
if dtnNow >= next_start: |
|
|
next_start += datetime.timedelta(hours=12) |
|
|
print(next_start) |
|
|
checkdropbox() |
|
|
|
|
|
time.sleep(1800) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
runn() |
|
|
|
|
|
|