Update app.py
Browse files
app.py
CHANGED
|
@@ -727,113 +727,174 @@ with st.sidebar:
|
|
| 727 |
st.markdown("© GeoMate • Advanced geotechnical copilot", unsafe_allow_html=True)
|
| 728 |
|
| 729 |
# 7) Pages implementation
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 730 |
def landing_page():
|
| 731 |
-
|
| 732 |
-
|
|
|
|
|
|
|
|
|
|
| 733 |
|
| 734 |
-
#
|
| 735 |
-
st.markdown(
|
| 736 |
-
<
|
| 737 |
-
|
| 738 |
-
|
| 739 |
-
|
| 740 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 741 |
border-radius: 16px;
|
|
|
|
|
|
|
| 742 |
margin-bottom: 24px;
|
| 743 |
-
|
| 744 |
-
|
| 745 |
-
|
| 746 |
-
|
| 747 |
-
|
| 748 |
-
|
| 749 |
-
|
| 750 |
-
|
| 751 |
-
|
| 752 |
-
|
| 753 |
-
|
| 754 |
-
|
| 755 |
-
|
| 756 |
-
|
| 757 |
-
|
| 758 |
-
|
| 759 |
-
|
| 760 |
-
|
| 761 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 762 |
""", unsafe_allow_html=True)
|
| 763 |
|
| 764 |
-
#
|
| 765 |
-
st.markdown("""
|
| 766 |
-
<div
|
| 767 |
-
<
|
| 768 |
-
|
| 769 |
-
|
| 770 |
-
|
| 771 |
-
|
| 772 |
-
<
|
| 773 |
-
|
| 774 |
-
|
| 775 |
-
|
| 776 |
-
<
|
| 777 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 778 |
</div>
|
| 779 |
</div>
|
| 780 |
</div>
|
| 781 |
""", unsafe_allow_html=True)
|
| 782 |
-
st.markdown("---")
|
| 783 |
|
| 784 |
-
#
|
| 785 |
-
|
| 786 |
-
|
| 787 |
-
|
| 788 |
-
|
| 789 |
-
|
| 790 |
-
|
| 791 |
-
-
|
| 792 |
-
-
|
| 793 |
-
-
|
| 794 |
-
|
| 795 |
-
|
| 796 |
-
|
| 797 |
-
|
| 798 |
-
|
| 799 |
-
|
| 800 |
-
#
|
| 801 |
-
|
| 802 |
-
|
| 803 |
-
|
| 804 |
-
|
| 805 |
-
|
| 806 |
-
|
| 807 |
-
|
| 808 |
-
|
| 809 |
-
|
| 810 |
-
|
| 811 |
-
|
| 812 |
-
|
| 813 |
-
|
| 814 |
-
|
| 815 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 816 |
|
| 817 |
-
# -------------------- SITE SUMMARY --------------------
|
| 818 |
-
with col2:
|
| 819 |
-
site = st.session_state["sites"][st.session_state["active_site"]]
|
| 820 |
-
st.markdown("""
|
| 821 |
-
<div style="
|
| 822 |
-
background: #1e1e1e;
|
| 823 |
-
border-radius:14px;
|
| 824 |
-
padding:20px;
|
| 825 |
-
text-align:center;
|
| 826 |
-
box-shadow:0 4px 14px rgba(0,0,0,0.5);
|
| 827 |
-
">
|
| 828 |
-
<h3 style='color:#FF8C00; margin:0 0 10px'>📍 Live Site Summary</h3>
|
| 829 |
-
""", unsafe_allow_html=True)
|
| 830 |
-
|
| 831 |
-
st.write(f"🏗️ **Site:** {site.get('Site Name')}")
|
| 832 |
-
st.write(f"🧱 **USCS:** {site.get('USCS', '—')}")
|
| 833 |
-
st.write(f"🛣️ **AASHTO:** {site.get('AASHTO', '—')}")
|
| 834 |
-
st.write(f"📊 **GSD saved:** {'✅ Yes' if site.get('GSD') else '❌ No'}")
|
| 835 |
-
|
| 836 |
-
st.markdown("</div>", unsafe_allow_html=True)
|
| 837 |
|
| 838 |
# Soil Classifier page (conversational, step-by-step)
|
| 839 |
def soil_classifier_page():
|
|
@@ -1325,49 +1386,51 @@ def locator_page():
|
|
| 1325 |
pass
|
| 1326 |
|
| 1327 |
# ----------------------------
|
| 1328 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1329 |
m.to_streamlit(height=600, responsive=True)
|
| 1330 |
|
| 1331 |
-
#
|
| 1332 |
-
|
|
|
|
| 1333 |
|
| 1334 |
-
#
|
| 1335 |
-
|
| 1336 |
-
|
| 1337 |
-
roi = m.user_roi
|
| 1338 |
-
st.session_state["roi_geojson"] = roi.toGeoJSONString()
|
| 1339 |
-
st.success("✅ ROI captured from user drawing")
|
| 1340 |
-
except Exception as e:
|
| 1341 |
-
st.warning(f"Could not parse m.user_roi: {e}")
|
| 1342 |
|
| 1343 |
-
|
| 1344 |
-
|
| 1345 |
-
|
| 1346 |
-
|
| 1347 |
-
|
| 1348 |
-
|
| 1349 |
-
|
| 1350 |
-
|
| 1351 |
-
|
|
|
|
|
|
|
|
|
|
| 1352 |
|
| 1353 |
-
#
|
| 1354 |
-
|
|
|
|
| 1355 |
try:
|
| 1356 |
-
|
| 1357 |
-
|
| 1358 |
-
|
| 1359 |
-
|
| 1360 |
except Exception as e:
|
| 1361 |
st.warning(f"Could not restore ROI: {e}")
|
| 1362 |
|
| 1363 |
-
#
|
| 1364 |
if st.button("Compute Summaries"):
|
| 1365 |
if roi is None:
|
| 1366 |
st.error("⚠️ No ROI found. Please draw a polygon/rectangle/circle and try again.")
|
| 1367 |
else:
|
| 1368 |
-
st.success("
|
| 1369 |
-
|
| 1370 |
-
|
| 1371 |
|
| 1372 |
chosen_soil_band = None
|
| 1373 |
if soil_img:
|
|
@@ -1550,6 +1613,7 @@ def rag_page():
|
|
| 1550 |
# -------------------
|
| 1551 |
# Report fields (still needed in reports_page)
|
| 1552 |
# -------------------
|
|
|
|
| 1553 |
REPORT_FIELDS = [
|
| 1554 |
("Load Bearing Capacity", "kPa or psf"),
|
| 1555 |
("Skin Shear Strength", "kPa"),
|
|
@@ -1579,172 +1643,209 @@ from reportlab.lib import colors
|
|
| 1579 |
from reportlab.lib.pagesizes import A4
|
| 1580 |
from reportlab.lib.units import mm
|
| 1581 |
|
| 1582 |
-
#
|
| 1583 |
-
#
|
| 1584 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1585 |
def build_full_geotech_pdf(
|
| 1586 |
site: Dict[str, Any],
|
| 1587 |
filename: str,
|
| 1588 |
include_map_image: Optional[bytes] = None,
|
| 1589 |
ext_refs: Optional[List[str]] = None
|
| 1590 |
):
|
| 1591 |
-
"""
|
| 1592 |
-
Build a professional PDF report using site data + references.
|
| 1593 |
-
"""
|
| 1594 |
styles = getSampleStyleSheet()
|
| 1595 |
-
title_style = ParagraphStyle(
|
| 1596 |
-
|
| 1597 |
-
|
| 1598 |
-
|
| 1599 |
-
h1 = ParagraphStyle(
|
| 1600 |
-
"h1", parent=styles["Heading1"], fontSize=14,
|
| 1601 |
-
textColor=colors.HexColor("#1F4E79"), spaceAfter=6
|
| 1602 |
-
)
|
| 1603 |
body = ParagraphStyle("body", parent=styles["BodyText"], fontSize=10.5, leading=13)
|
| 1604 |
-
bullet = ParagraphStyle("bullet", parent=body, leftIndent=12, bulletIndent=6)
|
| 1605 |
|
| 1606 |
-
doc = SimpleDocTemplate(
|
| 1607 |
-
|
| 1608 |
-
|
| 1609 |
-
topMargin=18*mm, bottomMargin=18*mm
|
| 1610 |
-
)
|
| 1611 |
elems = []
|
| 1612 |
|
| 1613 |
-
#
|
| 1614 |
elems.append(Paragraph("GEOTECHNICAL INVESTIGATION REPORT", title_style))
|
| 1615 |
elems.append(Spacer(1, 12))
|
| 1616 |
-
|
| 1617 |
-
|
| 1618 |
-
elems.append(Paragraph(f"<b>{company}</b>", body))
|
| 1619 |
-
if contact:
|
| 1620 |
-
elems.append(Paragraph(contact, body))
|
| 1621 |
-
elems.append(Spacer(1, 12))
|
| 1622 |
elems.append(Paragraph(f"<b>Project:</b> {site.get('Project Name','-')}", body))
|
| 1623 |
elems.append(Paragraph(f"<b>Site:</b> {site.get('Site Name','-')}", body))
|
| 1624 |
elems.append(Paragraph(f"<b>Date:</b> {datetime.today().strftime('%Y-%m-%d')}", body))
|
| 1625 |
elems.append(PageBreak())
|
| 1626 |
|
| 1627 |
-
#
|
| 1628 |
elems.append(Paragraph("TABLE OF CONTENTS", h1))
|
| 1629 |
toc_items = [
|
| 1630 |
-
"1.0 Introduction",
|
| 1631 |
-
"
|
| 1632 |
-
"
|
| 1633 |
-
"
|
| 1634 |
-
"5.0 Provisional site classification",
|
| 1635 |
-
"6.0 Recommendations",
|
| 1636 |
-
"7.0 LLM Analysis",
|
| 1637 |
-
"8.0 Figures & Tables",
|
| 1638 |
-
"9.0 Appendices & References"
|
| 1639 |
]
|
| 1640 |
-
for i, t in enumerate(toc_items,
|
| 1641 |
elems.append(Paragraph(f"{i}. {t}", body))
|
| 1642 |
elems.append(PageBreak())
|
| 1643 |
|
| 1644 |
-
#
|
| 1645 |
-
elems.append(Paragraph("SUMMARY", h1))
|
| 1646 |
-
|
| 1647 |
-
f"Site: {site.get('Site Name','-')}.",
|
| 1648 |
-
f"General geology: {site.get('Soil Profile','Not provided')}.",
|
| 1649 |
-
f"Key lab tests: {', '.join([r.get('sampleId','') for r in site.get('Laboratory Results',[])]) if site.get('Laboratory Results') else 'No lab results provided.'}",
|
| 1650 |
-
f"Classification: USCS = {site.get('USCS','Not provided')}; AASHTO = {site.get('AASHTO','Not provided')}.",
|
| 1651 |
-
"Primary recommendation: See Recommendations section."
|
| 1652 |
-
]
|
| 1653 |
-
for s in summary_bullets:
|
| 1654 |
-
elems.append(Paragraph(f"• {s}", bullet))
|
| 1655 |
elems.append(PageBreak())
|
| 1656 |
|
| 1657 |
-
|
| 1658 |
-
elems.append(Paragraph(
|
| 1659 |
-
|
| 1660 |
-
elems.append(Paragraph(
|
| 1661 |
-
|
| 1662 |
-
|
| 1663 |
-
elems.append(Paragraph("2.0 SITE DESCRIPTION AND GEOLOGY", h1))
|
| 1664 |
-
site_geo = [
|
| 1665 |
-
f"Topography: {site.get('Topography','Not provided')}",
|
| 1666 |
-
f"Drainage: {site.get('Drainage','Not provided')}",
|
| 1667 |
-
f"Current land use: {site.get('Current Land Use','Not provided')}",
|
| 1668 |
-
f"Regional geology: {site.get('Regional Geology','Not provided')}"
|
| 1669 |
-
]
|
| 1670 |
-
for t in site_geo:
|
| 1671 |
-
elems.append(Paragraph(t, body))
|
| 1672 |
elems.append(PageBreak())
|
| 1673 |
|
| 1674 |
-
|
| 1675 |
-
elems.append(Paragraph(
|
| 1676 |
-
if site.get("Field Investigation"):
|
| 1677 |
-
for item in site["Field Investigation"]:
|
| 1678 |
-
elems.append(Paragraph(f"<b>{item.get('id','Test')}</b> — depth {item.get('depth','-')}", body))
|
| 1679 |
-
for layer in item.get("layers", []):
|
| 1680 |
-
elems.append(Paragraph(f"- {layer.get('depth','')} : {layer.get('description','')}", body))
|
| 1681 |
-
else:
|
| 1682 |
-
elems.append(Paragraph("No field investigation data supplied.", body))
|
| 1683 |
-
|
| 1684 |
-
lab_rows = site.get("Laboratory Results", [])
|
| 1685 |
-
if lab_rows:
|
| 1686 |
-
elems.append(Spacer(1, 6))
|
| 1687 |
-
elems.append(Paragraph("Laboratory Results", h1))
|
| 1688 |
-
data = [["Sample ID","Material","LL","PI","Linear Shrinkage","%Clay","%Silt","%Sand","%Gravel","Expansiveness"]]
|
| 1689 |
-
for r in lab_rows:
|
| 1690 |
-
data.append([
|
| 1691 |
-
r.get("sampleId","-"), r.get("material","-"),
|
| 1692 |
-
str(r.get("liquidLimit","-")), str(r.get("plasticityIndex","-")),
|
| 1693 |
-
str(r.get("linearShrinkage","-")), str(r.get("percentClay","-")),
|
| 1694 |
-
str(r.get("percentSilt","-")), str(r.get("percentSand","-")),
|
| 1695 |
-
str(r.get("percentGravel","-")), r.get("potentialExpansiveness","-")
|
| 1696 |
-
])
|
| 1697 |
-
t = Table(data, repeatRows=1, colWidths=[40*mm,40*mm,18*mm,18*mm,22*mm,20*mm,20*mm,20*mm,20*mm,30*mm])
|
| 1698 |
-
t.setStyle(TableStyle([
|
| 1699 |
-
('BACKGROUND',(0,0),(-1,0),colors.HexColor("#1F4E79")),
|
| 1700 |
-
('TEXTCOLOR',(0,0),(-1,0),colors.white),
|
| 1701 |
-
('GRID',(0,0),(-1,-1),0.4,colors.grey),
|
| 1702 |
-
('BOX',(0,0),(-1,-1),1,colors.HexColor("#FF7A00"))
|
| 1703 |
-
]))
|
| 1704 |
-
elems.append(t)
|
| 1705 |
elems.append(PageBreak())
|
| 1706 |
|
| 1707 |
-
|
| 1708 |
-
elems.append(Paragraph(
|
| 1709 |
-
elems.append(Paragraph(site.get("Evaluation","Evaluation not provided."), body))
|
| 1710 |
-
elems.append(Paragraph("5.0 PROVISIONAL SITE CLASSIFICATION", h1))
|
| 1711 |
-
elems.append(Paragraph(site.get("Provisional Classification","Not provided."), body))
|
| 1712 |
-
elems.append(Paragraph("6.0 RECOMMENDATIONS", h1))
|
| 1713 |
-
elems.append(Paragraph(site.get("Recommendations","Not provided."), body))
|
| 1714 |
-
|
| 1715 |
-
# --- LLM Analysis ---
|
| 1716 |
-
elems.append(Paragraph("7.0 LLM ANALYSIS (GeoMate)", h1))
|
| 1717 |
-
llm_text = site.get("LLM_Report_Text", None)
|
| 1718 |
-
if llm_text:
|
| 1719 |
-
elems.append(Paragraph(llm_text.replace("\n","\n\n"), body))
|
| 1720 |
-
else:
|
| 1721 |
-
elems.append(Paragraph("No LLM analysis saved for this site.", body))
|
| 1722 |
|
| 1723 |
-
|
| 1724 |
-
|
| 1725 |
-
|
| 1726 |
-
tmp = tempfile.NamedTemporaryFile(delete=False, suffix=".png")
|
| 1727 |
-
tmp.write(include_map_image)
|
| 1728 |
-
tmp.flush()
|
| 1729 |
-
elems.append(PageBreak())
|
| 1730 |
-
elems.append(Paragraph("Map Snapshot", h1))
|
| 1731 |
-
elems.append(RLImage(tmp.name, width=160*mm, height=90*mm))
|
| 1732 |
-
except Exception:
|
| 1733 |
-
pass
|
| 1734 |
|
| 1735 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1736 |
elems.append(PageBreak())
|
| 1737 |
-
elems.append(Paragraph("
|
| 1738 |
if ext_refs:
|
| 1739 |
for r in ext_refs:
|
| 1740 |
elems.append(Paragraph(f"- {r}", body))
|
| 1741 |
else:
|
| 1742 |
-
elems.append(Paragraph("
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1743 |
|
| 1744 |
doc.build(elems)
|
| 1745 |
return filename
|
| 1746 |
|
| 1747 |
|
|
|
|
|
|
|
|
|
|
| 1748 |
# -------------------------------
|
| 1749 |
# Reports Page
|
| 1750 |
# -------------------------------
|
|
@@ -1758,31 +1859,20 @@ def reports_page():
|
|
| 1758 |
st.markdown("You have a saved classification for this site.")
|
| 1759 |
if st.button("Generate Classification PDF"):
|
| 1760 |
fname = f"classification_{site['Site Name'].replace(' ','_')}.pdf"
|
| 1761 |
-
|
| 1762 |
-
|
| 1763 |
-
|
| 1764 |
-
elems.append(Paragraph("Soil Classification Report", getSampleStyleSheet()['Title']))
|
| 1765 |
-
elems.append(Spacer(1,6))
|
| 1766 |
-
elems.append(Paragraph(f"Site: {site.get('Site Name')}", getSampleStyleSheet()['Normal']))
|
| 1767 |
-
elems.append(Spacer(1,6))
|
| 1768 |
-
elems.append(Paragraph("Classification result:", getSampleStyleSheet()['Heading2']))
|
| 1769 |
-
elems.append(Paragraph(site.get("classifier_decision","-"), getSampleStyleSheet()['BodyText']))
|
| 1770 |
-
|
| 1771 |
-
# Add FAISS citations if present in rag_history
|
| 1772 |
if "rag_history" in st.session_state and site.get("Site ID") in st.session_state["rag_history"]:
|
| 1773 |
-
refs = []
|
| 1774 |
for h in st.session_state["rag_history"][site["Site ID"]]:
|
| 1775 |
-
if h["who"]=="bot" and "[ref:" in h["text"]:
|
| 1776 |
for m in re.findall(r"\[ref:([^\]]+)\]", h["text"]):
|
| 1777 |
refs.append(m)
|
| 1778 |
-
if refs:
|
| 1779 |
-
elems.append(Spacer(1,12))
|
| 1780 |
-
elems.append(Paragraph("References:", getSampleStyleSheet()['Heading2']))
|
| 1781 |
-
for r in set(refs):
|
| 1782 |
-
elems.append(Paragraph(f"- {r}", getSampleStyleSheet()['Normal']))
|
| 1783 |
|
| 1784 |
-
|
|
|
|
|
|
|
| 1785 |
buffer.seek(0)
|
|
|
|
| 1786 |
st.download_button("Download Classification PDF", buffer, file_name=fname, mime="application/pdf")
|
| 1787 |
else:
|
| 1788 |
st.info("No classification saved for this site yet. Use the Classifier page.")
|
|
@@ -1790,14 +1880,14 @@ def reports_page():
|
|
| 1790 |
# ---------------- Quick Report Form ----------------
|
| 1791 |
st.markdown("### Quick report form (edit values and request LLM analysis)")
|
| 1792 |
with st.form(key="report_quick_form"):
|
| 1793 |
-
cols = st.columns([2,1,1])
|
| 1794 |
cols[0].markdown("**Parameter**")
|
| 1795 |
cols[1].markdown("**Value**")
|
| 1796 |
cols[2].markdown("**Unit / Notes**")
|
| 1797 |
|
| 1798 |
inputs = {}
|
| 1799 |
for (fld, unit) in REPORT_FIELDS:
|
| 1800 |
-
c1, c2, c3 = st.columns([2,1,1])
|
| 1801 |
c1.markdown(f"**{fld}**")
|
| 1802 |
default_val = site.get(fld, "")
|
| 1803 |
inputs[fld] = c2.text_input(fld, value=str(default_val), label_visibility="collapsed", key=f"quick_{fld}")
|
|
@@ -1819,22 +1909,23 @@ def reports_page():
|
|
| 1819 |
"site_summary": {
|
| 1820 |
"USCS": site.get("USCS"), "AASHTO": site.get("AASHTO"), "GI": site.get("GI"),
|
| 1821 |
"Soil Profile": site.get("Soil Profile"),
|
| 1822 |
-
"Key lab results": [r.get("sampleId") for r in site.get("Laboratory Results",[])]
|
| 1823 |
},
|
| 1824 |
-
"inputs": {fld: site.get(fld,"Not provided") for fld,_ in REPORT_FIELDS}
|
| 1825 |
}
|
| 1826 |
prompt = (
|
| 1827 |
"You are GeoMate AI, an engineering assistant. Given the following site context and "
|
| 1828 |
"engineering parameters (some may be 'Not provided'), produce:\n1) short executive summary, "
|
| 1829 |
"2) geotechnical interpretation (classification, key risks), 3) recommended remedial/improvement "
|
| 1830 |
"options and 4) short design notes. Provide any numeric outputs in the format [[FIELD: value unit]].\n\n"
|
| 1831 |
-
f"Context: {json.dumps(context)}
|
| 1832 |
)
|
| 1833 |
-
resp =
|
| 1834 |
-
|
| 1835 |
st.markdown("**GeoMate analysis**")
|
| 1836 |
st.markdown(resp)
|
| 1837 |
|
|
|
|
| 1838 |
matches = re.findall(r"\[\[([A-Za-z0-9 _/-]+):\s*([0-9.+-eE]+)\s*([A-Za-z%\/]*)\]\]", resp)
|
| 1839 |
for m in matches:
|
| 1840 |
field, val, unit = m[0].strip(), m[1].strip(), m[2].strip()
|
|
@@ -1858,16 +1949,16 @@ def reports_page():
|
|
| 1858 |
state = site.get("report_convo_state", -1)
|
| 1859 |
if state >= 0:
|
| 1860 |
st.markdown("Chatbot will ask for missing fields. You can answer or type 'skip' to leave blank.")
|
| 1861 |
-
show_table = [(f, site.get(f, "Not provided")) for f,_ in REPORT_FIELDS]
|
| 1862 |
st.table(show_table)
|
| 1863 |
|
| 1864 |
if state < len(REPORT_FIELDS):
|
| 1865 |
field, unit = REPORT_FIELDS[state]
|
| 1866 |
ans = st.text_input(f"GeoMate — Please provide '{field}' ({unit})", key=f"report_in_{state}")
|
| 1867 |
-
c1, c2 = st.columns([1,1])
|
| 1868 |
with c1:
|
| 1869 |
if st.button("Submit", key=f"report_submit_{state}"):
|
| 1870 |
-
site[field] = ans.strip() if ans.strip() not in ("skip","don't know","dont know","na","n/a","") else "Not provided"
|
| 1871 |
site["report_convo_state"] = state + 1
|
| 1872 |
st.rerun()
|
| 1873 |
with c2:
|
|
@@ -1883,7 +1974,7 @@ def reports_page():
|
|
| 1883 |
faiss_refs = []
|
| 1884 |
if "rag_history" in st.session_state and site.get("Site ID") in st.session_state["rag_history"]:
|
| 1885 |
for h in st.session_state["rag_history"][site["Site ID"]]:
|
| 1886 |
-
if h["who"]=="bot" and "[ref:" in h["text"]:
|
| 1887 |
for m in re.findall(r"\[ref:([^\]]+)\]", h["text"]):
|
| 1888 |
faiss_refs.append(m)
|
| 1889 |
all_refs = list(set(ext_refs + faiss_refs))
|
|
@@ -1895,7 +1986,7 @@ def reports_page():
|
|
| 1895 |
|
| 1896 |
with open(outname, "rb") as f:
|
| 1897 |
st.download_button("Download Full Geotechnical Report", f, file_name=outname, mime="application/pdf")
|
| 1898 |
-
|
| 1899 |
# 8) Page router
|
| 1900 |
if "page" not in st.session_state:
|
| 1901 |
st.session_state["page"] = "Home"
|
|
|
|
| 727 |
st.markdown("© GeoMate • Advanced geotechnical copilot", unsafe_allow_html=True)
|
| 728 |
|
| 729 |
# 7) Pages implementation
|
| 730 |
+
import streamlit as st
|
| 731 |
+
import torch
|
| 732 |
+
from torchvision import transforms
|
| 733 |
+
from PIL import Image
|
| 734 |
+
import os
|
| 735 |
+
|
| 736 |
+
# -------------------- SESSION INIT --------------------
|
| 737 |
+
if "page" not in st.session_state:
|
| 738 |
+
st.session_state["page"] = "Landing"
|
| 739 |
+
if "sites" not in st.session_state:
|
| 740 |
+
st.session_state["sites"] = [{"Site Name": "Default Site", "USCS": None, "AASHTO": None, "GSD": None}]
|
| 741 |
+
if "active_site" not in st.session_state:
|
| 742 |
+
st.session_state["active_site"] = 0
|
| 743 |
+
|
| 744 |
+
# -------------------- MODEL PLACEHOLDER --------------------
|
| 745 |
+
# Load soil recognizer model here (replace with your trained PyTorch model)
|
| 746 |
+
# model = torch.load("soil_model.pth", map_location="cpu")
|
| 747 |
+
# model.eval()
|
| 748 |
+
|
| 749 |
+
# Placeholder soil classes
|
| 750 |
+
soil_classes = ["Gravel", "Sand", "Silt", "Clay", "Loam"]
|
| 751 |
+
|
| 752 |
+
# -------------------- LANDING PAGE --------------------
|
| 753 |
def landing_page():
|
| 754 |
+
BACKGROUND_IMAGES = [
|
| 755 |
+
"/app/bg1.jpg", # replace with your images
|
| 756 |
+
"/app/bg2.jpg",
|
| 757 |
+
"/app/bg3.jpg"
|
| 758 |
+
]
|
| 759 |
|
| 760 |
+
# Inject CSS + animations
|
| 761 |
+
st.markdown("""
|
| 762 |
+
<style>
|
| 763 |
+
body {background:#000}
|
| 764 |
+
/* Background slider */
|
| 765 |
+
@keyframes bgFade {
|
| 766 |
+
0% {opacity:1;}
|
| 767 |
+
30% {opacity:1;}
|
| 768 |
+
33% {opacity:0;}
|
| 769 |
+
97% {opacity:0;}
|
| 770 |
+
100% {opacity:1;}
|
| 771 |
+
}
|
| 772 |
+
.hero {
|
| 773 |
+
position: relative;
|
| 774 |
border-radius: 16px;
|
| 775 |
+
overflow: hidden;
|
| 776 |
+
height: 420px;
|
| 777 |
margin-bottom: 24px;
|
| 778 |
+
box-shadow: 0 8px 28px rgba(0,0,0,0.65);
|
| 779 |
+
}
|
| 780 |
+
.hero img {
|
| 781 |
+
position: absolute;
|
| 782 |
+
top:0;left:0;
|
| 783 |
+
width:100%;height:100%;
|
| 784 |
+
object-fit: cover;
|
| 785 |
+
animation: bgFade 15s infinite;
|
| 786 |
+
}
|
| 787 |
+
.hero img:nth-child(2) { animation-delay: 5s; }
|
| 788 |
+
.hero img:nth-child(3) { animation-delay: 10s; }
|
| 789 |
+
|
| 790 |
+
/* Overlay content */
|
| 791 |
+
.overlay {
|
| 792 |
+
position: absolute;
|
| 793 |
+
top:0;left:0;
|
| 794 |
+
width:100%;height:100%;
|
| 795 |
+
background: rgba(15,15,15,0.65);
|
| 796 |
+
display:flex;
|
| 797 |
+
flex-direction:column;
|
| 798 |
+
justify-content:center;
|
| 799 |
+
padding: 40px;
|
| 800 |
+
animation: fadeIn 1.5s ease-in-out;
|
| 801 |
+
}
|
| 802 |
+
@keyframes fadeIn {
|
| 803 |
+
from {opacity:0; transform: translateY(20px);}
|
| 804 |
+
to {opacity:1; transform: translateY(0);}
|
| 805 |
+
}
|
| 806 |
+
h1.hero-title {
|
| 807 |
+
color:#FF8C00;
|
| 808 |
+
font-size:44px;
|
| 809 |
+
margin:0;
|
| 810 |
+
font-weight:800;
|
| 811 |
+
}
|
| 812 |
+
p.hero-sub {
|
| 813 |
+
color:#f0f0f0;
|
| 814 |
+
font-size:18px;
|
| 815 |
+
margin-top:12px;
|
| 816 |
+
line-height:1.6;
|
| 817 |
+
}
|
| 818 |
+
.quick-btn {
|
| 819 |
+
display:inline-block;
|
| 820 |
+
margin:10px 8px 0 0;
|
| 821 |
+
padding:12px 22px;
|
| 822 |
+
border-radius:10px;
|
| 823 |
+
background: linear-gradient(135deg,#ff7a00,#ff3a3a);
|
| 824 |
+
color:white;
|
| 825 |
+
font-weight:600;
|
| 826 |
+
text-decoration:none;
|
| 827 |
+
transition: all .3s ease;
|
| 828 |
+
}
|
| 829 |
+
.quick-btn:hover {
|
| 830 |
+
transform: translateY(-3px);
|
| 831 |
+
box-shadow:0 6px 18px rgba(0,0,0,0.4);
|
| 832 |
+
}
|
| 833 |
+
</style>
|
| 834 |
""", unsafe_allow_html=True)
|
| 835 |
|
| 836 |
+
# Hero with slider
|
| 837 |
+
st.markdown(f"""
|
| 838 |
+
<div class="hero">
|
| 839 |
+
<img src="{BACKGROUND_IMAGES[0]}"/>
|
| 840 |
+
<img src="{BACKGROUND_IMAGES[1]}"/>
|
| 841 |
+
<img src="{BACKGROUND_IMAGES[2]}"/>
|
| 842 |
+
|
| 843 |
+
<div class="overlay">
|
| 844 |
+
<h1 class="hero-title">GeoMate V2</h1>
|
| 845 |
+
<p class="hero-sub">
|
| 846 |
+
AI Geotechnical Copilot — soil recognition, classification,
|
| 847 |
+
locator (EE), RAG-powered Q&A, OCR, and dynamic reports.
|
| 848 |
+
</p>
|
| 849 |
+
<div style="margin-top:18px">
|
| 850 |
+
<a href="#" onclick="window.parent.postMessage({{type: 'streamlit:setSessionState', key:'page', value:'Classifier'}}, '*')" class="quick-btn">🧪 Classifier</a>
|
| 851 |
+
<a href="#" onclick="window.parent.postMessage({{type: 'streamlit:setSessionState', key:'page', value:'GSD'}}, '*')" class="quick-btn">📈 GSD</a>
|
| 852 |
+
<a href="#" onclick="window.parent.postMessage({{type: 'streamlit:setSessionState', key:'page', value:'Locator'}}, '*')" class="quick-btn">🌍 Locator</a>
|
| 853 |
+
<a href="#" onclick="window.parent.postMessage({{type: 'streamlit:setSessionState', key:'page', value:'RAG'}}, '*')" class="quick-btn">🤖 Ask</a>
|
| 854 |
+
<a href="#" onclick="window.parent.postMessage({{type: 'streamlit:setSessionState', key:'page', value:'Reports'}}, '*')" class="quick-btn">📑 Reports</a>
|
| 855 |
</div>
|
| 856 |
</div>
|
| 857 |
</div>
|
| 858 |
""", unsafe_allow_html=True)
|
|
|
|
| 859 |
|
| 860 |
+
# Live site summary
|
| 861 |
+
site = st.session_state["sites"][st.session_state["active_site"]]
|
| 862 |
+
st.markdown(f"""
|
| 863 |
+
<div style="
|
| 864 |
+
background:#1e1e1e;
|
| 865 |
+
border-radius:14px;
|
| 866 |
+
padding:20px;
|
| 867 |
+
margin-top:16px;
|
| 868 |
+
text-align:center;
|
| 869 |
+
box-shadow:0 4px 14px rgba(0,0,0,0.5);
|
| 870 |
+
animation: fadeIn 1.2s ease-in-out;
|
| 871 |
+
">
|
| 872 |
+
<h3 style='color:#FF8C00; margin:0 0 10px'>📍 Live Site Summary</h3>
|
| 873 |
+
<p style='color:#fff'>🏗️ <b>Site:</b> {site.get('Site Name')}</p>
|
| 874 |
+
<p style='color:#fff'>🧱 <b>USCS:</b> {site.get('USCS') or "—"}</p>
|
| 875 |
+
<p style='color:#fff'>🛣️ <b>AASHTO:</b> {site.get('AASHTO') or "—"}</p>
|
| 876 |
+
<p style='color:#fff'>📊 <b>GSD saved:</b> {"✅ Yes" if site.get("GSD") else "❌ No"}</p>
|
| 877 |
+
</div>
|
| 878 |
+
""", unsafe_allow_html=True)
|
| 879 |
+
|
| 880 |
+
# -------------------- MAIN APP --------------------
|
| 881 |
+
st.set_page_config(page_title="GeoMate V2", layout="wide")
|
| 882 |
+
if page == "Home":
|
| 883 |
+
landing_page()
|
| 884 |
+
elif st.session_state["page"] == "Soil recognizer":
|
| 885 |
+
soil_recognizer_page()
|
| 886 |
+
elif st.session_state["page"] == "Classifier":
|
| 887 |
+
soil_classifier_page()
|
| 888 |
+
elif st.session_state["page"] == "GSD":
|
| 889 |
+
gsd_page()
|
| 890 |
+
elif st.session_state["page"] == "Locator":
|
| 891 |
+
locator_page()
|
| 892 |
+
elif st.session_state["page"] == "RAG":
|
| 893 |
+
rag_page()
|
| 894 |
+
elif st.session_state["page"] == "Reports":
|
| 895 |
+
reports_page()
|
| 896 |
+
|
| 897 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 898 |
|
| 899 |
# Soil Classifier page (conversational, step-by-step)
|
| 900 |
def soil_classifier_page():
|
|
|
|
| 1386 |
pass
|
| 1387 |
|
| 1388 |
# ----------------------------
|
| 1389 |
+
from streamlit_folium import st_folium
|
| 1390 |
+
|
| 1391 |
+
# ... (your earlier dataset loading / EE init code) ...
|
| 1392 |
+
|
| 1393 |
+
# Render geemap map
|
| 1394 |
m.to_streamlit(height=600, responsive=True)
|
| 1395 |
|
| 1396 |
+
# Overlay a folium draw capture using st_folium on same map (transparent)
|
| 1397 |
+
# Note: we re-render using st_folium to capture geometry interactions.
|
| 1398 |
+
# We get a “last_active_drawing” from the st_folium output.
|
| 1399 |
|
| 1400 |
+
# Convert your geemap map to a folium map object for st_folium
|
| 1401 |
+
fol_map = m.folium_map # geemap.foliumap.Map wraps a base folium.Map
|
| 1402 |
+
result = st_folium(fol_map, width=700, height=600, returned_objects=["last_active_drawing"])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1403 |
|
| 1404 |
+
# Attempt to get ROI from drawn feature
|
| 1405 |
+
roi = None
|
| 1406 |
+
if result and "last_active_drawing" in result and result["last_active_drawing"]:
|
| 1407 |
+
feat = result["last_active_drawing"]
|
| 1408 |
+
geom = feat.get("geometry")
|
| 1409 |
+
if geom:
|
| 1410 |
+
try:
|
| 1411 |
+
roi = ee.Geometry(geom)
|
| 1412 |
+
st.session_state["roi_geojson"] = feat
|
| 1413 |
+
st.success("✅ ROI captured!")
|
| 1414 |
+
except Exception as e:
|
| 1415 |
+
st.error(f"Failed to convert drawn geometry to ee.Geometry: {e}")
|
| 1416 |
|
| 1417 |
+
# Restore from session if none drawn now
|
| 1418 |
+
if roi is None and "roi_geojson" in st.session_state:
|
| 1419 |
+
saved = st.session_state["roi_geojson"]
|
| 1420 |
try:
|
| 1421 |
+
geom = saved.get("geometry")
|
| 1422 |
+
if geom:
|
| 1423 |
+
roi = ee.Geometry(geom)
|
| 1424 |
+
st.info("♻️ ROI restored from earlier session")
|
| 1425 |
except Exception as e:
|
| 1426 |
st.warning(f"Could not restore ROI: {e}")
|
| 1427 |
|
| 1428 |
+
# Then your “Compute Summaries” button logic:
|
| 1429 |
if st.button("Compute Summaries"):
|
| 1430 |
if roi is None:
|
| 1431 |
st.error("⚠️ No ROI found. Please draw a polygon/rectangle/circle and try again.")
|
| 1432 |
else:
|
| 1433 |
+
st.success("ROI ready — performing computations...")
|
|
|
|
|
|
|
| 1434 |
|
| 1435 |
chosen_soil_band = None
|
| 1436 |
if soil_img:
|
|
|
|
| 1613 |
# -------------------
|
| 1614 |
# Report fields (still needed in reports_page)
|
| 1615 |
# -------------------
|
| 1616 |
+
|
| 1617 |
REPORT_FIELDS = [
|
| 1618 |
("Load Bearing Capacity", "kPa or psf"),
|
| 1619 |
("Skin Shear Strength", "kPa"),
|
|
|
|
| 1643 |
from reportlab.lib.pagesizes import A4
|
| 1644 |
from reportlab.lib.units import mm
|
| 1645 |
|
| 1646 |
+
# =============================
|
| 1647 |
+
# LLM Helper (Groq API)
|
| 1648 |
+
# =============================
|
| 1649 |
+
import requests, json, os
|
| 1650 |
+
import streamlit as st
|
| 1651 |
+
from datetime import datetime
|
| 1652 |
+
import tempfile
|
| 1653 |
+
from typing import Dict, Any, Optional, List
|
| 1654 |
+
from reportlab.platypus import (
|
| 1655 |
+
SimpleDocTemplate, Paragraph, Spacer, PageBreak, Table, TableStyle, Image as RLImage
|
| 1656 |
+
)
|
| 1657 |
+
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
|
| 1658 |
+
from reportlab.lib import colors
|
| 1659 |
+
from reportlab.lib.pagesizes import A4
|
| 1660 |
+
from reportlab.lib.units import mm
|
| 1661 |
+
|
| 1662 |
+
|
| 1663 |
+
def groq_llm_analyze(prompt: str, section_title: str,
|
| 1664 |
+
model_name: str = None,
|
| 1665 |
+
max_tokens: int = 500) -> str:
|
| 1666 |
+
"""
|
| 1667 |
+
Query Groq API for a humanized explanation/analysis of one report section.
|
| 1668 |
+
Uses global GROQ_API_KEY and llm_model.
|
| 1669 |
+
"""
|
| 1670 |
+
# model
|
| 1671 |
+
if not model_name:
|
| 1672 |
+
model_name = st.session_state.get("llm_model", "groq_compound")
|
| 1673 |
+
|
| 1674 |
+
# key
|
| 1675 |
+
key = None
|
| 1676 |
+
if "GROQ_API_KEY" in st.secrets:
|
| 1677 |
+
key = st.secrets["GROQ_API_KEY"]
|
| 1678 |
+
else:
|
| 1679 |
+
key = st.session_state.get("GROQ_API_KEY")
|
| 1680 |
+
|
| 1681 |
+
if not key:
|
| 1682 |
+
return f"[LLM unavailable] {section_title}: {prompt[:200]}..."
|
| 1683 |
+
|
| 1684 |
+
url = "https://api.groq.com/v1/chat/completions"
|
| 1685 |
+
headers = {"Authorization": f"Bearer {key}", "Content-Type": "application/json"}
|
| 1686 |
+
payload = {
|
| 1687 |
+
"model": model_name,
|
| 1688 |
+
"messages": [
|
| 1689 |
+
{"role": "system", "content": "You are GeoMate, a geotechnical engineering assistant. Respond professionally with concise analysis and insights."},
|
| 1690 |
+
{"role": "user", "content": f"Section: {section_title}\n\nInput: {prompt}\n\nWrite a professional engineering analysis for this section."}
|
| 1691 |
+
],
|
| 1692 |
+
"temperature": 0.2,
|
| 1693 |
+
"max_tokens": max_tokens,
|
| 1694 |
+
}
|
| 1695 |
+
try:
|
| 1696 |
+
resp = requests.post(url, headers=headers, json=payload, timeout=60)
|
| 1697 |
+
resp.raise_for_status()
|
| 1698 |
+
data = resp.json()
|
| 1699 |
+
return data["choices"][0]["message"]["content"].strip()
|
| 1700 |
+
except Exception as e:
|
| 1701 |
+
return f"[LLM error in {section_title}] {e}"
|
| 1702 |
+
|
| 1703 |
+
|
| 1704 |
+
# =============================
|
| 1705 |
+
# Build Full Geotechnical Report
|
| 1706 |
+
# =============================
|
| 1707 |
def build_full_geotech_pdf(
|
| 1708 |
site: Dict[str, Any],
|
| 1709 |
filename: str,
|
| 1710 |
include_map_image: Optional[bytes] = None,
|
| 1711 |
ext_refs: Optional[List[str]] = None
|
| 1712 |
):
|
|
|
|
|
|
|
|
|
|
| 1713 |
styles = getSampleStyleSheet()
|
| 1714 |
+
title_style = ParagraphStyle("title", parent=styles["Title"], fontSize=22,
|
| 1715 |
+
alignment=1, textColor=colors.HexColor("#FF6600"), spaceAfter=12)
|
| 1716 |
+
h1 = ParagraphStyle("h1", parent=styles["Heading1"], fontSize=14,
|
| 1717 |
+
textColor=colors.HexColor("#1F4E79"), spaceBefore=10, spaceAfter=6)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1718 |
body = ParagraphStyle("body", parent=styles["BodyText"], fontSize=10.5, leading=13)
|
|
|
|
| 1719 |
|
| 1720 |
+
doc = SimpleDocTemplate(filename, pagesize=A4,
|
| 1721 |
+
leftMargin=18*mm, rightMargin=18*mm,
|
| 1722 |
+
topMargin=18*mm, bottomMargin=18*mm)
|
|
|
|
|
|
|
| 1723 |
elems = []
|
| 1724 |
|
| 1725 |
+
# Title Page
|
| 1726 |
elems.append(Paragraph("GEOTECHNICAL INVESTIGATION REPORT", title_style))
|
| 1727 |
elems.append(Spacer(1, 12))
|
| 1728 |
+
elems.append(Paragraph(f"<b>Client:</b> {site.get('Company Name','-')}", body))
|
| 1729 |
+
elems.append(Paragraph(f"<b>Contact:</b> {site.get('Company Contact','-')}", body))
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1730 |
elems.append(Paragraph(f"<b>Project:</b> {site.get('Project Name','-')}", body))
|
| 1731 |
elems.append(Paragraph(f"<b>Site:</b> {site.get('Site Name','-')}", body))
|
| 1732 |
elems.append(Paragraph(f"<b>Date:</b> {datetime.today().strftime('%Y-%m-%d')}", body))
|
| 1733 |
elems.append(PageBreak())
|
| 1734 |
|
| 1735 |
+
# TOC
|
| 1736 |
elems.append(Paragraph("TABLE OF CONTENTS", h1))
|
| 1737 |
toc_items = [
|
| 1738 |
+
"1.0 Summary", "2.0 Introduction", "3.0 Site Description & Geology",
|
| 1739 |
+
"4.0 Field & Laboratory Testing", "5.0 Evaluation of Geotechnical Properties",
|
| 1740 |
+
"6.0 Provisional Classification", "7.0 Recommendations",
|
| 1741 |
+
"8.0 LLM Analysis", "9.0 Figures & Tables", "10.0 Appendices & References"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1742 |
]
|
| 1743 |
+
for i, t in enumerate(toc_items, 1):
|
| 1744 |
elems.append(Paragraph(f"{i}. {t}", body))
|
| 1745 |
elems.append(PageBreak())
|
| 1746 |
|
| 1747 |
+
# Sections with LLM calls
|
| 1748 |
+
elems.append(Paragraph("1.0 SUMMARY", h1))
|
| 1749 |
+
elems.append(Paragraph(groq_llm_analyze(json.dumps(site, indent=2), "Summary"), body))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1750 |
elems.append(PageBreak())
|
| 1751 |
|
| 1752 |
+
elems.append(Paragraph("2.0 INTRODUCTION", h1))
|
| 1753 |
+
elems.append(Paragraph(groq_llm_analyze(site.get("Project Description",""), "Introduction"), body))
|
| 1754 |
+
|
| 1755 |
+
elems.append(Paragraph("3.0 SITE DESCRIPTION & GEOLOGY", h1))
|
| 1756 |
+
geology_text = f"Topo: {site.get('Topography')}, Drainage: {site.get('Drainage')}, Land Use: {site.get('Current Land Use')}, Geology: {site.get('Regional Geology')}"
|
| 1757 |
+
elems.append(Paragraph(groq_llm_analyze(geology_text, "Geology & Site Description"), body))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1758 |
elems.append(PageBreak())
|
| 1759 |
|
| 1760 |
+
elems.append(Paragraph("4.0 FIELD & LABORATORY TESTING", h1))
|
| 1761 |
+
elems.append(Paragraph(groq_llm_analyze(json.dumps(site.get('Laboratory Results',[]), indent=2), "Field & Lab Testing"), body))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1762 |
elems.append(PageBreak())
|
| 1763 |
|
| 1764 |
+
elems.append(Paragraph("5.0 EVALUATION OF GEOTECHNICAL PROPERTIES", h1))
|
| 1765 |
+
elems.append(Paragraph(groq_llm_analyze(json.dumps(site, indent=2), "Evaluation of Properties"), body))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1766 |
|
| 1767 |
+
elems.append(Paragraph("6.0 PROVISIONAL CLASSIFICATION", h1))
|
| 1768 |
+
class_text = f"USCS={site.get('USCS')}, AASHTO={site.get('AASHTO')}"
|
| 1769 |
+
elems.append(Paragraph(groq_llm_analyze(class_text, "Soil Classification"), body))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1770 |
|
| 1771 |
+
elems.append(Paragraph("7.0 RECOMMENDATIONS", h1))
|
| 1772 |
+
elems.append(Paragraph(groq_llm_analyze(json.dumps(site, indent=2), "Recommendations"), body))
|
| 1773 |
+
|
| 1774 |
+
elems.append(Paragraph("8.0 LLM ANALYSIS (GeoMate)", h1))
|
| 1775 |
+
elems.append(Paragraph(groq_llm_analyze(json.dumps(site, indent=2), "LLM Insights"), body))
|
| 1776 |
+
|
| 1777 |
+
# Map snapshot
|
| 1778 |
+
if include_map_image:
|
| 1779 |
+
tmp = tempfile.NamedTemporaryFile(delete=False, suffix=".png")
|
| 1780 |
+
tmp.write(include_map_image)
|
| 1781 |
+
tmp.flush()
|
| 1782 |
+
elems.append(PageBreak())
|
| 1783 |
+
elems.append(Paragraph("9.0 MAP SNAPSHOT", h1))
|
| 1784 |
+
elems.append(RLImage(tmp.name, width=160*mm, height=90*mm))
|
| 1785 |
+
|
| 1786 |
+
# References
|
| 1787 |
elems.append(PageBreak())
|
| 1788 |
+
elems.append(Paragraph("10.0 REFERENCES", h1))
|
| 1789 |
if ext_refs:
|
| 1790 |
for r in ext_refs:
|
| 1791 |
elems.append(Paragraph(f"- {r}", body))
|
| 1792 |
else:
|
| 1793 |
+
elems.append(Paragraph("No external references provided.", body))
|
| 1794 |
+
|
| 1795 |
+
doc.build(elems)
|
| 1796 |
+
return filename
|
| 1797 |
+
|
| 1798 |
+
|
| 1799 |
+
# =============================
|
| 1800 |
+
# Build Classification Report
|
| 1801 |
+
# =============================
|
| 1802 |
+
def build_classification_pdf(
|
| 1803 |
+
site: Dict[str, Any],
|
| 1804 |
+
classification: Dict[str, Any],
|
| 1805 |
+
filename: str
|
| 1806 |
+
):
|
| 1807 |
+
styles = getSampleStyleSheet()
|
| 1808 |
+
title_style = ParagraphStyle("title", parent=styles["Title"], fontSize=18,
|
| 1809 |
+
textColor=colors.HexColor("#FF6600"), alignment=1)
|
| 1810 |
+
h1 = ParagraphStyle("h1", parent=styles["Heading1"], fontSize=12, textColor=colors.HexColor("#1F4E79"))
|
| 1811 |
+
body = ParagraphStyle("body", parent=styles["BodyText"], fontSize=10)
|
| 1812 |
+
|
| 1813 |
+
doc = SimpleDocTemplate(filename, pagesize=A4,
|
| 1814 |
+
leftMargin=18*mm, rightMargin=18*mm,
|
| 1815 |
+
topMargin=18*mm, bottomMargin=18*mm)
|
| 1816 |
+
elems = []
|
| 1817 |
+
|
| 1818 |
+
# Title Page
|
| 1819 |
+
elems.append(Paragraph("SOIL CLASSIFICATION REPORT", title_style))
|
| 1820 |
+
elems.append(Spacer(1, 12))
|
| 1821 |
+
elems.append(Paragraph(f"Site: {site.get('Site Name','Unnamed')}", body))
|
| 1822 |
+
elems.append(Paragraph(f"Date: {datetime.today().strftime('%Y-%m-%d')}", body))
|
| 1823 |
+
elems.append(PageBreak())
|
| 1824 |
+
|
| 1825 |
+
# Sections
|
| 1826 |
+
elems.append(Paragraph("1.0 DETERMINISTIC RESULTS", h1))
|
| 1827 |
+
elems.append(Paragraph(groq_llm_analyze(json.dumps(classification, indent=2), "Deterministic Results"), body))
|
| 1828 |
+
elems.append(PageBreak())
|
| 1829 |
+
|
| 1830 |
+
elems.append(Paragraph("2.0 ENGINEERING CHARACTERISTICS", h1))
|
| 1831 |
+
elems.append(Paragraph(groq_llm_analyze(json.dumps(classification.get('engineering_characteristics',{}), indent=2), "Engineering Characteristics"), body))
|
| 1832 |
+
elems.append(PageBreak())
|
| 1833 |
+
|
| 1834 |
+
elems.append(Paragraph("3.0 DECISION PATHS", h1))
|
| 1835 |
+
dp_text = f"USCS path: {classification.get('USCS_decision_path')}, AASHTO path: {classification.get('AASHTO_decision_path')}"
|
| 1836 |
+
elems.append(Paragraph(groq_llm_analyze(dp_text, "Decision Paths"), body))
|
| 1837 |
+
elems.append(PageBreak())
|
| 1838 |
+
|
| 1839 |
+
elems.append(Paragraph("4.0 LLM ANALYSIS", h1))
|
| 1840 |
+
elems.append(Paragraph(groq_llm_analyze(json.dumps(classification, indent=2), "LLM Analysis"), body))
|
| 1841 |
|
| 1842 |
doc.build(elems)
|
| 1843 |
return filename
|
| 1844 |
|
| 1845 |
|
| 1846 |
+
# -------------------------------
|
| 1847 |
+
# Reports Page
|
| 1848 |
+
# -------------------------------
|
| 1849 |
# -------------------------------
|
| 1850 |
# Reports Page
|
| 1851 |
# -------------------------------
|
|
|
|
| 1859 |
st.markdown("You have a saved classification for this site.")
|
| 1860 |
if st.button("Generate Classification PDF"):
|
| 1861 |
fname = f"classification_{site['Site Name'].replace(' ','_')}.pdf"
|
| 1862 |
+
|
| 1863 |
+
# Collect references (rag_history + manual ones later)
|
| 1864 |
+
refs = []
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1865 |
if "rag_history" in st.session_state and site.get("Site ID") in st.session_state["rag_history"]:
|
|
|
|
| 1866 |
for h in st.session_state["rag_history"][site["Site ID"]]:
|
| 1867 |
+
if h["who"] == "bot" and "[ref:" in h["text"]:
|
| 1868 |
for m in re.findall(r"\[ref:([^\]]+)\]", h["text"]):
|
| 1869 |
refs.append(m)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1870 |
|
| 1871 |
+
# Build classification PDF
|
| 1872 |
+
buffer = io.BytesIO()
|
| 1873 |
+
build_classification_pdf(site, site.get("classifier_decision"), buffer)
|
| 1874 |
buffer.seek(0)
|
| 1875 |
+
|
| 1876 |
st.download_button("Download Classification PDF", buffer, file_name=fname, mime="application/pdf")
|
| 1877 |
else:
|
| 1878 |
st.info("No classification saved for this site yet. Use the Classifier page.")
|
|
|
|
| 1880 |
# ---------------- Quick Report Form ----------------
|
| 1881 |
st.markdown("### Quick report form (edit values and request LLM analysis)")
|
| 1882 |
with st.form(key="report_quick_form"):
|
| 1883 |
+
cols = st.columns([2, 1, 1])
|
| 1884 |
cols[0].markdown("**Parameter**")
|
| 1885 |
cols[1].markdown("**Value**")
|
| 1886 |
cols[2].markdown("**Unit / Notes**")
|
| 1887 |
|
| 1888 |
inputs = {}
|
| 1889 |
for (fld, unit) in REPORT_FIELDS:
|
| 1890 |
+
c1, c2, c3 = st.columns([2, 1, 1])
|
| 1891 |
c1.markdown(f"**{fld}**")
|
| 1892 |
default_val = site.get(fld, "")
|
| 1893 |
inputs[fld] = c2.text_input(fld, value=str(default_val), label_visibility="collapsed", key=f"quick_{fld}")
|
|
|
|
| 1909 |
"site_summary": {
|
| 1910 |
"USCS": site.get("USCS"), "AASHTO": site.get("AASHTO"), "GI": site.get("GI"),
|
| 1911 |
"Soil Profile": site.get("Soil Profile"),
|
| 1912 |
+
"Key lab results": [r.get("sampleId") for r in site.get("Laboratory Results", [])]
|
| 1913 |
},
|
| 1914 |
+
"inputs": {fld: site.get(fld, "Not provided") for fld, _ in REPORT_FIELDS}
|
| 1915 |
}
|
| 1916 |
prompt = (
|
| 1917 |
"You are GeoMate AI, an engineering assistant. Given the following site context and "
|
| 1918 |
"engineering parameters (some may be 'Not provided'), produce:\n1) short executive summary, "
|
| 1919 |
"2) geotechnical interpretation (classification, key risks), 3) recommended remedial/improvement "
|
| 1920 |
"options and 4) short design notes. Provide any numeric outputs in the format [[FIELD: value unit]].\n\n"
|
| 1921 |
+
f"Context: {json.dumps(context)}"
|
| 1922 |
)
|
| 1923 |
+
resp = groq_llm_analyze(prompt, section_title="GeoMate Analysis")
|
| 1924 |
+
|
| 1925 |
st.markdown("**GeoMate analysis**")
|
| 1926 |
st.markdown(resp)
|
| 1927 |
|
| 1928 |
+
# extract values like [[Bearing Capacity: 150 kN/m2]]
|
| 1929 |
matches = re.findall(r"\[\[([A-Za-z0-9 _/-]+):\s*([0-9.+-eE]+)\s*([A-Za-z%\/]*)\]\]", resp)
|
| 1930 |
for m in matches:
|
| 1931 |
field, val, unit = m[0].strip(), m[1].strip(), m[2].strip()
|
|
|
|
| 1949 |
state = site.get("report_convo_state", -1)
|
| 1950 |
if state >= 0:
|
| 1951 |
st.markdown("Chatbot will ask for missing fields. You can answer or type 'skip' to leave blank.")
|
| 1952 |
+
show_table = [(f, site.get(f, "Not provided")) for f, _ in REPORT_FIELDS]
|
| 1953 |
st.table(show_table)
|
| 1954 |
|
| 1955 |
if state < len(REPORT_FIELDS):
|
| 1956 |
field, unit = REPORT_FIELDS[state]
|
| 1957 |
ans = st.text_input(f"GeoMate — Please provide '{field}' ({unit})", key=f"report_in_{state}")
|
| 1958 |
+
c1, c2 = st.columns([1, 1])
|
| 1959 |
with c1:
|
| 1960 |
if st.button("Submit", key=f"report_submit_{state}"):
|
| 1961 |
+
site[field] = ans.strip() if ans.strip() not in ("skip", "don't know", "dont know", "na", "n/a", "") else "Not provided"
|
| 1962 |
site["report_convo_state"] = state + 1
|
| 1963 |
st.rerun()
|
| 1964 |
with c2:
|
|
|
|
| 1974 |
faiss_refs = []
|
| 1975 |
if "rag_history" in st.session_state and site.get("Site ID") in st.session_state["rag_history"]:
|
| 1976 |
for h in st.session_state["rag_history"][site["Site ID"]]:
|
| 1977 |
+
if h["who"] == "bot" and "[ref:" in h["text"]:
|
| 1978 |
for m in re.findall(r"\[ref:([^\]]+)\]", h["text"]):
|
| 1979 |
faiss_refs.append(m)
|
| 1980 |
all_refs = list(set(ext_refs + faiss_refs))
|
|
|
|
| 1986 |
|
| 1987 |
with open(outname, "rb") as f:
|
| 1988 |
st.download_button("Download Full Geotechnical Report", f, file_name=outname, mime="application/pdf")
|
| 1989 |
+
|
| 1990 |
# 8) Page router
|
| 1991 |
if "page" not in st.session_state:
|
| 1992 |
st.session_state["page"] = "Home"
|