Delete Text
Browse files
Text
DELETED
|
@@ -1,1800 +0,0 @@
|
|
| 1 |
-
# Part 1/4 of GeoMate V2 app.py
|
| 2 |
-
# -------------------------------------------------------
|
| 3 |
-
# Top: Streamlit config and imports.
|
| 4 |
-
# This part contains:
|
| 5 |
-
# - page config
|
| 6 |
-
# - imports
|
| 7 |
-
# - secrets checks
|
| 8 |
-
# - session_state initialization
|
| 9 |
-
# - sidebar + landing UI + core helpers
|
| 10 |
-
# - page function stubs (detailed implementations follow in Part 2-4)
|
| 11 |
-
# -------------------------------------------------------
|
| 12 |
-
|
| 13 |
-
# NOTE: paste Part1, then Part2, Part3, Part4 in order into a single app.py
|
| 14 |
-
|
| 15 |
-
# IMPORTANT: set_page_config must be the first Streamlit command
|
| 16 |
-
import streamlit as st
|
| 17 |
-
st.set_page_config(page_title="GeoMate V2", page_icon="🌍", layout="wide", initial_sidebar_state="expanded")
|
| 18 |
-
|
| 19 |
-
# Standard imports
|
| 20 |
-
import os
|
| 21 |
-
import io
|
| 22 |
-
import json
|
| 23 |
-
import time
|
| 24 |
-
import math
|
| 25 |
-
import base64
|
| 26 |
-
import textwrap
|
| 27 |
-
from typing import Any, Dict, List, Optional, Tuple
|
| 28 |
-
|
| 29 |
-
# Third-party imports (ensure in requirements.txt)
|
| 30 |
-
from streamlit_option_menu import option_menu
|
| 31 |
-
import matplotlib.pyplot as plt
|
| 32 |
-
|
| 33 |
-
# Attempt imports for optional integrations. If missing, app will show instruction in UI.
|
| 34 |
-
try:
|
| 35 |
-
import faiss
|
| 36 |
-
except Exception:
|
| 37 |
-
faiss = None
|
| 38 |
-
|
| 39 |
-
try:
|
| 40 |
-
import reportlab
|
| 41 |
-
from reportlab.lib import colors
|
| 42 |
-
from reportlab.lib.pagesizes import A4
|
| 43 |
-
from reportlab.lib.units import mm
|
| 44 |
-
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Table, TableStyle, PageBreak, Flowable
|
| 45 |
-
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
|
| 46 |
-
except Exception:
|
| 47 |
-
reportlab = None
|
| 48 |
-
|
| 49 |
-
try:
|
| 50 |
-
import geemap
|
| 51 |
-
import ee
|
| 52 |
-
except Exception:
|
| 53 |
-
geemap = None
|
| 54 |
-
ee = None
|
| 55 |
-
|
| 56 |
-
try:
|
| 57 |
-
import easyocr
|
| 58 |
-
except Exception:
|
| 59 |
-
easyocr = None
|
| 60 |
-
|
| 61 |
-
# For Groq client - if not installed it will be None and UI will show an instructive error
|
| 62 |
-
try:
|
| 63 |
-
from groq import Groq
|
| 64 |
-
except Exception:
|
| 65 |
-
Groq = None
|
| 66 |
-
|
| 67 |
-
# For sentence-transformers if used locally for embedding fallback
|
| 68 |
-
try:
|
| 69 |
-
from sentence_transformers import SentenceTransformer
|
| 70 |
-
except Exception:
|
| 71 |
-
SentenceTransformer = None
|
| 72 |
-
|
| 73 |
-
# --- Secrets and environment handling ---
|
| 74 |
-
# Hugging Face Spaces: secrets can be stored in Secrets and accessed via os.environ or st.secrets
|
| 75 |
-
def _get_env_secret(key: str) -> Optional[str]:
|
| 76 |
-
# Try environment first, then st.secrets
|
| 77 |
-
val = os.environ.get(key)
|
| 78 |
-
if val:
|
| 79 |
-
return val
|
| 80 |
-
try:
|
| 81 |
-
return st.secrets.get(key)
|
| 82 |
-
except Exception:
|
| 83 |
-
return None
|
| 84 |
-
|
| 85 |
-
# Required secret names (as requested)
|
| 86 |
-
REQUIRED_SECRETS = ["GROQ_API_KEY", "SERVICE_ACCOUNT", "EARTH_ENGINE_KEY"]
|
| 87 |
-
|
| 88 |
-
missing = []
|
| 89 |
-
for sname in REQUIRED_SECRETS:
|
| 90 |
-
if not _get_env_secret(sname):
|
| 91 |
-
missing.append(sname)
|
| 92 |
-
|
| 93 |
-
# If secrets missing - show friendly error and halt
|
| 94 |
-
if missing:
|
| 95 |
-
st.markdown(
|
| 96 |
-
"""
|
| 97 |
-
<style>
|
| 98 |
-
.secret-error { background: #200; border-left: 6px solid #FF7A00; padding: 12px; border-radius:8px; }
|
| 99 |
-
</style>
|
| 100 |
-
""", unsafe_allow_html=True
|
| 101 |
-
)
|
| 102 |
-
st.error(
|
| 103 |
-
f"Missing required secrets: {', '.join(missing)}. "
|
| 104 |
-
"Please add them in your Hugging Face Space Secrets or in environment variables and restart the app."
|
| 105 |
-
)
|
| 106 |
-
st.stop()
|
| 107 |
-
|
| 108 |
-
# If we get here, secrets exist - read into variables
|
| 109 |
-
GROQ_API_KEY = _get_env_secret("GROQ_API_KEY")
|
| 110 |
-
SERVICE_ACCOUNT = _get_env_secret("SERVICE_ACCOUNT")
|
| 111 |
-
EARTH_ENGINE_KEY = _get_env_secret("EARTH_ENGINE_KEY") # expected to be JSON content or path
|
| 112 |
-
|
| 113 |
-
# Initialize Groq client (lazy) - we'll construct real client in RAG page when needed
|
| 114 |
-
def groq_client():
|
| 115 |
-
if Groq is None:
|
| 116 |
-
raise RuntimeError("groq package not installed. Add 'groq' to requirements.txt.")
|
| 117 |
-
return Groq(api_key=GROQ_API_KEY)
|
| 118 |
-
|
| 119 |
-
# --- Session state initialization ---
|
| 120 |
-
ss = st.session_state
|
| 121 |
-
|
| 122 |
-
# Initialize core session keys
|
| 123 |
-
if "page" not in ss:
|
| 124 |
-
ss.page = "Landing"
|
| 125 |
-
if "llm_model" not in ss:
|
| 126 |
-
# default model choices (user can change in sidebar)
|
| 127 |
-
ss.llm_model = "meta-llama/llama-4-maverick-17b-128e-instruct"
|
| 128 |
-
if "sites" not in ss:
|
| 129 |
-
# sites is a list of site dictionaries (max 4)
|
| 130 |
-
ss.sites = []
|
| 131 |
-
if "active_site_idx" not in ss:
|
| 132 |
-
ss.active_site_idx = 0
|
| 133 |
-
if "faiss_loaded" not in ss:
|
| 134 |
-
ss.faiss_loaded = False
|
| 135 |
-
if "faiss_index" not in ss:
|
| 136 |
-
ss.faiss_index = None
|
| 137 |
-
if "faiss_meta" not in ss:
|
| 138 |
-
ss.faiss_meta = None
|
| 139 |
-
|
| 140 |
-
# default styling variables
|
| 141 |
-
THEME = {
|
| 142 |
-
"bg": "#060606",
|
| 143 |
-
"panel": "#0b0b0b",
|
| 144 |
-
"accent": "#FF7A00",
|
| 145 |
-
"accent2": "#C62828",
|
| 146 |
-
"blue": "#1F4E79",
|
| 147 |
-
"muted": "#9aa7bf",
|
| 148 |
-
"bubble_bg": "#0f1724",
|
| 149 |
-
}
|
| 150 |
-
|
| 151 |
-
# helper: cap site count
|
| 152 |
-
MAX_SITES = 4
|
| 153 |
-
|
| 154 |
-
# --- Core data structure helpers ---
|
| 155 |
-
def new_empty_site(name: str = "Site") -> Dict[str, Any]:
|
| 156 |
-
"""Create a new site dict with all required fields pre-populated as None or sensible defaults."""
|
| 157 |
-
return {
|
| 158 |
-
"Site Name": name,
|
| 159 |
-
"Site Coordinates": "",
|
| 160 |
-
"lat": None,
|
| 161 |
-
"lon": None,
|
| 162 |
-
"Load Bearing Capacity": None,
|
| 163 |
-
"Skin Shear Strength": None,
|
| 164 |
-
"Relative Compaction": None,
|
| 165 |
-
"Rate of Consolidation": None,
|
| 166 |
-
"Nature of Construction": None,
|
| 167 |
-
"Soil Profile": None,
|
| 168 |
-
"Flood Data": None,
|
| 169 |
-
"Seismic Data": None,
|
| 170 |
-
"Topography": None,
|
| 171 |
-
"GSD": None,
|
| 172 |
-
"USCS": None,
|
| 173 |
-
"AASHTO": None,
|
| 174 |
-
"GI": None,
|
| 175 |
-
"classifier_inputs": {},
|
| 176 |
-
"classifier_decision_path": "",
|
| 177 |
-
"chat_history": [], # list of dicts {"role":"bot"|"user", "text": "..."}
|
| 178 |
-
"report_convo_state": 0,
|
| 179 |
-
"map_snapshot": None,
|
| 180 |
-
"ocr_pending": False,
|
| 181 |
-
}
|
| 182 |
-
|
| 183 |
-
def get_active_site() -> Dict[str, Any]:
|
| 184 |
-
"""Return the active site dict. If none exists, create one."""
|
| 185 |
-
if not ss.sites:
|
| 186 |
-
ss.sites = [new_empty_site("Home")]
|
| 187 |
-
ss.active_site_idx = 0
|
| 188 |
-
# clamp index
|
| 189 |
-
if ss.active_site_idx < 0:
|
| 190 |
-
ss.active_site_idx = 0
|
| 191 |
-
if ss.active_site_idx >= len(ss.sites):
|
| 192 |
-
ss.active_site_idx = max(0, len(ss.sites) - 1)
|
| 193 |
-
return ss.sites[ss.active_site_idx]
|
| 194 |
-
|
| 195 |
-
def save_active_site(site_dict: Dict[str, Any]):
|
| 196 |
-
"""Save the given dict into the active site slot."""
|
| 197 |
-
if not ss.sites:
|
| 198 |
-
ss.sites = [site_dict]
|
| 199 |
-
ss.active_site_idx = 0
|
| 200 |
-
else:
|
| 201 |
-
ss.sites[ss.active_site_idx] = site_dict
|
| 202 |
-
|
| 203 |
-
def add_site(name: str):
|
| 204 |
-
if len(ss.sites) >= MAX_SITES:
|
| 205 |
-
st.warning(f"Maximum of {MAX_SITES} sites reached.")
|
| 206 |
-
return
|
| 207 |
-
ss.sites.append(new_empty_site(name))
|
| 208 |
-
ss.active_site_idx = len(ss.sites) - 1
|
| 209 |
-
|
| 210 |
-
def remove_site(idx: int):
|
| 211 |
-
if idx < 0 or idx >= len(ss.sites):
|
| 212 |
-
return
|
| 213 |
-
ss.sites.pop(idx)
|
| 214 |
-
if ss.active_site_idx >= len(ss.sites):
|
| 215 |
-
ss.active_site_idx = max(0, len(ss.sites) - 1)
|
| 216 |
-
|
| 217 |
-
# small helper to pretty-print JSON for the site
|
| 218 |
-
def pretty_site_json(site: Dict[str, Any]) -> str:
|
| 219 |
-
try:
|
| 220 |
-
return json.dumps(site, indent=2, default=str)
|
| 221 |
-
except Exception:
|
| 222 |
-
return str(site)
|
| 223 |
-
|
| 224 |
-
# --- Sidebar: model selection, site management, nav (option_menu) ---
|
| 225 |
-
def sidebar_ui():
|
| 226 |
-
with st.sidebar:
|
| 227 |
-
st.markdown(f"<h3 style='color:{THEME['accent']};margin:6px 0;'>GeoMate V2</h3>", unsafe_allow_html=True)
|
| 228 |
-
# LLM model selector
|
| 229 |
-
st.markdown("**Select LLM model**")
|
| 230 |
-
model_choice = st.selectbox(
|
| 231 |
-
"Model",
|
| 232 |
-
options=[
|
| 233 |
-
"meta-llama/llama-4-maverick-17b-128e-instruct",
|
| 234 |
-
"llama3-8b-8192",
|
| 235 |
-
"mixtral-8x7b-32768",
|
| 236 |
-
"gemma-7b-it"
|
| 237 |
-
],
|
| 238 |
-
index=0,
|
| 239 |
-
key="sidebar_model_select"
|
| 240 |
-
)
|
| 241 |
-
ss.llm_model = model_choice
|
| 242 |
-
|
| 243 |
-
st.markdown("---")
|
| 244 |
-
st.markdown("**Project Sites**")
|
| 245 |
-
# Site add/remove
|
| 246 |
-
colA, colB = st.columns([2,1])
|
| 247 |
-
with colA:
|
| 248 |
-
# unique key per render to avoid duplicate key error
|
| 249 |
-
new_site_name = st.text_input("New site name", key="new_site_name_input")
|
| 250 |
-
with colB:
|
| 251 |
-
if st.button("➕ Add", key="add_site_btn"):
|
| 252 |
-
name = new_site_name.strip() or f"Site {len(ss.sites)+1}"
|
| 253 |
-
add_site(name)
|
| 254 |
-
st.success(f"Added site: {name}")
|
| 255 |
-
st.rerun()
|
| 256 |
-
|
| 257 |
-
# list sites and active site selector
|
| 258 |
-
if ss.sites:
|
| 259 |
-
names = [s.get("Site Name", f"Site {i+1}") for i,s in enumerate(ss.sites)]
|
| 260 |
-
asel = st.selectbox("Active Site", options=names, index=ss.active_site_idx, key="active_site_select")
|
| 261 |
-
# map selectbox selection to index
|
| 262 |
-
ss.active_site_idx = names.index(asel)
|
| 263 |
-
# remove site button
|
| 264 |
-
if st.button("🗑️ Remove active site", key="remove_site_btn"):
|
| 265 |
-
idx = ss.active_site_idx
|
| 266 |
-
removed_name = ss.sites[idx].get("Site Name","Site")
|
| 267 |
-
remove_site(idx)
|
| 268 |
-
st.success(f"Removed site {removed_name}")
|
| 269 |
-
st.rerun()
|
| 270 |
-
else:
|
| 271 |
-
st.info("No sites yet. Add one above.")
|
| 272 |
-
|
| 273 |
-
st.markdown("---")
|
| 274 |
-
# expandable JSON viewer for active site
|
| 275 |
-
with st.expander("Show active site JSON"):
|
| 276 |
-
st.code(pretty_site_json(get_active_site()), language="json")
|
| 277 |
-
|
| 278 |
-
st.markdown("---")
|
| 279 |
-
# Navigation menu
|
| 280 |
-
pages = ["Landing", "Soil Recognizer", "Soil Classifier", "GSD Curve", "Locator", "GeoMate Ask", "Reports"]
|
| 281 |
-
icons = ["house", "image", "flask", "bar-chart", "geo-alt", "robot", "file-earmark-text"]
|
| 282 |
-
choice = option_menu(
|
| 283 |
-
menu_title=None,
|
| 284 |
-
options=pages,
|
| 285 |
-
icons=icons,
|
| 286 |
-
menu_icon="cast",
|
| 287 |
-
default_index=pages.index(ss.page) if ss.page in pages else 0,
|
| 288 |
-
orientation="vertical",
|
| 289 |
-
styles={
|
| 290 |
-
"container": {"padding": "6px", "background-color": THEME["panel"]},
|
| 291 |
-
"icon": {"color": THEME["accent"], "font-size": "18px"},
|
| 292 |
-
"nav-link": {"font-size": "14px", "text-align": "left", "margin":"4px"},
|
| 293 |
-
"nav-link-selected": {"background-color": THEME["accent"], "color": "white"},
|
| 294 |
-
}
|
| 295 |
-
)
|
| 296 |
-
if choice != ss.page:
|
| 297 |
-
ss.page = choice
|
| 298 |
-
st.rerun()
|
| 299 |
-
|
| 300 |
-
st.markdown("---")
|
| 301 |
-
if st.button("Reset Session (keep secrets)"):
|
| 302 |
-
for k in list(ss.keys()):
|
| 303 |
-
if k not in ["page", "llm_model"]:
|
| 304 |
-
del ss[k]
|
| 305 |
-
# reinitialize
|
| 306 |
-
ss.sites = [new_empty_site("Home")]
|
| 307 |
-
ss.active_site_idx = 0
|
| 308 |
-
st.success("Session reset.")
|
| 309 |
-
st.rerun()
|
| 310 |
-
|
| 311 |
-
st.markdown(f"<div style='color:{THEME['muted']};font-size:12px;padding-top:6px'>GeoMate V2 — Streamlit • Multi-site • RAG + Groq</div>", unsafe_allow_html=True)
|
| 312 |
-
|
| 313 |
-
# --- Landing page UI ---
|
| 314 |
-
def landing_ui():
|
| 315 |
-
st.markdown(
|
| 316 |
-
f"""
|
| 317 |
-
<style>
|
| 318 |
-
.hero {{
|
| 319 |
-
background: linear-gradient(180deg, rgba(255,122,0,0.06), rgba(255,122,0,0.02));
|
| 320 |
-
border-radius: 12px;
|
| 321 |
-
padding: 18px;
|
| 322 |
-
border: 1px solid rgba(255,122,0,0.08);
|
| 323 |
-
}}
|
| 324 |
-
.globe {{
|
| 325 |
-
width:120px;height:120px;border-radius:999px;
|
| 326 |
-
background: conic-gradient({THEME['accent']}, {THEME['accent2']}, {THEME['blue']});
|
| 327 |
-
box-shadow: 0 10px 40px rgba(0,0,0,0.6);
|
| 328 |
-
display:inline-block;margin-right:18px;
|
| 329 |
-
}}
|
| 330 |
-
.cta {{
|
| 331 |
-
background: linear-gradient(90deg, {THEME['accent']}, {THEME['accent2']});
|
| 332 |
-
color: white;padding:10px 18px;border-radius:10px;border: none;
|
| 333 |
-
}}
|
| 334 |
-
</style>
|
| 335 |
-
"""
|
| 336 |
-
, unsafe_allow_html=True)
|
| 337 |
-
|
| 338 |
-
col1, col2 = st.columns([2,1])
|
| 339 |
-
with col1:
|
| 340 |
-
st.markdown("<div class='hero'>", unsafe_allow_html=True)
|
| 341 |
-
st.markdown("<div style='display:flex;align-items:center'>")
|
| 342 |
-
st.markdown("<div class='globe'></div>", unsafe_allow_html=True)
|
| 343 |
-
st.markdown("<div><h1 style='margin:0;color:#FF8C00'>GeoMate V2</h1><div style='color:#9aa7bf'>AI copilot for geotechnical engineering</div></div>", unsafe_allow_html=True)
|
| 344 |
-
st.markdown("</div>")
|
| 345 |
-
st.markdown("<hr/>", unsafe_allow_html=True)
|
| 346 |
-
st.markdown("""
|
| 347 |
-
<ul>
|
| 348 |
-
<li><b>Soil Recognizer:</b> Image-based soil detection (upload photos or use OCR).</li>
|
| 349 |
-
<li><b>Classifier:</b> Verbatim USCS & AASHTO logic (chatbot style).</li>
|
| 350 |
-
<li><b>Locator:</b> Draw AOI on map, fetch soil/flood/seismic/topography via Earth Engine.</li>
|
| 351 |
-
<li><b>GeoMate Ask:</b> RAG-enabled LLM (FAISS + Groq) with session memory per site.</li>
|
| 352 |
-
<li><b>Reports:</b> Classification-only & full Geotechnical PDF reports (styled).</li>
|
| 353 |
-
</ul>
|
| 354 |
-
""", unsafe_allow_html=True)
|
| 355 |
-
st.markdown("</div>", unsafe_allow_html=True)
|
| 356 |
-
|
| 357 |
-
st.markdown("### Quick actions")
|
| 358 |
-
c1, c2, c3 = st.columns(3)
|
| 359 |
-
if c1.button("🧪 Classifier"):
|
| 360 |
-
ss.page = "Soil Classifier"
|
| 361 |
-
st.rerun()
|
| 362 |
-
if c2.button("📊 GSD Curve"):
|
| 363 |
-
ss.page = "GSD Curve"
|
| 364 |
-
st.rerun()
|
| 365 |
-
if c3.button("🌍 Locator"):
|
| 366 |
-
ss.page = "Locator"
|
| 367 |
-
st.rerun()
|
| 368 |
-
|
| 369 |
-
with col2:
|
| 370 |
-
st.markdown("<div style='padding:12px;border-radius:10px;background:#06121a'>", unsafe_allow_html=True)
|
| 371 |
-
active = get_active_site()
|
| 372 |
-
st.markdown(f"<div style='font-size:16px;color:{THEME['accent']}'><b>Active site</b></div>", unsafe_allow_html=True)
|
| 373 |
-
st.markdown(f"<div style='font-size:14px'>{active.get('Site Name','-')}</div>", unsafe_allow_html=True)
|
| 374 |
-
st.markdown("<hr/>", unsafe_allow_html=True)
|
| 375 |
-
st.markdown(f"<div style='color:{THEME['muted']};font-size:13px'>Sites configured: <b>{len(ss.sites)}</b></div>", unsafe_allow_html=True)
|
| 376 |
-
st.markdown(f"<div style='color:{THEME['muted']};font-size:13px'>Saved classifications: <b>{len([s for s in ss.sites if s.get('USCS') or s.get('AASHTO')])}</b></div>", unsafe_allow_html=True)
|
| 377 |
-
st.markdown("</div>", unsafe_allow_html=True)
|
| 378 |
-
|
| 379 |
-
st.markdown("---")
|
| 380 |
-
st.info("Tip: Use the sidebar to switch pages or the quick buttons above. All data is stored in this session (up to 4 sites).")
|
| 381 |
-
|
| 382 |
-
# -----------------------------
|
| 383 |
-
# Page function stubs (detailed implementations in Parts 2-4)
|
| 384 |
-
# These are declared so the script runs as a whole when all parts are concatenated.
|
| 385 |
-
# -----------------------------
|
| 386 |
-
|
| 387 |
-
def soil_recognizer_ui():
|
| 388 |
-
"""Image-based soil recognizer — placeholder here, implemented in Part 2."""
|
| 389 |
-
st.header("Soil Recognizer")
|
| 390 |
-
st.info("Upload an image of soil. OCR and image model will extract features and suggest soil type. (Implemented in Part 2/3)")
|
| 391 |
-
|
| 392 |
-
def soil_classifier_ui():
|
| 393 |
-
"""Soil classifier (chat-style). Full implementation continues in Part 2."""
|
| 394 |
-
st.header("Soil Classifier")
|
| 395 |
-
st.info("Chat-style classifier will be displayed here. (Detailed implementation in Part 2)")
|
| 396 |
-
|
| 397 |
-
def gsd_curve_ui():
|
| 398 |
-
"""GSD Curve page (upload data or enter diameters/passing). Implemented in Part 2."""
|
| 399 |
-
st.header("GSD Curve")
|
| 400 |
-
st.info("Plot GSD curves, compute D10/D30/D60, Cu, Cc. (Detailed implementation in Part 2)")
|
| 401 |
-
|
| 402 |
-
def locator_ui():
|
| 403 |
-
"""Locator page — interactive map and Earth Engine integration. Implemented in Part 3."""
|
| 404 |
-
st.header("Locator")
|
| 405 |
-
st.info("Draw AOI, fetch soil, flood, seismic and topography data. (Implemented in Part 3)")
|
| 406 |
-
|
| 407 |
-
def rag_ui():
|
| 408 |
-
"""GeoMate Ask — RAG Chatbot. Implemented in Part 4."""
|
| 409 |
-
st.header("GeoMate Ask (RAG + Groq)")
|
| 410 |
-
st.info("RAG-based technical chatbot with memory per site. (Implemented in Part 4)")
|
| 411 |
-
|
| 412 |
-
def reports_ui():
|
| 413 |
-
"""Reports UI: classification-only and full geotechnical report generator. Implemented in Part 4."""
|
| 414 |
-
st.header("Reports")
|
| 415 |
-
st.info("Generate Classification-only or Full Geotechnical PDF reports. (Implemented in Part 4)")
|
| 416 |
-
|
| 417 |
-
# -----------------------------
|
| 418 |
-
# Main app runner (will route to pages)
|
| 419 |
-
# -----------------------------
|
| 420 |
-
def main():
|
| 421 |
-
sidebar_ui()
|
| 422 |
-
page = ss.page if hasattr(ss, "page") else "Landing"
|
| 423 |
-
# Page routing
|
| 424 |
-
if page == "Landing":
|
| 425 |
-
landing_ui()
|
| 426 |
-
elif page == "Soil Recognizer":
|
| 427 |
-
soil_recognizer_ui()
|
| 428 |
-
elif page == "Soil Classifier":
|
| 429 |
-
soil_classifier_ui()
|
| 430 |
-
elif page == "GSD Curve":
|
| 431 |
-
gsd_curve_ui()
|
| 432 |
-
elif page == "Locator":
|
| 433 |
-
locator_ui()
|
| 434 |
-
elif page == "GeoMate Ask":
|
| 435 |
-
rag_ui()
|
| 436 |
-
elif page == "Reports":
|
| 437 |
-
reports_ui()
|
| 438 |
-
else:
|
| 439 |
-
st.warning("Unknown page. Returning to Landing.")
|
| 440 |
-
ss.page = "Landing"
|
| 441 |
-
landing_ui()
|
| 442 |
-
|
| 443 |
-
# Run main
|
| 444 |
-
if __name__ == "__main__":
|
| 445 |
-
main()
|
| 446 |
-
|
| 447 |
-
# End of Part 1/4
|
| 448 |
-
# Part 2/4 of GeoMate V2 app.py
|
| 449 |
-
# -------------------------------------------------------
|
| 450 |
-
# Implements:
|
| 451 |
-
# - Soil Recognizer (OCR + image-based placeholder)
|
| 452 |
-
# - Soil Classifier (chat-style Q&A, USCS + AASHTO logic)
|
| 453 |
-
# - GSD Curve Page (CSV upload + plotting + parameter calc)
|
| 454 |
-
# -------------------------------------------------------
|
| 455 |
-
|
| 456 |
-
def soil_recognizer_page():
|
| 457 |
-
st.header("🖼️ Soil Recognizer")
|
| 458 |
-
idx = st.session_state["active_site_idx"]
|
| 459 |
-
|
| 460 |
-
st.write("Upload a soil sample photo. If a trained model is available, it will infer the soil class.")
|
| 461 |
-
|
| 462 |
-
uploaded = st.file_uploader(
|
| 463 |
-
"Upload sample photo",
|
| 464 |
-
type=["png", "jpg", "jpeg"],
|
| 465 |
-
key=mk("sr_upload", idx)
|
| 466 |
-
)
|
| 467 |
-
|
| 468 |
-
if uploaded:
|
| 469 |
-
img = Image.open(uploaded).convert("RGB")
|
| 470 |
-
st.image(img, use_column_width=True)
|
| 471 |
-
|
| 472 |
-
if torch and os.path.exists("soil_best_model.pth"):
|
| 473 |
-
st.info("✅ Model found — running inference (CPU).")
|
| 474 |
-
|
| 475 |
-
try:
|
| 476 |
-
# --- Load model safely ---
|
| 477 |
-
model = torch.load("soil_best_model.pth", map_location="cpu")
|
| 478 |
-
if hasattr(model, "eval"):
|
| 479 |
-
model.eval()
|
| 480 |
-
|
| 481 |
-
# --- Preprocess ---
|
| 482 |
-
transform = T.Compose([
|
| 483 |
-
T.Resize((224, 224)),
|
| 484 |
-
T.ToTensor(),
|
| 485 |
-
T.Normalize([0.485, 0.456, 0.406],
|
| 486 |
-
[0.229, 0.224, 0.225])
|
| 487 |
-
])
|
| 488 |
-
inp = transform(img).unsqueeze(0)
|
| 489 |
-
|
| 490 |
-
with st.spinner("Running model..."):
|
| 491 |
-
logits = model(inp)
|
| 492 |
-
probs = torch.softmax(logits, dim=-1).detach().cpu().numpy()[0]
|
| 493 |
-
|
| 494 |
-
labels = ["Sand", "Silt", "Clay", "Gravel", "Peat"]
|
| 495 |
-
best = labels[int(np.argmax(probs))]
|
| 496 |
-
conf = float(np.max(probs))
|
| 497 |
-
|
| 498 |
-
st.success(f"Predicted: **{best}** (confidence {conf:.2%})")
|
| 499 |
-
|
| 500 |
-
if st.button("Save to site", key=mk("sr_save_btn", idx)):
|
| 501 |
-
st.session_state["sites"][idx]["Soil Profile"] = best
|
| 502 |
-
st.success("✅ Saved soil profile to site.")
|
| 503 |
-
|
| 504 |
-
except Exception as e:
|
| 505 |
-
st.error(f"❌ Model inference failed: {e}")
|
| 506 |
-
|
| 507 |
-
else:
|
| 508 |
-
# --- Heuristic Fallback ---
|
| 509 |
-
st.warning("⚠️ No trained model file found — running heuristic fallback.")
|
| 510 |
-
arr = np.array(img.resize((50, 50))).mean(axis=(0, 1))
|
| 511 |
-
r, g, b = arr
|
| 512 |
-
if r > 120 and g > 110:
|
| 513 |
-
pred = "Sand"
|
| 514 |
-
else:
|
| 515 |
-
pred = "Silt"
|
| 516 |
-
|
| 517 |
-
st.success(f"Fallback prediction: **{pred}**")
|
| 518 |
-
|
| 519 |
-
if st.button("Save fallback to site", key=mk("sr_save_fallback", idx)):
|
| 520 |
-
st.session_state["sites"][idx]["Soil Profile"] = pred
|
| 521 |
-
st.success("✅ Saved fallback result to site.")
|
| 522 |
-
|
| 523 |
-
# -------------------------------------------------------
|
| 524 |
-
# Soil Classifier
|
| 525 |
-
# -------------------------------------------------------
|
| 526 |
-
|
| 527 |
-
# -------------------------------------------------------
|
| 528 |
-
# Soil Classifier (Chatbot Style, with OCR + LLM)
|
| 529 |
-
# -------------------------------------------------------
|
| 530 |
-
import pytesseract
|
| 531 |
-
import tempfile
|
| 532 |
-
from PIL import Image
|
| 533 |
-
from typing import Dict, Any, Tuple
|
| 534 |
-
|
| 535 |
-
# ---------- Utilities ----------
|
| 536 |
-
def run_ocr_on_image(uploaded_file) -> Dict[str, float]:
|
| 537 |
-
"""Run OCR on uploaded soil problem sheet to extract LL, PL, sieve %s."""
|
| 538 |
-
img = Image.open(uploaded_file).convert("L")
|
| 539 |
-
text = pytesseract.image_to_string(img)
|
| 540 |
-
extracted = {}
|
| 541 |
-
|
| 542 |
-
# Very naive parsing – refine later
|
| 543 |
-
for line in text.splitlines():
|
| 544 |
-
if "LL" in line.upper():
|
| 545 |
-
try: extracted["LL"] = float([s for s in line.split() if s.replace('.','',1).isdigit()][0])
|
| 546 |
-
except: pass
|
| 547 |
-
if "PL" in line.upper():
|
| 548 |
-
try: extracted["PL"] = float([s for s in line.split() if s.replace('.','',1).isdigit()][0])
|
| 549 |
-
except: pass
|
| 550 |
-
if "#200" in line or "200" in line:
|
| 551 |
-
try: extracted["P200"] = float([s for s in line.split() if s.replace('.','',1).isdigit()][0])
|
| 552 |
-
except: pass
|
| 553 |
-
if "#40" in line or "40" in line:
|
| 554 |
-
try: extracted["P40"] = float([s for s in line.split() if s.replace('.','',1).isdigit()][0])
|
| 555 |
-
except: pass
|
| 556 |
-
return extracted
|
| 557 |
-
|
| 558 |
-
|
| 559 |
-
# ---------- Classification Logic ----------
|
| 560 |
-
def classify_aashto(inputs: Dict[str, Any]) -> Tuple[str, str, str]:
|
| 561 |
-
"""Full AASHTO logic + Group Index + explanation."""
|
| 562 |
-
from math import floor
|
| 563 |
-
P2 = inputs.get("P200", 0.0)
|
| 564 |
-
P4 = inputs.get("P40", 0.0)
|
| 565 |
-
LL = inputs.get("LL", 0.0)
|
| 566 |
-
PL = inputs.get("PL", 0.0)
|
| 567 |
-
PI = LL - PL
|
| 568 |
-
result = "A-0"
|
| 569 |
-
desc = ""
|
| 570 |
-
GI = 0
|
| 571 |
-
|
| 572 |
-
if P2 <= 35:
|
| 573 |
-
if P2 <= 15 and P4 <= 30 and PI <= 6:
|
| 574 |
-
P1 = inputs.get("P10", 0.0)
|
| 575 |
-
if P1 <= 50:
|
| 576 |
-
result = "A-1-a"; desc = "Granular soil, excellent subgrade."
|
| 577 |
-
else:
|
| 578 |
-
result = "A-1-b"; desc = "Granular soil with fines, still good subgrade."
|
| 579 |
-
elif P2 <= 25 and P4 <= 50 and PI <= 6:
|
| 580 |
-
result = "A-1-b"; desc = "Granular soil with more fines, fair performance."
|
| 581 |
-
elif P2 <= 35:
|
| 582 |
-
if LL <= 40 and PI <= 10: result = "A-2-4"; desc = "Granular soil with silt, fair subgrade."
|
| 583 |
-
elif LL >= 41 and PI <= 10: result = "A-2-5"; desc = "Granular soil, high LL silt content."
|
| 584 |
-
elif LL <= 40 and PI >= 11: result = "A-2-6"; desc = "Granular soil with clayey fines."
|
| 585 |
-
else: result = "A-2-7"; desc = "Granular soil, poor clayey fines."
|
| 586 |
-
else:
|
| 587 |
-
result = "A-3"; desc = "Clean sands, excellent highway subgrade."
|
| 588 |
-
else:
|
| 589 |
-
if LL <= 40 and PI <= 10: result = "A-4"; desc = "Silt, fair to poor subgrade."
|
| 590 |
-
elif LL >= 41 and PI <= 10: result = "A-5"; desc = "Elastic silt, very poor subgrade."
|
| 591 |
-
elif LL <= 40 and PI >= 11: result = "A-6"; desc = "Clay of low plasticity, poor subgrade."
|
| 592 |
-
else:
|
| 593 |
-
if PI <= (LL-30): result = "A-7-5"; desc = "Clay, high LL, fair plasticity."
|
| 594 |
-
else: result = "A-7-6"; desc = "Clay, high plasticity, very poor subgrade."
|
| 595 |
-
|
| 596 |
-
# Group Index
|
| 597 |
-
a = min(max(P2 - 35, 0), 40)
|
| 598 |
-
b = min(max(P2 - 15, 0), 40)
|
| 599 |
-
c = min(max(LL - 40, 0), 20)
|
| 600 |
-
d = min(max(PI - 10, 0), 20)
|
| 601 |
-
GI = floor(0.2*a + 0.005*a*c + 0.01*b*d)
|
| 602 |
-
|
| 603 |
-
return result, desc, str(GI)
|
| 604 |
-
|
| 605 |
-
|
| 606 |
-
def classify_uscs(inputs: Dict[str, Any]) -> Tuple[str, str]:
|
| 607 |
-
"""Full USCS logic with Cu, Cc, PI, DS/DIL/TG."""
|
| 608 |
-
P2 = inputs.get("P200", 0.0)
|
| 609 |
-
if inputs.get("organic", False):
|
| 610 |
-
return "Pt", "Peat / Organic soil — compressible, poor engineering properties."
|
| 611 |
-
|
| 612 |
-
if P2 <= 50: # Coarse
|
| 613 |
-
P4 = inputs.get("P4", 0.0)
|
| 614 |
-
D60, D30, D10 = inputs.get("D60", 0.0), inputs.get("D30", 0.0), inputs.get("D10", 0.0)
|
| 615 |
-
LL, PL = inputs.get("LL", 0.0), inputs.get("PL", 0.0)
|
| 616 |
-
PI = LL - PL
|
| 617 |
-
Cu, Cc = 0, 0
|
| 618 |
-
if all([D60, D30, D10]):
|
| 619 |
-
Cu = D60/D10 if D10 else 0
|
| 620 |
-
Cc = (D30**2)/(D10*D60) if D10*D60 else 0
|
| 621 |
-
|
| 622 |
-
if P4 <= 50: # Gravels
|
| 623 |
-
if Cu >= 4 and 1 <= Cc <= 3: return "GW", "Well-graded gravel, excellent foundation material."
|
| 624 |
-
elif PI <= 7: return "GM", "Silty gravel, moderate quality."
|
| 625 |
-
else: return "GC", "Clayey gravel, reduced drainage."
|
| 626 |
-
else: # Sands
|
| 627 |
-
if Cu >= 6 and 1 <= Cc <= 3: return "SW", "Well-graded sand, excellent engineering soil."
|
| 628 |
-
elif PI <= 7: return "SM", "Silty sand, fair to moderate."
|
| 629 |
-
else: return "SC", "Clayey sand, reduced strength."
|
| 630 |
-
else: # Fine
|
| 631 |
-
LL, PL = inputs.get("LL", 0.0), inputs.get("PL", 0.0)
|
| 632 |
-
PI = LL - PL
|
| 633 |
-
if LL < 50:
|
| 634 |
-
if PI <= 7: return "ML", "Low plasticity silt."
|
| 635 |
-
elif PI > 7: return "CL", "Low plasticity clay."
|
| 636 |
-
else:
|
| 637 |
-
if PI < 0.73*(LL-20): return "MH", "Elastic silt."
|
| 638 |
-
else: return "CH", "High plasticity clay, compressible, weak foundation soil."
|
| 639 |
-
return "ML", "Default: Low plasticity silt."
|
| 640 |
-
|
| 641 |
-
|
| 642 |
-
# ---------- Main Chatbot ----------
|
| 643 |
-
def soil_classifier_ui():
|
| 644 |
-
st.header("🤖 Soil Classifier (Chatbot + OCR + LLM)")
|
| 645 |
-
site = get_active_site()
|
| 646 |
-
|
| 647 |
-
if "classifier_state" not in site:
|
| 648 |
-
site["classifier_state"] = 0
|
| 649 |
-
site["classifier_inputs"] = {}
|
| 650 |
-
site["classifier_chat"] = []
|
| 651 |
-
|
| 652 |
-
chat = site["classifier_chat"]
|
| 653 |
-
|
| 654 |
-
def add_bot(msg: str):
|
| 655 |
-
chat.append(["bot", msg])
|
| 656 |
-
def add_user(msg: str):
|
| 657 |
-
chat.append(["user", msg])
|
| 658 |
-
|
| 659 |
-
# Render chat
|
| 660 |
-
for role, msg in chat:
|
| 661 |
-
bubble_color = THEME["bubble_bg"] if role=="bot" else "#1f2a44"
|
| 662 |
-
border = f"2px solid {THEME['accent']}" if role=="bot" else "1px solid #333"
|
| 663 |
-
st.markdown(f"""
|
| 664 |
-
<div style='margin:6px 0;padding:8px 12px;background:{bubble_color};
|
| 665 |
-
border-radius:14px;border:{border};max-width:80%;'>
|
| 666 |
-
<b>{'🤖' if role=='bot' else '👤'}:</b> {msg}
|
| 667 |
-
</div>
|
| 668 |
-
""", unsafe_allow_html=True)
|
| 669 |
-
|
| 670 |
-
state = site["classifier_state"]
|
| 671 |
-
inputs = site["classifier_inputs"]
|
| 672 |
-
|
| 673 |
-
# OCR Upload
|
| 674 |
-
uploaded = st.file_uploader("📄 Upload soil test sheet (OCR)", type=["jpg","png","jpeg"])
|
| 675 |
-
if uploaded:
|
| 676 |
-
ocr_data = run_ocr_on_image(uploaded)
|
| 677 |
-
inputs.update(ocr_data)
|
| 678 |
-
add_bot(f"OCR detected values: {ocr_data}")
|
| 679 |
-
|
| 680 |
-
# Initial
|
| 681 |
-
if state == 0 and not chat:
|
| 682 |
-
add_bot("Hello 👋 I am GeoMate Soil Classifier. Let's begin. Is the soil organic (spongy, dark, odorous)? (y/n)")
|
| 683 |
-
site["classifier_state"] = 1
|
| 684 |
-
save_active_site(site)
|
| 685 |
-
|
| 686 |
-
# User Input
|
| 687 |
-
user_in = st.text_input("Your answer:", key=f"classifier_input_{state}")
|
| 688 |
-
if st.button("➡️", key=f"classifier_submit_{state}"):
|
| 689 |
-
if user_in.strip():
|
| 690 |
-
add_user(user_in.strip())
|
| 691 |
-
|
| 692 |
-
# Logic branches
|
| 693 |
-
if state == 1:
|
| 694 |
-
if user_in.lower().startswith("y"):
|
| 695 |
-
inputs["organic"] = True
|
| 696 |
-
uscs, desc1 = classify_uscs(inputs)
|
| 697 |
-
aashto, desc2, gi = classify_aashto(inputs)
|
| 698 |
-
add_bot(f"Classification complete ✅ USCS={uscs} ({desc1}), AASHTO={aashto} (GI={gi}, {desc2})")
|
| 699 |
-
# Expand with LLM
|
| 700 |
-
full_report = query_llm_for_soil(uscs, aashto, desc1, desc2, gi)
|
| 701 |
-
add_bot(full_report)
|
| 702 |
-
site["classifier_state"] = -1
|
| 703 |
-
else:
|
| 704 |
-
inputs["organic"] = False
|
| 705 |
-
add_bot("What is % passing #200 sieve?")
|
| 706 |
-
site["classifier_state"] = 2
|
| 707 |
-
elif state == 2:
|
| 708 |
-
try: inputs["P200"] = float(user_in)
|
| 709 |
-
except: inputs["P200"] = 0.0
|
| 710 |
-
add_bot("What is % passing #40 sieve?")
|
| 711 |
-
site["classifier_state"] = 3
|
| 712 |
-
elif state == 3:
|
| 713 |
-
try: inputs["P40"] = float(user_in)
|
| 714 |
-
except: inputs["P40"] = 0.0
|
| 715 |
-
add_bot("Enter Liquid Limit (LL):")
|
| 716 |
-
site["classifier_state"] = 4
|
| 717 |
-
elif state == 4:
|
| 718 |
-
try: inputs["LL"] = float(user_in)
|
| 719 |
-
except: inputs["LL"] = 0.0
|
| 720 |
-
add_bot("Enter Plastic Limit (PL):")
|
| 721 |
-
site["classifier_state"] = 5
|
| 722 |
-
elif state == 5:
|
| 723 |
-
try: inputs["PL"] = float(user_in)
|
| 724 |
-
except: inputs["PL"] = 0.0
|
| 725 |
-
uscs, desc1 = classify_uscs(inputs)
|
| 726 |
-
aashto, desc2, gi = classify_aashto(inputs)
|
| 727 |
-
add_bot(f"Classification complete ✅ USCS={uscs} ({desc1}), AASHTO={aashto} (GI={gi}, {desc2})")
|
| 728 |
-
full_report = query_llm_for_soil(uscs, aashto, desc1, desc2, gi)
|
| 729 |
-
add_bot(full_report)
|
| 730 |
-
site["classifier_state"] = -1
|
| 731 |
-
|
| 732 |
-
save_active_site(site)
|
| 733 |
-
st.rerun()
|
| 734 |
-
|
| 735 |
-
if site["classifier_state"] == -1:
|
| 736 |
-
if st.button("📄 Export Classification Report"):
|
| 737 |
-
site["classification_report"] = chat
|
| 738 |
-
st.success("Report saved. Generate full report in Reports Page.")
|
| 739 |
-
|
| 740 |
-
|
| 741 |
-
# ---------- LLM Expansion ----------
|
| 742 |
-
def query_llm_for_soil(uscs_code, aashto_code, desc1, desc2, gi):
|
| 743 |
-
"""Ask Groq LLM to expand classification into detailed engineering report."""
|
| 744 |
-
prompt = f"""
|
| 745 |
-
Soil Classification Results:
|
| 746 |
-
- USCS: {uscs_code} ({desc1})
|
| 747 |
-
- AASHTO: {aashto_code} ({desc2}), Group Index={gi}
|
| 748 |
-
|
| 749 |
-
Provide:
|
| 750 |
-
1. Engineering characteristics (compressibility, permeability, shear strength, settlement, frost susceptibility).
|
| 751 |
-
2. Construction applications (foundations, embankments, pavements).
|
| 752 |
-
3. Typical stabilization or improvement methods.
|
| 753 |
-
4. Warnings or limitations.
|
| 754 |
-
|
| 755 |
-
Be detailed but concise, use professional engineering language.
|
| 756 |
-
"""
|
| 757 |
-
return groq_chat(prompt) # assumes you have groq_chat() wrapper
|
| 758 |
-
# -------------------------------------------------------
|
| 759 |
-
# GSD Curve Page
|
| 760 |
-
# -------------------------------------------------------
|
| 761 |
-
def gsd_curve_ui():
|
| 762 |
-
st.header("📊 Grain Size Distribution (GSD) Curve")
|
| 763 |
-
site = get_active_site()
|
| 764 |
-
|
| 765 |
-
st.info("Upload sieve analysis data (CSV: Sieve size [mm], %Passing). Or manually enter D-values.")
|
| 766 |
-
|
| 767 |
-
uploaded = st.file_uploader("Upload CSV", type=["csv"], key="gsd_csv")
|
| 768 |
-
data = None
|
| 769 |
-
if uploaded:
|
| 770 |
-
df = pd.read_csv(uploaded)
|
| 771 |
-
st.write(df)
|
| 772 |
-
try:
|
| 773 |
-
sizes = df.iloc[:,0].values
|
| 774 |
-
passing = df.iloc[:,1].values
|
| 775 |
-
data = (sizes, passing)
|
| 776 |
-
except Exception as e:
|
| 777 |
-
st.error(f"Error parsing CSV: {e}")
|
| 778 |
-
|
| 779 |
-
if data is not None:
|
| 780 |
-
sizes, passing = data
|
| 781 |
-
# Plot
|
| 782 |
-
fig, ax = plt.subplots()
|
| 783 |
-
ax.semilogx(sizes, passing, marker="o", color="orange")
|
| 784 |
-
ax.set_xlabel("Sieve Size (mm, log scale)")
|
| 785 |
-
ax.set_ylabel("% Passing")
|
| 786 |
-
ax.set_title("Grain Size Distribution Curve")
|
| 787 |
-
ax.grid(True, which="both", linestyle="--", linewidth=0.5)
|
| 788 |
-
st.pyplot(fig)
|
| 789 |
-
|
| 790 |
-
# Interpolate D10, D30, D60
|
| 791 |
-
def interpD(target):
|
| 792 |
-
return np.interp(target, passing[::-1], sizes[::-1])
|
| 793 |
-
D10 = interpD(10)
|
| 794 |
-
D30 = interpD(30)
|
| 795 |
-
D60 = interpD(60)
|
| 796 |
-
Cu = D60/D10 if D10>0 else None
|
| 797 |
-
Cc = (D30**2)/(D60*D10) if D10>0 and D60>0 else None
|
| 798 |
-
|
| 799 |
-
st.write(f"D10={D10:.3f} mm, D30={D30:.3f} mm, D60={D60:.3f} mm")
|
| 800 |
-
st.write(f"Cu={Cu:.2f}, Cc={Cc:.2f}")
|
| 801 |
-
|
| 802 |
-
site["GSD"] = {"D10":D10,"D30":D30,"D60":D60,"Cu":Cu,"Cc":Cc}
|
| 803 |
-
save_active_site(site)
|
| 804 |
-
|
| 805 |
-
with st.expander("Manual entry"):
|
| 806 |
-
c1,c2,c3 = st.columns(3)
|
| 807 |
-
D10 = c1.number_input("D10 (mm)", value=0.0)
|
| 808 |
-
D30 = c2.number_input("D30 (mm)", value=0.0)
|
| 809 |
-
D60 = c3.number_input("D60 (mm)", value=0.0)
|
| 810 |
-
if st.button("Save D-values"):
|
| 811 |
-
site["GSD"] = {"D10":D10,"D30":D30,"D60":D60}
|
| 812 |
-
save_active_site(site)
|
| 813 |
-
st.success("Saved to site.")
|
| 814 |
-
|
| 815 |
-
# End of Part 2/4
|
| 816 |
-
# Part 3/4 of GeoMate V2 app.py
|
| 817 |
-
# -------------------------------------------------------
|
| 818 |
-
# Implements:
|
| 819 |
-
# - Locator Page with EE + geemap
|
| 820 |
-
# - Extracts flood, seismic, soil, topography data
|
| 821 |
-
# -------------------------------------------------------
|
| 822 |
-
|
| 823 |
-
# Locator module — Earth Engine + geemap integration
|
| 824 |
-
# Paste into your Streamlit app file. Requires: streamlit, geemap, earthengine-api, pillow, pyppeteer (optional)
|
| 825 |
-
import os
|
| 826 |
-
import json
|
| 827 |
-
import tempfile
|
| 828 |
-
import traceback
|
| 829 |
-
import base64
|
| 830 |
-
import time
|
| 831 |
-
from math import isnan
|
| 832 |
-
|
| 833 |
-
import streamlit as st
|
| 834 |
-
from PIL import Image
|
| 835 |
-
import io
|
| 836 |
-
|
| 837 |
-
# Optional libs (fail gracefully)
|
| 838 |
-
try:
|
| 839 |
-
import geemap.foliumap as geemap
|
| 840 |
-
except Exception:
|
| 841 |
-
geemap = None
|
| 842 |
-
|
| 843 |
-
try:
|
| 844 |
-
import ee
|
| 845 |
-
except Exception:
|
| 846 |
-
ee = None
|
| 847 |
-
|
| 848 |
-
# Optional headless browser to capture PNG from HTML (pyppeteer)
|
| 849 |
-
try:
|
| 850 |
-
import asyncio
|
| 851 |
-
from pyppeteer import launch as pyppeteer_launch
|
| 852 |
-
_HAS_PYPPETEER = True
|
| 853 |
-
except Exception:
|
| 854 |
-
_HAS_PYPPETEER = False
|
| 855 |
-
|
| 856 |
-
# -------------------------
|
| 857 |
-
# Multi-site state helpers
|
| 858 |
-
# -------------------------
|
| 859 |
-
DATA_DIR = st.cache_resource(lambda: (os.environ.get("GEOMATE_DATA_DIR", "./data")))
|
| 860 |
-
os.makedirs(DATA_DIR, exist_ok=True)
|
| 861 |
-
SITES_FILE = os.path.join(DATA_DIR, "sites.json")
|
| 862 |
-
|
| 863 |
-
def load_sites():
|
| 864 |
-
if not os.path.exists(SITES_FILE):
|
| 865 |
-
# create default single site
|
| 866 |
-
default = [{
|
| 867 |
-
"Site Name": "Site 1",
|
| 868 |
-
"Coordinates": None,
|
| 869 |
-
"lat": None,
|
| 870 |
-
"lon": None,
|
| 871 |
-
"Load Bearing Capacity": None,
|
| 872 |
-
"Skin Shear Strength": None,
|
| 873 |
-
"Relative Compaction": None,
|
| 874 |
-
"Rate of Consolidation": None,
|
| 875 |
-
"Nature of Construction": None,
|
| 876 |
-
"Soil Profile": None,
|
| 877 |
-
"Flood Data": None,
|
| 878 |
-
"Seismic Data": None,
|
| 879 |
-
"Topography": None,
|
| 880 |
-
"GSD": None,
|
| 881 |
-
"USCS": None,
|
| 882 |
-
"AASHTO": None,
|
| 883 |
-
"GI": None,
|
| 884 |
-
"classifier_inputs": {},
|
| 885 |
-
"classifier_decision_path": "",
|
| 886 |
-
"chat_history": [],
|
| 887 |
-
"report_convo_state": 0,
|
| 888 |
-
"map_snapshot": None
|
| 889 |
-
}]
|
| 890 |
-
with open(SITES_FILE, "w") as f:
|
| 891 |
-
json.dump(default, f, indent=2)
|
| 892 |
-
return default
|
| 893 |
-
try:
|
| 894 |
-
with open(SITES_FILE, "r") as f:
|
| 895 |
-
return json.load(f)
|
| 896 |
-
except Exception:
|
| 897 |
-
return []
|
| 898 |
-
|
| 899 |
-
def save_sites(sites):
|
| 900 |
-
with open(SITES_FILE, "w") as f:
|
| 901 |
-
json.dump(sites, f, indent=2)
|
| 902 |
-
|
| 903 |
-
def get_active_site_index():
|
| 904 |
-
# Use session_state to store active site index
|
| 905 |
-
idx = st.session_state.get("active_site_index", 0)
|
| 906 |
-
sites = load_sites()
|
| 907 |
-
if idx < 0 or idx >= len(sites):
|
| 908 |
-
idx = 0
|
| 909 |
-
st.session_state["active_site_index"] = 0
|
| 910 |
-
return idx
|
| 911 |
-
|
| 912 |
-
def get_active_site():
|
| 913 |
-
sites = load_sites()
|
| 914 |
-
idx = get_active_site_index()
|
| 915 |
-
return sites[idx]
|
| 916 |
-
|
| 917 |
-
def save_active_site(site_obj):
|
| 918 |
-
sites = load_sites()
|
| 919 |
-
idx = get_active_site_index()
|
| 920 |
-
sites[idx] = site_obj
|
| 921 |
-
save_sites(sites)
|
| 922 |
-
|
| 923 |
-
# -------------------------
|
| 924 |
-
# Earth Engine initialization
|
| 925 |
-
# -------------------------
|
| 926 |
-
EE_READY = False
|
| 927 |
-
EE_INIT_ERROR = None
|
| 928 |
-
|
| 929 |
-
def init_earth_engine():
|
| 930 |
-
global EE_READY, EE_INIT_ERROR
|
| 931 |
-
if ee is None:
|
| 932 |
-
EE_INIT_ERROR = "earthengine-api not installed."
|
| 933 |
-
EE_READY = False
|
| 934 |
-
return EE_READY
|
| 935 |
-
try:
|
| 936 |
-
# Expect secrets: EARTH_ENGINE_KEY (json string or dict) and SERVICE_ACCOUNT (string)
|
| 937 |
-
if "EARTH_ENGINE_KEY" not in st.secrets or "SERVICE_ACCOUNT" not in st.secrets:
|
| 938 |
-
EE_INIT_ERROR = "Missing EARTH_ENGINE_KEY or SERVICE_ACCOUNT in Streamlit secrets."
|
| 939 |
-
EE_READY = False
|
| 940 |
-
return EE_READY
|
| 941 |
-
|
| 942 |
-
raw_key = st.secrets["EARTH_ENGINE_KEY"]
|
| 943 |
-
service_account = st.secrets["SERVICE_ACCOUNT"]
|
| 944 |
-
|
| 945 |
-
# raw_key might be dict or a JSON string
|
| 946 |
-
if isinstance(raw_key, str):
|
| 947 |
-
try:
|
| 948 |
-
key_json = json.loads(raw_key)
|
| 949 |
-
except Exception:
|
| 950 |
-
# maybe it's already a path
|
| 951 |
-
try:
|
| 952 |
-
with open(raw_key, "r") as f:
|
| 953 |
-
key_json = json.load(f)
|
| 954 |
-
except Exception as ex:
|
| 955 |
-
EE_INIT_ERROR = f"Could not parse EARTH_ENGINE_KEY: {ex}"
|
| 956 |
-
EE_READY = False
|
| 957 |
-
return EE_READY
|
| 958 |
-
elif isinstance(raw_key, dict):
|
| 959 |
-
key_json = raw_key
|
| 960 |
-
else:
|
| 961 |
-
EE_INIT_ERROR = "EARTH_ENGINE_KEY must be JSON string or dict."
|
| 962 |
-
EE_READY = False
|
| 963 |
-
return EE_READY
|
| 964 |
-
|
| 965 |
-
# Write to temp file (required by ServiceAccountCredentials)
|
| 966 |
-
tmp = tempfile.NamedTemporaryFile(delete=False, suffix=".json")
|
| 967 |
-
tmp.write(json.dumps(key_json).encode("utf-8"))
|
| 968 |
-
tmp.flush()
|
| 969 |
-
tmp.close()
|
| 970 |
-
key_path = tmp.name
|
| 971 |
-
|
| 972 |
-
# Initialize
|
| 973 |
-
creds = ee.ServiceAccountCredentials(service_account, key_path)
|
| 974 |
-
ee.Initialize(creds)
|
| 975 |
-
# remove temp file
|
| 976 |
-
try:
|
| 977 |
-
os.remove(key_path)
|
| 978 |
-
except Exception:
|
| 979 |
-
pass
|
| 980 |
-
EE_READY = True
|
| 981 |
-
EE_INIT_ERROR = None
|
| 982 |
-
return True
|
| 983 |
-
except Exception as e:
|
| 984 |
-
EE_INIT_ERROR = str(e)
|
| 985 |
-
EE_READY = False
|
| 986 |
-
return False
|
| 987 |
-
|
| 988 |
-
# Try to init at import
|
| 989 |
-
if not EE_READY:
|
| 990 |
-
init_earth_engine()
|
| 991 |
-
|
| 992 |
-
# -------------------------
|
| 993 |
-
# Coordinate normalization
|
| 994 |
-
# -------------------------
|
| 995 |
-
def normalize_coords_for_ee(coords):
|
| 996 |
-
"""
|
| 997 |
-
Accepts coords in a few common formats (list of [lat,lon] or [lon,lat], nested)
|
| 998 |
-
Returns a polygon coordinate acceptable to ee.Geometry.Polygon: [[ [lon,lat], ... ]]
|
| 999 |
-
"""
|
| 1000 |
-
if not coords:
|
| 1001 |
-
raise ValueError("Empty coordinates")
|
| 1002 |
-
# If nested as GeoJSON coordinates (like [ [ [lon,lat], ... ] ])
|
| 1003 |
-
if isinstance(coords[0][0], (list, tuple)):
|
| 1004 |
-
ring = coords[0]
|
| 1005 |
-
else:
|
| 1006 |
-
ring = coords
|
| 1007 |
-
|
| 1008 |
-
# detect order
|
| 1009 |
-
first = ring[0]
|
| 1010 |
-
try:
|
| 1011 |
-
a, b = float(first[0]), float(first[1])
|
| 1012 |
-
except Exception:
|
| 1013 |
-
raise ValueError("Invalid coordinate format")
|
| 1014 |
-
|
| 1015 |
-
# If first value looks like lon (|-180..180|) and second like lat
|
| 1016 |
-
if -180 <= a <= 180 and -90 <= b <= 90:
|
| 1017 |
-
lonlat = [[float(x), float(y)] for x,y in ring]
|
| 1018 |
-
elif -90 <= a <= 90 and -180 <= b <= 180:
|
| 1019 |
-
# it is lat,lon -> swap
|
| 1020 |
-
lonlat = [[float(y), float(x)] for x,y in ring]
|
| 1021 |
-
else:
|
| 1022 |
-
# fallback: swap
|
| 1023 |
-
lonlat = [[float(y), float(x)] for x,y in ring]
|
| 1024 |
-
|
| 1025 |
-
return [lonlat]
|
| 1026 |
-
|
| 1027 |
-
# -------------------------
|
| 1028 |
-
# Earth Engine dataset wrappers
|
| 1029 |
-
# -------------------------
|
| 1030 |
-
def safe_reduce_region(image, geom, scale):
|
| 1031 |
-
try:
|
| 1032 |
-
stats = image.reduceRegion(reducer=ee.Reducer.mean(), geometry=geom, scale=scale, maxPixels=1e9)
|
| 1033 |
-
return stats.getInfo()
|
| 1034 |
-
except Exception as e:
|
| 1035 |
-
return {"error": str(e)}
|
| 1036 |
-
|
| 1037 |
-
def fetch_flood_data(aoi_geom):
|
| 1038 |
-
"""
|
| 1039 |
-
Uses JRC Global Surface Water MonthlyHistory water band to compute mean water occurrence.
|
| 1040 |
-
Returns a dict with mean 'water' value (0..1) representing fraction of months with water.
|
| 1041 |
-
"""
|
| 1042 |
-
try:
|
| 1043 |
-
coll = ee.ImageCollection("JRC/GSW1_4/MonthlyHistory").select("water")
|
| 1044 |
-
img = coll.mean()
|
| 1045 |
-
info = safe_reduce_region(img, aoi_geom, scale=30)
|
| 1046 |
-
# Convert water mean (if present) to percentage occurrence
|
| 1047 |
-
if info and "water" in info and isinstance(info["water"], (int, float)):
|
| 1048 |
-
water_mean = info["water"]
|
| 1049 |
-
# clamp and produce percentage
|
| 1050 |
-
try:
|
| 1051 |
-
water_pct = float(water_mean) * 100.0
|
| 1052 |
-
except:
|
| 1053 |
-
water_pct = None
|
| 1054 |
-
return {"water_mean": water_mean, "water_percent": water_pct}
|
| 1055 |
-
return info
|
| 1056 |
-
except Exception as e:
|
| 1057 |
-
return {"error": str(e)}
|
| 1058 |
-
|
| 1059 |
-
def fetch_seismic_data(aoi_geom):
|
| 1060 |
-
"""
|
| 1061 |
-
Fetch PGA (Peak ground acceleration) mean from a global PGA dataset.
|
| 1062 |
-
"""
|
| 1063 |
-
try:
|
| 1064 |
-
img = ee.Image("USGS/GME/hazards/seismic/2013_PGA_10pct_50yr")
|
| 1065 |
-
info = safe_reduce_region(img, aoi_geom, scale=1000)
|
| 1066 |
-
# Convert to g units if dataset is in PGA (m/s^2?) - keep raw value but attempt to humanize
|
| 1067 |
-
return info
|
| 1068 |
-
except Exception as e:
|
| 1069 |
-
return {"error": str(e)}
|
| 1070 |
-
|
| 1071 |
-
def fetch_topography_data(aoi_geom):
|
| 1072 |
-
try:
|
| 1073 |
-
dem = ee.Image("USGS/SRTMGL1_003")
|
| 1074 |
-
info = safe_reduce_region(dem, aoi_geom, scale=90)
|
| 1075 |
-
return info
|
| 1076 |
-
except Exception as e:
|
| 1077 |
-
return {"error": str(e)}
|
| 1078 |
-
|
| 1079 |
-
# Map OpenLandMap soil codes to human text (basic)
|
| 1080 |
-
_SOIL_CODE_MAP = {
|
| 1081 |
-
0: "No data",
|
| 1082 |
-
1: "Sand (USDA texture class)",
|
| 1083 |
-
2: "Loamy sand",
|
| 1084 |
-
3: "Sandy loam",
|
| 1085 |
-
4: "Loam",
|
| 1086 |
-
5: "Silt loam",
|
| 1087 |
-
6: "Silt",
|
| 1088 |
-
7: "Silty clay loam",
|
| 1089 |
-
8: "Silty clay",
|
| 1090 |
-
9: "Clay loam",
|
| 1091 |
-
10: "Sandy clay loam",
|
| 1092 |
-
11: "Sandy clay",
|
| 1093 |
-
12: "Clay",
|
| 1094 |
-
# ... extend mapping as needed for your dataset coding
|
| 1095 |
-
}
|
| 1096 |
-
|
| 1097 |
-
def fetch_soil_data(aoi_geom):
|
| 1098 |
-
try:
|
| 1099 |
-
img = ee.Image("OpenLandMap/SOL/SOL_TEXTURE-CLASS_USDA-TT_M/v02")
|
| 1100 |
-
stats = img.reduceRegion(reducer=ee.Reducer.mode(), geometry=aoi_geom, scale=250, maxPixels=1e9)
|
| 1101 |
-
if stats and "b1" in stats:
|
| 1102 |
-
code = stats["b1"]
|
| 1103 |
-
human = _SOIL_CODE_MAP.get(int(code), f"Texture code {code}")
|
| 1104 |
-
return {"mode_code": code, "description": human}
|
| 1105 |
-
return stats
|
| 1106 |
-
except Exception as e:
|
| 1107 |
-
return {"error": str(e)}
|
| 1108 |
-
|
| 1109 |
-
# -------------------------
|
| 1110 |
-
# Map snapshot utilities
|
| 1111 |
-
# -------------------------
|
| 1112 |
-
def save_map_html_and_try_png(map_obj, site_name="site"):
|
| 1113 |
-
"""
|
| 1114 |
-
Save map HTML and attempt to render PNG using pyppeteer (headless chromium).
|
| 1115 |
-
Returns dict with keys: html (str), png_bytes (bytes | None), error (str | None)
|
| 1116 |
-
"""
|
| 1117 |
-
result = {"html": None, "png_bytes": None, "error": None}
|
| 1118 |
-
try:
|
| 1119 |
-
html = map_obj.to_html()
|
| 1120 |
-
result["html"] = html
|
| 1121 |
-
except Exception as e:
|
| 1122 |
-
result["error"] = f"Failed to generate HTML from map: {e}"
|
| 1123 |
-
return result
|
| 1124 |
-
|
| 1125 |
-
# Try to render PNG using pyppeteer if available
|
| 1126 |
-
if _HAS_PYPPETEER:
|
| 1127 |
-
try:
|
| 1128 |
-
tmp_html = tempfile.NamedTemporaryFile(delete=False, suffix=".html")
|
| 1129 |
-
tmp_html.write(html.encode("utf-8"))
|
| 1130 |
-
tmp_html.flush()
|
| 1131 |
-
tmp_html.close()
|
| 1132 |
-
tmp_path = tmp_html.name
|
| 1133 |
-
|
| 1134 |
-
async def render():
|
| 1135 |
-
browser = await pyppeteer_launch(args=['--no-sandbox'], headless=True)
|
| 1136 |
-
page = await browser.newPage()
|
| 1137 |
-
await page.setViewport({"width": 1200, "height": 800})
|
| 1138 |
-
await page.goto("file://" + tmp_path)
|
| 1139 |
-
await asyncio.sleep(1.5) # let leaflet tiles load
|
| 1140 |
-
png = await page.screenshot({'fullPage': True})
|
| 1141 |
-
await browser.close()
|
| 1142 |
-
return png
|
| 1143 |
-
|
| 1144 |
-
png_bytes = asyncio.get_event_loop().run_until_complete(render())
|
| 1145 |
-
result["png_bytes"] = png_bytes
|
| 1146 |
-
# cleanup
|
| 1147 |
-
try:
|
| 1148 |
-
os.remove(tmp_path)
|
| 1149 |
-
except:
|
| 1150 |
-
pass
|
| 1151 |
-
return result
|
| 1152 |
-
except Exception as e:
|
| 1153 |
-
result["error"] = f"pyppeteer PNG capture failed: {e}"
|
| 1154 |
-
# fall through to return HTML-only
|
| 1155 |
-
return result
|
| 1156 |
-
else:
|
| 1157 |
-
result["error"] = "pyppeteer not available to render PNG. HTML saved."
|
| 1158 |
-
return result
|
| 1159 |
-
|
| 1160 |
-
# -------------------------
|
| 1161 |
-
# Human-friendly conversions
|
| 1162 |
-
# -------------------------
|
| 1163 |
-
def humanize_flood_info(flood_dict):
|
| 1164 |
-
# flood_dict: {'water_mean': val, 'water_percent': pct} or error
|
| 1165 |
-
if not flood_dict:
|
| 1166 |
-
return "No flood data"
|
| 1167 |
-
if "error" in flood_dict:
|
| 1168 |
-
return f"Error fetching flood data: {flood_dict['error']}"
|
| 1169 |
-
wm = flood_dict.get("water_mean")
|
| 1170 |
-
wp = flood_dict.get("water_percent")
|
| 1171 |
-
if wp is None and wm is not None:
|
| 1172 |
-
try:
|
| 1173 |
-
wp = float(wm) * 100.0
|
| 1174 |
-
except:
|
| 1175 |
-
wp = None
|
| 1176 |
-
if wp is None:
|
| 1177 |
-
return f"Flood data (raw): {flood_dict}"
|
| 1178 |
-
else:
|
| 1179 |
-
# Interpret: mean water percent across time (0-100). >10% suggests frequent inundation historically.
|
| 1180 |
-
severity = "Low"
|
| 1181 |
-
if wp >= 50:
|
| 1182 |
-
severity = "Very high"
|
| 1183 |
-
elif wp >= 20:
|
| 1184 |
-
severity = "High"
|
| 1185 |
-
elif wp >= 5:
|
| 1186 |
-
severity = "Moderate"
|
| 1187 |
-
return f"Historic water occurrence mean: {wp:.2f}% → {severity} flood occurrence in AOI."
|
| 1188 |
-
|
| 1189 |
-
def humanize_seismic_info(seis_dict):
|
| 1190 |
-
if not seis_dict:
|
| 1191 |
-
return "No seismic data"
|
| 1192 |
-
if "error" in seis_dict:
|
| 1193 |
-
return f"Error fetching seismic data: {seis_dict['error']}"
|
| 1194 |
-
# PGA value often returned in g or m/s^2 depending on dataset; provide raw and interpret
|
| 1195 |
-
# We'll display raw and classify roughly
|
| 1196 |
-
# Try find any numeric keys
|
| 1197 |
-
for k,v in seis_dict.items():
|
| 1198 |
-
try:
|
| 1199 |
-
val = float(v)
|
| 1200 |
-
# classify rough levels (in g)
|
| 1201 |
-
g_val = val
|
| 1202 |
-
# if value > 1 assume dataset maybe in %g or different units - just display raw
|
| 1203 |
-
if g_val <= 0.02:
|
| 1204 |
-
level = "Low"
|
| 1205 |
-
elif g_val <= 0.05:
|
| 1206 |
-
level = "Moderate"
|
| 1207 |
-
elif g_val <= 0.15:
|
| 1208 |
-
level = "High"
|
| 1209 |
-
else:
|
| 1210 |
-
level = "Very high"
|
| 1211 |
-
return f"Seismic: {k} = {val:.4f} (approx. {level} PGA)."
|
| 1212 |
-
except Exception:
|
| 1213 |
-
continue
|
| 1214 |
-
return f"Seismic raw data: {seis_dict}"
|
| 1215 |
-
|
| 1216 |
-
# -------------------------
|
| 1217 |
-
# Locator UI (main)
|
| 1218 |
-
# -------------------------
|
| 1219 |
-
def locator_ui():
|
| 1220 |
-
st.header("🌍 Locator (Earth Engine Powered)")
|
| 1221 |
-
sites = load_sites()
|
| 1222 |
-
idx = get_active_site_index()
|
| 1223 |
-
site = sites[idx]
|
| 1224 |
-
|
| 1225 |
-
# site switching UI
|
| 1226 |
-
cols = st.columns([3,1,1])
|
| 1227 |
-
with cols[0]:
|
| 1228 |
-
st.markdown("**Active site:**")
|
| 1229 |
-
site_name = st.text_input("Site name", value=site.get("Site Name", f"Site {idx+1}"))
|
| 1230 |
-
with cols[1]:
|
| 1231 |
-
if st.button("Save site name"):
|
| 1232 |
-
site["Site Name"] = site_name
|
| 1233 |
-
save_active_site(site)
|
| 1234 |
-
st.success("Site name updated.")
|
| 1235 |
-
with cols[2]:
|
| 1236 |
-
if st.button("New site"):
|
| 1237 |
-
# Append a new site and switch
|
| 1238 |
-
new_site = {
|
| 1239 |
-
"Site Name": f"Site {len(sites)+1}",
|
| 1240 |
-
"Coordinates": None,
|
| 1241 |
-
"lat": None,
|
| 1242 |
-
"lon": None,
|
| 1243 |
-
"Load Bearing Capacity": None,
|
| 1244 |
-
"Skin Shear Strength": None,
|
| 1245 |
-
"Relative Compaction": None,
|
| 1246 |
-
"Rate of Consolidation": None,
|
| 1247 |
-
"Nature of Construction": None,
|
| 1248 |
-
"Soil Profile": None,
|
| 1249 |
-
"Flood Data": None,
|
| 1250 |
-
"Seismic Data": None,
|
| 1251 |
-
"Topography": None,
|
| 1252 |
-
"GSD": None,
|
| 1253 |
-
"USCS": None,
|
| 1254 |
-
"AASHTO": None,
|
| 1255 |
-
"GI": None,
|
| 1256 |
-
"classifier_inputs": {},
|
| 1257 |
-
"classifier_decision_path": "",
|
| 1258 |
-
"chat_history": [],
|
| 1259 |
-
"report_convo_state": 0,
|
| 1260 |
-
"map_snapshot": None
|
| 1261 |
-
}
|
| 1262 |
-
sites.append(new_site)
|
| 1263 |
-
save_sites(sites)
|
| 1264 |
-
st.session_state["active_site_index"] = len(sites)-1
|
| 1265 |
-
st.experimental_rerun()
|
| 1266 |
-
|
| 1267 |
-
# Attempt to init EE if not ready
|
| 1268 |
-
if not EE_READY:
|
| 1269 |
-
init_earth_engine()
|
| 1270 |
-
|
| 1271 |
-
if geemap is None:
|
| 1272 |
-
st.error("geemap is not installed in the environment. Install via `pip install geemap` and earthengine-api.")
|
| 1273 |
-
return
|
| 1274 |
-
if ee is None:
|
| 1275 |
-
st.error("earthengine-api not installed. Install it and provide service account key in secrets.")
|
| 1276 |
-
return
|
| 1277 |
-
|
| 1278 |
-
# Center map on saved coords if available
|
| 1279 |
-
center = [20, 78]
|
| 1280 |
-
if site.get("Coordinates"):
|
| 1281 |
-
try:
|
| 1282 |
-
coords = site.get("Coordinates")
|
| 1283 |
-
# try to get centroid
|
| 1284 |
-
if isinstance(coords[0][0], (list, tuple)):
|
| 1285 |
-
# nested polygon: take first point
|
| 1286 |
-
pt = coords[0][0]
|
| 1287 |
-
else:
|
| 1288 |
-
pt = coords[0]
|
| 1289 |
-
# detect ordering
|
| 1290 |
-
a,b = float(pt[0]), float(pt[1])
|
| 1291 |
-
if -90 <= a <= 90 and -180 <= b <= 180:
|
| 1292 |
-
center = [a,b]
|
| 1293 |
-
else:
|
| 1294 |
-
center = [b,a]
|
| 1295 |
-
except Exception:
|
| 1296 |
-
pass
|
| 1297 |
-
|
| 1298 |
-
m = geemap.Map(center=center, zoom=6, plugin_Draw=True, Draw_export=True, locate_control=True)
|
| 1299 |
-
try:
|
| 1300 |
-
m.add_basemap("HYBRID")
|
| 1301 |
-
except Exception:
|
| 1302 |
-
pass
|
| 1303 |
-
|
| 1304 |
-
with st.expander("📌 Locator instructions"):
|
| 1305 |
-
st.markdown("""
|
| 1306 |
-
- Use the draw tools to mark AOI (polygon/rectangle/circle).
|
| 1307 |
-
- Click the crosshair to auto-locate.
|
| 1308 |
-
- After drawing, click **Get AOI & Extract Data**.
|
| 1309 |
-
- If Earth Engine is unavailable extraction will be skipped but AOI will be saved.
|
| 1310 |
-
""")
|
| 1311 |
-
|
| 1312 |
-
# Render the map into Streamlit
|
| 1313 |
-
try:
|
| 1314 |
-
m.to_streamlit(height=520)
|
| 1315 |
-
except Exception as e:
|
| 1316 |
-
st.error(f"Map rendering failed: {e}")
|
| 1317 |
-
return
|
| 1318 |
-
|
| 1319 |
-
# Button to extract
|
| 1320 |
-
if st.button("📥 Get AOI & Extract Data"):
|
| 1321 |
-
try:
|
| 1322 |
-
# retrieve drawn bounds / geojson
|
| 1323 |
-
coords = None
|
| 1324 |
-
try:
|
| 1325 |
-
coords = m.user_roi_bounds() # returns list of [lat,lon] or [lon,lat]
|
| 1326 |
-
except Exception:
|
| 1327 |
-
try:
|
| 1328 |
-
geojson = m.get_drawn_geojson() if hasattr(m, "get_drawn_geojson") else None
|
| 1329 |
-
if geojson and "features" in geojson and len(geojson["features"])>0:
|
| 1330 |
-
coords = geojson["features"][0]["geometry"]["coordinates"]
|
| 1331 |
-
except Exception:
|
| 1332 |
-
coords = None
|
| 1333 |
-
|
| 1334 |
-
if not coords:
|
| 1335 |
-
st.warning("No AOI found. Draw a polygon/rectangle and try again.")
|
| 1336 |
-
else:
|
| 1337 |
-
# Normalize and form EE polygon
|
| 1338 |
-
try:
|
| 1339 |
-
lonlat_poly = normalize_coords_for_ee(coords)
|
| 1340 |
-
aoi = ee.Geometry.Polygon(lonlat_poly)
|
| 1341 |
-
except Exception as e:
|
| 1342 |
-
st.error(f"Coordinate normalization failed: {e}")
|
| 1343 |
-
st.stop()
|
| 1344 |
-
|
| 1345 |
-
# Save coordinates to site
|
| 1346 |
-
site["Coordinates"] = coords
|
| 1347 |
-
save_active_site(site)
|
| 1348 |
-
|
| 1349 |
-
# If EE ready, fetch data
|
| 1350 |
-
if EE_READY:
|
| 1351 |
-
with st.spinner("Querying Earth Engine (flood, seismic, topo, soil)..."):
|
| 1352 |
-
flood = fetch_flood_data(aoi)
|
| 1353 |
-
seismic = fetch_seismic_data(aoi)
|
| 1354 |
-
topo = fetch_topography_data(aoi)
|
| 1355 |
-
soil = fetch_soil_data(aoi)
|
| 1356 |
-
|
| 1357 |
-
# Humanize and store
|
| 1358 |
-
site["Flood Data"] = flood
|
| 1359 |
-
site["Seismic Data"] = seismic
|
| 1360 |
-
site["Topography"] = topo
|
| 1361 |
-
site["Soil Profile"] = soil
|
| 1362 |
-
save_active_site(site)
|
| 1363 |
-
|
| 1364 |
-
st.success("✅ Data extracted and saved to site.")
|
| 1365 |
-
st.markdown("### Extracted summary")
|
| 1366 |
-
st.write(humanize_flood_info(flood))
|
| 1367 |
-
st.write(humanize_seismic_info(seismic))
|
| 1368 |
-
st.write(f"Topography (SRTM mean): {topo}")
|
| 1369 |
-
st.write(f"Soil profile (mode): {soil}")
|
| 1370 |
-
|
| 1371 |
-
# Map snapshot (HTML + optional PNG)
|
| 1372 |
-
snap = save_map_html_and_try_png(m, site.get("Site Name","site"))
|
| 1373 |
-
if snap.get("html"):
|
| 1374 |
-
site["map_snapshot_html"] = snap["html"]
|
| 1375 |
-
if snap.get("png_bytes"):
|
| 1376 |
-
site["map_snapshot_png"] = base64.b64encode(snap["png_bytes"]).decode("utf-8")
|
| 1377 |
-
save_active_site(site)
|
| 1378 |
-
if snap.get("png_bytes"):
|
| 1379 |
-
st.image(Image.open(io.BytesIO(snap["png_bytes"])), caption="Map snapshot (PNG)", use_column_width=True)
|
| 1380 |
-
else:
|
| 1381 |
-
st.info("Map PNG snapshot not available; HTML snapshot saved in site data.")
|
| 1382 |
-
else:
|
| 1383 |
-
st.info("Earth Engine unavailable — AOI saved locally.")
|
| 1384 |
-
st.success("AOI saved to site.")
|
| 1385 |
-
except Exception as e:
|
| 1386 |
-
st.error(f"Extraction failed: {e}\n{traceback.format_exc()}")
|
| 1387 |
-
|
| 1388 |
-
# Show cached extraction if present
|
| 1389 |
-
if site.get("Flood Data") or site.get("Seismic Data") or site.get("Topography") or site.get("Soil Profile"):
|
| 1390 |
-
st.markdown("### Cached site dataset (last extraction)")
|
| 1391 |
-
st.json({
|
| 1392 |
-
"Flood": site.get("Flood Data"),
|
| 1393 |
-
"Seismic": site.get("Seismic Data"),
|
| 1394 |
-
"Topography": site.get("Topography"),
|
| 1395 |
-
"Soil": site.get("Soil Profile")
|
| 1396 |
-
})
|
| 1397 |
-
|
| 1398 |
-
# Save back site (name updated)
|
| 1399 |
-
site["Site Name"] = site_name
|
| 1400 |
-
save_active_site(site)
|
| 1401 |
-
|
| 1402 |
-
# Part 4/4 of GeoMate V2 app.py
|
| 1403 |
-
# -------------------------------------------------------
|
| 1404 |
-
# Implements:
|
| 1405 |
-
# - RAG: FAISS + Groq chat (per-site memory)
|
| 1406 |
-
# - Entity extraction placeholder to auto-save parameters from chat
|
| 1407 |
-
# - Reports: Classification-only PDF + Full Geotechnical Report PDF
|
| 1408 |
-
# - Final UI glue: Reports page and main app routing
|
| 1409 |
-
# -------------------------------------------------------
|
| 1410 |
-
|
| 1411 |
-
# --------------------------
|
| 1412 |
-
# GeoMate Ask (RAG Chat with OCR) UI
|
| 1413 |
-
# --------------------------
|
| 1414 |
-
import pytesseract
|
| 1415 |
-
from PIL import Image
|
| 1416 |
-
import fitz # PyMuPDF for PDF OCR
|
| 1417 |
-
|
| 1418 |
-
def extract_text_from_file(uploaded_file):
|
| 1419 |
-
"""OCR utility for images and PDFs"""
|
| 1420 |
-
try:
|
| 1421 |
-
if uploaded_file.type in ["image/png", "image/jpeg", "image/jpg"]:
|
| 1422 |
-
img = Image.open(uploaded_file)
|
| 1423 |
-
text = pytesseract.image_to_string(img)
|
| 1424 |
-
return text.strip()
|
| 1425 |
-
elif uploaded_file.type == "application/pdf":
|
| 1426 |
-
text_pages = []
|
| 1427 |
-
pdf = fitz.open(stream=uploaded_file.read(), filetype="pdf")
|
| 1428 |
-
for page in pdf:
|
| 1429 |
-
text_pages.append(page.get_text("text"))
|
| 1430 |
-
# fallback to OCR if no text
|
| 1431 |
-
if not text_pages[-1].strip():
|
| 1432 |
-
pix = page.get_pixmap()
|
| 1433 |
-
img = Image.frombytes("RGB", [pix.width, pix.height], pix.samples)
|
| 1434 |
-
text_pages[-1] = pytesseract.image_to_string(img)
|
| 1435 |
-
return "\n".join(text_pages).strip()
|
| 1436 |
-
else:
|
| 1437 |
-
return ""
|
| 1438 |
-
except Exception as e:
|
| 1439 |
-
st.error(f"OCR failed: {e}")
|
| 1440 |
-
return ""
|
| 1441 |
-
|
| 1442 |
-
def rag_ui():
|
| 1443 |
-
st.header("🤖 GeoMate Ask — RAG + Groq (per-site memory + OCR)")
|
| 1444 |
-
site = get_active_site()
|
| 1445 |
-
|
| 1446 |
-
if "chat_history" not in site:
|
| 1447 |
-
site["chat_history"] = []
|
| 1448 |
-
|
| 1449 |
-
st.markdown(
|
| 1450 |
-
"**Context:** The RAG uses your FAISS knowledge base (upload .zip in this page), "
|
| 1451 |
-
"Groq LLM for answers, and optional OCR from uploaded images/PDFs. "
|
| 1452 |
-
"Chat history is saved per site."
|
| 1453 |
-
)
|
| 1454 |
-
|
| 1455 |
-
# FAISS DB upload (one-time)
|
| 1456 |
-
with st.expander("📂 Upload FAISS DB (zip with index.faiss + meta.pkl)"):
|
| 1457 |
-
uploaded = st.file_uploader("Upload faiss_books_db.zip", type=["zip"], key="faiss_db_uploader")
|
| 1458 |
-
if uploaded:
|
| 1459 |
-
tmpf = tempfile.NamedTemporaryFile(delete=False, suffix=".zip")
|
| 1460 |
-
tmpf.write(uploaded.getvalue())
|
| 1461 |
-
tmpf.flush()
|
| 1462 |
-
ix, meta = load_faiss_db_from_zip(tmpf.name)
|
| 1463 |
-
if ix is not None:
|
| 1464 |
-
ss["faiss_index"] = ix
|
| 1465 |
-
ss["faiss_meta"] = meta
|
| 1466 |
-
st.success("✅ FAISS DB loaded.")
|
| 1467 |
-
|
| 1468 |
-
# Render chat history
|
| 1469 |
-
for turn in site.get("chat_history", []):
|
| 1470 |
-
role, text = turn.get("role"), turn.get("text")
|
| 1471 |
-
if role == "bot":
|
| 1472 |
-
st.markdown(
|
| 1473 |
-
f"<div style='background:{THEME['bubble_bg']};padding:8px;border-radius:12px;border:2px solid {THEME['accent']};'>"
|
| 1474 |
-
f"<b>🤖 GeoMate:</b> {text}</div>", unsafe_allow_html=True
|
| 1475 |
-
)
|
| 1476 |
-
else:
|
| 1477 |
-
st.markdown(
|
| 1478 |
-
f"<div style='background:#1a2436;color:#fff;padding:8px;border-radius:12px;margin-left:40px;'>"
|
| 1479 |
-
f"<b>👤 You:</b> {text}</div>", unsafe_allow_html=True
|
| 1480 |
-
)
|
| 1481 |
-
|
| 1482 |
-
# Input + OCR
|
| 1483 |
-
user_q = st.text_input("Ask GeoMate:", key="geomate_rag_input")
|
| 1484 |
-
uploaded_ocr = st.file_uploader("Optional OCR input (image/pdf)", type=["png","jpg","jpeg","pdf"], key="rag_ocr_uploader")
|
| 1485 |
-
|
| 1486 |
-
if st.button("Ask", key="geomate_rag_button"):
|
| 1487 |
-
if not user_q.strip() and not uploaded_ocr:
|
| 1488 |
-
st.warning("Please type a question or upload a file.")
|
| 1489 |
-
else:
|
| 1490 |
-
query_text = user_q.strip()
|
| 1491 |
-
if uploaded_ocr:
|
| 1492 |
-
with st.spinner("Running OCR..."):
|
| 1493 |
-
ocr_text = extract_text_from_file(uploaded_ocr)
|
| 1494 |
-
if ocr_text:
|
| 1495 |
-
query_text += "\n\n[OCR Extracted Content]\n" + ocr_text
|
| 1496 |
-
|
| 1497 |
-
# Append user query
|
| 1498 |
-
site["chat_history"].append({"role":"user","text":query_text, "time":datetime.utcnow().isoformat()})
|
| 1499 |
-
save_active_site(site)
|
| 1500 |
-
|
| 1501 |
-
# Retrieve + call LLM
|
| 1502 |
-
with st.spinner("Retrieving context and calling LLM..."):
|
| 1503 |
-
answer = rag_retrieve_and_answer(query_text, topk=5)
|
| 1504 |
-
|
| 1505 |
-
# Append bot answer
|
| 1506 |
-
site["chat_history"].append({"role":"bot","text":answer, "time":datetime.utcnow().isoformat()})
|
| 1507 |
-
|
| 1508 |
-
# Extract parameters
|
| 1509 |
-
site = update_site_description_from_text(site, query_text + "\n" + answer)
|
| 1510 |
-
save_active_site(site)
|
| 1511 |
-
|
| 1512 |
-
st.rerun()
|
| 1513 |
-
|
| 1514 |
-
# Quick buttons
|
| 1515 |
-
colA, colB = st.columns(2)
|
| 1516 |
-
if colA.button("💾 Save Chat"):
|
| 1517 |
-
save_active_site(site)
|
| 1518 |
-
st.success("Chat saved into site JSON.")
|
| 1519 |
-
if colB.button("🗑️ Clear Chat"):
|
| 1520 |
-
site["chat_history"] = []
|
| 1521 |
-
save_active_site(site)
|
| 1522 |
-
st.success("Cleared history for this site.")
|
| 1523 |
-
# --------------------------
|
| 1524 |
-
# REPORTS: PDF builders
|
| 1525 |
-
# --------------------------
|
| 1526 |
-
import io, os, json
|
| 1527 |
-
from datetime import datetime
|
| 1528 |
-
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Table, TableStyle, PageBreak, Image as RLImage
|
| 1529 |
-
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
|
| 1530 |
-
from reportlab.lib.pagesizes import A4
|
| 1531 |
-
from reportlab.lib.units import mm
|
| 1532 |
-
from reportlab.lib import colors
|
| 1533 |
-
import streamlit as st
|
| 1534 |
-
|
| 1535 |
-
# Theme colors
|
| 1536 |
-
THEME = {"accent": colors.HexColor("#FF6600")}
|
| 1537 |
-
|
| 1538 |
-
# --------------------------
|
| 1539 |
-
# CLASSIFICATION REPORT BUILDER
|
| 1540 |
-
# --------------------------
|
| 1541 |
-
def build_classification_pdf_bytes(site: dict):
|
| 1542 |
-
"""
|
| 1543 |
-
Build classification-only PDF (returns bytes)
|
| 1544 |
-
"""
|
| 1545 |
-
buf = io.BytesIO()
|
| 1546 |
-
doc = SimpleDocTemplate(
|
| 1547 |
-
buf, pagesize=A4,
|
| 1548 |
-
leftMargin=20*mm, rightMargin=20*mm,
|
| 1549 |
-
topMargin=20*mm, bottomMargin=20*mm
|
| 1550 |
-
)
|
| 1551 |
-
styles = getSampleStyleSheet()
|
| 1552 |
-
title = ParagraphStyle("title", parent=styles["Title"], fontSize=20, textColor=THEME["accent"], alignment=1)
|
| 1553 |
-
h1 = ParagraphStyle("h1", parent=styles["Heading1"], fontSize=14, textColor=THEME["accent"])
|
| 1554 |
-
body = ParagraphStyle("body", parent=styles["BodyText"], fontSize=10)
|
| 1555 |
-
|
| 1556 |
-
elems = []
|
| 1557 |
-
elems.append(Paragraph("Classification Report — GeoMate V2", title))
|
| 1558 |
-
elems.append(Spacer(1, 8))
|
| 1559 |
-
elems.append(Paragraph(f"Site: {site.get('Site Name','-')}", h1))
|
| 1560 |
-
elems.append(Paragraph(f"Coordinates: {site.get('Coordinates','-')}", body))
|
| 1561 |
-
elems.append(Spacer(1, 6))
|
| 1562 |
-
|
| 1563 |
-
# Inputs table
|
| 1564 |
-
inputs = site.get("classifier_inputs", {})
|
| 1565 |
-
if inputs:
|
| 1566 |
-
data = [["Parameter", "Value"]]
|
| 1567 |
-
for k, v in inputs.items():
|
| 1568 |
-
data.append([k, str(v)])
|
| 1569 |
-
t = Table(data, colWidths=[80*mm, 80*mm])
|
| 1570 |
-
t.setStyle(TableStyle([
|
| 1571 |
-
("GRID", (0,0), (-1,-1), 0.5, colors.grey),
|
| 1572 |
-
("BACKGROUND", (0,0), (-1,0), THEME["accent"]),
|
| 1573 |
-
("TEXTCOLOR", (0,0), (-1,0), colors.white)
|
| 1574 |
-
]))
|
| 1575 |
-
elems.append(t)
|
| 1576 |
-
elems.append(Spacer(1, 8))
|
| 1577 |
-
|
| 1578 |
-
# Results
|
| 1579 |
-
elems.append(Paragraph("Results", h1))
|
| 1580 |
-
elems.append(Paragraph(f"USCS: {site.get('USCS','N/A')}", body))
|
| 1581 |
-
elems.append(Paragraph(f"AASHTO: {site.get('AASHTO','N/A')} (GI: {site.get('GI','N/A')})", body))
|
| 1582 |
-
|
| 1583 |
-
# OCR data inclusion
|
| 1584 |
-
if site.get("ocr_text"):
|
| 1585 |
-
elems.append(Spacer(1, 8))
|
| 1586 |
-
elems.append(Paragraph("OCR Extracted Notes", h1))
|
| 1587 |
-
elems.append(Paragraph(site.get("ocr_text","No OCR data found."), body))
|
| 1588 |
-
|
| 1589 |
-
# GSD curve inclusion if present
|
| 1590 |
-
gsd = site.get("GSD")
|
| 1591 |
-
if gsd:
|
| 1592 |
-
elems.append(Spacer(1, 8))
|
| 1593 |
-
elems.append(Paragraph("Grain Size Distribution (GSD)", h1))
|
| 1594 |
-
elems.append(Paragraph(f"D10: {gsd.get('D10')}, D30: {gsd.get('D30')}, D60: {gsd.get('D60')}", body))
|
| 1595 |
-
gsd_img_path = "/tmp/geomate_gsd_plot.png"
|
| 1596 |
-
if os.path.exists(gsd_img_path):
|
| 1597 |
-
elems.append(Spacer(1, 6))
|
| 1598 |
-
elems.append(RLImage(gsd_img_path, width=150*mm, height=80*mm))
|
| 1599 |
-
|
| 1600 |
-
elems.append(Spacer(1, 10))
|
| 1601 |
-
elems.append(Paragraph("Decision path", h1))
|
| 1602 |
-
elems.append(Paragraph(site.get("classifier_decision_path","Not recorded"), body))
|
| 1603 |
-
|
| 1604 |
-
doc.build(elems)
|
| 1605 |
-
pdf = buf.getvalue()
|
| 1606 |
-
buf.close()
|
| 1607 |
-
return pdf
|
| 1608 |
-
|
| 1609 |
-
# --------------------------
|
| 1610 |
-
# FULL REPORT BUILDER
|
| 1611 |
-
# --------------------------
|
| 1612 |
-
def build_full_geotech_pdf_bytes(sites_list: list, external_refs: list):
|
| 1613 |
-
"""
|
| 1614 |
-
Build a full geotechnical report covering all selected sites.
|
| 1615 |
-
sites_list: list of site dictionaries
|
| 1616 |
-
Returns bytes of PDF.
|
| 1617 |
-
"""
|
| 1618 |
-
buf = io.BytesIO()
|
| 1619 |
-
doc = SimpleDocTemplate(
|
| 1620 |
-
buf, pagesize=A4,
|
| 1621 |
-
leftMargin=20*mm, rightMargin=20*mm,
|
| 1622 |
-
topMargin=20*mm, bottomMargin=20*mm
|
| 1623 |
-
)
|
| 1624 |
-
styles = getSampleStyleSheet()
|
| 1625 |
-
title = ParagraphStyle("title", parent=styles["Title"], fontSize=20, textColor=THEME["accent"], alignment=1)
|
| 1626 |
-
h1 = ParagraphStyle("h1", parent=styles["Heading1"], fontSize=14, textColor=THEME["accent"])
|
| 1627 |
-
body = ParagraphStyle("body", parent=styles["BodyText"], fontSize=10)
|
| 1628 |
-
|
| 1629 |
-
elems = []
|
| 1630 |
-
elems.append(Paragraph("Full Geotechnical Investigation Report — GeoMate V2", title))
|
| 1631 |
-
elems.append(Spacer(1, 6))
|
| 1632 |
-
elems.append(Paragraph(f"Date: {datetime.today().strftime('%Y-%m-%d')}", body))
|
| 1633 |
-
elems.append(Spacer(1, 10))
|
| 1634 |
-
|
| 1635 |
-
# For each site
|
| 1636 |
-
for s in sites_list:
|
| 1637 |
-
elems.append(Paragraph(f"Site: {s.get('Site Name','Unnamed')}", h1))
|
| 1638 |
-
elems.append(Paragraph(f"Coordinates: {s.get('Coordinates','Not provided')}", body))
|
| 1639 |
-
elems.append(Spacer(1, 6))
|
| 1640 |
-
|
| 1641 |
-
# OCR notes
|
| 1642 |
-
if s.get("ocr_text"):
|
| 1643 |
-
elems.append(Paragraph("OCR Extracted Notes", h1))
|
| 1644 |
-
elems.append(Paragraph(s.get("ocr_text"), body))
|
| 1645 |
-
elems.append(Spacer(1, 6))
|
| 1646 |
-
|
| 1647 |
-
# Classification
|
| 1648 |
-
elems.append(Paragraph("Classification", h1))
|
| 1649 |
-
elems.append(Paragraph(f"USCS: {s.get('USCS','N/A')}", body))
|
| 1650 |
-
elems.append(Paragraph(f"AASHTO: {s.get('AASHTO','N/A')} (GI: {s.get('GI','N/A')})", body))
|
| 1651 |
-
|
| 1652 |
-
# Earth Engine / Map snapshots
|
| 1653 |
-
if s.get("map_snapshot") and os.path.exists(s["map_snapshot"]):
|
| 1654 |
-
elems.append(Spacer(1, 6))
|
| 1655 |
-
elems.append(Paragraph("Site Map Snapshot", h1))
|
| 1656 |
-
elems.append(RLImage(s["map_snapshot"], width=140*mm, height=80*mm))
|
| 1657 |
-
|
| 1658 |
-
# GSD
|
| 1659 |
-
gsd = s.get("GSD")
|
| 1660 |
-
if gsd:
|
| 1661 |
-
elems.append(Spacer(1, 6))
|
| 1662 |
-
elems.append(Paragraph("Grain Size Distribution", h1))
|
| 1663 |
-
elems.append(Paragraph(
|
| 1664 |
-
f"D10: {gsd.get('D10')}, D30: {gsd.get('D30')}, "
|
| 1665 |
-
f"D60: {gsd.get('D60')}, Cu: {gsd.get('Cu')}, Cc: {gsd.get('Cc')}",
|
| 1666 |
-
body
|
| 1667 |
-
))
|
| 1668 |
-
gsd_img = "/tmp/geomate_gsd_plot.png"
|
| 1669 |
-
if os.path.exists(gsd_img):
|
| 1670 |
-
elems.append(Spacer(1, 6))
|
| 1671 |
-
elems.append(RLImage(gsd_img, width=150*mm, height=80*mm))
|
| 1672 |
-
|
| 1673 |
-
# Recommendations (basic rules from classification)
|
| 1674 |
-
elems.append(Spacer(1, 8))
|
| 1675 |
-
elems.append(Paragraph("Recommendations", h1))
|
| 1676 |
-
if s.get("USCS") and s["USCS"].startswith("C"):
|
| 1677 |
-
elems.append(Paragraph(" - Clayey soils: check consolidation/settlement. Consider raft or pile foundations.", body))
|
| 1678 |
-
else:
|
| 1679 |
-
elems.append(Paragraph(" - Granular soils: shallow foundations possible with compaction and drainage.", body))
|
| 1680 |
-
|
| 1681 |
-
elems.append(PageBreak())
|
| 1682 |
-
|
| 1683 |
-
# References (FAISS RAG + manual)
|
| 1684 |
-
refs = []
|
| 1685 |
-
for s in sites_list:
|
| 1686 |
-
if s.get("rag_sources"):
|
| 1687 |
-
refs.extend(s["rag_sources"])
|
| 1688 |
-
refs = list(set(refs)) # remove duplicates
|
| 1689 |
-
refs.extend(external_refs)
|
| 1690 |
-
|
| 1691 |
-
if refs:
|
| 1692 |
-
elems.append(Paragraph("References", h1))
|
| 1693 |
-
for r in refs:
|
| 1694 |
-
elems.append(Paragraph(r, body))
|
| 1695 |
-
|
| 1696 |
-
doc.build(elems)
|
| 1697 |
-
pdf = buf.getvalue()
|
| 1698 |
-
buf.close()
|
| 1699 |
-
return pdf
|
| 1700 |
-
|
| 1701 |
-
# --------------------------
|
| 1702 |
-
# REPORTS UI
|
| 1703 |
-
# --------------------------
|
| 1704 |
-
def reports_ui():
|
| 1705 |
-
st.header("📑 Reports — Classification-only & Full Geotechnical Report")
|
| 1706 |
-
|
| 1707 |
-
# Classification-only
|
| 1708 |
-
st.subheader("Classification-only report")
|
| 1709 |
-
sites = ss.get("sites", [])
|
| 1710 |
-
if not sites:
|
| 1711 |
-
st.warning("No sites available.")
|
| 1712 |
-
return
|
| 1713 |
-
|
| 1714 |
-
site_names = [s.get("Site Name","Unnamed") for s in sites]
|
| 1715 |
-
sel_cls = st.selectbox("Select site", site_names, index=ss.get("active_site_idx",0))
|
| 1716 |
-
if st.button("Generate Classification PDF"):
|
| 1717 |
-
site = ss["sites"][site_names.index(sel_cls)]
|
| 1718 |
-
pdf_bytes = build_classification_pdf_bytes(site)
|
| 1719 |
-
st.download_button(
|
| 1720 |
-
"Download Classification PDF",
|
| 1721 |
-
data=pdf_bytes,
|
| 1722 |
-
file_name=f"classification_{sel_cls}.pdf",
|
| 1723 |
-
mime="application/pdf"
|
| 1724 |
-
)
|
| 1725 |
-
|
| 1726 |
-
st.markdown("---")
|
| 1727 |
-
|
| 1728 |
-
# Full report
|
| 1729 |
-
st.subheader("Full Geotechnical Report")
|
| 1730 |
-
selected = st.multiselect("Sites to include", site_names, default=site_names)
|
| 1731 |
-
ext_refs_text = st.text_area("External references (one per line)")
|
| 1732 |
-
if st.button("Generate Full Report PDF"):
|
| 1733 |
-
if not selected:
|
| 1734 |
-
st.error("Select at least one site.")
|
| 1735 |
-
else:
|
| 1736 |
-
chosen_sites = [ss["sites"][site_names.index(n)] for n in selected]
|
| 1737 |
-
ext_refs = [l.strip() for l in ext_refs_text.splitlines() if l.strip()]
|
| 1738 |
-
with st.spinner("Building PDF (this may take a few seconds)..."):
|
| 1739 |
-
pdf_bytes = build_full_geotech_pdf_bytes(chosen_sites, ext_refs)
|
| 1740 |
-
st.download_button(
|
| 1741 |
-
"Download Full Geotechnical Report",
|
| 1742 |
-
data=pdf_bytes,
|
| 1743 |
-
file_name="geomate_full_report.pdf",
|
| 1744 |
-
mime="application/pdf"
|
| 1745 |
-
)
|
| 1746 |
-
|
| 1747 |
-
# --------------------------
|
| 1748 |
-
# Final UI main function (glue)
|
| 1749 |
-
# --------------------------
|
| 1750 |
-
def ui_main_final():
|
| 1751 |
-
# Sidebar (model selection + project sites)
|
| 1752 |
-
with st.sidebar:
|
| 1753 |
-
st.markdown(f"<h3 style='color:{THEME['accent']};margin:6px 0;'>GeoMate V2</h3>", unsafe_allow_html=True)
|
| 1754 |
-
model = st.selectbox("Select LLM model", ["meta-llama/llama-4-maverick-17b-128e-instruct", "llama3-8b-8192", "gemma-7b-it"], index=0)
|
| 1755 |
-
ss["selected_model"] = model
|
| 1756 |
-
|
| 1757 |
-
st.markdown("### Project Sites")
|
| 1758 |
-
# Add / remove / select sites
|
| 1759 |
-
cols = st.columns([3,1])
|
| 1760 |
-
new_site_name = cols[0].text_input("New site name", key="sidebar_new_site_name")
|
| 1761 |
-
if cols[1].button("➕ Add"):
|
| 1762 |
-
if new_site_name.strip():
|
| 1763 |
-
add_site(new_site_name.strip())
|
| 1764 |
-
st.success(f"Added site {new_site_name.strip()}")
|
| 1765 |
-
else:
|
| 1766 |
-
add_site(f"Site-{len(ss['sites'])+1}")
|
| 1767 |
-
st.success("Added new site")
|
| 1768 |
-
|
| 1769 |
-
st.markdown("Active site:")
|
| 1770 |
-
idx = st.radio("Select active site", options=list(range(len(ss["sites"]))), format_func=lambda i: ss["sites"][i].get("Site Name","Site"), index=ss.get("active_site_idx",0))
|
| 1771 |
-
ss["active_site_idx"] = idx
|
| 1772 |
-
|
| 1773 |
-
with st.expander("Show active site JSON"):
|
| 1774 |
-
st.json(ss["sites"][ss["active_site_idx"]])
|
| 1775 |
-
|
| 1776 |
-
# Page routing (ss['page'] set by earlier parts' option_menu)
|
| 1777 |
-
page = ss.get("page","Landing")
|
| 1778 |
-
if page == "Landing":
|
| 1779 |
-
landing_ui()
|
| 1780 |
-
elif page == "Soil Recognizer":
|
| 1781 |
-
soil_recognizer_ui()
|
| 1782 |
-
elif page == "Soil Classifier":
|
| 1783 |
-
# Use the classifier UI from Part 2
|
| 1784 |
-
soil_classifier_ui()
|
| 1785 |
-
elif page == "GSD Curve":
|
| 1786 |
-
gsd_curve_ui()
|
| 1787 |
-
elif page == "Locator":
|
| 1788 |
-
locator_ui()
|
| 1789 |
-
elif page == "GeoMate Ask":
|
| 1790 |
-
rag_ui()
|
| 1791 |
-
elif page == "Reports":
|
| 1792 |
-
reports_ui()
|
| 1793 |
-
else:
|
| 1794 |
-
st.info("Select a page from the sidebar.")
|
| 1795 |
-
|
| 1796 |
-
# Run
|
| 1797 |
-
if __name__ == "__main__":
|
| 1798 |
-
ui_main_final()
|
| 1799 |
-
|
| 1800 |
-
# End of Part 4/4
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|