krishnadhulipalla commited on
Commit
7d9d335
·
1 Parent(s): 066f51c

removed data dir path

Browse files
backend/app/config/settings.py CHANGED
@@ -3,7 +3,6 @@ from pydantic import Field
3
  from pydantic_settings import BaseSettings, SettingsConfigDict
4
  import os, tempfile
5
 
6
- # Resolve the repo root when running outside Docker
7
  REPO_ROOT = Path(__file__).resolve().parents[3]
8
 
9
  def _writable_dir(candidates: list[Path]) -> Path:
@@ -13,20 +12,21 @@ def _writable_dir(candidates: list[Path]) -> Path:
13
  t = p / ".write_test"
14
  t.write_text("ok", encoding="utf-8")
15
  t.unlink(missing_ok=True)
16
- return p
17
  except Exception:
18
  continue
19
  raise RuntimeError(f"No writable data dir from: {candidates!r}")
20
 
21
  def _default_data_dir() -> Path:
22
- # 1) Respect env if provided
23
- if os.getenv("DATA_DIR"):
24
- return Path(os.getenv("DATA_DIR")).resolve()
25
- # 2) Prefer /data inside containers (Docker/HF Spaces)
26
- candidates = [Path("/data")]
27
- # 3) Repo local ./data for local dev
28
- candidates.append((REPO_ROOT / "data").resolve())
29
- # 4) Last resort: /tmp
 
30
  candidates.append(Path(tempfile.gettempdir()) / "pulsemaps" / "data")
31
  return _writable_dir(candidates)
32
 
@@ -34,52 +34,40 @@ def _default_frontend_dist() -> Path:
34
  return (REPO_ROOT / "web" / "dist").resolve()
35
 
36
  class Settings(BaseSettings):
37
- model_config = SettingsConfigDict(
38
- env_file=".env",
39
- extra="ignore",
40
- case_sensitive=False,
41
- populate_by_name=True,
42
- )
43
 
44
- # API / Models
45
  OPENAI_API_KEY: str | None = None
46
  OPENAI_MODEL_AGENT: str = "gpt-4o"
47
  OPENAI_MODEL_CLASSIFIER: str = "gpt-4o-mini"
48
 
49
- # Paths
50
  DATA_DIR: Path = Field(default_factory=_default_data_dir)
51
  REPORTS_DB: Path | None = None
52
  SESSIONS_DB: Path | None = None
53
  UPLOADS_DIR: Path | None = None
54
  FRONTEND_DIST: Path = Field(default_factory=_default_frontend_dist)
55
 
56
- # Defaults
57
  DEFAULT_RADIUS_KM: float = 40.0
58
  DEFAULT_LIMIT: int = 10
59
  MAX_AGE_HOURS: int = 48
60
 
61
- # Optional extras you had in .env
62
  firms_map_key: str | None = None
63
  gdacs_rss_url: str | None = "https://www.gdacs.org/xml/rss.xml"
64
  nvidia_api_key: str | None = None
65
 
66
  def ensure_dirs(self) -> None:
67
- # Fill path fields if not set, then ensure they exist
68
  if self.REPORTS_DB is None:
69
- self.REPORTS_DB = (self.DATA_DIR / "pulsemaps_reports.db")
70
  if self.SESSIONS_DB is None:
71
- self.SESSIONS_DB = (self.DATA_DIR / "pulsemap_sessions.db")
72
  if self.UPLOADS_DIR is None:
73
- self.UPLOADS_DIR = (self.DATA_DIR / "uploads")
74
 
75
- # Absolute, existing
76
- self.DATA_DIR = self.DATA_DIR.resolve()
 
77
  self.REPORTS_DB = self.REPORTS_DB.resolve()
78
  self.SESSIONS_DB = self.SESSIONS_DB.resolve()
79
  self.UPLOADS_DIR = self.UPLOADS_DIR.resolve()
80
 
81
- self.DATA_DIR.mkdir(parents=True, exist_ok=True)
82
- self.UPLOADS_DIR.mkdir(parents=True, exist_ok=True)
83
-
84
  settings = Settings()
85
  settings.ensure_dirs()
 
3
  from pydantic_settings import BaseSettings, SettingsConfigDict
4
  import os, tempfile
5
 
 
6
  REPO_ROOT = Path(__file__).resolve().parents[3]
7
 
8
  def _writable_dir(candidates: list[Path]) -> Path:
 
12
  t = p / ".write_test"
13
  t.write_text("ok", encoding="utf-8")
14
  t.unlink(missing_ok=True)
15
+ return p.resolve()
16
  except Exception:
17
  continue
18
  raise RuntimeError(f"No writable data dir from: {candidates!r}")
19
 
20
  def _default_data_dir() -> Path:
21
+ candidates: list[Path] = []
22
+ env = os.getenv("DATA_DIR")
23
+ if env:
24
+ candidates.append(Path(env))
25
+ # Prefer the standard mount on Docker/HF (if writable)
26
+ candidates.append(Path("/data"))
27
+ # Local dev
28
+ candidates.append(REPO_ROOT / "data")
29
+ # Last resort
30
  candidates.append(Path(tempfile.gettempdir()) / "pulsemaps" / "data")
31
  return _writable_dir(candidates)
32
 
 
34
  return (REPO_ROOT / "web" / "dist").resolve()
35
 
36
  class Settings(BaseSettings):
37
+ model_config = SettingsConfigDict(env_file=".env", extra="ignore", case_sensitive=False)
 
 
 
 
 
38
 
 
39
  OPENAI_API_KEY: str | None = None
40
  OPENAI_MODEL_AGENT: str = "gpt-4o"
41
  OPENAI_MODEL_CLASSIFIER: str = "gpt-4o-mini"
42
 
 
43
  DATA_DIR: Path = Field(default_factory=_default_data_dir)
44
  REPORTS_DB: Path | None = None
45
  SESSIONS_DB: Path | None = None
46
  UPLOADS_DIR: Path | None = None
47
  FRONTEND_DIST: Path = Field(default_factory=_default_frontend_dist)
48
 
 
49
  DEFAULT_RADIUS_KM: float = 40.0
50
  DEFAULT_LIMIT: int = 10
51
  MAX_AGE_HOURS: int = 48
52
 
 
53
  firms_map_key: str | None = None
54
  gdacs_rss_url: str | None = "https://www.gdacs.org/xml/rss.xml"
55
  nvidia_api_key: str | None = None
56
 
57
  def ensure_dirs(self) -> None:
 
58
  if self.REPORTS_DB is None:
59
+ self.REPORTS_DB = self.DATA_DIR / "pulsemaps_reports.db"
60
  if self.SESSIONS_DB is None:
61
+ self.SESSIONS_DB = self.DATA_DIR / "pulsemap_sessions.db"
62
  if self.UPLOADS_DIR is None:
63
+ self.UPLOADS_DIR = self.DATA_DIR / "uploads"
64
 
65
+ # Make & resolve
66
+ self.DATA_DIR.mkdir(parents=True, exist_ok=True)
67
+ self.UPLOADS_DIR.mkdir(parents=True, exist_ok=True)
68
  self.REPORTS_DB = self.REPORTS_DB.resolve()
69
  self.SESSIONS_DB = self.SESSIONS_DB.resolve()
70
  self.UPLOADS_DIR = self.UPLOADS_DIR.resolve()
71
 
 
 
 
72
  settings = Settings()
73
  settings.ensure_dirs()
backend/app/data/store.py CHANGED
@@ -7,11 +7,14 @@ from pathlib import Path
7
  from ..config.settings import settings
8
  from .geo import haversine_km
9
 
10
- # Use writable, absolute paths from settings (prefers /data in containers)
11
  DB_PATH: Path = settings.REPORTS_DB
 
12
  DB_PATH.parent.mkdir(parents=True, exist_ok=True)
 
 
 
 
13
 
14
- # Single connection
15
  _CONN = sqlite3.connect(str(DB_PATH), check_same_thread=False)
16
  _CONN.execute("""
17
  CREATE TABLE IF NOT EXISTS reports (
@@ -29,29 +32,18 @@ def _row_to_feature(row: tuple) -> Dict[str, Any]:
29
  _id, lat, lon, text, props_json, created_at = row
30
  props = {"type": "user_report", "text": text, "reported_at": created_at}
31
  if props_json:
32
- try:
33
- props.update(json.loads(props_json))
34
- except Exception:
35
- props["raw_props"] = props_json
36
- return {
37
- "type": "Feature",
38
- "geometry": {"type": "Point", "coordinates": [lon, lat]},
39
- "properties": props,
40
- }
41
 
42
  def add_report(lat: float, lon: float, text: str = "User report", props: dict | None = None):
43
  created_at = datetime.now(timezone.utc).isoformat()
44
  props_json = json.dumps(props or {})
45
- _CONN.execute(
46
- "INSERT INTO reports (lat, lon, text, props_json, created_at) VALUES (?,?,?,?,?)",
47
- (float(lat), float(lon), text, props_json, created_at),
48
- )
49
  _CONN.commit()
50
- return {
51
- "type": "Feature",
52
- "geometry": {"type": "Point", "coordinates": [float(lon), float(lat)]},
53
- "properties": {"type": "user_report", "text": text, "reported_at": created_at, **(props or {})},
54
- }
55
 
56
  def get_feature_collection() -> Dict[str, Any]:
57
  cur = _CONN.execute("SELECT id, lat, lon, text, props_json, created_at FROM reports ORDER BY id DESC")
 
7
  from ..config.settings import settings
8
  from .geo import haversine_km
9
 
 
10
  DB_PATH: Path = settings.REPORTS_DB
11
+ # Create parent and touch the file so we fail here if unwritable
12
  DB_PATH.parent.mkdir(parents=True, exist_ok=True)
13
+ try:
14
+ DB_PATH.touch(exist_ok=True)
15
+ except Exception as e:
16
+ raise RuntimeError(f"Cannot create DB file at {DB_PATH}: {e}")
17
 
 
18
  _CONN = sqlite3.connect(str(DB_PATH), check_same_thread=False)
19
  _CONN.execute("""
20
  CREATE TABLE IF NOT EXISTS reports (
 
32
  _id, lat, lon, text, props_json, created_at = row
33
  props = {"type": "user_report", "text": text, "reported_at": created_at}
34
  if props_json:
35
+ try: props.update(json.loads(props_json))
36
+ except Exception: props["raw_props"] = props_json
37
+ return {"type": "Feature", "geometry": {"type": "Point", "coordinates": [lon, lat]}, "properties": props}
 
 
 
 
 
 
38
 
39
  def add_report(lat: float, lon: float, text: str = "User report", props: dict | None = None):
40
  created_at = datetime.now(timezone.utc).isoformat()
41
  props_json = json.dumps(props or {})
42
+ _CONN.execute("INSERT INTO reports (lat, lon, text, props_json, created_at) VALUES (?,?,?,?,?)",
43
+ (float(lat), float(lon), text, props_json, created_at))
 
 
44
  _CONN.commit()
45
+ return {"type": "Feature", "geometry": {"type": "Point", "coordinates": [float(lon), float(lat)]},
46
+ "properties": {"type": "user_report", "text": text, "reported_at": created_at, **(props or {})}}
 
 
 
47
 
48
  def get_feature_collection() -> Dict[str, Any]:
49
  cur = _CONN.execute("SELECT id, lat, lon, text, props_json, created_at FROM reports ORDER BY id DESC")