Ezhil commited on
Commit
6f7e4e5
·
1 Parent(s): 15c3d68

Deleted few files

Browse files
__pycache__/logger.cpython-310.pyc DELETED
Binary file (585 Bytes)
 
__pycache__/main.cpython-310.pyc DELETED
Binary file (566 Bytes)
 
logger.py DELETED
@@ -1,51 +0,0 @@
1
- # import logging
2
- # import os
3
-
4
- # # Define absolute path for logs
5
- # LOG_DIR = "/app/logs"
6
- # LOG_FILE = os.path.join(LOG_DIR, "backend.log")
7
-
8
- # # Ensure the logs directory exists
9
- # os.makedirs(LOG_DIR, exist_ok=True)
10
-
11
- # # Configure logging settings
12
- # logging.basicConfig(
13
- # format="%(asctime)s - %(levelname)s - %(message)s",
14
- # level=logging.INFO,
15
- # handlers=[
16
- # logging.FileHandler(LOG_FILE), # Use absolute path
17
- # logging.StreamHandler() # Print logs to the console
18
- # ]
19
- # )
20
-
21
- # logger = logging.getLogger(__name__)
22
-
23
- # if __name__ == "__main__":
24
- # logger.info("Logger is set up correctly!")
25
-
26
- import logging
27
- import os
28
-
29
- # Define relative path for logs
30
- LOG_DIR = "logs"
31
- LOG_FILE = os.path.join(LOG_DIR, "backend.log")
32
-
33
- # Ensure the logs directory exists
34
- os.makedirs(LOG_DIR, exist_ok=True)
35
-
36
- # Configure logging settings
37
- logging.basicConfig(
38
- format="%(asctime)s - %(levelname)s - %(message)s",
39
- level=logging.INFO,
40
- handlers=[
41
- logging.FileHandler(LOG_FILE), # Use relative path
42
- logging.StreamHandler() # Print logs to the console
43
- ]
44
- )
45
-
46
- logger = logging.getLogger(__name__)
47
-
48
- if __name__ == "__main__":
49
- logger.info("Logger is set up correctly!")
50
-
51
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
logs/app.log DELETED
@@ -1,22 +0,0 @@
1
- 2025-02-08 10:29:41,633 - INFO - CSV file loaded successfully.
2
- 2025-02-08 10:29:41,645 - INFO - Started server process [17300]
3
- 2025-02-08 10:29:41,650 - INFO - Waiting for application startup.
4
- 2025-02-08 10:29:41,652 - INFO - Application startup complete.
5
- 2025-02-08 10:30:46,646 - INFO - Shutting down
6
- 2025-02-08 10:30:46,755 - INFO - Waiting for application shutdown.
7
- 2025-02-08 10:30:46,757 - INFO - Application shutdown complete.
8
- 2025-02-08 10:30:46,759 - INFO - Finished server process [17300]
9
- 2025-02-08 10:31:37,797 - INFO - CSV file loaded successfully.
10
- 2025-02-08 10:31:37,819 - INFO - Starting FastAPI application...
11
- 2025-02-08 10:31:37,826 - INFO - Started server process [5848]
12
- 2025-02-08 10:31:37,838 - INFO - Waiting for application startup.
13
- 2025-02-08 10:31:37,842 - INFO - Application startup complete.
14
- 2025-02-08 10:42:11,654 - INFO - Shutting down
15
- 2025-02-08 10:42:11,762 - INFO - Waiting for application shutdown.
16
- 2025-02-08 10:42:11,767 - INFO - Application shutdown complete.
17
- 2025-02-08 10:42:11,772 - INFO - Finished server process [5848]
18
- 2025-02-08 10:42:14,024 - INFO - CSV file loaded successfully.
19
- 2025-02-08 10:42:14,035 - INFO - Starting FastAPI application...
20
- 2025-02-08 10:42:14,039 - INFO - Started server process [13436]
21
- 2025-02-08 10:42:14,043 - INFO - Waiting for application startup.
22
- 2025-02-08 10:42:14,045 - INFO - Application startup complete.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
logs/backend.log DELETED
@@ -1,5 +0,0 @@
1
- 2025-02-12 11:16:48,494 - INFO - CSV file loaded successfully.
2
- 2025-02-12 11:16:48,817 - INFO - Started server process [19640]
3
- 2025-02-12 11:16:48,820 - INFO - Waiting for application startup.
4
- 2025-02-12 11:16:48,823 - INFO - Application startup complete.
5
- 2025-02-12 11:16:48,825 - INFO - Uvicorn running on http://0.0.0.0:8000 (Press CTRL+C to quit)
 
 
 
 
 
 
models/__pycache__/pydantic_model.cpython-310.pyc DELETED
Binary file (1.41 kB)
 
models/pydantic_model.py DELETED
@@ -1,20 +0,0 @@
1
- from pydantic import BaseModel, Field, field_validator
2
- from typing import List, Union
3
-
4
- # Pydantic Model for Response Validation
5
- class ContinentStats(BaseModel):
6
- continent: str = Field(..., title="Continent Name", example="Asia")
7
- Total_Countries: int = Field(..., title="Total Number of Countries", example=48)
8
- Total_Population: int = Field(..., title="Total Population", example=4600000000)
9
- Average_Population: float = Field(..., title="Average Population Per Country", example=96000000)
10
- Total_Area: int = Field(..., title="Total Land Area (sq km)", example=44679000)
11
- max_population: int = Field(..., title="Highest Country Population", example=1400000000)
12
- min_population: int = Field(..., title="Lowest Country Population", example=100000)
13
- Population_Density: float = Field(..., title="Population Density (people/sq km)", example=103)
14
-
15
- @field_validator("Total_Countries", "Total_Population", "Total_Area", "max_population", "min_population")
16
- @classmethod
17
- def must_be_positive(cls, value):
18
- if value < 0:
19
- raise ValueError("Value must be non-negative")
20
- return value
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
routers/__pycache__/continent.cpython-310.pyc DELETED
Binary file (567 Bytes)
 
routers/__pycache__/population.cpython-310.pyc DELETED
Binary file (1.5 kB)
 
routers/continent.py DELETED
@@ -1,11 +0,0 @@
1
- from fastapi import APIRouter
2
- from services.continent_services import get_continent_data
3
- from models.pydantic_model import ContinentStats
4
-
5
- router = APIRouter()
6
-
7
- @router.get("/{continent}/", response_model=ContinentStats)
8
- def get_continent_stats(continent: str):
9
- return get_continent_data(continent)
10
-
11
-
 
 
 
 
 
 
 
 
 
 
 
 
services/__pycache__/continent_services.cpython-310.pyc DELETED
Binary file (1.45 kB)
 
services/continent_services.py DELETED
@@ -1,49 +0,0 @@
1
- import pandas as pd
2
- # from backend.population_pandas import get_continents, get_continent_data
3
- from Backend.logger import logger
4
-
5
- import os
6
-
7
- # file_path = os.path.join(os.path.dirname(__file__), "../../data/world_population.csv")
8
- file_path = os.path.join(os.path.dirname(__file__), "../data/world_population.csv")
9
- # file_path = os.path.abspath(file_path) # Convert to absolute path
10
-
11
- file_path = os.path.abspath(file_path) # Convert to absolute path
12
- try:
13
- df = pd.read_csv(file_path)
14
- logger.info(f"CSV file loaded successfully from: {file_path}")
15
- except Exception as e:
16
- logger.error(f"Error loading CSV file from {file_path}: {e}")
17
- df = None # Prevent NameError if file loading fails
18
-
19
- if df is not None:
20
- # Perform the aggregations only if df is successfully loaded
21
- continent_stats = df.groupby("Continent").agg(
22
- Total_Countries=('Country', 'count'),
23
- Total_Population=('Population', 'sum'),
24
- Average_Population=('Population', 'mean'),
25
- Total_Area=('Area', 'sum'),
26
- max_population=('Population', 'max'),
27
- min_population=('Population', 'min'),
28
- Country_Max_Population=('Population', lambda x: df.loc[x.idxmax(), 'Country']),
29
- Country_Min_Population=('Population', lambda x: df.loc[x.idxmin(), 'Country'])
30
- ).reset_index()
31
-
32
- # Compute Population Density
33
- continent_stats["Population_Density"] = (
34
- continent_stats["Total_Population"] / continent_stats["Total_Area"]
35
- )
36
-
37
- logger.info("Data processing completed.")
38
-
39
-
40
- def get_continent_data(continent):
41
- """Returns statistics for a specific continent."""
42
- logger.info(f"Fetching data for continent: {continent}")
43
- result = continent_stats[continent_stats["Continent"] == continent].squeeze()
44
-
45
- if result.empty:
46
- logger.warning(f"No data found for continent: {continent}")
47
- return {}
48
-
49
- return result.to_dict()