gbrabbit commited on
Commit
447998b
ยท
1 Parent(s): 7fe9920

Auto commit at 09-2025-08 0:00:01

Browse files
Dockerfile CHANGED
@@ -37,15 +37,21 @@ RUN python -c "import nltk; nltk.download('punkt'); nltk.download('punkt_tab');
37
  COPY . .
38
 
39
  # ํ•„์š”ํ•œ ๋””๋ ‰ํ† ๋ฆฌ ์ƒ์„ฑ
40
- RUN mkdir -p /app/data /app/logs /app/models /app/uploads /app/vector_stores /app/temp /app/cache/transformers /app/cache/huggingface
41
 
42
- # ๊ถŒํ•œ ์„ค์ • - ์บ์‹œ ๋””๋ ‰ํ† ๋ฆฌ์— ์“ฐ๊ธฐ ๊ถŒํ•œ ๋ถ€์—ฌ
43
  RUN chmod +x /app/*.py
44
  RUN chmod -R 777 /app/cache
45
  RUN chmod -R 777 /app/data
46
  RUN chmod -R 777 /app/logs
47
  RUN chmod -R 777 /app/uploads
48
  RUN chmod -R 777 /app/temp
 
 
 
 
 
 
49
 
50
  # Hugging Face ์บ์‹œ ๋””๋ ‰ํ† ๋ฆฌ ํ™˜๊ฒฝ ๋ณ€์ˆ˜ ์„ค์ •
51
  ENV TRANSFORMERS_CACHE=/app/cache/transformers
 
37
  COPY . .
38
 
39
  # ํ•„์š”ํ•œ ๋””๋ ‰ํ† ๋ฆฌ ์ƒ์„ฑ
40
+ RUN mkdir -p /app/data /app/logs /app/models /app/uploads /app/vector_stores /app/temp /app/cache/transformers /app/cache/huggingface /app/cache/easyocr /app/cache/ocr_models
41
 
42
+ # ๊ถŒํ•œ ์„ค์ • - ๋ชจ๋“  ์บ์‹œ ๋ฐ ๋ฐ์ดํ„ฐ ๋””๋ ‰ํ† ๋ฆฌ์— ์“ฐ๊ธฐ ๊ถŒํ•œ ๋ถ€์—ฌ
43
  RUN chmod +x /app/*.py
44
  RUN chmod -R 777 /app/cache
45
  RUN chmod -R 777 /app/data
46
  RUN chmod -R 777 /app/logs
47
  RUN chmod -R 777 /app/uploads
48
  RUN chmod -R 777 /app/temp
49
+ RUN chmod -R 777 /app/cache/easyocr
50
+ RUN chmod -R 777 /app/cache/ocr_models
51
+
52
+ # EasyOCR ๋ฐ ๊ธฐํƒ€ ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ๋ฅผ ์œ„ํ•œ ํ™˜๊ฒฝ ๋ณ€์ˆ˜ ์„ค์ •
53
+ ENV EASYOCR_MODULE_PATH=/app/cache/easyocr
54
+ ENV OCR_MODELS_PATH=/app/cache/ocr_models
55
 
56
  # Hugging Face ์บ์‹œ ๋””๋ ‰ํ† ๋ฆฌ ํ™˜๊ฒฝ ๋ณ€์ˆ˜ ์„ค์ •
57
  ENV TRANSFORMERS_CACHE=/app/cache/transformers
app_huggingface.py CHANGED
@@ -32,12 +32,14 @@ def setup_huggingface_environment():
32
  # ํ•„์š”ํ•œ ๋””๋ ‰ํ† ๋ฆฌ ์ƒ์„ฑ
33
  directories = [
34
  "data", "logs", "models", "uploads",
35
- "vector_stores", "temp", "cache"
 
 
36
  ]
37
 
38
  for dir_name in directories:
39
  dir_path = project_root / dir_name
40
- dir_path.mkdir(exist_ok=True)
41
  logger.info(f"๐Ÿ“ ๋””๋ ‰ํ† ๋ฆฌ ์ƒ์„ฑ: {dir_path}")
42
 
43
  # ํ™˜๊ฒฝ ๋ณ€์ˆ˜ ์„ค์ •
@@ -45,6 +47,8 @@ def setup_huggingface_environment():
45
  "TRANSFORMERS_CACHE": str(project_root / "cache" / "transformers"),
46
  "HF_HOME": str(project_root / "cache" / "huggingface"),
47
  "TORCH_HOME": str(project_root / "cache" / "torch"),
 
 
48
  "TOKENIZERS_PARALLELISM": "false",
49
  "OMP_NUM_THREADS": "1",
50
  "MKL_NUM_THREADS": "1"
 
32
  # ํ•„์š”ํ•œ ๋””๋ ‰ํ† ๋ฆฌ ์ƒ์„ฑ
33
  directories = [
34
  "data", "logs", "models", "uploads",
35
+ "vector_stores", "temp", "cache",
36
+ "cache/transformers", "cache/huggingface",
37
+ "cache/easyocr", "cache/ocr_models"
38
  ]
39
 
40
  for dir_name in directories:
41
  dir_path = project_root / dir_name
42
+ dir_path.mkdir(exist_ok=True, parents=True)
43
  logger.info(f"๐Ÿ“ ๋””๋ ‰ํ† ๋ฆฌ ์ƒ์„ฑ: {dir_path}")
44
 
45
  # ํ™˜๊ฒฝ ๋ณ€์ˆ˜ ์„ค์ •
 
47
  "TRANSFORMERS_CACHE": str(project_root / "cache" / "transformers"),
48
  "HF_HOME": str(project_root / "cache" / "huggingface"),
49
  "TORCH_HOME": str(project_root / "cache" / "torch"),
50
+ "EASYOCR_MODULE_PATH": str(project_root / "cache" / "easyocr"),
51
+ "OCR_MODELS_PATH": str(project_root / "cache" / "ocr_models"),
52
  "TOKENIZERS_PARALLELISM": "false",
53
  "OMP_NUM_THREADS": "1",
54
  "MKL_NUM_THREADS": "1"
lily_llm_api/models/kanana_1_5_v_3b_instruct.py CHANGED
@@ -24,8 +24,8 @@ class Kanana15V3bInstructProfile:
24
  self.local_path = "./lily_llm_core/models/kanana_1_5_v_3b_instruct"
25
  self.display_name = "Kanana-1.5-v-3b-instruct (๋กœ์ปฌ)"
26
  else:
27
- self.model_name = "gbrabbit/lily-math-model" # Hugging Face ๋ชจ๋ธ ๊ฒฝ๋กœ
28
- self.local_path = "./lily_llm_core/models/kanana_1_5_v_3b_instruct"
29
  self.display_name = "Kanana-1.5-v-3b-instruct (์„œ๋ฒ„)"
30
 
31
  self.description = "์นด์นด์˜ค ๋ฉ€ํ‹ฐ๋ชจ๋‹ฌ ๋ชจ๋ธ (3.6B) - Math RAG ํŠนํ™”"
@@ -146,9 +146,9 @@ class Kanana15V3bInstructProfile:
146
  low_cpu_mem_usage=True,
147
  ).to(DEVICE)
148
  else:
149
- # Hugging Face Hub: ํ‘œ์ค€ AutoModel ์‚ฌ์šฉ
150
- from transformers import AutoModelForCausalLM
151
- model = AutoModelForCausalLM.from_pretrained(
152
  model_path,
153
  trust_remote_code=True,
154
  torch_dtype=torch.float16,
 
24
  self.local_path = "./lily_llm_core/models/kanana_1_5_v_3b_instruct"
25
  self.display_name = "Kanana-1.5-v-3b-instruct (๋กœ์ปฌ)"
26
  else:
27
+ self.model_name = "gbrabbit/lily-math-model" # Hugging Face Hub ๋ชจ๋ธ ๊ฒฝ๋กœ
28
+ self.local_path = None # ์„œ๋ฒ„์—์„œ๋Š” ๋กœ์ปฌ ๊ฒฝ๋กœ ์‚ฌ์šฉ ์•ˆํ•จ
29
  self.display_name = "Kanana-1.5-v-3b-instruct (์„œ๋ฒ„)"
30
 
31
  self.description = "์นด์นด์˜ค ๋ฉ€ํ‹ฐ๋ชจ๋‹ฌ ๋ชจ๋ธ (3.6B) - Math RAG ํŠนํ™”"
 
146
  low_cpu_mem_usage=True,
147
  ).to(DEVICE)
148
  else:
149
+ # Hugging Face Hub: AutoModel ์‚ฌ์šฉ (์ปค์Šคํ…€ ์„ค์ • ํด๋ž˜์Šค ํ˜ธํ™˜์„ฑ)
150
+ from transformers import AutoModel
151
+ model = AutoModel.from_pretrained(
152
  model_path,
153
  trust_remote_code=True,
154
  torch_dtype=torch.float16,