SyngyeonTak commited on
Commit
64fa191
ยท
1 Parent(s): 4915717

repo_type change

Browse files
Files changed (3) hide show
  1. cluster_predictor.py +10 -3
  2. rag_retriever.py +3 -2
  3. region_extractor.py +3 -3
cluster_predictor.py CHANGED
@@ -8,9 +8,16 @@ import json
8
  from huggingface_hub import hf_hub_download
9
 
10
  # Hugging Face dataset repo์—์„œ prompt ํŒŒ์ผ ๋กœ๋“œ
11
- PROMPT_PATH = hf_hub_download("Syngyeon/seoulalpha-data", "data/prompt/custom_prompt_eng.txt")
12
- FEWSHOT_PATH = hf_hub_download("Syngyeon/seoulalpha-data", "data/prompt/custom_few_shot_learning_multi_language.txt")
13
-
 
 
 
 
 
 
 
14
  # --- ์ดˆ๊ธฐ ์„ค์ • ---
15
  client = OpenAI(api_key=os.getenv("API_KEY"))
16
 
 
8
  from huggingface_hub import hf_hub_download
9
 
10
  # Hugging Face dataset repo์—์„œ prompt ํŒŒ์ผ ๋กœ๋“œ
11
+ PROMPT_PATH = hf_hub_download(
12
+ repo_id="Syngyeon/seoulalpha-data",
13
+ repo_type="dataset", # โœ… ๋ฐ˜๋“œ์‹œ dataset์œผ๋กœ ์ง€์ •
14
+ filename="data/prompt/custom_prompt_eng.txt"
15
+ )
16
+ FEWSHOT_PATH = hf_hub_download(
17
+ repo_id="Syngyeon/seoulalpha-data",
18
+ repo_type="dataset", # โœ… ๋ฐ˜๋“œ์‹œ dataset์œผ๋กœ ์ง€์ •
19
+ filename="data/prompt/custom_few_shot_learning_multi_language.txt"
20
+ )
21
  # --- ์ดˆ๊ธฐ ์„ค์ • ---
22
  client = OpenAI(api_key=os.getenv("API_KEY"))
23
 
rag_retriever.py CHANGED
@@ -24,8 +24,9 @@ def _load_resources():
24
  print("1. Hugging Face Hub์—์„œ RAG ๋ฆฌ์†Œ์Šค๋ฅผ ๋‹ค์šด๋กœ๋“œํ•ฉ๋‹ˆ๋‹ค...")
25
 
26
  # HF repo์—์„œ ํŒŒ์ผ ๋‹ค์šด๋กœ๋“œ
27
- index_path = hf_hub_download(repo_id=DATA_REPO, filename="data/faiss/faiss_merged_output/merged.index")
28
- metadata_path = hf_hub_download(repo_id=DATA_REPO, filename="data/faiss/faiss_merged_output/merged_metadata.jsonl")
 
29
  # ์ž„๋ฒ ๋”ฉ ๋ชจ๋ธ ๋กœ๋“œ
30
  model = SentenceTransformer(MODEL_NAME)
31
 
 
24
  print("1. Hugging Face Hub์—์„œ RAG ๋ฆฌ์†Œ์Šค๋ฅผ ๋‹ค์šด๋กœ๋“œํ•ฉ๋‹ˆ๋‹ค...")
25
 
26
  # HF repo์—์„œ ํŒŒ์ผ ๋‹ค์šด๋กœ๋“œ
27
+ index_path = hf_hub_download(repo_id=DATA_REPO, repo_type="dataset", filename="data/faiss/faiss_merged_output/merged.index")
28
+ metadata_path = hf_hub_download(repo_id=DATA_REPO, repo_type="dataset", filename="data/faiss/faiss_merged_output/merged_metadata.jsonl")
29
+
30
  # ์ž„๋ฒ ๋”ฉ ๋ชจ๋ธ ๋กœ๋“œ
31
  model = SentenceTransformer(MODEL_NAME)
32
 
region_extractor.py CHANGED
@@ -11,9 +11,9 @@ MODEL_NAME = "jhgan/ko-sbert-nli"
11
  # ๋กœ๋“œ
12
  def _load_region_index():
13
  try:
14
- index_path = hf_hub_download(repo_id=DATA_REPO, filename="data/faiss/region_db/faiss_region_semantic.index")
15
- metadata_path = hf_hub_download(repo_id=DATA_REPO, filename="data/faiss/region_db/metadata_region_semantic.jsonl")
16
-
17
  index = faiss.read_index(index_path)
18
  model = SentenceTransformer(MODEL_NAME)
19
 
 
11
  # ๋กœ๋“œ
12
  def _load_region_index():
13
  try:
14
+ index_path = hf_hub_download(repo_id=DATA_REPO, repo_type="dataset", filename="data/faiss/region_db/faiss_region_semantic.index")
15
+ metadata_path = hf_hub_download(repo_id=DATA_REPO, repo_type="dataset", filename="data/faiss/region_db/metadata_region_semantic.jsonl")
16
+
17
  index = faiss.read_index(index_path)
18
  model = SentenceTransformer(MODEL_NAME)
19