Spaces:
Sleeping
Sleeping
| """ | |
| RAG ์์คํ ํ ์คํธ ์คํฌ๋ฆฝํธ | |
| API ์๋ฒ๊ฐ ์คํ ์ค์ผ ๋ ์ฌ์ฉ | |
| """ | |
| import requests | |
| import json | |
| from typing import Dict | |
| def test_query( | |
| question: str, | |
| top_k: int = 5, | |
| enable_metacognition: bool = True, | |
| api_url: str = "http://localhost:8000" | |
| ) -> Dict: | |
| """ | |
| ์ง๋ฌธ ํ ์คํธ | |
| Args: | |
| question: ์ง๋ฌธ | |
| top_k: ๊ฒ์ํ ๋ฌธ์ ๊ฐ์ | |
| enable_metacognition: ๋ฉํ์ธ์ง ํ์ฑํ | |
| api_url: API URL | |
| Returns: | |
| ์๋ต ๋ฐ์ดํฐ | |
| """ | |
| print("=" * 80) | |
| print(f"์ง๋ฌธ: {question}") | |
| print("=" * 80) | |
| # ์์ฒญ | |
| response = requests.post( | |
| f"{api_url}/query", | |
| json={ | |
| "question": question, | |
| "top_k": top_k, | |
| "enable_metacognition": enable_metacognition | |
| } | |
| ) | |
| if response.status_code != 200: | |
| print(f"โ ์ค๋ฅ: {response.status_code}") | |
| print(response.text) | |
| return {} | |
| result = response.json() | |
| # ๊ฒฐ๊ณผ ์ถ๋ ฅ | |
| print("\n๐ ๋ต๋ณ:") | |
| print("-" * 80) | |
| print(result["answer"]) | |
| print("-" * 80) | |
| print(f"\n๐ ์ฐธ๊ณ ๋ฌธํ: {len(result['sources'])}๊ฐ") | |
| for i, source in enumerate(result['sources'][:3], 1): | |
| print(f"\n[{i}] {source['source_filename']}") | |
| print(f" ์ ์ฌ๋: {source['similarity']:.3f}") | |
| print(f" ๋ด์ฉ: {source['text'][:100]}...") | |
| if result.get('metacognition'): | |
| print(f"\n๐ง ๋ฉํ์ธ์ง ์ ๋ณด:") | |
| print(f" ๋ฐ๋ณต ํ์: {result['metacognition']['iterations']}") | |
| print(f" ์ฌ๊ณ ๊ณผ์ ๋จ๊ณ: {len(result['metacognition']['thinking_history'])}") | |
| print("\n" + "=" * 80) | |
| return result | |
| def test_health(api_url: str = "http://localhost:8000"): | |
| """ํฌ์ค ์ฒดํฌ""" | |
| print("๐ฅ ํฌ์ค ์ฒดํฌ ์ค...") | |
| response = requests.get(f"{api_url}/health") | |
| if response.status_code == 200: | |
| data = response.json() | |
| print("โ ์๋ฒ ์ ์") | |
| print(f" Vector Store: {data['vector_store']['total_documents']}๊ฐ ๋ฌธ์") | |
| print(f" Embedding: {data['embedding_model']['type']} ({data['embedding_model']['dimension']}์ฐจ์)") | |
| else: | |
| print(f"โ ์๋ฒ ์ค๋ฅ: {response.status_code}") | |
| if __name__ == "__main__": | |
| # ํฌ์ค ์ฒดํฌ | |
| test_health() | |
| print("\n") | |
| # ์ํ ์ง๋ฌธ๋ค | |
| questions = [ | |
| "๊ธ์ต์๊ธฐ์ ์ฃผ์ ์์ธ์ ๋ฌด์์ธ๊ฐ์?", | |
| "ํฌํธํด๋ฆฌ์ค ๋ค๊ฐํ์ ํจ๊ณผ๋?", | |
| "์ค์์ํ์ ๊ธ๋ฆฌ ์ ์ฑ ์ด ์์ฅ์ ๋ฏธ์น๋ ์ํฅ์?", | |
| ] | |
| for question in questions: | |
| try: | |
| test_query(question, top_k=5, enable_metacognition=True) | |
| print("\n\n") | |
| except Exception as e: | |
| print(f"โ ์ค๋ฅ: {str(e)}\n\n") | |
| # ์ฌ์ฉ์ ์ ๋ ฅ ์ง๋ฌธ | |
| print("\n์ปค์คํ ์ง๋ฌธ์ ์ ๋ ฅํ์ธ์ (Enter๋ฅผ ๋๋ฅด๋ฉด ์ข ๋ฃ):") | |
| while True: | |
| question = input("\n์ง๋ฌธ: ").strip() | |
| if not question: | |
| break | |
| try: | |
| test_query(question, top_k=5, enable_metacognition=True) | |
| except Exception as e: | |
| print(f"โ ์ค๋ฅ: {str(e)}") | |