|
|
from flask import Flask, request, jsonify, send_from_directory |
|
|
from flask_cors import CORS |
|
|
from huggingface_hub import InferenceClient |
|
|
import os |
|
|
from dotenv import load_dotenv |
|
|
|
|
|
load_dotenv() |
|
|
|
|
|
app = Flask(__name__) |
|
|
CORS(app, resources={ |
|
|
r"/*": { |
|
|
"origins": ["http://your-frontend-domain"], |
|
|
"methods": ["GET", "POST", "OPTIONS"], |
|
|
"allow_headers": ["Content-Type", "Accept"] |
|
|
} |
|
|
}) |
|
|
|
|
|
|
|
|
client = InferenceClient( |
|
|
model=os.getenv("MODEL_ID"), |
|
|
token=os.getenv("HUGGINGFACE_API_KEY") |
|
|
) |
|
|
|
|
|
|
|
|
model_name = "nlpai-lab/kullm-polyglot-5.8b-v2" |
|
|
|
|
|
tokenizer = AutoTokenizer.from_pretrained(model_name) |
|
|
model = AutoModelForCausalLM.from_pretrained( |
|
|
model_name, |
|
|
device_map="auto", |
|
|
torch_dtype=torch.float16, |
|
|
low_cpu_mem_usage=True |
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/') |
|
|
def serve_frontend(): |
|
|
return send_from_directory('../frontend', 'index.html') |
|
|
|
|
|
|
|
|
@app.route('/<path:path>') |
|
|
def serve_static(path): |
|
|
return send_from_directory('../frontend', path) |
|
|
|
|
|
|
|
|
@app.route('/favicon.ico') |
|
|
def favicon(): |
|
|
return '', 204 |
|
|
|
|
|
@app.route('/api/generate-diary', methods=['POST']) |
|
|
def generate_diary(): |
|
|
try: |
|
|
data = request.json |
|
|
if not data or 'keywords' not in data: |
|
|
return jsonify({"error": "ํค์๋๊ฐ ํ์ํฉ๋๋ค"}), 400 |
|
|
|
|
|
keywords = data.get('keywords', '').strip() |
|
|
if not keywords: |
|
|
return jsonify({"error": "ํค์๋๊ฐ ๋น์ด์์ต๋๋ค"}), 400 |
|
|
|
|
|
prompt = f"""๋ค์์ ์ค๋ ์์๋ ์ผ์ ์์ฝ์
๋๋ค. ์ด๊ฒ์ ๋ฐํ์ผ๋ก ์์ํ๊ณ ๊ฐ๋์ ์ธ ์ผ๊ธฐ๋ฅผ ์์ฑํด์ฃผ์ธ์. |
|
|
|
|
|
[์์ธ ์๊ตฌ์ฌํญ] |
|
|
1. ๋์
๋ถ: |
|
|
- ๊ทธ๋ ์ ๋ ์จ๋ ๋ถ์๊ธฐ๋ก ์์ |
|
|
- ์ํฉ๊ณผ ๋ฑ์ฅ์ธ๋ฌผ ์๊ฐ |
|
|
|
|
|
2. ์ ๊ฐ: |
|
|
- ๊ตฌ์ฒด์ ์ธ ๋ํ์ ํ๋ ๋ฌ์ฌ |
|
|
- ์ค๊ฐ์ ์ฌ์ฉํ ์ฅ๋ฉด ๋ฌ์ฌ |
|
|
- ๋ฑ์ฅ์ธ๋ฌผ๋ค์ ํ์ ๊ณผ ๊ฐ์ ๋ณํ |
|
|
|
|
|
3. ๊ฐ์ ๊ณผ ์๊ฐ: |
|
|
- ๋ด๋ฉด์ ๊ฐ์ ์ ์ฌ์ธํ๊ฒ ํํ |
|
|
- ์ฌ๊ฑด์ ๋ํ ๋์ ์๊ฐ๊ณผ ๊นจ๋ฌ์ |
|
|
- ๋ค๋ฅธ ์ฌ๋๋ค์ ๊ฐ์ ์ ๋ํ ๊ณต๊ฐ |
|
|
|
|
|
4. ๋ฌธ์ฒด: |
|
|
- ๋ฌธ์ด์ฒด์ ๊ตฌ์ด์ฒด๋ฅผ ์ ์ ํ ํผ์ฉ |
|
|
- ๋น์ ์ ์์ ๋ฅผ ํ์ฉํ ํํ |
|
|
- ๋ฐ๋ณต์ ํผํ๊ณ ๋ค์ํ ์ดํ ์ฌ์ฉ |
|
|
|
|
|
5. ๋ง๋ฌด๋ฆฌ: |
|
|
- ๊ทธ๋ ์ ๊ฒฝํ์ด ์ฃผ๋ ์๋ฏธ |
|
|
- ์์ผ๋ก์ ๊ธฐ๋๋ ๋ค์ง |
|
|
|
|
|
์์ฝ: |
|
|
{keywords} |
|
|
|
|
|
=== |
|
|
์ค๋์ ์ผ๊ธฐ: |
|
|
์ค๋์ """ |
|
|
|
|
|
|
|
|
parameters = { |
|
|
"max_new_tokens": 768, |
|
|
"temperature": 0.88, |
|
|
"top_p": 0.95, |
|
|
"repetition_penalty": 1.35, |
|
|
"top_k": 50, |
|
|
"do_sample": True, |
|
|
"num_return_sequences": 1 |
|
|
} |
|
|
|
|
|
response = client.text_generation( |
|
|
prompt, |
|
|
**parameters |
|
|
) |
|
|
|
|
|
if not response: |
|
|
return jsonify({"error": "์ผ๊ธฐ ์์ฑ์ ์คํจํ์ต๋๋ค"}), 500 |
|
|
|
|
|
|
|
|
diary_content = response.split("์ค๋์ ")[-1].strip() |
|
|
diary_content = "์ค๋์ " + diary_content |
|
|
|
|
|
return jsonify({"diary": diary_content}) |
|
|
|
|
|
except Exception as e: |
|
|
print(f"Error generating diary: {str(e)}") |
|
|
return jsonify({"error": f"์ผ๊ธฐ ์์ฑ ์ค ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}"}), 500 |
|
|
|
|
|
if __name__ == '__main__': |
|
|
app.run(debug=True) |
|
|
|