Groq-Inference / groq_flask.py
ai-rnd-dev's picture
Update groq_flask.py
2079ac4 verified
from flask import Flask, request, jsonify
import groq_LPU_inference
app = Flask(__name__)
@app.route('/chat', methods=['GET'])
def chat():
query = request.args.get('query') # Assuming the query is sent in JSON format
model = str(request.args.get('model', "mistral")) # Optional parameter with default value
max_tokens = int(request.args.get('max_tokens', 300))
temperature = float(request.args.get('temperature', 0.7)) # Optional parameter with default value
assistant = str(request.args.get('assistant', "")) # Optional parameter with default value
system = str(request.args.get('system', "Be Helpful and Friendly. Keep your response straightfoward, short and concise")) # Optional parameter with default value
# Developer information
developer_info = {
'status': True
}
if query:
response = groq_LPU_inference.Groq_Inference(query, model=model, system=system, assistant=assistant, temp=temperature, max_tokens=max_tokens)
return jsonify([{'response': response}, {'developer_info': developer_info}])
else:
error_message = {
'error': 'Oops! Something went wrong.'
}
return jsonify(error_message), 400
if __name__ == '__main__':
app.run(debug=True)