Spaces:
Runtime error
Runtime error
Commit
·
6500a16
1
Parent(s):
ef4b7e8
Add stating init instructions for backend
Browse files- backend/__init__.py +14 -17
backend/__init__.py
CHANGED
|
@@ -4,18 +4,19 @@ from mysql.connector import errorcode
|
|
| 4 |
from fastapi import FastAPI, status
|
| 5 |
from fastapi.exceptions import HTTPException
|
| 6 |
|
| 7 |
-
from
|
|
|
|
| 8 |
|
| 9 |
-
from langchain.llms import CTransformers
|
| 10 |
from langchain.chains import LLMChain
|
| 11 |
-
from langchain import PromptTemplate
|
| 12 |
|
| 13 |
app = FastAPI(title="DocGup-Tea",
|
| 14 |
version="V0.0.1",
|
| 15 |
description="API for automatic code documentation generation!"
|
| 16 |
)
|
| 17 |
|
| 18 |
-
from
|
| 19 |
|
| 20 |
try:
|
| 21 |
dbconnection = DBConnection()
|
|
@@ -24,19 +25,15 @@ try:
|
|
| 24 |
# send prompt codellama-13b-instruct-GGUF model
|
| 25 |
with open("docguptea/utils/prompt.txt",'r') as f:
|
| 26 |
prompt = f.read()
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
temperature = 0.25,
|
| 37 |
-
repetition_penalty = 1.13,
|
| 38 |
-
stream=True,
|
| 39 |
-
gpu_layers = 10,
|
| 40 |
)
|
| 41 |
|
| 42 |
llmchain = LLMChain(
|
|
|
|
| 4 |
from fastapi import FastAPI, status
|
| 5 |
from fastapi.exceptions import HTTPException
|
| 6 |
|
| 7 |
+
from docguptea.utils import DBConnection
|
| 8 |
+
from docguptea.core.ConfigEnv import config
|
| 9 |
|
| 10 |
+
from langchain.llms import CTransformers, Clarifai
|
| 11 |
from langchain.chains import LLMChain
|
| 12 |
+
from langchain.prompts import PromptTemplate
|
| 13 |
|
| 14 |
app = FastAPI(title="DocGup-Tea",
|
| 15 |
version="V0.0.1",
|
| 16 |
description="API for automatic code documentation generation!"
|
| 17 |
)
|
| 18 |
|
| 19 |
+
from docguptea import router
|
| 20 |
|
| 21 |
try:
|
| 22 |
dbconnection = DBConnection()
|
|
|
|
| 25 |
# send prompt codellama-13b-instruct-GGUF model
|
| 26 |
with open("docguptea/utils/prompt.txt",'r') as f:
|
| 27 |
prompt = f.read()
|
| 28 |
+
|
| 29 |
+
prompt = PromptTemplate(template=prompt, input_variables=['instruction'])
|
| 30 |
+
|
| 31 |
+
llm = Clarifai(
|
| 32 |
+
pat = config.CLARIFAI_PAT,
|
| 33 |
+
user_id = config.USER_ID,
|
| 34 |
+
app_id = config.APP_ID,
|
| 35 |
+
model_id = config.MODEL_ID,
|
| 36 |
+
model_version_id=config.MODEL_VERSION_ID,
|
|
|
|
|
|
|
|
|
|
|
|
|
| 37 |
)
|
| 38 |
|
| 39 |
llmchain = LLMChain(
|