Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -2,23 +2,19 @@
|
|
| 2 |
from ibm_watson_machine_learning.foundation_models import Model
|
| 3 |
import gradio as gr
|
| 4 |
|
|
|
|
|
|
|
| 5 |
|
| 6 |
# Model and project settings
|
| 7 |
model_id = "meta-llama/llama-2-13b-chat" # Directly specifying the LLAMA2 model
|
| 8 |
|
| 9 |
-
# Set credentials to use the model
|
| 10 |
-
my_credentials = {
|
| 11 |
-
"url": "https://us-south.ml.cloud.ibm.com"
|
| 12 |
-
}
|
| 13 |
|
| 14 |
# Generation parameters
|
| 15 |
gen_parms = {
|
| 16 |
"max_new_tokens": 512, # Adjust as needed for the length of the cover letter
|
| 17 |
"temperature": 0.7 # Adjust for creativity
|
| 18 |
}
|
| 19 |
-
|
| 20 |
-
space_id = None
|
| 21 |
-
verify = False
|
| 22 |
|
| 23 |
# Initialize the model
|
| 24 |
model = Model(model_id, my_credentials, gen_parms, project_id, space_id, verify)
|
|
|
|
| 2 |
from ibm_watson_machine_learning.foundation_models import Model
|
| 3 |
import gradio as gr
|
| 4 |
|
| 5 |
+
watsonx_API = "Q7fLgZsmjZJ9L8BhLE_asx91M1C3dlPeYgmSm2wBleyk"
|
| 6 |
+
project_id= "a5a13c69-59c8-4856-bea6-4fa1f85e2cbb"
|
| 7 |
|
| 8 |
# Model and project settings
|
| 9 |
model_id = "meta-llama/llama-2-13b-chat" # Directly specifying the LLAMA2 model
|
| 10 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 11 |
|
| 12 |
# Generation parameters
|
| 13 |
gen_parms = {
|
| 14 |
"max_new_tokens": 512, # Adjust as needed for the length of the cover letter
|
| 15 |
"temperature": 0.7 # Adjust for creativity
|
| 16 |
}
|
| 17 |
+
|
|
|
|
|
|
|
| 18 |
|
| 19 |
# Initialize the model
|
| 20 |
model = Model(model_id, my_credentials, gen_parms, project_id, space_id, verify)
|