debug
Browse files
data/models/llama3-1-70b-nvidia.py
CHANGED
|
@@ -2,6 +2,7 @@ import argparse
|
|
| 2 |
from openai import OpenAI
|
| 3 |
import os
|
| 4 |
import json
|
|
|
|
| 5 |
|
| 6 |
# Set up argument parsing
|
| 7 |
parser = argparse.ArgumentParser(description="Pass message content to OpenAI API")
|
|
@@ -17,7 +18,7 @@ client = OpenAI(
|
|
| 17 |
# Create the completion
|
| 18 |
completion = client.chat.completions.create(
|
| 19 |
model="meta/llama-3.1-70b-instruct",
|
| 20 |
-
messages=json.loads(args.message),
|
| 21 |
temperature=0.2,
|
| 22 |
top_p=0.7,
|
| 23 |
max_tokens=1024,
|
|
|
|
| 2 |
from openai import OpenAI
|
| 3 |
import os
|
| 4 |
import json
|
| 5 |
+
import base64
|
| 6 |
|
| 7 |
# Set up argument parsing
|
| 8 |
parser = argparse.ArgumentParser(description="Pass message content to OpenAI API")
|
|
|
|
| 18 |
# Create the completion
|
| 19 |
completion = client.chat.completions.create(
|
| 20 |
model="meta/llama-3.1-70b-instruct",
|
| 21 |
+
messages=json.loads(base64.b64decode(args.message).decode('utf-8')),
|
| 22 |
temperature=0.2,
|
| 23 |
top_p=0.7,
|
| 24 |
max_tokens=1024,
|