wanted to host from OpenAI, Groq, and Anthropic
Browse files- hype_pack/utils/nodes.py +8 -8
hype_pack/utils/nodes.py
CHANGED
|
@@ -169,18 +169,18 @@ def generate_transcript_node(interview_state: InterviewState, speaker_profile: d
|
|
| 169 |
# model="gpt-4o-mini",
|
| 170 |
# temperature=0.6
|
| 171 |
# ).with_structured_output(HypeCastTranscript)
|
| 172 |
-
llm = ChatGroq(
|
| 173 |
-
|
| 174 |
-
|
| 175 |
-
).with_structured_output(HypeCastTranscript)
|
| 176 |
# llm = ChatFireworks(
|
| 177 |
# model="accounts/fireworks/models/llama-v3p1-70b-instruct",
|
| 178 |
# temperature=0.1
|
| 179 |
# ).with_structured_output(HypeCastTranscript)
|
| 180 |
-
|
| 181 |
-
|
| 182 |
-
|
| 183 |
-
|
| 184 |
|
| 185 |
prompt = ChatPromptTemplate.from_messages([
|
| 186 |
("system", f"""You are speaking directly TO the candidate about why they should be excited about THIS specific opportunity.
|
|
|
|
| 169 |
# model="gpt-4o-mini",
|
| 170 |
# temperature=0.6
|
| 171 |
# ).with_structured_output(HypeCastTranscript)
|
| 172 |
+
# llm = ChatGroq(
|
| 173 |
+
# model="llama-3.1-70b-versatile",
|
| 174 |
+
# temperature=0.6
|
| 175 |
+
# ).with_structured_output(HypeCastTranscript)
|
| 176 |
# llm = ChatFireworks(
|
| 177 |
# model="accounts/fireworks/models/llama-v3p1-70b-instruct",
|
| 178 |
# temperature=0.1
|
| 179 |
# ).with_structured_output(HypeCastTranscript)
|
| 180 |
+
llm = ChatAnthropic(
|
| 181 |
+
model="claude-3-5-haiku-20241022",
|
| 182 |
+
temperature=0.4
|
| 183 |
+
).with_structured_output(HypeCastTranscript)
|
| 184 |
|
| 185 |
prompt = ChatPromptTemplate.from_messages([
|
| 186 |
("system", f"""You are speaking directly TO the candidate about why they should be excited about THIS specific opportunity.
|