File size: 1,457 Bytes
f7a977b
10b0dcf
6c9afb1
10b0dcf
7f209b0
b4a445c
f7a977b
b4a445c
 
f7a977b
10b0dcf
 
 
 
735d690
10b0dcf
 
 
 
 
 
f7a977b
10b0dcf
 
b4a445c
10b0dcf
6c9afb1
10b0dcf
b4a445c
10b0dcf
6c9afb1
10b0dcf
8ac11cf
10b0dcf
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
from transformers import pipeline
import requests

# Replace with your actual LangSmith API key
LANGSMITH_API_KEY = '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'
LANGSMITH_ENDPOINT = 'https://smith.langchain.com/o/b7d2cb3f-e589-52bb-9b8a-2e8483e4ee8d/tailor'  # Make sure this is the correct endpoint

# Initialize the Hugging Face text generation pipeline with BlenderBot
conversational_pipeline = pipeline('text-generation', model='facebook/blenderbot-3B')

def tailor_with_langsmith(model_data):
    headers = {
        'Authorization': f'Bearer {LANGSMITH_API_KEY}',
        'Content-Type': 'application/json'
    }
    data = {
        'model_data': model_data
    }
    response = requests.post(LANGSMITH_ENDPOINT, json=data, headers=headers)
    response.raise_for_status()
    return response.json()

def create_custom_conversation(prompt):
    # Step 1: Get response from Hugging Face model
    hf_response = conversational_pipeline(prompt, max_length=50)  # Adjust max_length as needed
    hf_reply = hf_response[0]['generated_text']

    # Step 2: Tailor the response using LangSmith
    tailored_response = tailor_with_langsmith({'model_data': hf_reply})
    tailored_reply = tailored_response.get('tailored_reply', '')

    return tailored_reply

if __name__ == '__main__':
    user_prompt = "Tell me about the latest advancements in AI."
    response = create_custom_conversation(user_prompt)
    print("Tailored Response:", response)