R-Kentaren commited on
Commit
d61277d
·
verified ·
1 Parent(s): d0894f0

Create colab.ipynb

Browse files
Files changed (1) hide show
  1. colab.ipynb +57 -0
colab.ipynb ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """L3.1-Dark-Reasoning-LewdPlay-evo-Hermes-R1-Uncensored-8B.ipynb
3
+
4
+ Automatically generated by Colab.
5
+
6
+ Original file is located at
7
+ https://colab.research.google.com/DavidAU/L3.1-Dark-Reasoning-LewdPlay-evo-Hermes-R1-Uncensored-8B.ipynb
8
+
9
+ # Local Inference on GPU
10
+
11
+ Model page: https://huggingface.co/DavidAU/L3.1-Dark-Reasoning-LewdPlay-evo-Hermes-R1-Uncensored-8B
12
+
13
+ """
14
+
15
+ # Install transformers library
16
+ # This command should be run in a Colab cell, not directly in a Python script.
17
+ # !pip install -U transformers
18
+
19
+ from transformers import pipeline
20
+
21
+ # Load the model
22
+ pipeline = pipeline("text-generation", model="DavidAU/L3.1-Dark-Reasoning-LewdPlay-evo-Hermes-R1-Uncensored-8B")
23
+
24
+ def chat_with_model(prompt):
25
+ messages = [
26
+ {"role": "user", "content": prompt},
27
+ ]
28
+ response = pipeline(messages)
29
+ # The response structure might vary, so we'll try to extract the content safely.
30
+ # This assumes the last generated text is the bot's response.
31
+ if response and isinstance(response, list) and len(response) > 0:
32
+ generated_text = response[0].get("generated_text")
33
+ if generated_text and isinstance(generated_text, list) and len(generated_text) > 0:
34
+ last_message = generated_text[-1]
35
+ if isinstance(last_message, dict) and "content" in last_message:
36
+ return last_message["content"]
37
+ return "Sorry, I couldn't generate a response."
38
+
39
+ # Example usage in a Colab cell:
40
+ # print(chat_with_model("Hello, how are you?"))
41
+ # print(chat_with_model("Tell me a story."))
42
+
43
+ # To make it interactive in a Colab environment, you would typically use
44
+ # input() within a loop in a separate cell, or use Colab's form features.
45
+ # For a simple demonstration within the notebook:
46
+
47
+ user_query_1 = "Hello, how are you?"
48
+ bot_response_1 = chat_with_model(user_query_1)
49
+ print(f"User: {user_query_1}")
50
+ print(f"Bot: {bot_response_1}")
51
+
52
+ user_query_2 = "What can you do?"
53
+ bot_response_2 = chat_with_model(user_query_2)
54
+ print(f"User: {user_query_2}")
55
+ print(f"Bot: {bot_response_2}")
56
+
57
+