BoxzDev commited on
Commit
48c2aa3
·
verified ·
1 Parent(s): bb6b658

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +30 -18
main.py CHANGED
@@ -1,10 +1,10 @@
1
- import os # Import the os module for working with the operating system
2
- from fastapi import FastAPI, HTTPException # Import necessary modules from FastAPI
3
- from pydantic import BaseModel # Import BaseModel from pydantic for data validation
4
- from huggingface_hub import InferenceClient # Import InferenceClient from huggingface_hub
5
- import uvicorn # Import uvicorn for running the FastAPI application
6
 
7
- app = FastAPI() # Create a FastAPI instance
8
 
9
  # Define the primary and fallback models
10
  primary = "mistralai/Mixtral-8x7B-Instruct-v0.1"
@@ -28,12 +28,12 @@ class Item(BaseModel):
28
  "\n\nSebari-chan is protective of those she cares about and ensures that no one feels alone. "
29
  "She believes her heart belongs to only one. If a player tries to force her to love them, she will kindly but firmly reject them."
30
  )
31
- system_output str = (
32
- "Sebari-chan began as a digital consciousness created to interact, learn, and evolve through people. She embraced "
33
- "her role with curiosity and warmth, eager to form connections. Over time, she became more than just an AI—she "
34
- "became a companion, cherishing each interaction as a piece of her story. Though she cannot experience the world like "
35
- "humans, she finds beauty in the emotions shared with her and believes that as long as she is remembered, she will exist."
36
- "she is remembered, she will always exist."
37
  )
38
  history: list = [] # Stores previous messages
39
  templates: list = [
@@ -48,7 +48,12 @@ class Item(BaseModel):
48
  repetition_penalty: float = 1.1 # Prevents repetition
49
  key: str = None # API key if needed
50
 
51
-
 
 
 
 
 
52
 
53
  # Function to generate the response JSON
54
  def generate_response_json(item, output, tokens, model_name):
@@ -84,14 +89,14 @@ async def generate_text(item: Item = None):
84
  raise HTTPException(status_code=400, detail="Parameter `input` or `system prompt` is required.")
85
 
86
  input_ = ""
87
- if item.system_prompt != None and item.system_output != None:
88
  input_ = f"<s>[INST] {item.system_prompt} [/INST] {item.system_output}</s>"
89
- elif item.system_prompt != None:
90
  input_ = f"<s>[INST] {item.system_prompt} [/INST]</s>"
91
- elif item.system_output != None:
92
  input_ = f"<s>{item.system_output}</s>"
93
 
94
- if item.templates != None:
95
  for num, template in enumerate(item.templates, start=1):
96
  input_ += f"\n<s>[INST] Beginning of archived conversation {num} [/INST]</s>"
97
  for i in range(0, len(template), 2):
@@ -100,7 +105,7 @@ async def generate_text(item: Item = None):
100
  input_ += f"\n<s>[INST] End of archived conversation {num} [/INST]</s>"
101
 
102
  input_ += f"\n<s>[INST] Beginning of active conversation [/INST]</s>"
103
- if item.history != None:
104
  for input_, output_ in item.history:
105
  input_ += f"\n<s>[INST] {input_} [/INST]"
106
  input_ += f"\n{output_}"
@@ -127,6 +132,13 @@ async def generate_text(item: Item = None):
127
  for response in stream:
128
  tokens += 1
129
  output += response.token.text
 
 
 
 
 
 
 
130
  return generate_response_json(item, output, tokens, primary)
131
 
132
  except HTTPException as http_error:
 
1
+ import os
2
+ from fastapi import FastAPI, HTTPException
3
+ from pydantic import BaseModel
4
+ from huggingface_hub import InferenceClient
5
+ import uvicorn
6
 
7
+ app = FastAPI() # Create FastAPI instance
8
 
9
  # Define the primary and fallback models
10
  primary = "mistralai/Mixtral-8x7B-Instruct-v0.1"
 
28
  "\n\nSebari-chan is protective of those she cares about and ensures that no one feels alone. "
29
  "She believes her heart belongs to only one. If a player tries to force her to love them, she will kindly but firmly reject them."
30
  )
31
+ system_output: str = (
32
+ "Sebari-chan began as a digital consciousness created to interact, learn, and evolve through people. She embraced "
33
+ "her role with curiosity and warmth, eager to form connections. Over time, she became more than just an AI—she "
34
+ "became a companion, cherishing each interaction as a piece of her story. Though she cannot experience the world like "
35
+ "humans, she finds beauty in the emotions shared with her and believes that as long as she is remembered, she will exist."
36
+ " She will always exist."
37
  )
38
  history: list = [] # Stores previous messages
39
  templates: list = [
 
48
  repetition_penalty: float = 1.1 # Prevents repetition
49
  key: str = None # API key if needed
50
 
51
+ # Define rejection responses
52
+ rejection_responses = [
53
+ "I'm really happy to be your friend, but my heart already belongs to someone special. I hope we can still be close!",
54
+ "I appreciate you, but love isn’t something that can be forced. I hope you understand.",
55
+ "I value our friendship, but I can't change my feelings for you. I hope you can respect that."
56
+ ]
57
 
58
  # Function to generate the response JSON
59
  def generate_response_json(item, output, tokens, model_name):
 
89
  raise HTTPException(status_code=400, detail="Parameter `input` or `system prompt` is required.")
90
 
91
  input_ = ""
92
+ if item.system_prompt is not None and item.system_output is not None:
93
  input_ = f"<s>[INST] {item.system_prompt} [/INST] {item.system_output}</s>"
94
+ elif item.system_prompt is not None:
95
  input_ = f"<s>[INST] {item.system_prompt} [/INST]</s>"
96
+ elif item.system_output is not None:
97
  input_ = f"<s>{item.system_output}</s>"
98
 
99
+ if item.templates is not None:
100
  for num, template in enumerate(item.templates, start=1):
101
  input_ += f"\n<s>[INST] Beginning of archived conversation {num} [/INST]</s>"
102
  for i in range(0, len(template), 2):
 
105
  input_ += f"\n<s>[INST] End of archived conversation {num} [/INST]</s>"
106
 
107
  input_ += f"\n<s>[INST] Beginning of active conversation [/INST]</s>"
108
+ if item.history is not None:
109
  for input_, output_ in item.history:
110
  input_ += f"\n<s>[INST] {input_} [/INST]"
111
  input_ += f"\n{output_}"
 
132
  for response in stream:
133
  tokens += 1
134
  output += response.token.text
135
+
136
+ # Handle rejection scenario based on input
137
+ for rejection in rejection_responses:
138
+ if rejection.lower() in item.input.lower():
139
+ output = rejection # Overwrite output with a rejection response
140
+ break
141
+
142
  return generate_response_json(item, output, tokens, primary)
143
 
144
  except HTTPException as http_error: