Neptests commited on
Commit
b8b56a4
Β·
verified Β·
1 Parent(s): 2f2fd75

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +289 -0
  2. requirements.txt +7 -0
app.py ADDED
@@ -0,0 +1,289 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ AI Detection & Humanization API - Hugging Face Spaces Version
3
+ This is a simplified Gradio interface for Hugging Face Spaces deployment
4
+ """
5
+
6
+ import gradio as gr
7
+ from transformers import (
8
+ AutoTokenizer,
9
+ AutoModelForSequenceClassification,
10
+ PegasusTokenizer,
11
+ PegasusForConditionalGeneration
12
+ )
13
+ import torch
14
+ import json
15
+ import os
16
+
17
+ # Global variables for models
18
+ ai_detector_model = None
19
+ ai_detector_tokenizer = None
20
+ humanizer_model = None
21
+ humanizer_tokenizer = None
22
+ device = "cuda" if torch.cuda.is_available() else "cpu"
23
+
24
+ def load_models():
25
+ """Load both AI detection and humanization models"""
26
+ global ai_detector_model, ai_detector_tokenizer, humanizer_model, humanizer_tokenizer
27
+
28
+ print("Loading AI detection model...")
29
+ ai_detector_tokenizer = AutoTokenizer.from_pretrained("Hello-SimpleAI/chatgpt-detector-roberta")
30
+ ai_detector_model = AutoModelForSequenceClassification.from_pretrained("Hello-SimpleAI/chatgpt-detector-roberta")
31
+ ai_detector_model.to(device)
32
+ ai_detector_model.eval()
33
+ print("AI detection model loaded!")
34
+
35
+ print("Loading humanization model...")
36
+ humanizer_tokenizer = PegasusTokenizer.from_pretrained("tuner007/pegasus_paraphrase")
37
+ humanizer_model = PegasusForConditionalGeneration.from_pretrained("tuner007/pegasus_paraphrase")
38
+ humanizer_model.to(device)
39
+ humanizer_model.eval()
40
+ print("Humanization model loaded!")
41
+
42
+
43
+ def detect_ai(text):
44
+ """Detect if text is AI-generated"""
45
+ if not text or len(text.strip()) == 0:
46
+ return "Please enter some text to analyze."
47
+
48
+ try:
49
+ inputs = ai_detector_tokenizer(
50
+ text,
51
+ return_tensors="pt",
52
+ truncation=True,
53
+ max_length=512,
54
+ padding=True
55
+ ).to(device)
56
+
57
+ with torch.no_grad():
58
+ outputs = ai_detector_model(**inputs)
59
+ predictions = torch.nn.functional.softmax(outputs.logits, dim=-1)
60
+
61
+ ai_prob = predictions[0][0].item() * 100
62
+ human_prob = predictions[0][1].item() * 100
63
+
64
+ if ai_prob > human_prob:
65
+ result = f"""πŸ€– **AI-Generated Text Detected**
66
+
67
+ **Confidence:** {ai_prob:.1f}%
68
+
69
+ | Metric | Value |
70
+ |--------|-------|
71
+ | AI Probability | {ai_prob:.1f}% |
72
+ | Human Probability | {human_prob:.1f}% |
73
+ | Label | AI-Generated |
74
+ """
75
+ else:
76
+ result = f"""βœ… **Human-Written Text Detected**
77
+
78
+ **Confidence:** {human_prob:.1f}%
79
+
80
+ | Metric | Value |
81
+ |--------|-------|
82
+ | AI Probability | {ai_prob:.1f}% |
83
+ | Human Probability | {human_prob:.1f}% |
84
+ | Label | Human-Written |
85
+ """
86
+ return result
87
+
88
+ except Exception as e:
89
+ return f"Error: {str(e)}"
90
+
91
+
92
+ def humanize_text(text):
93
+ """Humanize AI-generated text"""
94
+ if not text or len(text.strip()) == 0:
95
+ return "Please enter some text to humanize."
96
+
97
+ try:
98
+ inputs = humanizer_tokenizer(
99
+ text,
100
+ return_tensors="pt",
101
+ truncation=True,
102
+ max_length=512,
103
+ padding=True
104
+ ).to(device)
105
+
106
+ with torch.no_grad():
107
+ outputs = humanizer_model.generate(
108
+ **inputs,
109
+ max_length=512,
110
+ num_beams=4,
111
+ early_stopping=True,
112
+ length_penalty=1.0
113
+ )
114
+
115
+ humanized = humanizer_tokenizer.decode(outputs[0], skip_special_tokens=True)
116
+ return humanized
117
+
118
+ except Exception as e:
119
+ return f"Error: {str(e)}"
120
+
121
+
122
+ def process_combined(text, auto_humanize):
123
+ """Combined: Detect and optionally humanize"""
124
+ if not text or len(text.strip()) == 0:
125
+ return "Please enter some text.", ""
126
+
127
+ # First detect
128
+ detection = detect_ai(text)
129
+
130
+ # Check if humanization is needed
131
+ humanized = ""
132
+ if auto_humanize:
133
+ # Parse AI probability from detection result
134
+ if "AI-Generated" in detection:
135
+ humanized = humanize_text(text)
136
+ else:
137
+ humanized = "No humanization needed - text appears to be human-written."
138
+
139
+ return detection, humanized
140
+
141
+
142
+ # Load models at startup
143
+ print("Initializing models (this may take a few minutes)...")
144
+ load_models()
145
+ print("Models loaded successfully!")
146
+
147
+
148
+ # Create Gradio interface
149
+ with gr.Blocks(
150
+ title="AI Detection & Humanization API",
151
+ theme=gr.themes.Soft()
152
+ ) as demo:
153
+
154
+ gr.Markdown("""
155
+ # πŸ€– AI Detection & Humanization API
156
+
157
+ Detect AI-generated text and humanize it to sound more natural.
158
+
159
+ **Your API Key:** `sk-demo-key-12345678`
160
+
161
+ ---
162
+ """)
163
+
164
+ with gr.Tab("πŸ” AI Detection"):
165
+ gr.Markdown("### Detect if text is AI-generated")
166
+ with gr.Row():
167
+ with gr.Column():
168
+ detect_input = gr.Textbox(
169
+ label="Enter text to analyze",
170
+ placeholder="Paste your text here...",
171
+ lines=6
172
+ )
173
+ detect_btn = gr.Button("Detect AI", variant="primary", size="lg")
174
+ with gr.Column():
175
+ detect_output = gr.Markdown(label="Detection Result")
176
+
177
+ detect_btn.click(detect_ai, inputs=detect_input, outputs=detect_output)
178
+
179
+ gr.Examples(
180
+ examples=[
181
+ ["Artificial intelligence has revolutionized numerous industries by providing innovative solutions to complex problems. Machine learning algorithms can analyze vast amounts of data to identify patterns."],
182
+ ["Hey! I just grabbed coffee with my friend yesterday. The weather was amazing and we had such a great time chatting!"],
183
+ ],
184
+ inputs=detect_input
185
+ )
186
+
187
+ with gr.Tab("✍️ Humanization"):
188
+ gr.Markdown("### Make AI text sound more human")
189
+ with gr.Row():
190
+ with gr.Column():
191
+ humanize_input = gr.Textbox(
192
+ label="Enter AI-generated text to humanize",
193
+ placeholder="Paste AI-generated text here...",
194
+ lines=6
195
+ )
196
+ humanize_btn = gr.Button("Humanize Text", variant="primary", size="lg")
197
+ with gr.Column():
198
+ humanize_output = gr.Textbox(
199
+ label="Humanized Text",
200
+ lines=6
201
+ )
202
+
203
+ humanize_btn.click(humanize_text, inputs=humanize_input, outputs=humanize_output)
204
+
205
+ gr.Examples(
206
+ examples=[
207
+ ["Artificial intelligence has revolutionized numerous industries by providing innovative solutions to complex problems."],
208
+ ["The implementation of machine learning algorithms facilitates the optimization of business processes."],
209
+ ],
210
+ inputs=humanize_input
211
+ )
212
+
213
+ with gr.Tab("⚑ Combined Processing"):
214
+ gr.Markdown("### Detect AI and humanize in one step")
215
+ with gr.Row():
216
+ with gr.Column():
217
+ combined_input = gr.Textbox(
218
+ label="Enter text to process",
219
+ placeholder="Paste your text here...",
220
+ lines=6
221
+ )
222
+ auto_humanize = gr.Checkbox(
223
+ label="Auto-humanize if AI is detected",
224
+ value=True
225
+ )
226
+ combined_btn = gr.Button("Process Text", variant="primary", size="lg")
227
+ with gr.Column():
228
+ combined_detection = gr.Markdown(label="Detection Result")
229
+ combined_humanized = gr.Textbox(label="Humanized Text", lines=4)
230
+
231
+ combined_btn.click(
232
+ process_combined,
233
+ inputs=[combined_input, auto_humanize],
234
+ outputs=[combined_detection, combined_humanized]
235
+ )
236
+
237
+ with gr.Tab("πŸ“š API Documentation"):
238
+ gr.Markdown("""
239
+ ## API Endpoints
240
+
241
+ This Space also provides REST API endpoints that you can call programmatically.
242
+
243
+ ### Base URL
244
+ ```
245
+ https://neptests-ai-detection-api.hf.space
246
+ ```
247
+
248
+ ### 1. Detect AI Text
249
+ ```python
250
+ import requests
251
+
252
+ response = requests.post(
253
+ "https://neptests-ai-detection-api.hf.space/api/predict",
254
+ json={"data": ["Your text here"]}
255
+ )
256
+ print(response.json())
257
+ ```
258
+
259
+ ### 2. Humanize Text
260
+ ```python
261
+ response = requests.post(
262
+ "https://neptests-ai-detection-api.hf.space/api/predict_1",
263
+ json={"data": ["AI text to humanize"]}
264
+ )
265
+ print(response.json())
266
+ ```
267
+
268
+ ### Your API Key
269
+ ```
270
+ sk-demo-key-12345678
271
+ ```
272
+
273
+ ---
274
+
275
+ ## Features
276
+
277
+ - βœ… **AI Detection** - Detect if text is AI-generated
278
+ - βœ… **Text Humanization** - Convert AI text to human-like
279
+ - βœ… **Combined Processing** - Detect and humanize together
280
+ - βœ… **FREE to use** - No payment required
281
+
282
+ ---
283
+
284
+ Built with ❀️ using Gradio and Hugging Face Transformers
285
+ """)
286
+
287
+
288
+ if __name__ == "__main__":
289
+ demo.launch()
requirements.txt ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ flask==3.0.0
2
+ flask-cors==4.0.0
3
+ transformers==4.36.0
4
+ torch==2.1.0
5
+ sentencepiece==0.1.99
6
+ protobuf==4.25.1
7
+ gradio==4.0.0