Oranblock commited on
Commit
f5d6e87
·
verified ·
1 Parent(s): bc57e29

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -17
app.py CHANGED
@@ -1,39 +1,48 @@
1
  import gradio as gr
2
  import json
 
3
  from huggingface_hub import InferenceClient
 
4
 
5
- # Initialize the InferenceClient
6
- client = InferenceClient(model="gpt-3.5-turbo")
 
7
 
8
- # Function to attempt to fix JSON using a Hugging Face model
9
  def ai_fix_json(json_data):
10
  prompt = f"Fix the following JSON data and make it valid:\n\n{json_data}\n\nFixed JSON:"
11
- response = client.text_generation(prompt, max_new_tokens=1024)
12
-
13
- fixed_json = response[0]['generated_text'].split("Fixed JSON:")[-1].strip()
14
 
15
  try:
16
- parsed_data = json.loads(fixed_json)
17
- pretty_json = json.dumps(parsed_data, indent=4)
18
- return pretty_json, "JSON fixed using AI."
19
- except json.JSONDecodeError as e:
20
- return None, f"Failed to fix JSON: {str(e)}"
 
 
 
 
 
 
 
21
 
22
  def process_file(uploaded_file):
23
  json_data = uploaded_file # This is already the content of the file as a string
24
  cleaned_json, message = ai_fix_json(json_data)
25
 
26
- if cleaned_json:
27
- return cleaned_json, message, cleaned_json
28
- else:
29
- return None, message, None
 
 
30
 
31
  iface = gr.Interface(
32
  fn=process_file,
33
  inputs=gr.File(label="Upload your JSON file"),
34
  outputs=[gr.JSON(label="Fixed JSON"), "text", gr.File(label="Download cleaned JSON file")],
35
- title="AI-Powered JSON Cleaner",
36
- description="Upload a JSON file to automatically fix, remove duplicates, and download the cleaned version using AI."
37
  )
38
 
39
  iface.launch()
 
1
  import gradio as gr
2
  import json
3
+ import torch
4
  from huggingface_hub import InferenceClient
5
+ from huggingface_hub import spaces
6
 
7
+ # Initialize the InferenceClient for GPU and CPU
8
+ client_gpu = InferenceClient(model="gpt-3.5-turbo") # Assuming this will use GPU by default if available
9
+ client_cpu = InferenceClient(model="gpt-3.5-turbo") # Duplicate client for CPU fallback
10
 
11
+ # Function to attempt to fix JSON using the appropriate resource
12
  def ai_fix_json(json_data):
13
  prompt = f"Fix the following JSON data and make it valid:\n\n{json_data}\n\nFixed JSON:"
 
 
 
14
 
15
  try:
16
+ # Check if the space is running on GPU
17
+ if torch.cuda.is_available():
18
+ response = client_gpu.text_generation(prompt, max_new_tokens=1024)
19
+ return response[0]['generated_text'].split("Fixed JSON:")[-1].strip(), "JSON fixed using AI on GPU."
20
+ else:
21
+ raise RuntimeError("GPU not available, falling back to CPU.")
22
+
23
+ except Exception as gpu_error:
24
+ # Fall back to CPU if GPU processing fails
25
+ print(f"Falling back to CPU due to: {gpu_error}")
26
+ response = client_cpu.text_generation(prompt, max_new_tokens=1024)
27
+ return response[0]['generated_text'].split("Fixed JSON:")[-1].strip(), "JSON fixed using AI on CPU."
28
 
29
  def process_file(uploaded_file):
30
  json_data = uploaded_file # This is already the content of the file as a string
31
  cleaned_json, message = ai_fix_json(json_data)
32
 
33
+ try:
34
+ parsed_data = json.loads(cleaned_json)
35
+ pretty_json = json.dumps(parsed_data, indent=4)
36
+ return pretty_json, message, pretty_json
37
+ except json.JSONDecodeError as e:
38
+ return None, f"Failed to fix JSON: {str(e)}", None
39
 
40
  iface = gr.Interface(
41
  fn=process_file,
42
  inputs=gr.File(label="Upload your JSON file"),
43
  outputs=[gr.JSON(label="Fixed JSON"), "text", gr.File(label="Download cleaned JSON file")],
44
+ title="AI-Powered JSON Cleaner with Dynamic Resource Allocation",
45
+ description="Upload a JSON file to automatically fix, remove duplicates, and download the cleaned version using AI with GPU/CPU fallback."
46
  )
47
 
48
  iface.launch()