Oranblock commited on
Commit
29b1b2b
·
verified ·
1 Parent(s): c2ec9aa

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -19
app.py CHANGED
@@ -1,47 +1,33 @@
1
  import gradio as gr
2
  import json
3
- from transformers import pipeline
4
- from huggingface_hub import InferenceApi
5
 
6
- # Set up the Inference API to use a GPU in the Hugging Face Space
7
- inference = InferenceApi(repo_id="gpt-3.5-turbo", task="text-generation", use_gpu=True)
8
 
9
  # Function to attempt to fix JSON using a Hugging Face model with GPU
10
  def ai_fix_json(json_data):
11
- # Prepare a prompt for the AI model to fix the JSON
12
  prompt = f"Fix the following JSON data and make it valid:\n\n{json_data}\n\nFixed JSON:"
13
-
14
- # Generate a response from the model using GPU
15
- response = inference(inputs=prompt, max_length=1024, num_return_sequences=1)
16
-
17
- # Extract the fixed JSON from the response (you may need to fine-tune this depending on the model's output format)
18
  fixed_json = response[0]['generated_text'].split("Fixed JSON:")[-1].strip()
19
 
20
- # Try to load the fixed JSON to ensure it's valid
21
  try:
22
  parsed_data = json.loads(fixed_json)
23
  pretty_json = json.dumps(parsed_data, indent=4)
24
  return pretty_json, "JSON fixed using AI with GPU."
25
-
26
  except json.JSONDecodeError as e:
27
  return None, f"Failed to fix JSON: {str(e)}"
28
 
29
- # Function to handle file uploads and processing
30
  def process_file(uploaded_file):
31
- # Read the content of the uploaded file directly
32
  json_data = uploaded_file # This is already the content of the file as a string
33
-
34
- # Attempt to fix the JSON using AI
35
  cleaned_json, message = ai_fix_json(json_data)
36
 
37
  if cleaned_json:
38
- # Return the fixed JSON for display and as a downloadable file
39
  return cleaned_json, message, cleaned_json
40
-
41
  else:
42
  return None, message, None
43
 
44
- # Gradio interface
45
  iface = gr.Interface(
46
  fn=process_file,
47
  inputs=gr.File(label="Upload your JSON file"),
 
1
  import gradio as gr
2
  import json
3
+ from huggingface_hub import InferenceClient
 
4
 
5
+ # Initialize the InferenceClient, specifying the model and enabling GPU
6
+ client = InferenceClient(model="gpt-3.5-turbo", use_gpu=True)
7
 
8
  # Function to attempt to fix JSON using a Hugging Face model with GPU
9
  def ai_fix_json(json_data):
 
10
  prompt = f"Fix the following JSON data and make it valid:\n\n{json_data}\n\nFixed JSON:"
11
+ response = client.text_generation(prompt, max_new_tokens=1024)
12
+
 
 
 
13
  fixed_json = response[0]['generated_text'].split("Fixed JSON:")[-1].strip()
14
 
 
15
  try:
16
  parsed_data = json.loads(fixed_json)
17
  pretty_json = json.dumps(parsed_data, indent=4)
18
  return pretty_json, "JSON fixed using AI with GPU."
 
19
  except json.JSONDecodeError as e:
20
  return None, f"Failed to fix JSON: {str(e)}"
21
 
 
22
  def process_file(uploaded_file):
 
23
  json_data = uploaded_file # This is already the content of the file as a string
 
 
24
  cleaned_json, message = ai_fix_json(json_data)
25
 
26
  if cleaned_json:
 
27
  return cleaned_json, message, cleaned_json
 
28
  else:
29
  return None, message, None
30
 
 
31
  iface = gr.Interface(
32
  fn=process_file,
33
  inputs=gr.File(label="Upload your JSON file"),