MAS-AI-0000 commited on
Commit
5efd7a3
Β·
verified Β·
1 Parent(s): 516a37d

Update textPreprocess.py

Browse files
Files changed (1) hide show
  1. textPreprocess.py +5 -3
textPreprocess.py CHANGED
@@ -55,7 +55,8 @@ except Exception as e:
55
  @torch.inference_mode()
56
  def predict_text(text: str, max_length: int | None = None):
57
  if model is None or tokenizer is None:
58
- return {"predicted_class": "Human", "confidence": 0.0}
 
59
 
60
  if max_length is None:
61
  max_length = MAX_LEN
@@ -70,13 +71,14 @@ def predict_text(text: str, max_length: int | None = None):
70
  return {"predicted_class": label, "confidence": float(probs[pred_id])}
71
  except Exception as e:
72
  print(f"Error during text prediction: {e}")
73
- return {"predicted_class": "Human", "confidence": 0.0}
74
 
75
  # ── 4) Batch (optional) ────────────────────────────────────────────────────────
76
  @torch.inference_mode()
77
  def predict_batch(texts, batch_size=16):
78
  if model is None or tokenizer is None:
79
- return [{"predicted_class": "Human", "confidence": 0.0} for _ in texts]
 
80
 
81
  results = []
82
  for i in range(0, len(texts), batch_size):
 
55
  @torch.inference_mode()
56
  def predict_text(text: str, max_length: int | None = None):
57
  if model is None or tokenizer is None:
58
+ print("Issue 1")
59
+ return {"predicted_class": "Human", "confidence": -100.0}
60
 
61
  if max_length is None:
62
  max_length = MAX_LEN
 
71
  return {"predicted_class": label, "confidence": float(probs[pred_id])}
72
  except Exception as e:
73
  print(f"Error during text prediction: {e}")
74
+ return {"predicted_class": "Human", "confidence": -100.0}
75
 
76
  # ── 4) Batch (optional) ────────────────────────────────────────────────────────
77
  @torch.inference_mode()
78
  def predict_batch(texts, batch_size=16):
79
  if model is None or tokenizer is None:
80
+ print("Issue 2")
81
+ return [{"predicted_class": "Human", "confidence": -100.0} for _ in texts]
82
 
83
  results = []
84
  for i in range(0, len(texts), batch_size):