alrichardbollans commited on
Commit
e19254b
·
1 Parent(s): ef8459c

Remove multiprocessing as it seems to be breaking

Browse files
Files changed (1) hide show
  1. python_utils/running_model.py +6 -2
python_utils/running_model.py CHANGED
@@ -110,6 +110,7 @@ def process_file(file, predictor_=None):
110
  else:
111
  # otherwise use the passed predictor
112
  prediction = predictor_(im)
 
113
  return {
114
  "filename": file["name"],
115
  "image": img_base64,
@@ -122,10 +123,13 @@ def run_predictions(files):
122
 
123
  ## When using GPU, run single instance
124
  ## Or if not checking many files, as loading multiple models isn't worthwhile
125
- if torch.cuda.is_available() or len(files) < 4:
 
 
 
 
126
  for file in files:
127
  # Run prediction with original BGR image
128
- predictor_ = load_model()
129
  prediction_output = process_file(file, predictor_=predictor_)
130
 
131
  results.append(prediction_output)
 
110
  else:
111
  # otherwise use the passed predictor
112
  prediction = predictor_(im)
113
+ print(prediction)
114
  return {
115
  "filename": file["name"],
116
  "image": img_base64,
 
123
 
124
  ## When using GPU, run single instance
125
  ## Or if not checking many files, as loading multiple models isn't worthwhile
126
+ if True:#torch.cuda.is_available() or len(files) < 4:
127
+ print(f'Using 1 process')
128
+
129
+ predictor_ = load_model()
130
+
131
  for file in files:
132
  # Run prediction with original BGR image
 
133
  prediction_output = process_file(file, predictor_=predictor_)
134
 
135
  results.append(prediction_output)