rosemariafontana commited on
Commit
f25d603
·
verified ·
1 Parent(s): 8b1ba55

Update process_data.py

Browse files
Files changed (1) hide show
  1. process_data.py +8 -0
process_data.py CHANGED
@@ -134,19 +134,26 @@ def generate_json_pieces(input_data, parameters):
134
  specification = input_data["input_text"]
135
  model_version = parameters["model_version"]
136
 
 
 
 
 
137
  if parameters["pre_prompt"] == True:
 
138
  field_data_input = input_data["input_text_pieces"]["field_data_input"]
139
  planting_data_input = input_data["input_text_pieces"]["planting_data_input"]
140
  logs_data_input = input_data["input_text_pieces"]["logs_data_input"]
141
  soil_data_input = input_data["input_text_pieces"]["soil_data_input"]
142
  yield_data_input = input_data["input_text_pieces"]["yield_data_input"]
143
  else:
 
144
  field_data_input = input_data["input_text_pieces"]["pre_processed_pieces"]["field_data_input"]
145
  planting_data_input = input_data["input_text_pieces"]["pre_processed_pieces"]["planting_data_input"]
146
  logs_data_input = input_data["input_text_pieces"]["pre_processed_pieces"]["logs_data_input"]
147
  soil_data_input = input_data["input_text_pieces"]["pre_processed_pieces"]["soil_data_input"]
148
  yield_data_input = input_data["input_text_pieces"]["pre_processed_pieces"]["yield_data_input"]
149
 
 
150
  field_prompt = "Extract the field information."
151
  plant_prompt = "Extract the planting information."
152
  log_prompt = "Extract the log information."
@@ -166,6 +173,7 @@ def generate_json_pieces(input_data, parameters):
166
 
167
  try:
168
  # Call OpenAI API to generate structured output based on prompt
 
169
  field_response = client.beta.chat.completions.parse(
170
  model=model_version, # Use GPT model that supports structured output
171
  messages=[
 
134
  specification = input_data["input_text"]
135
  model_version = parameters["model_version"]
136
 
137
+ print("Specification and Model Version")
138
+ print(specification)
139
+ print(model_version)
140
+
141
  if parameters["pre_prompt"] == True:
142
+ print("Pre prompt is true")
143
  field_data_input = input_data["input_text_pieces"]["field_data_input"]
144
  planting_data_input = input_data["input_text_pieces"]["planting_data_input"]
145
  logs_data_input = input_data["input_text_pieces"]["logs_data_input"]
146
  soil_data_input = input_data["input_text_pieces"]["soil_data_input"]
147
  yield_data_input = input_data["input_text_pieces"]["yield_data_input"]
148
  else:
149
+ print("Pre prompt is false")
150
  field_data_input = input_data["input_text_pieces"]["pre_processed_pieces"]["field_data_input"]
151
  planting_data_input = input_data["input_text_pieces"]["pre_processed_pieces"]["planting_data_input"]
152
  logs_data_input = input_data["input_text_pieces"]["pre_processed_pieces"]["logs_data_input"]
153
  soil_data_input = input_data["input_text_pieces"]["pre_processed_pieces"]["soil_data_input"]
154
  yield_data_input = input_data["input_text_pieces"]["pre_processed_pieces"]["yield_data_input"]
155
 
156
+ print("Setting prompts")
157
  field_prompt = "Extract the field information."
158
  plant_prompt = "Extract the planting information."
159
  log_prompt = "Extract the log information."
 
173
 
174
  try:
175
  # Call OpenAI API to generate structured output based on prompt
176
+ print("Getting all responses in pieces, starting with field response")
177
  field_response = client.beta.chat.completions.parse(
178
  model=model_version, # Use GPT model that supports structured output
179
  messages=[