arcticaurora commited on
Commit
276512c
·
verified ·
1 Parent(s): 248b355

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +66 -67
app.py CHANGED
@@ -142,115 +142,114 @@ def monitor_dump_size():
142
  def run_dump(source_conn: str, file_path: str, options: dict):
143
  """Run pg_dump in a background thread"""
144
  try:
 
 
 
 
 
 
145
  # Clear any existing file
146
- if os.path.exists(file_path):
147
- os.remove(file_path)
148
-
149
  # Set environment variables for connection
150
  env = os.environ.copy()
151
-
152
  # Build pg_dump command
153
  format_flag = "-F" + options.get("format", "c") # Default to custom format
154
- cmd = ["pg_dump", source_conn, format_flag, "-v", "-f", file_path]
155
-
156
  # Add schema if specified
157
  if options.get("schema"):
158
  cmd.extend(["-n", options["schema"]])
159
-
160
  # Add compression level if specified
161
  if options.get("compression") and options["compression"] != "default":
162
  cmd.extend(["-Z", options["compression"]])
163
-
164
- log_message(f"Starting database dump to {file_path}", "info", " ".join(cmd))
165
-
166
  # Start monitoring thread for file size
167
  monitor_thread = threading.Thread(target=monitor_dump_size, daemon=True)
168
  monitor_thread.start()
169
-
170
  # Start the dump process
171
  with migration_lock:
172
  migration_state["start_time"] = time.time()
173
  migration_state["running"] = True
174
  migration_state["operation"] = "dump"
175
- migration_state["dump_file"] = file_path
176
  migration_state["dump_completed"] = False
177
- migration_state["previous_size"] = 0 # Reset previous size
178
-
179
- # Use preexec_fn=os.setsid to create a new process group
180
- # This is necessary for the killpg logic in stop_current_process
181
- # Note: preexec_fn is Unix-specific. This won't work directly on Windows.
182
- preexec_fn_to_use = None
183
- if hasattr(os, 'setsid'):
184
- preexec_fn_to_use = os.setsid
185
-
186
  process = subprocess.Popen(
187
  cmd,
188
  stdout=subprocess.PIPE,
189
  stderr=subprocess.PIPE,
190
  env=env,
191
- text=True,
192
- bufsize=1, # Line buffering
193
- universal_newlines=True,
194
- preexec_fn=preexec_fn_to_use # Create a new process group
195
  )
196
-
197
  with migration_lock:
198
  migration_state["process"] = process
199
-
200
  # Process output
201
- if process.stderr:
202
- for line in iter(process.stderr.readline, ''):
203
- line = line.strip()
204
- if not line:
205
- continue
206
- # Update log with verbose output from pg_dump
207
- log_message(line, "info")
208
- if "dumping contents of table" in line:
209
- try:
210
- table_name = line.split('"')[1] # Extract table name from quotes
211
- with migration_lock:
212
- migration_state["progress"]["current_table"] = table_name
213
- migration_state["progress"]["tables_completed"] += 1
214
- except IndexError:
215
- logger.warning(f"Could not parse table name from line: {line}")
216
- # Check if process is still running
217
  with migration_lock:
218
- if not migration_state["running"]:
219
- break # Stop processing if process was terminated
220
-
 
 
221
  # Wait for process to complete
222
  stdout, stderr = process.communicate()
223
  exit_code = process.returncode
224
-
225
- with migration_lock:
226
- # Ensure running state is updated based on process completion
227
- if migration_state["running"]: # Only update if not stopped manually
228
- if exit_code == 0:
229
- # Get final file size
230
- final_size = get_file_size_mb(file_path)
231
- migration_state["dump_file_size"] = final_size
232
- migration_state["progress"]["current_size_mb"] = round(final_size, 2)
233
  migration_state["dump_completed"] = True
234
  migration_state["end_time"] = time.time()
235
- total_time = migration_state["end_time"] - migration_state["start_time"]
236
- log_message(
237
- f"Database dump completed successfully. Size: {round(final_size, 2)} MB. Time: {round(total_time, 2)} seconds",
238
- "success"
239
- )
240
- else:
241
- error_message = stderr or stdout or "Unknown error during dump"
242
- log_message(f"Database dump failed: {error_message}", "error")
243
-
 
 
 
 
 
 
 
 
 
 
 
 
 
244
  migration_state["running"] = False
245
  migration_state["process"] = None
246
-
247
- return exit_code == 0
248
-
249
  except Exception as e:
250
  log_message(f"Error during database dump: {str(e)}", "error")
 
251
  with migration_lock:
252
  migration_state["running"] = False
253
  migration_state["process"] = None
 
254
  return False
255
 
256
  def run_restore(target_conn: str, file_path: str, options: dict):
 
142
  def run_dump(source_conn: str, file_path: str, options: dict):
143
  """Run pg_dump in a background thread"""
144
  try:
145
+ # Convert to absolute path
146
+ absolute_file_path = os.path.abspath(file_path)
147
+
148
+ # Log the path conversion
149
+ log_message(f"Converting path: {file_path} -> {absolute_file_path}", "info")
150
+
151
  # Clear any existing file
152
+ if os.path.exists(absolute_file_path):
153
+ os.remove(absolute_file_path)
154
+
155
  # Set environment variables for connection
156
  env = os.environ.copy()
157
+
158
  # Build pg_dump command
159
  format_flag = "-F" + options.get("format", "c") # Default to custom format
160
+ cmd = ["pg_dump", source_conn, format_flag, "-v", "-f", absolute_file_path]
161
+
162
  # Add schema if specified
163
  if options.get("schema"):
164
  cmd.extend(["-n", options["schema"]])
165
+
166
  # Add compression level if specified
167
  if options.get("compression") and options["compression"] != "default":
168
  cmd.extend(["-Z", options["compression"]])
169
+
170
+ log_message(f"Starting database dump to {absolute_file_path}", "info", " ".join(cmd))
171
+
172
  # Start monitoring thread for file size
173
  monitor_thread = threading.Thread(target=monitor_dump_size, daemon=True)
174
  monitor_thread.start()
175
+
176
  # Start the dump process
177
  with migration_lock:
178
  migration_state["start_time"] = time.time()
179
  migration_state["running"] = True
180
  migration_state["operation"] = "dump"
181
+ migration_state["dump_file"] = absolute_file_path
182
  migration_state["dump_completed"] = False
183
+
 
 
 
 
 
 
 
 
184
  process = subprocess.Popen(
185
  cmd,
186
  stdout=subprocess.PIPE,
187
  stderr=subprocess.PIPE,
188
  env=env,
189
+ text=True
 
 
 
190
  )
191
+
192
  with migration_lock:
193
  migration_state["process"] = process
194
+
195
  # Process output
196
+ for line in process.stderr:
197
+ if "Dumping" in line and "table" in line:
198
+ table_name = line.split("Dumping")[1].strip().split(" ")[1]
 
 
 
 
 
 
 
 
 
 
 
 
 
199
  with migration_lock:
200
+ migration_state["progress"]["current_table"] = table_name
201
+ migration_state["progress"]["tables_completed"] += 1
202
+
203
+ log_message(f"Dumping table: {table_name}", "info")
204
+
205
  # Wait for process to complete
206
  stdout, stderr = process.communicate()
207
  exit_code = process.returncode
208
+
209
+ if exit_code == 0:
210
+ # Verify file exists and has content
211
+ if os.path.exists(absolute_file_path):
212
+ final_size = os.path.getsize(absolute_file_path)
213
+
214
+ with migration_lock:
215
+ migration_state["dump_file_size"] = final_size / (1024 * 1024) # Convert to MB
216
+ migration_state["progress"]["current_size_mb"] = round(final_size / (1024 * 1024), 2)
217
  migration_state["dump_completed"] = True
218
  migration_state["end_time"] = time.time()
219
+ migration_state["running"] = False
220
+ migration_state["process"] = None
221
+
222
+ total_time = migration_state["end_time"] - migration_state["start_time"]
223
+ log_message(
224
+ f"Database dump completed successfully. Size: {round(final_size / (1024 * 1024), 2)} MB. Time: {round(total_time, 2)} seconds",
225
+ "success"
226
+ )
227
+ return True
228
+ else:
229
+ log_message(f"Dump completed but file not found: {absolute_file_path}", "error")
230
+
231
+ with migration_lock:
232
+ migration_state["running"] = False
233
+ migration_state["process"] = None
234
+
235
+ return False
236
+ else:
237
+ error_message = stderr or "Unknown error during dump"
238
+ log_message(f"Database dump failed: {error_message}", "error")
239
+
240
+ with migration_lock:
241
  migration_state["running"] = False
242
  migration_state["process"] = None
243
+
244
+ return False
245
+
246
  except Exception as e:
247
  log_message(f"Error during database dump: {str(e)}", "error")
248
+
249
  with migration_lock:
250
  migration_state["running"] = False
251
  migration_state["process"] = None
252
+
253
  return False
254
 
255
  def run_restore(target_conn: str, file_path: str, options: dict):