AmirMoris commited on
Commit
c93cbfc
·
1 Parent(s): 36e01c2

Feat: multiple parameters to optimzie the model

Browse files
Files changed (6) hide show
  1. GoogleDrive_API.py +8 -9
  2. Helper_functions.py +86 -0
  3. Kaggle_API.py +33 -95
  4. app.py +82 -25
  5. pix2pix-model.ipynb +1 -1
  6. service_account.json +13 -0
GoogleDrive_API.py CHANGED
@@ -10,7 +10,7 @@ class GoogleDrive_API:
10
  self.PARENT_FOLDER_ID = "1r-MlnEpWHx3b1fxHDnHcZ2-Wh_Y89676"
11
 
12
  self.service = self.authenticate(SERVICE_ACCOUNT_FILE_path)
13
- self.clear_files()
14
 
15
  def authenticate(self, SERVICE_ACCOUNT_FILE):
16
  if len(SERVICE_ACCOUNT_FILE) > 0:
@@ -54,13 +54,16 @@ class GoogleDrive_API:
54
  )
55
  return results.get("files", [])
56
 
57
- def clear_files(self):
 
 
 
58
  items = self.get_files()
59
 
60
  # Delete each file
61
  for item in items:
62
  file_id = item["id"]
63
- self.service.files().delete(fileId=file_id).execute()
64
 
65
  def upload_file(self, file_name: str, file_path: str):
66
  file_metadata = {
@@ -68,11 +71,7 @@ class GoogleDrive_API:
68
  "parents": [self.PARENT_FOLDER_ID],
69
  }
70
  media = MediaFileUpload(file_path)
71
- file = (
72
- self.service.files()
73
- .create(body=file_metadata, media_body=media, fields="id")
74
- .execute()
75
- )
76
  print(rf"{file_path} uploaded.")
77
 
78
  def download_file(self, file_name: str, file_path: str):
@@ -99,4 +98,4 @@ class GoogleDrive_API:
99
  status, done = downloader.next_chunk()
100
  print(f"Download {int(status.progress() * 100)}%.")
101
  else:
102
- print(rf"Fine {file_name} is Not Found")
 
10
  self.PARENT_FOLDER_ID = "1r-MlnEpWHx3b1fxHDnHcZ2-Wh_Y89676"
11
 
12
  self.service = self.authenticate(SERVICE_ACCOUNT_FILE_path)
13
+ self.delete_all_files()
14
 
15
  def authenticate(self, SERVICE_ACCOUNT_FILE):
16
  if len(SERVICE_ACCOUNT_FILE) > 0:
 
54
  )
55
  return results.get("files", [])
56
 
57
+ def delete_file(self, file_id):
58
+ self.service.files().delete(fileId=file_id).execute()
59
+
60
+ def delete_all_files(self):
61
  items = self.get_files()
62
 
63
  # Delete each file
64
  for item in items:
65
  file_id = item["id"]
66
+ self.delete_file(file_id)
67
 
68
  def upload_file(self, file_name: str, file_path: str):
69
  file_metadata = {
 
71
  "parents": [self.PARENT_FOLDER_ID],
72
  }
73
  media = MediaFileUpload(file_path)
74
+ self.service.files().create(body=file_metadata, media_body=media, fields="id").execute()
 
 
 
 
75
  print(rf"{file_path} uploaded.")
76
 
77
  def download_file(self, file_name: str, file_path: str):
 
98
  status, done = downloader.next_chunk()
99
  print(f"Download {int(status.progress() * 100)}%.")
100
  else:
101
+ print(rf"{file_name} is Not Found")
Helper_functions.py ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import shutil
2
+ import subprocess
3
+ import os
4
+ import json
5
+ import pytz
6
+ import random
7
+ from datetime import datetime
8
+ from PIL import Image
9
+
10
+
11
+ def execute_terminal_command(command: str):
12
+ try:
13
+ process = subprocess.Popen(
14
+ command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE
15
+ )
16
+ output, error = process.communicate()
17
+ output = str(output.decode("utf-8"))
18
+ print(rf"Command executed successfully: {command}")
19
+ return output
20
+ except Exception as e:
21
+ return None, str(e)
22
+
23
+
24
+ def correct_path(path: str):
25
+ return path[1:] if path.startswith("\\") else path
26
+
27
+
28
+ def write_file(data_list: list, file_path: str, file_name: str = ""):
29
+ if len(file_name) > 0:
30
+ file_path = rf"{file_path}\{file_name}"
31
+
32
+ # Writing JSON data
33
+ with open(file_path, "w") as file:
34
+ json.dump(data_list, file)
35
+
36
+
37
+ def read_file(file_path: str):
38
+ with open(file_path, 'r') as file:
39
+ data = json.load(file)
40
+ return data
41
+
42
+
43
+ def copy_file(source_path: str, destination_path: str):
44
+ try:
45
+ shutil.copyfile(source_path, destination_path)
46
+ except Exception as e:
47
+ print(rf"Failed to copy {source_path} to {destination_path}")
48
+ raise e
49
+
50
+
51
+ def read_image(image_path: str):
52
+ try:
53
+ image = Image.open(image_path)
54
+ return image
55
+ except IOError:
56
+ print("Unable to load image")
57
+ return None
58
+
59
+
60
+ def get_current_time():
61
+ # Get the current datetime in UTC timezone
62
+ current_datetime_utc = datetime.now(pytz.utc)
63
+ # Convert UTC datetime to Egypt timezone
64
+ egypt_timezone = pytz.timezone("Africa/Cairo")
65
+ current_datetime_local = current_datetime_utc.astimezone(egypt_timezone)
66
+
67
+ return str(current_datetime_local.strftime("%Y-%m-%d %H:%M:%S %Z"))
68
+
69
+
70
+ def get_random_str(sz: int):
71
+ result: str = ""
72
+ while len(result) < sz:
73
+ result += str(random.randint(0, 9))
74
+
75
+ return result
76
+
77
+
78
+ def create_folder(path: str, Replace_if_exist = True):
79
+ try:
80
+ if Replace_if_exist and os.path.exists(path):
81
+ shutil.rmtree(path)
82
+
83
+ os.makedirs(path, exist_ok=False)
84
+ print(f"Folder '{path}' created successfully.")
85
+ except Exception as e:
86
+ print(f"Failed to create folder '{path}'. Error: {e}")
Kaggle_API.py CHANGED
@@ -1,129 +1,77 @@
1
- import os
2
- import shutil
3
- import subprocess
4
- import json
5
  import time
6
- import pytz
7
- from datetime import datetime
8
- from PIL import Image
9
 
10
 
11
  class API_Connection:
12
- def __init__(self, GD_connection, kaggle_username: str = "", kaggle_key: str = ""):
13
  os.environ["KAGGLE_USERNAME"] = kaggle_username
14
  os.environ["KAGGLE_KEY"] = kaggle_key
15
 
16
- self.GoogleDrive_connection = GD_connection
17
 
18
- self.PROJECT_PATH = r""
19
- self.NOTEBOOK_ID = "amirmoris/pix2pix-model"
20
  self.DATASET_NAME = "dataset"
21
 
22
- def execute_terminal_command(self, command: str):
23
- try:
24
- process = subprocess.Popen(
25
- command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE
26
- )
27
- output, error = process.communicate()
28
- output = str(output.decode("utf-8"))
29
- error = str(error.decode("utf-8"))
30
- print(rf"Command executed successfully: {command}")
31
- return output
32
- except Exception as e:
33
- return None, str(e)
34
-
35
- def correct_path(self, path: str):
36
- return path[1:] if path.startswith("\\") else path
37
-
38
- def create_folder(self, path: str):
39
- path = self.correct_path(path)
40
- if os.path.exists(path):
41
- shutil.rmtree(path)
42
- self.execute_terminal_command(rf"mkdir {path}")
43
-
44
  def pull_kaggle_notebook(self, notebook_path: str):
45
  command = rf"kaggle kernels pull {self.NOTEBOOK_ID} -p {notebook_path} -m"
46
- return self.execute_terminal_command(command)
47
 
48
  def push_kaggle_notebook(self, notebook_path: str):
49
  command = rf"kaggle kernels push -p {notebook_path}"
50
- return self.execute_terminal_command(command)
51
 
52
  def get_notebook_status(self):
53
  command = rf"kaggle kernels status {self.NOTEBOOK_ID}"
54
- return self.execute_terminal_command(command)
55
 
56
  def run(self, notebook_path: str):
57
- notebook_path = self.correct_path(notebook_path)
58
  self.pull_kaggle_notebook(notebook_path)
59
  return self.push_kaggle_notebook(notebook_path)
60
 
61
- def write_file(self, data: list, file_path: str, file_name: str = ""):
62
- if len(file_name) > 0:
63
- file_path = rf"{file_path}\{file_name}"
64
-
65
- file_path = self.correct_path(file_path)
66
- # Writing JSON data
67
- with open(file_path, "w") as file:
68
- for idx in range(len(data)):
69
- json_string = json.dumps(data[idx]) + (
70
- "\n" if idx < len(data) - 1 else ""
71
- )
72
- file.write(json_string)
73
-
74
- def read_image(self, image_path: str):
75
- try:
76
- image = Image.open(image_path)
77
- return image
78
- except IOError:
79
- print("Unable to load image")
80
- return None
81
-
82
- def get_notebook_output(self, output_path: str):
83
- output_path = self.correct_path(output_path)
84
- command = rf"kaggle kernels output {self.NOTEBOOK_ID} -p {output_path}"
85
- return self.execute_terminal_command(command)
86
-
87
- def generate_image(
88
- self,
89
- input_image_name: str,
90
- edit_instruction: str,
91
- output_image_name: str,
92
- ):
93
- if len(input_image_name) == 0 or len(edit_instruction) == 0:
94
- return False, rf"Missing Input"
95
 
96
  if len(output_image_name) == 0:
97
- return False, rf"Missing Output"
98
 
99
- current_time = self.get_current_time()
100
  print(rf"Start Time : {current_time}")
101
 
102
- dataset_path = self.correct_path(rf"{self.PROJECT_PATH}\{self.DATASET_NAME}")
103
- notebook_path = self.correct_path(rf"{self.PROJECT_PATH}\notebook")
104
 
105
- self.create_folder(dataset_path)
106
 
107
  # copy image to the dataset
108
- shutil.copyfile(
109
- rf"local_dataset\{input_image_name}",
110
- rf"{dataset_path}\{input_image_name}",
111
- )
112
 
113
  data = [
114
  {
115
  "time": current_time,
116
- "status": "IDLE",
117
- "edit": edit_instruction,
118
  "input_image_path": input_image_name,
119
  "output_image_path": output_image_name,
 
 
 
 
 
120
  }
121
  ]
122
 
123
- self.write_file(data, dataset_path, "data.jsonl")
 
124
  # update dataset
125
  self.GoogleDrive_connection.upload_file(
126
- "data.jsonl", rf"{self.DATASET_NAME}\data.jsonl"
127
  )
128
  self.GoogleDrive_connection.upload_file(
129
  input_image_name, rf"{self.DATASET_NAME}\{input_image_name}"
@@ -150,23 +98,13 @@ class API_Connection:
150
  self.GoogleDrive_connection.download_file(
151
  output_image_name, rf"{dataset_path}\{output_image_name}"
152
  )
153
- output_image = self.read_image(rf"{dataset_path}\{output_image_name}")
154
- # clear input and output
155
 
156
  if output_image is None:
157
  return False, "An error occured while running, no output image found"
158
 
159
  return True, output_image
160
 
161
- def get_current_time(self):
162
- # Get the current datetime in UTC timezone
163
- current_datetime_utc = datetime.now(pytz.utc)
164
- # Convert UTC datetime to Egypt timezone
165
- egypt_timezone = pytz.timezone("Africa/Cairo")
166
- current_datetime_local = current_datetime_utc.astimezone(egypt_timezone)
167
-
168
- return str(current_datetime_local.strftime("%Y-%m-%d %H:%M:%S %Z"))
169
-
170
 
171
  def main():
172
  pass
 
 
 
 
 
1
  import time
2
+
3
+ from Helper_functions import *
 
4
 
5
 
6
  class API_Connection:
7
+ def __init__(self, gd_connection, kaggle_username: str = "", kaggle_key: str = ""):
8
  os.environ["KAGGLE_USERNAME"] = kaggle_username
9
  os.environ["KAGGLE_KEY"] = kaggle_key
10
 
11
+ self.GoogleDrive_connection = gd_connection
12
 
13
+ self.NOTEBOOK_ID = "amirmorris/pix2pix-model"
 
14
  self.DATASET_NAME = "dataset"
15
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  def pull_kaggle_notebook(self, notebook_path: str):
17
  command = rf"kaggle kernels pull {self.NOTEBOOK_ID} -p {notebook_path} -m"
18
+ return execute_terminal_command(command)
19
 
20
  def push_kaggle_notebook(self, notebook_path: str):
21
  command = rf"kaggle kernels push -p {notebook_path}"
22
+ return execute_terminal_command(command)
23
 
24
  def get_notebook_status(self):
25
  command = rf"kaggle kernels status {self.NOTEBOOK_ID}"
26
+ return execute_terminal_command(command)
27
 
28
  def run(self, notebook_path: str):
 
29
  self.pull_kaggle_notebook(notebook_path)
30
  return self.push_kaggle_notebook(notebook_path)
31
 
32
+ def generate_image(self, input_image_name: str, edit_instruction: str, output_image_name: str,
33
+ steps: int, seed: int, cfgtext: float, cfgimage: float, resolution: int
34
+ ):
35
+
36
+ if len(input_image_name) == 0:
37
+ return False, rf"Missing Input: input_image"
38
+
39
+ if len(edit_instruction) == 0:
40
+ return False, rf"Missing Input: edit_instruction"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
 
42
  if len(output_image_name) == 0:
43
+ return False, rf"Output Error: Missing output_image path"
44
 
45
+ current_time = get_current_time()
46
  print(rf"Start Time : {current_time}")
47
 
48
+ dataset_path = correct_path(self.DATASET_NAME)
49
+ notebook_path = correct_path("notebook")
50
 
51
+ create_folder(dataset_path)
52
 
53
  # copy image to the dataset
54
+ copy_file(rf"local_dataset\{input_image_name}", rf"{dataset_path}\{input_image_name}")
 
 
 
55
 
56
  data = [
57
  {
58
  "time": current_time,
59
+ "edit_instruction": edit_instruction,
 
60
  "input_image_path": input_image_name,
61
  "output_image_path": output_image_name,
62
+ "steps": steps,
63
+ "seed": seed,
64
+ "cfg-text": cfgtext,
65
+ "cfg-image": cfgimage,
66
+ "resolution": resolution
67
  }
68
  ]
69
 
70
+ write_file(data, dataset_path, "data.json")
71
+
72
  # update dataset
73
  self.GoogleDrive_connection.upload_file(
74
+ "data.json", rf"{self.DATASET_NAME}\data.json"
75
  )
76
  self.GoogleDrive_connection.upload_file(
77
  input_image_name, rf"{self.DATASET_NAME}\{input_image_name}"
 
98
  self.GoogleDrive_connection.download_file(
99
  output_image_name, rf"{dataset_path}\{output_image_name}"
100
  )
101
+ output_image = read_image(rf"{dataset_path}\{output_image_name}")
 
102
 
103
  if output_image is None:
104
  return False, "An error occured while running, no output image found"
105
 
106
  return True, output_image
107
 
 
 
 
 
 
 
 
 
 
108
 
109
  def main():
110
  pass
app.py CHANGED
@@ -1,30 +1,63 @@
1
  import os
2
- import random
3
- from PIL import Image
4
  import gradio as gr
5
 
 
6
  from Kaggle_API import API_Connection
7
  from GoogleDrive_API import GoogleDrive_API
8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
 
10
- def generate_button_clicked(input_image: Image = None, edit_instruction: str = ""):
11
  if input_image is None:
12
  raise gr.Error("Missing Input: input_image")
13
- if edit_instruction == 0:
14
  raise gr.Error("Missing Input: edit_instruction")
15
 
16
- # set kaggle-api variables
17
- kaggle_username = os.environ["kaggle_username"]
18
- kaggle_key = os.environ["kaggle_key"]
19
-
20
- GoogleDrive_connection = GoogleDrive_API()
21
  api_connection = API_Connection(GoogleDrive_connection, kaggle_username, kaggle_key)
22
 
23
- api_connection.create_folder(rf"{api_connection.PROJECT_PATH}\local_dataset")
24
 
25
- image_ID = ""
26
- while len(image_ID) < 4:
27
- image_ID += str(random.randint(0, 9))
28
 
29
  input_image_name = rf"input_image_{image_ID}.png"
30
  output_image_name = rf"output_image_{image_ID}.png"
@@ -32,22 +65,17 @@ def generate_button_clicked(input_image: Image = None, edit_instruction: str = "
32
  input_image.save(rf"local_dataset\{input_image_name}")
33
 
34
  status, img = api_connection.generate_image(
35
- input_image_name, edit_instruction, output_image_name
 
36
  )
37
- print(rf"Finish Time : {api_connection.get_current_time()}")
38
- if status == False:
39
- raise gr.Error(img) # img represents the error
40
 
41
  return img
42
 
43
-
44
  def reset_button_clicked():
45
- return None, "", None
46
-
47
-
48
- SCOPES = ["https://www.googleapis.com/auth/drive"]
49
- SERVICE_ACCOUNT_FILE = "service_account.json"
50
- PARENT_FOLDER_ID = "1r-MlnEpWHx3b1fxHDnHcZ2-Wh_Y89676"
51
 
52
 
53
  def main():
@@ -69,17 +97,46 @@ def main():
69
  with gr.Row():
70
  reset_button = gr.Button("Reset")
71
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
72
  generate_button.click(
73
  fn=generate_button_clicked,
74
  inputs=[
75
  input_image,
76
  instruction,
 
 
 
 
 
 
 
77
  ],
78
  outputs=edited_image,
79
  )
80
  reset_button.click(
81
  fn=reset_button_clicked,
82
- outputs=[input_image, instruction, edited_image],
83
  )
84
 
85
  # Launch Gradio interface
 
1
  import os
2
+ import math
 
3
  import gradio as gr
4
 
5
+ from Helper_functions import *
6
  from Kaggle_API import API_Connection
7
  from GoogleDrive_API import GoogleDrive_API
8
 
9
+ DEFAULT_VALUES = {
10
+ "input_image": None,
11
+ "edit_instruction": "",
12
+ "steps": 100,
13
+ "randomize_seed": "Fix Seed",
14
+ "seed": 1371,
15
+ "randomize_cfg": "Fix CFG",
16
+ "text_cfg_scale": 7.5,
17
+ "image_cfg_scale": 1.5,
18
+ "resolution": 512,
19
+ "edited_image": None
20
+ }
21
+
22
+ def generate_button_clicked(*args):
23
+ # set kaggle-api variables
24
+ kaggle_username = os.environ["kaggle_username"]
25
+ kaggle_key = os.environ["kaggle_key"]
26
+
27
+ input_keys = list(DEFAULT_VALUES.keys())
28
+ values = dict(zip(input_keys, list(args)))
29
+
30
+ for key in values:
31
+ if values[key] is None:
32
+ values[key] = DEFAULT_VALUES[key]
33
+
34
+ if values["randomize_seed"]:
35
+ values["randomize_seed"] = random.randint(1, 100000)
36
+
37
+ if values["randomize_cfg"]:
38
+ values["text_cfg_scale"] = round(random.uniform(6.0, 9.0), ndigits=2)
39
+ values["image_cfg_scale"] = round(random.uniform(1.2, 1.8), ndigits=2)
40
+
41
+ # parameters for the model
42
+ input_image = values["input_image"]
43
+ edit_instruction = values["edit_instruction"]
44
+ steps = values["steps"]
45
+ seed = values["seed"]
46
+ cfgtext = values["text_cfg_scale"]
47
+ cfgimage = values["image_cfg_scale"]
48
+ resolution = 2 ** int(math.log2(values["resolution"]))
49
 
 
50
  if input_image is None:
51
  raise gr.Error("Missing Input: input_image")
52
+ if len(edit_instruction) == 0:
53
  raise gr.Error("Missing Input: edit_instruction")
54
 
55
+ GoogleDrive_connection = GoogleDrive_API("service_account.json")
 
 
 
 
56
  api_connection = API_Connection(GoogleDrive_connection, kaggle_username, kaggle_key)
57
 
58
+ create_folder("local_dataset")
59
 
60
+ image_ID = get_random_str(4)
 
 
61
 
62
  input_image_name = rf"input_image_{image_ID}.png"
63
  output_image_name = rf"output_image_{image_ID}.png"
 
65
  input_image.save(rf"local_dataset\{input_image_name}")
66
 
67
  status, img = api_connection.generate_image(
68
+ input_image_name, edit_instruction, output_image_name,
69
+ steps, seed, cfgtext, cfgimage, resolution
70
  )
71
+ print(rf"End Time : {get_current_time()}")
72
+ if not status:
73
+ raise gr.Error(img)
74
 
75
  return img
76
 
 
77
  def reset_button_clicked():
78
+ return list(DEFAULT_VALUES.values())
 
 
 
 
 
79
 
80
 
81
  def main():
 
97
  with gr.Row():
98
  reset_button = gr.Button("Reset")
99
 
100
+ with gr.Row():
101
+ steps = gr.Number(value=DEFAULT_VALUES["steps"], precision=0, label="Steps", interactive=True)
102
+ randomize_seed = gr.Radio(
103
+ ["Fix Seed", "Randomize Seed"],
104
+ value=DEFAULT_VALUES["randomize_seed"],
105
+ type="index",
106
+ show_label=False,
107
+ interactive=True,
108
+ )
109
+ seed = gr.Number(value=DEFAULT_VALUES["seed"], precision=0, label="Seed", interactive=True)
110
+ randomize_cfg = gr.Radio(
111
+ ["Fix CFG", "Randomize CFG"],
112
+ value=DEFAULT_VALUES["randomize_cfg"],
113
+ type="index",
114
+ show_label=False,
115
+ interactive=True,
116
+ )
117
+ text_cfg_scale = gr.Number(value=DEFAULT_VALUES["text_cfg_scale"], label=f"Text CFG", interactive=True)
118
+ image_cfg_scale = gr.Number(value=DEFAULT_VALUES["image_cfg_scale"], label=f"Image CFG", interactive=True)
119
+ resolution = gr.Number(value=DEFAULT_VALUES["resolution"], label=f"Resolution", interactive=True)
120
+
121
+
122
  generate_button.click(
123
  fn=generate_button_clicked,
124
  inputs=[
125
  input_image,
126
  instruction,
127
+ steps,
128
+ randomize_seed,
129
+ seed,
130
+ randomize_cfg,
131
+ text_cfg_scale,
132
+ image_cfg_scale,
133
+ resolution
134
  ],
135
  outputs=edited_image,
136
  )
137
  reset_button.click(
138
  fn=reset_button_clicked,
139
+ outputs=[input_image, instruction, edited_image, steps, randomize_seed, seed, randomize_cfg, text_cfg_scale, image_cfg_scale, resolution],
140
  )
141
 
142
  # Launch Gradio interface
pix2pix-model.ipynb CHANGED
@@ -1 +1 @@
1
- {"metadata":{"kernelspec":{"language":"python","display_name":"Python 3","name":"python3"},"language_info":{"pygments_lexer":"ipython3","nbconvert_exporter":"python","version":"3.6.4","file_extension":".py","codemirror_mode":{"name":"ipython","version":3},"name":"python","mimetype":"text/x-python"},"kaggle":{"accelerator":"gpu","dataSources":[{"sourceId":8109869,"sourceType":"datasetVersion","datasetId":4790518},{"sourceId":8131022,"sourceType":"datasetVersion","datasetId":4805885}],"isInternetEnabled":true,"language":"python","sourceType":"notebook","isGpuEnabled":true}},"nbformat_minor":4,"nbformat":4,"cells":[{"cell_type":"code","source":"!git clone \"https://github.com/timothybrooks/instruct-pix2pix/\"","metadata":{"_uuid":"8f2839f25d086af736a60e9eeb907d3b93b6e0e5","_cell_guid":"b1076dfc-b9ad-4769-8c92-a6c4dae69d19","trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"cd /kaggle/working/instruct-pix2pix","metadata":{},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"print(\"\\nInstalling dependencies ...\")","metadata":{},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"!conda env create -f /kaggle/working/instruct-pix2pix/environment.yaml > /dev/null 2>&1","metadata":{},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"mkdir /root/.kaggle/","metadata":{},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"cp -r /kaggle/input/api-connections/kaggle.json /root/.kaggle/","metadata":{},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"cp -r /kaggle/input/api-connections/service_account.json /kaggle/working/instruct-pix2pix","metadata":{},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"cp -r /kaggle/input/pix2pix-checkpoints/checkpoints /kaggle/working/instruct-pix2pix","metadata":{},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"import os\nimport subprocess\nimport shutil\nimport json\nfrom PIL import Image\nimport matplotlib.pyplot as plt\nimport matplotlib.image as mpimg\nfrom google.oauth2 import service_account\nfrom googleapiclient.discovery import build\nfrom googleapiclient.http import MediaFileUpload, MediaIoBaseDownload\n\n\nDATASET_PATH = \"/kaggle/working/dataset\"\n\nclass GoogleDrive_API:\n def __init__(self):\n self.SCOPES = [\"https://www.googleapis.com/auth/drive\"]\n self.SERVICE_ACCOUNT_FILE = \"service_account.json\"\n self.PARENT_FOLDER_ID = \"1r-MlnEpWHx3b1fxHDnHcZ2-Wh_Y89676\"\n self.service = self.authenticate()\n\n def authenticate(self):\n credentials = service_account.Credentials.from_service_account_file(\n self.SERVICE_ACCOUNT_FILE, scopes=self.SCOPES\n )\n service = build(\"drive\", \"v3\", credentials=credentials)\n\n return service\n\n def get_files(self):\n # List all files in the folder\n results = (\n self.service.files()\n .list(\n q=f\"'{self.PARENT_FOLDER_ID}' in parents and trashed=false\",\n fields=\"files(id, name)\",\n )\n .execute()\n )\n return results.get(\"files\", [])\n\n def clear_files(self):\n items = self.get_files()\n\n # Delete each file\n for item in items:\n file_id = item[\"id\"]\n self.service.files().delete(fileId=file_id).execute()\n\n def upload_file(self, file_path):\n file_metadata = {\n \"name\": os.path.basename(file_path),\n \"parents\": [self.PARENT_FOLDER_ID],\n }\n media = MediaFileUpload(file_path)\n file = (\n self.service.files()\n .create(body=file_metadata, media_body=media, fields=\"id\")\n .execute()\n )\n print(\"File uploaded. File ID:\", file.get(\"id\"))\n\n def download_file(self, file_name, file_path):\n results = (\n self.service.files()\n .list(\n q=f\"'{self.PARENT_FOLDER_ID}' in parents and name='{file_name}' and trashed=false\",\n fields=\"files(id)\",\n )\n .execute()\n )\n items = results.get(\"files\", [])\n\n if items:\n # Get the file ID\n file_id = items[0][\"id\"]\n\n request = self.service.files().get_media(fileId=file_id)\n\n with open(file_path, \"wb\") as file:\n downloader = MediaIoBaseDownload(file, request)\n done = False\n while not done:\n status, done = downloader.next_chunk()\n print(f\"Download {int(status.progress() * 100)}%.\")\n else:\n print(rf\"Fine {file_name} is Not Found\")\n\ndef execute_terminal_command(command: str):\n try:\n process = subprocess.Popen(\n command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE\n )\n output, error = process.communicate()\n output = str(output.decode(\"utf-8\"))\n error = str(error.decode(\"utf-8\"))\n print(rf\"Command executed successfully: {command}\")\n return output\n except Exception as e:\n return None, str(e)\n \ndef read_file(file_path: str):\n with open(file_path, \"r\") as file:\n data = json.load(file)\n return data\n\ndef write_file(data: list, file_path: str, file_name: str = \"\"):\n # Writing JSON data\n with open(rf\"{file_path}/{file_name}\", \"w\") as file:\n for idx in range(len(data)):\n json_string = json.dumps(data[idx]) + (\"\\n\" if idx < len(data) - 1 else \"\")\n file.write(json_string)\n \ndef read_image(image_path: str):\n try:\n image = Image.open(image_path)\n return image\n except IOError:\n print(\"Unable to load image\")\n return None\n \ndef generate_image(input_image: str, edit_instruction: str, output_path: str):\n command = fr'/opt/conda/envs/ip2p/bin/python3 /kaggle/working/instruct-pix2pix/edit_cli.py --input {input_image} --output {output_path} --edit \"{edit_instruction}\"'\n return execute_terminal_command(command)\n\n\ndef main():\n if not os.path.exists(DATASET_PATH):\n execute_terminal_command(rf\"mkdir {DATASET_PATH}\")\n \n # download dataset\n GoogleDrive_connection = GoogleDrive_API()\n GoogleDrive_connection.download_file(\"data.jsonl\", rf\"{DATASET_PATH}/data.jsonl\")\n \n # input\n data = read_file(rf\"{DATASET_PATH}/data.jsonl\")\n # get data\n edit_instruction = data[\"edit\"]\n input_image_path = data[\"input_image_path\"]\n output_image_path = data[\"output_image_path\"]\n \n GoogleDrive_connection.download_file(input_image_path, rf\"{DATASET_PATH}/{input_image_path}\")\n\n input_image = read_image(rf\"{DATASET_PATH}/{input_image_path}\")\n # run model\n ######################################################\n generate_image(rf\"{DATASET_PATH}/{input_image_path}\", edit_instruction, rf\"{DATASET_PATH}/{output_image_path}\")\n ######################################################\n\n # update dataset\n GoogleDrive_connection.upload_file(rf\"{DATASET_PATH}/{output_image_path}\")\n \n # Load the images\n img1 = mpimg.imread(rf\"{DATASET_PATH}/{input_image_path}\")\n img2 = mpimg.imread(rf\"{DATASET_PATH}/{output_image_path}\")\n\n # Display the images side by side\n plt.figure(figsize=(10, 5))\n plt.subplot(1, 2, 1)\n plt.imshow(img1)\n plt.axis('off')\n plt.title('Input Image')\n\n plt.subplot(1, 2, 2)\n plt.imshow(img2)\n plt.axis('off')\n plt.title(rf\"Edited Image: {edit_instruction}\")\n \n plt.show()\n \nif __name__ == \"__main__\":\n main()","metadata":{},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"","metadata":{},"execution_count":null,"outputs":[]}]}
 
1
+ {"metadata":{"kernelspec":{"language":"python","display_name":"Python 3","name":"python3"},"language_info":{"name":"python","version":"3.10.13","mimetype":"text/x-python","codemirror_mode":{"name":"ipython","version":3},"pygments_lexer":"ipython3","nbconvert_exporter":"python","file_extension":".py"},"kaggle":{"accelerator":"gpu","dataSources":[{"sourceId":8109869,"sourceType":"datasetVersion","datasetId":4790518},{"sourceId":8149113,"sourceType":"datasetVersion","datasetId":4805885}],"dockerImageVersionId":30683,"isInternetEnabled":true,"language":"python","sourceType":"notebook","isGpuEnabled":true}},"nbformat_minor":4,"nbformat":4,"cells":[{"cell_type":"code","source":"!git clone \"https://github.com/timothybrooks/instruct-pix2pix/\"","metadata":{"_uuid":"8f2839f25d086af736a60e9eeb907d3b93b6e0e5","_cell_guid":"b1076dfc-b9ad-4769-8c92-a6c4dae69d19","trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"cd /kaggle/working/instruct-pix2pix","metadata":{"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"print(\"\\nInstalling Dependencies ...\")","metadata":{},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"!conda env create -f /kaggle/working/instruct-pix2pix/environment.yaml > /dev/null 2>&1","metadata":{"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"print(\"\\nDependencies Installed\")","metadata":{},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"mkdir /root/.kaggle/","metadata":{"execution":{"iopub.status.busy":"2024-04-17T21:42:47.289791Z","iopub.execute_input":"2024-04-17T21:42:47.290713Z","iopub.status.idle":"2024-04-17T21:42:48.315343Z","shell.execute_reply.started":"2024-04-17T21:42:47.290662Z","shell.execute_reply":"2024-04-17T21:42:48.313748Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"cp -r /kaggle/input/api-connections/kaggle2.json /root/.kaggle/kaggle.json","metadata":{"execution":{"iopub.status.busy":"2024-04-17T21:42:49.638661Z","iopub.execute_input":"2024-04-17T21:42:49.639066Z","iopub.status.idle":"2024-04-17T21:42:50.73111Z","shell.execute_reply.started":"2024-04-17T21:42:49.639033Z","shell.execute_reply":"2024-04-17T21:42:50.729935Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"cp -r /kaggle/input/api-connections/service_account.json /kaggle/working/instruct-pix2pix","metadata":{"execution":{"iopub.status.busy":"2024-04-17T21:42:51.962757Z","iopub.execute_input":"2024-04-17T21:42:51.963135Z","iopub.status.idle":"2024-04-17T21:42:52.986254Z","shell.execute_reply.started":"2024-04-17T21:42:51.963102Z","shell.execute_reply":"2024-04-17T21:42:52.984997Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"cp -r /kaggle/input/pix2pix-checkpoints/checkpoints /kaggle/working/instruct-pix2pix","metadata":{"execution":{"iopub.status.busy":"2024-04-17T21:42:54.554271Z","iopub.execute_input":"2024-04-17T21:42:54.554668Z","iopub.status.idle":"2024-04-17T21:44:02.002039Z","shell.execute_reply.started":"2024-04-17T21:42:54.554637Z","shell.execute_reply":"2024-04-17T21:44:02.000754Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"import os\nimport subprocess\nimport shutil\nimport json\nfrom PIL import Image\nimport matplotlib.pyplot as plt\nimport matplotlib.image as mpimg\nfrom google.oauth2 import service_account\nfrom googleapiclient.discovery import build\nfrom googleapiclient.http import MediaFileUpload, MediaIoBaseDownload\n\n\nDATASET_PATH = \"/kaggle/working/dataset\"\n\nclass GoogleDrive_API:\n def __init__(self):\n self.SCOPES = [\"https://www.googleapis.com/auth/drive\"]\n self.SERVICE_ACCOUNT_FILE = \"service_account.json\"\n self.PARENT_FOLDER_ID = \"1r-MlnEpWHx3b1fxHDnHcZ2-Wh_Y89676\"\n self.service = self.authenticate()\n\n def authenticate(self):\n credentials = service_account.Credentials.from_service_account_file(\n self.SERVICE_ACCOUNT_FILE, scopes=self.SCOPES\n )\n service = build(\"drive\", \"v3\", credentials=credentials)\n\n return service\n\n def get_files(self):\n # List all files in the folder\n results = (\n self.service.files()\n .list(\n q=f\"'{self.PARENT_FOLDER_ID}' in parents and trashed=false\",\n fields=\"files(id, name)\",\n )\n .execute()\n )\n return results.get(\"files\", [])\n\n def upload_file(self, file_path):\n file_metadata = {\n \"name\": os.path.basename(file_path),\n \"parents\": [self.PARENT_FOLDER_ID],\n }\n media = MediaFileUpload(file_path)\n file = (\n self.service.files()\n .create(body=file_metadata, media_body=media, fields=\"id\")\n .execute()\n )\n print(\"File uploaded. File ID:\", file.get(\"id\"))\n\n def download_file(self, file_name, file_path):\n results = (\n self.service.files()\n .list(\n q=f\"'{self.PARENT_FOLDER_ID}' in parents and name='{file_name}' and trashed=false\",\n fields=\"files(id)\",\n )\n .execute()\n )\n items = results.get(\"files\", [])\n\n if items:\n # Get the file ID\n file_id = items[0][\"id\"]\n\n request = self.service.files().get_media(fileId=file_id)\n\n with open(file_path, \"wb\") as file:\n downloader = MediaIoBaseDownload(file, request)\n done = False\n while not done:\n status, done = downloader.next_chunk()\n print(f\"Download {int(status.progress() * 100)}%.\")\n else:\n print(rf\"{file_name} is Not Found\")\n\ndef execute_terminal_command(command: str):\n try:\n process = subprocess.Popen(\n command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE\n )\n output, error = process.communicate()\n output = str(output.decode(\"utf-8\"))\n error = str(error.decode(\"utf-8\"))\n print(rf\"Command executed successfully: {command}\")\n return output\n except Exception as e:\n print(\"Command Failed !\")\n return None, str(e)\n \ndef read_file(file_path: str):\n with open(file_path, 'r') as file:\n data = json.load(file)\n return data[0]\n\ndef write_file(data: list, file_path: str, file_name: str = \"\"):\n # Writing JSON data\n with open(rf\"{file_path}/{file_name}\", \"w\") as file:\n for idx in range(len(data)):\n json_string = json.dumps(data[idx]) + (\"\\n\" if idx < len(data) - 1 else \"\")\n file.write(json_string)\n \ndef read_image(image_path: str):\n try:\n image = Image.open(image_path)\n return image\n except IOError:\n print(\"Unable to load image\")\n return None\n \ndef generate_image(input_image, edit_instruction, output_path, steps, seed, cfgtext, cfgimage, resolution):\n inputs = fr'--input {input_image} --output {output_path} --edit \"{edit_instruction}\" --steps {steps} --seed {seed} --cfg-text {cfgtext} --cfg-image {cfgimage} --resolution {resolution}'\n command = fr\"/opt/conda/envs/ip2p/bin/python3 edit_cli.py {inputs}\"\n return execute_terminal_command(command)\n\n\ndef main():\n if not os.path.exists(DATASET_PATH):\n execute_terminal_command(rf\"mkdir {DATASET_PATH}\")\n \n # download dataset\n GoogleDrive_connection = GoogleDrive_API()\n GoogleDrive_connection.download_file(\"data.json\", rf\"{DATASET_PATH}/data.json\")\n \n # input\n data = read_file(rf\"{DATASET_PATH}/data.json\")\n # get data\n time = data[\"time\"]\n print(fr\"Start Time: {time}\")\n edit_instruction = data[\"edit_instruction\"]\n input_image_path = data[\"input_image_path\"]\n output_image_path = data[\"output_image_path\"]\n steps = data[\"steps\"]\n seed = data[\"seed\"]\n cfgtext = data[\"cfg-text\"]\n cfgimage = data[\"cfg-image\"]\n resolution = data[\"resolution\"]\n\n GoogleDrive_connection.download_file(input_image_path, rf\"{DATASET_PATH}/{input_image_path}\")\n\n input_image = read_image(rf\"{DATASET_PATH}/{input_image_path}\")\n # run model\n ######################################################\n generate_image(rf\"{DATASET_PATH}/{input_image_path}\", edit_instruction, rf\"{DATASET_PATH}/{output_image_path}\", steps, seed, cfgtext, cfgimage, resolution)\n ######################################################\n \n # update dataset\n GoogleDrive_connection.upload_file(rf\"{DATASET_PATH}/{output_image_path}\")\n \n # Load the images\n img1 = mpimg.imread(rf\"{DATASET_PATH}/{input_image_path}\")\n img2 = mpimg.imread(rf\"{DATASET_PATH}/{output_image_path}\")\n\n # Display the images side by side\n plt.figure(figsize=(10, 5))\n plt.subplot(1, 2, 1)\n plt.imshow(img1)\n plt.axis('off')\n plt.title('Input Image')\n\n plt.subplot(1, 2, 2)\n plt.imshow(img2)\n plt.axis('off')\n plt.title(rf\"Edited Image: {edit_instruction}\")\n \n plt.show()\n \nif __name__ == \"__main__\":\n main()","metadata":{"execution":{"iopub.status.busy":"2024-04-17T22:04:07.306536Z","iopub.execute_input":"2024-04-17T22:04:07.306923Z","iopub.status.idle":"2024-04-17T22:06:11.475935Z","shell.execute_reply.started":"2024-04-17T22:04:07.306896Z","shell.execute_reply":"2024-04-17T22:06:11.474759Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"","metadata":{},"execution_count":null,"outputs":[]}]}
service_account.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "type": "service_account",
3
+ "project_id": "graduation-project-420420",
4
+ "private_key_id": "514ceeaa4abc9a06525ab7f5b224cb9421da5c43",
5
+ "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDYskZd2uJSE/wN\n6xnh90qoi93WidUNN/xOloOzwlFeWk6+5/UP1OQ4WcInnIZae2wO2q4rAZL2vnsb\n7h3joQMicoqKBKQ58qPEO0gb7yXirAQW6XmTrPnJl0EakdGTQFurQynFEsjVa9fQ\nmFjTQtMXa3X48tlb5ip94TwgnNcw3+DtHmkU8Sg5RAV1+Tl8dMUEQ7F2ZwtcggOu\nDKzRcekr89f8fSBeq6S8a4UaAnVGiy03MGk6GOVQAF7LlOPGjBs97wkH7Qt9OLMp\nYcMQ3mFZOdgpJnYrP0pM1C/LASJ4ysv9GAVWKU+cpOC2zmj17HO6Qe6i6IEF82s8\nSj6bOawDAgMBAAECggEAYfMHcpInffFRnBr6ipUEtrXuMvT/jCeT0RQg+QarUApj\nEND5iUvyUvRjmhMa+dhTKk23r3HZM9Y7AI98DDHyIEaFD8j0UMs4MlGXkSTiy5h0\nisTDMzjbeb94aqGiCTzskrfbsByDElFabowGjovkre/hlgNzs6IQ7/B5BMbQUGL0\n1bXvFW9x1n5XK8Rbd3ls74F7Qob3tSMOv7zZ2MRHv/VtzOFVgobFipwG615+LJ1G\nuYtbQ2qSYdc82s/801OLVhuUDJzaaadrPORQH6ThwX+KqJeN91egVYfG1d/nwUuF\n0EqUbuUQrGJD90iobNzz/04Xoej/7Y9NGxbvE1/uKQKBgQD5x6bxsOMIwoXD9wdd\ntq2e7yiaQZ+YCwD2iwPbkkZvVBzHBe0fd2qTdl8e7F43VjJwyARcKHZ8AncjfQeG\nezvJFL2TLPi/8XEI41ZDqrH4Cb+Dsp6m5pRqIfxdoiVH10vcdyKQOKP4KOuy4+GV\n+PEdkh1qEXzZj+i2HNLGfpCJmwKBgQDeF7cbNKi2pwcTDuzjLzIZrYEkv24oO7l5\nexAqnn3xw1281kRZlilZePPJfZidMkRAidtySaxrluHXxk2Fo9ZHu7hDI6mtZ9Kq\nxJCHeakhaW0zotJ4SQN07zNnR+6+bTgh7ZisSOKnsQUkQLJC4/pShUzjdHGHE1Mf\nz6ERv0/huQKBgQDganMh/61b93m7obzqg/DmNTYjsgYz0t8go7HpmzjRWZ8pabes\nZ9xHdTjtTa0wxq83CV5xdqFNteFY70LNnvcOKSZSjztHKSRHugmNblXL5uY860UD\n63M5ECPv9HjOXS95e0iAj4MyuYFbDsdluW7ZBKiM3f1wc42/4ZG2aYxvNQKBgQDA\nY8LW6nMMZvyV4eQLmaLI46hv8PS4HaS6O4TxpoJroFXfc62Z/Z6sRz7IWOiPpC/D\nyKg1BOU7/mR9yub00cyaL1+bqZUfBkVUcRrSgO2mZ/GVPYMdeAJqlZvt6D5E79Ak\nc/JBrJHLumk90l2Z3GvIeHj2wLRn0rG7aN2axiOo4QKBgQD0f6HGlvVsc4OoqZMk\nj+ynqAmOcKtanD36GAFtRN6Ztb/7VsyX3LJe2KS71MXhnjN/hb6ug4jROjqMPI+q\nqT6zXJgRNu2n35NYDtzg8LD9J7dYM4vTeQsKF+leFT6XRQpkOBxvbgvGjaNrTCwN\nJCmUCo24od5hKOmqiQQCEMJS5g==\n-----END PRIVATE KEY-----\n",
6
+ "client_email": "graduation-project@graduation-project-420420.iam.gserviceaccount.com",
7
+ "client_id": "106719380856521616350",
8
+ "auth_uri": "https://accounts.google.com/o/oauth2/auth",
9
+ "token_uri": "https://oauth2.googleapis.com/token",
10
+ "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
11
+ "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/graduation-project%40graduation-project-420420.iam.gserviceaccount.com",
12
+ "universe_domain": "googleapis.com"
13
+ }