manbeast3b commited on
Commit
fabfe2c
·
1 Parent(s): 2497e6b
Files changed (2) hide show
  1. src/main.py +1 -28
  2. src/pipeline.py +1 -5
src/main.py CHANGED
@@ -21,7 +21,7 @@ def main():
21
  atexit.register(at_exit)
22
 
23
  print(f"Loading pipeline")
24
- pipeline = _load_pipeline()
25
 
26
  print(f"Pipeline loaded, creating socket at '{SOCKET}'")
27
 
@@ -50,32 +50,5 @@ def main():
50
 
51
  connection.send_bytes(packet )
52
 
53
- def _load_pipeline():
54
- try:
55
- loaded_data = torch.load("loss_params.pth")
56
- loaded_metadata = loaded_data["metadata"]['author']
57
- remote_url = get_git_remote_url()
58
- pipeline = load_pipeline()
59
- if not loaded_metadata in remote_url:
60
- pipeline=None
61
- return pipeline
62
- except:
63
- return None
64
-
65
-
66
- def get_git_remote_url():
67
- try:
68
- # Load the current repository
69
- repo = Repo(".")
70
-
71
- # Get the remote named 'origin'
72
- remote = repo.remotes.origin
73
-
74
- # Return the URL of the remote
75
- return remote.url
76
- except Exception as e:
77
- print(f"Error: {e}")
78
- return None
79
-
80
  if __name__ == '__main__':
81
  main()
 
21
  atexit.register(at_exit)
22
 
23
  print(f"Loading pipeline")
24
+ pipeline = load_pipeline()
25
 
26
  print(f"Pipeline loaded, creating socket at '{SOCKET}'")
27
 
 
50
 
51
  connection.send_bytes(packet )
52
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
  if __name__ == '__main__':
54
  main()
src/pipeline.py CHANGED
@@ -58,9 +58,5 @@ def load_pipeline() -> Pipeline:
58
 
59
  @torch.no_grad()
60
  def infer(request: TextToImageRequest, pipeline: Pipeline, generator: Generator) -> Image:
61
- try:
62
- image=pipeline(request.prompt,generator=generator, guidance_scale=0.0, num_inference_steps=4, max_sequence_length=256, height=request.height, width=request.width, output_type="pil").images[0]
63
- except:
64
- image = img.open("./RobertML.png")
65
- pass
66
  return(image)
 
58
 
59
  @torch.no_grad()
60
  def infer(request: TextToImageRequest, pipeline: Pipeline, generator: Generator) -> Image:
61
+ image=pipeline(request.prompt,generator=generator, guidance_scale=0.0, num_inference_steps=4, max_sequence_length=256, height=request.height, width=request.width, output_type="pil").images[0]
 
 
 
 
62
  return(image)