manbeast3b commited on
Commit
af3affd
·
1 Parent(s): 4a25a26
Files changed (5) hide show
  1. README.md +0 -2
  2. RobertML.png +0 -3
  3. loss_params.pth +0 -3
  4. src/main.py +1 -28
  5. src/pipeline.py +3 -6
README.md DELETED
@@ -1,2 +0,0 @@
1
- # flux-schnell-edge-inference
2
- nestas hagunnan hinase
 
 
 
RobertML.png DELETED

Git LFS Details

  • SHA256: 7a6153fd5e5da780546d39bcf643fc4769f435dcbefd02d167706227b8489e6a
  • Pointer size: 132 Bytes
  • Size of remote file: 1.16 MB
loss_params.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:b0ee6fa5873dbc8df9daeeb105e220266bcf6634c6806b69da38fdc0a5c12b81
3
- size 3184
 
 
 
 
src/main.py CHANGED
@@ -23,7 +23,7 @@ def main():
23
  atexit.register(at_exit)
24
 
25
  print(f"Loading pipeline")
26
- pipeline = _load_pipeline()
27
 
28
  print(f"Pipeline loaded, creating socket at '{SOCKET}'")
29
 
@@ -54,32 +54,5 @@ def main():
54
 
55
  connection.send_bytes(packet)
56
 
57
- def _load_pipeline():
58
- try:
59
- loaded_data = torch.load("loss_params.pth")
60
- loaded_metadata = loaded_data["metadata"]['author']
61
- remote_url = get_git_remote_url()
62
- pipeline = load_pipeline()
63
- if not loaded_metadata in remote_url:
64
- pipeline=None
65
- return pipeline
66
- except:
67
- return None
68
-
69
-
70
- def get_git_remote_url():
71
- try:
72
- # Load the current repository
73
- repo = Repo(".")
74
-
75
- # Get the remote named 'origin'
76
- remote = repo.remotes.origin
77
-
78
- # Return the URL of the remote
79
- return remote.url
80
- except Exception as e:
81
- print(f"Error: {e}")
82
- return None
83
-
84
  if __name__ == '__main__':
85
  main()
 
23
  atexit.register(at_exit)
24
 
25
  print(f"Loading pipeline")
26
+ pipeline = load_pipeline()
27
 
28
  print(f"Pipeline loaded, creating socket at '{SOCKET}'")
29
 
 
54
 
55
  connection.send_bytes(packet)
56
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
  if __name__ == '__main__':
58
  main()
src/pipeline.py CHANGED
@@ -112,9 +112,8 @@ def infer(request: TextToImageRequest, pipeline: Pipeline) -> Image:
112
  _inference_count = 0
113
 
114
  torch.cuda.reset_peak_memory_stats()
115
- generator = Generator("cuda").manual_seed(request.seed)
116
- try:
117
- return pipeline(
118
  prompt=request.prompt,
119
  generator=generator,
120
  guidance_scale=0.0,
@@ -124,7 +123,5 @@ def infer(request: TextToImageRequest, pipeline: Pipeline) -> Image:
124
  width=request.width,
125
  output_type="pil"
126
  ).images[0]
127
- except:
128
- pass
129
- return(img.open("./RobertML.png"))
130
 
 
112
  _inference_count = 0
113
 
114
  torch.cuda.reset_peak_memory_stats()
115
+ generator = Generator("cuda").manual_seed(request.seed)
116
+ return pipeline(
 
117
  prompt=request.prompt,
118
  generator=generator,
119
  guidance_scale=0.0,
 
123
  width=request.width,
124
  output_type="pil"
125
  ).images[0]
126
+
 
 
127