Marlon Wiprud commited on
Commit
fbee3b9
·
1 Parent(s): 9afb053

update logs

Browse files
Files changed (1) hide show
  1. handler.py +9 -16
handler.py CHANGED
@@ -4,27 +4,20 @@ from PIL import Image
4
  import requests
5
  from transformers import AutoModelForCausalLM, LlamaTokenizer
6
  import torch
7
- from accelerate import (
8
- init_empty_weights,
9
- infer_auto_device_map,
10
- load_checkpoint_and_dispatch,
11
- )
12
  import os
13
 
14
  import logging
15
- from transformers import logging as hf_logging
 
16
 
17
  logging.basicConfig(level=logging.INFO)
18
- hf_logging.set_verbosity_debug()
19
 
20
 
21
- def list_files(directory, depth, max_depth=5):
22
- # Lists all files and directories in the given directory
23
- for filename in os.listdir(directory):
24
- print(os.path.join(directory, filename))
25
- if not os.path.isfile(filename) and depth < max_depth:
26
- list_files(os.path.join(directory, filename), depth + 1, max_depth)
27
-
28
 
29
  class EndpointHandler:
30
  def __init__(self, path=""):
@@ -122,9 +115,9 @@ class EndpointHandler:
122
 
123
  with torch.no_grad():
124
  outputs = self.model.generate(**inputs, **gen_kwargs)
125
- print("OUTPUTS 1: ", outputs)
126
  outputs = outputs[:, inputs["input_ids"].shape[1] :]
127
- print("OUTPUTS 2: ", outputs)
128
  response = self.tokenizer.decode(outputs[0])
129
  return response
130
 
 
4
  import requests
5
  from transformers import AutoModelForCausalLM, LlamaTokenizer
6
  import torch
7
+ # from accelerate import (
8
+ # init_empty_weights,
9
+ # infer_auto_device_map,
10
+ # load_checkpoint_and_dispatch,
11
+ # )
12
  import os
13
 
14
  import logging
15
+ # from transformers import logging as hf_logging
16
+ # hf_logging.set_verbosity_debug()
17
 
18
  logging.basicConfig(level=logging.INFO)
 
19
 
20
 
 
 
 
 
 
 
 
21
 
22
  class EndpointHandler:
23
  def __init__(self, path=""):
 
115
 
116
  with torch.no_grad():
117
  outputs = self.model.generate(**inputs, **gen_kwargs)
118
+ logging.info(f"OUTPUTS 1: {outputs}")
119
  outputs = outputs[:, inputs["input_ids"].shape[1] :]
120
+ logging.info(f"OUTPUTS 2: {outputs}")
121
  response = self.tokenizer.decode(outputs[0])
122
  return response
123