pets123 commited on
Commit
208b437
·
verified ·
1 Parent(s): 472102a

Update generator.py

Browse files
Files changed (1) hide show
  1. generator.py +1 -1
generator.py CHANGED
@@ -136,7 +136,7 @@ class Generator:
136
  curr_tokens_mask = prompt_tokens_mask.unsqueeze(0)
137
  curr_pos = torch.arange(0, prompt_tokens.size(0)).unsqueeze(0).long().to(self.device)
138
 
139
- max_seq_len = 999999999999999
140
  max_context_len = max_seq_len - max_generation_len
141
  if curr_tokens.size(1) >= max_context_len:
142
  raise ValueError(
 
136
  curr_tokens_mask = prompt_tokens_mask.unsqueeze(0)
137
  curr_pos = torch.arange(0, prompt_tokens.size(0)).unsqueeze(0).long().to(self.device)
138
 
139
+ max_seq_len = 10000
140
  max_context_len = max_seq_len - max_generation_len
141
  if curr_tokens.size(1) >= max_context_len:
142
  raise ValueError(