TiberiuCristianLeon commited on
Commit
44788ec
·
verified ·
1 Parent(s): cd52fb2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -334,7 +334,7 @@ class Translators:
334
  prompt = pipe.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=False)
335
  tokenized_input = pipe.tokenizer(self.input_text, return_tensors="pt")
336
  num_input_tokens = len(tokenized_input["input_ids"][0])
337
- max_new_tokens = round(num_input_tokens + 0.25 * num_input_tokens)
338
  outputs = pipe(prompt, max_new_tokens=max_new_tokens, do_sample=False)
339
  translated_text = outputs[0]["generated_text"]
340
  print(f"Input chars: {len(input_text)}", f"Input tokens: {num_input_tokens}", f"max_new_tokens: {max_new_tokens}",
 
334
  prompt = pipe.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=False)
335
  tokenized_input = pipe.tokenizer(self.input_text, return_tensors="pt")
336
  num_input_tokens = len(tokenized_input["input_ids"][0])
337
+ max_new_tokens = round(num_input_tokens + 0.75 * num_input_tokens)
338
  outputs = pipe(prompt, max_new_tokens=max_new_tokens, do_sample=False)
339
  translated_text = outputs[0]["generated_text"]
340
  print(f"Input chars: {len(input_text)}", f"Input tokens: {num_input_tokens}", f"max_new_tokens: {max_new_tokens}",