Uglevod7 commited on
Commit
9ebdda8
1 Parent(s): 8847f91

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -26,7 +26,7 @@ prompt_template=f'''Below is an instruction that describes a task. Write a respo
26
  print("\n\n*** Generate:")
27
 
28
  input_ids = tokenizer(prompt_template, return_tensors='pt').input_ids.cuda()
29
- output = model.generate(inputs=input_ids, temperature=0.7, do_sample=True, top_p=0.95, top_k=40, max_new_tokens=512,disable_exllama=True)
30
  print(tokenizer.decode(output[0]))
31
 
32
  # Inference can also be done using transformers' pipeline
@@ -41,7 +41,7 @@ pipe = pipeline(
41
  temperature=0.7,
42
  top_p=0.95,
43
  top_k=40,
44
- repetition_penalty=1.1
45
  )
46
 
47
  print(pipe(prompt_template)[0]['generated_text'])
 
26
  print("\n\n*** Generate:")
27
 
28
  input_ids = tokenizer(prompt_template, return_tensors='pt').input_ids.cuda()
29
+ output = model.generate(inputs=input_ids, temperature=0.7, do_sample=True, top_p=0.95, top_k=40, max_new_tokens=512)
30
  print(tokenizer.decode(output[0]))
31
 
32
  # Inference can also be done using transformers' pipeline
 
41
  temperature=0.7,
42
  top_p=0.95,
43
  top_k=40,
44
+ repetition_penalty=1.1 , disable_exllama=True
45
  )
46
 
47
  print(pipe(prompt_template)[0]['generated_text'])