lkm2835 commited on
Commit
3f24f4c
1 Parent(s): 166ae06

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -21,7 +21,7 @@ DESCRIPTION = """\
21
  """
22
 
23
  MAX_MAX_NEW_TOKENS = 4096
24
- DEFAULT_MAX_NEW_TOKENS = 128
25
  MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "3840"))
26
 
27
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
@@ -42,7 +42,7 @@ def generate(
42
  message: str,
43
  chat_history: list[tuple[str, str]],
44
  system_prompt: str,
45
- max_new_tokens: int = 128,
46
  temperature: float = 0.6,
47
  top_p: float = 0.9,
48
  top_k: int = 50,
 
21
  """
22
 
23
  MAX_MAX_NEW_TOKENS = 4096
24
+ DEFAULT_MAX_NEW_TOKENS = 512
25
  MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "3840"))
26
 
27
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
 
42
  message: str,
43
  chat_history: list[tuple[str, str]],
44
  system_prompt: str,
45
+ max_new_tokens: int = 512,
46
  temperature: float = 0.6,
47
  top_p: float = 0.9,
48
  top_k: int = 50,