No token size limit for LLM
Browse files
app.py
CHANGED
@@ -269,7 +269,7 @@ if user_message:
|
|
269 |
|
270 |
llm_model = LLLResponseGenerator()
|
271 |
temperature = 0.5
|
272 |
-
max_length = 128 * 4
|
273 |
|
274 |
# Collect all messages exchanged so far into a single text string
|
275 |
all_messages = "\n".join(
|
|
|
269 |
|
270 |
llm_model = LLLResponseGenerator()
|
271 |
temperature = 0.5
|
272 |
+
max_length = None #128 * 4
|
273 |
|
274 |
# Collect all messages exchanged so far into a single text string
|
275 |
all_messages = "\n".join(
|