deprecate flow_run_cache; api_output
Browse files- .gitignore +1 -1
- OpenAIChatAtomicFlow.py +2 -6
- OpenAIChatAtomicFlow.yaml +1 -0
.gitignore
CHANGED
@@ -158,4 +158,4 @@ cython_debug/
|
|
158 |
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
159 |
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
160 |
.idea/
|
161 |
-
.*cache*/
|
|
|
158 |
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
159 |
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
160 |
.idea/
|
161 |
+
.*cache*/
|
OpenAIChatAtomicFlow.py
CHANGED
@@ -14,7 +14,6 @@ from flows.base_flows.abstract import AtomicFlow
|
|
14 |
|
15 |
from flows.utils import logging
|
16 |
from flows.messages.flow_message import UpdateMessage_ChatMessage
|
17 |
-
from flows.utils.caching_utils import flow_run_cache
|
18 |
|
19 |
log = logging.get_logger(__name__)
|
20 |
|
@@ -227,11 +226,8 @@ class OpenAIChatAtomicFlow(AtomicFlow):
|
|
227 |
self._state_update_add_chat_message(role=self.flow_config["user_name"],
|
228 |
content=user_message_content)
|
229 |
|
230 |
-
@flow_run_cache()
|
231 |
def run(self,
|
232 |
-
input_data: Dict[str, Any],
|
233 |
-
private_keys: Optional[List[str]] = [],
|
234 |
-
keys_to_ignore_for_hash: Optional[List[str]] = []) -> Dict[str, Any]:
|
235 |
# ~~~ Process input ~~~
|
236 |
self._process_input(input_data)
|
237 |
|
@@ -242,4 +238,4 @@ class OpenAIChatAtomicFlow(AtomicFlow):
|
|
242 |
content=response
|
243 |
)
|
244 |
|
245 |
-
return
|
|
|
14 |
|
15 |
from flows.utils import logging
|
16 |
from flows.messages.flow_message import UpdateMessage_ChatMessage
|
|
|
17 |
|
18 |
log = logging.get_logger(__name__)
|
19 |
|
|
|
226 |
self._state_update_add_chat_message(role=self.flow_config["user_name"],
|
227 |
content=user_message_content)
|
228 |
|
|
|
229 |
def run(self,
|
230 |
+
input_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
|
|
231 |
# ~~~ Process input ~~~
|
232 |
self._process_input(input_data)
|
233 |
|
|
|
238 |
content=response
|
239 |
)
|
240 |
|
241 |
+
return {"api_output": response}
|
OpenAIChatAtomicFlow.yaml
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
# This is an abstract flow, therefore some required fields are not defined (and must be defined by the concrete flow)
|
|
|
2 |
|
3 |
model_name: "gpt-4"
|
4 |
generation_parameters:
|
|
|
1 |
# This is an abstract flow, therefore some required fields are not defined (and must be defined by the concrete flow)
|
2 |
+
enable_cache: True
|
3 |
|
4 |
model_name: "gpt-4"
|
5 |
generation_parameters:
|