Modifying to try to make xformers work on spaces
Browse files- app.py +10 -1
- requirements.txt +0 -0
app.py
CHANGED
@@ -8,6 +8,8 @@ from haystack.components.embedders import SentenceTransformersTextEmbedder, Sent
|
|
8 |
from haystack.components.preprocessors import DocumentSplitter
|
9 |
from haystack.components.converters.txt import TextFileToDocument
|
10 |
from haystack.components.preprocessors import DocumentCleaner
|
|
|
|
|
11 |
|
12 |
from haystack.document_stores.in_memory import InMemoryDocumentStore
|
13 |
from haystack.components.retrievers import InMemoryEmbeddingRetriever
|
@@ -51,6 +53,10 @@ indexing.connect("embedder", "writer")
|
|
51 |
indexing.run({"sources": ["knowledge-plain.txt"]})
|
52 |
|
53 |
|
|
|
|
|
|
|
|
|
54 |
##################################
|
55 |
####### Answering pipeline #######
|
56 |
##################################
|
@@ -124,11 +130,14 @@ llm = OpenAIGenerator(
|
|
124 |
|
125 |
router = ConditionalRouter(routes=routes)
|
126 |
|
127 |
-
embedder = SentenceTransformersTextEmbedder(model=embedding_model)
|
128 |
|
129 |
# Again: in memory for now
|
130 |
retriever = InMemoryEmbeddingRetriever(document_store)
|
131 |
|
|
|
|
|
|
|
132 |
prompt_builder2 = PromptBuilder(template=query_prompt_template)
|
133 |
|
134 |
llm2 = OpenAIGenerator(
|
|
|
8 |
from haystack.components.preprocessors import DocumentSplitter
|
9 |
from haystack.components.converters.txt import TextFileToDocument
|
10 |
from haystack.components.preprocessors import DocumentCleaner
|
11 |
+
from haystack_integrations.document_stores.chroma import ChromaDocumentStore
|
12 |
+
from haystack_integrations.components.retrievers.chroma import ChromaEmbeddingRetriever
|
13 |
|
14 |
from haystack.document_stores.in_memory import InMemoryDocumentStore
|
15 |
from haystack.components.retrievers import InMemoryEmbeddingRetriever
|
|
|
53 |
indexing.run({"sources": ["knowledge-plain.txt"]})
|
54 |
|
55 |
|
56 |
+
# Chroma version (no support for overlaps in documents)
|
57 |
+
# document_store = ChromaDocumentStore(persist_path="vstore_4012")
|
58 |
+
|
59 |
+
|
60 |
##################################
|
61 |
####### Answering pipeline #######
|
62 |
##################################
|
|
|
130 |
|
131 |
router = ConditionalRouter(routes=routes)
|
132 |
|
133 |
+
embedder = SentenceTransformersTextEmbedder(model=embedding_model, trust_remote_code=True)
|
134 |
|
135 |
# Again: in memory for now
|
136 |
retriever = InMemoryEmbeddingRetriever(document_store)
|
137 |
|
138 |
+
# Chroma
|
139 |
+
# retriever = ChromaEmbeddingRetriever(document_store=document_store)
|
140 |
+
|
141 |
prompt_builder2 = PromptBuilder(template=query_prompt_template)
|
142 |
|
143 |
llm2 = OpenAIGenerator(
|
requirements.txt
CHANGED
Binary files a/requirements.txt and b/requirements.txt differ
|
|