Upload 18 files
Browse files- LICENSE +201 -0
- README.md +76 -7
- Rock_fact_checker.py +125 -0
- app_utils/__pycache__/backend_utils.cpython-39.pyc +0 -0
- app_utils/backend_utils.py +67 -0
- app_utils/config.py +16 -0
- app_utils/entailment_checker.py +109 -0
- app_utils/frontend_utils.py +120 -0
- data/.DS_Store +0 -0
- data/index/.DS_Store +0 -0
- data/index/faiss_document_store.db +0 -0
- data/index/my_faiss_index.faiss +0 -0
- data/index/my_faiss_index.json +1 -0
- data/statements.txt +1 -0
- notebooks/get_wikipedia_data.ipynb +582 -0
- notebooks/indexing.ipynb +417 -0
- pages/Info.py +10 -0
- requirements.txt +6 -0
LICENSE
ADDED
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Apache License
|
2 |
+
Version 2.0, January 2004
|
3 |
+
http://www.apache.org/licenses/
|
4 |
+
|
5 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
6 |
+
|
7 |
+
1. Definitions.
|
8 |
+
|
9 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
10 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
11 |
+
|
12 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
13 |
+
the copyright owner that is granting the License.
|
14 |
+
|
15 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
16 |
+
other entities that control, are controlled by, or are under common
|
17 |
+
control with that entity. For the purposes of this definition,
|
18 |
+
"control" means (i) the power, direct or indirect, to cause the
|
19 |
+
direction or management of such entity, whether by contract or
|
20 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
21 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
22 |
+
|
23 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
24 |
+
exercising permissions granted by this License.
|
25 |
+
|
26 |
+
"Source" form shall mean the preferred form for making modifications,
|
27 |
+
including but not limited to software source code, documentation
|
28 |
+
source, and configuration files.
|
29 |
+
|
30 |
+
"Object" form shall mean any form resulting from mechanical
|
31 |
+
transformation or translation of a Source form, including but
|
32 |
+
not limited to compiled object code, generated documentation,
|
33 |
+
and conversions to other media types.
|
34 |
+
|
35 |
+
"Work" shall mean the work of authorship, whether in Source or
|
36 |
+
Object form, made available under the License, as indicated by a
|
37 |
+
copyright notice that is included in or attached to the work
|
38 |
+
(an example is provided in the Appendix below).
|
39 |
+
|
40 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
41 |
+
form, that is based on (or derived from) the Work and for which the
|
42 |
+
editorial revisions, annotations, elaborations, or other modifications
|
43 |
+
represent, as a whole, an original work of authorship. For the purposes
|
44 |
+
of this License, Derivative Works shall not include works that remain
|
45 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
46 |
+
the Work and Derivative Works thereof.
|
47 |
+
|
48 |
+
"Contribution" shall mean any work of authorship, including
|
49 |
+
the original version of the Work and any modifications or additions
|
50 |
+
to that Work or Derivative Works thereof, that is intentionally
|
51 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
52 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
53 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
54 |
+
means any form of electronic, verbal, or written communication sent
|
55 |
+
to the Licensor or its representatives, including but not limited to
|
56 |
+
communication on electronic mailing lists, source code control systems,
|
57 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
58 |
+
Licensor for the purpose of discussing and improving the Work, but
|
59 |
+
excluding communication that is conspicuously marked or otherwise
|
60 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
61 |
+
|
62 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
63 |
+
on behalf of whom a Contribution has been received by Licensor and
|
64 |
+
subsequently incorporated within the Work.
|
65 |
+
|
66 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
67 |
+
this License, each Contributor hereby grants to You a perpetual,
|
68 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
69 |
+
copyright license to reproduce, prepare Derivative Works of,
|
70 |
+
publicly display, publicly perform, sublicense, and distribute the
|
71 |
+
Work and such Derivative Works in Source or Object form.
|
72 |
+
|
73 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
74 |
+
this License, each Contributor hereby grants to You a perpetual,
|
75 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
76 |
+
(except as stated in this section) patent license to make, have made,
|
77 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
78 |
+
where such license applies only to those patent claims licensable
|
79 |
+
by such Contributor that are necessarily infringed by their
|
80 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
81 |
+
with the Work to which such Contribution(s) was submitted. If You
|
82 |
+
institute patent litigation against any entity (including a
|
83 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
84 |
+
or a Contribution incorporated within the Work constitutes direct
|
85 |
+
or contributory patent infringement, then any patent licenses
|
86 |
+
granted to You under this License for that Work shall terminate
|
87 |
+
as of the date such litigation is filed.
|
88 |
+
|
89 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
90 |
+
Work or Derivative Works thereof in any medium, with or without
|
91 |
+
modifications, and in Source or Object form, provided that You
|
92 |
+
meet the following conditions:
|
93 |
+
|
94 |
+
(a) You must give any other recipients of the Work or
|
95 |
+
Derivative Works a copy of this License; and
|
96 |
+
|
97 |
+
(b) You must cause any modified files to carry prominent notices
|
98 |
+
stating that You changed the files; and
|
99 |
+
|
100 |
+
(c) You must retain, in the Source form of any Derivative Works
|
101 |
+
that You distribute, all copyright, patent, trademark, and
|
102 |
+
attribution notices from the Source form of the Work,
|
103 |
+
excluding those notices that do not pertain to any part of
|
104 |
+
the Derivative Works; and
|
105 |
+
|
106 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
107 |
+
distribution, then any Derivative Works that You distribute must
|
108 |
+
include a readable copy of the attribution notices contained
|
109 |
+
within such NOTICE file, excluding those notices that do not
|
110 |
+
pertain to any part of the Derivative Works, in at least one
|
111 |
+
of the following places: within a NOTICE text file distributed
|
112 |
+
as part of the Derivative Works; within the Source form or
|
113 |
+
documentation, if provided along with the Derivative Works; or,
|
114 |
+
within a display generated by the Derivative Works, if and
|
115 |
+
wherever such third-party notices normally appear. The contents
|
116 |
+
of the NOTICE file are for informational purposes only and
|
117 |
+
do not modify the License. You may add Your own attribution
|
118 |
+
notices within Derivative Works that You distribute, alongside
|
119 |
+
or as an addendum to the NOTICE text from the Work, provided
|
120 |
+
that such additional attribution notices cannot be construed
|
121 |
+
as modifying the License.
|
122 |
+
|
123 |
+
You may add Your own copyright statement to Your modifications and
|
124 |
+
may provide additional or different license terms and conditions
|
125 |
+
for use, reproduction, or distribution of Your modifications, or
|
126 |
+
for any such Derivative Works as a whole, provided Your use,
|
127 |
+
reproduction, and distribution of the Work otherwise complies with
|
128 |
+
the conditions stated in this License.
|
129 |
+
|
130 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
131 |
+
any Contribution intentionally submitted for inclusion in the Work
|
132 |
+
by You to the Licensor shall be under the terms and conditions of
|
133 |
+
this License, without any additional terms or conditions.
|
134 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
135 |
+
the terms of any separate license agreement you may have executed
|
136 |
+
with Licensor regarding such Contributions.
|
137 |
+
|
138 |
+
6. Trademarks. This License does not grant permission to use the trade
|
139 |
+
names, trademarks, service marks, or product names of the Licensor,
|
140 |
+
except as required for reasonable and customary use in describing the
|
141 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
142 |
+
|
143 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
144 |
+
agreed to in writing, Licensor provides the Work (and each
|
145 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
146 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
147 |
+
implied, including, without limitation, any warranties or conditions
|
148 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
149 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
150 |
+
appropriateness of using or redistributing the Work and assume any
|
151 |
+
risks associated with Your exercise of permissions under this License.
|
152 |
+
|
153 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
154 |
+
whether in tort (including negligence), contract, or otherwise,
|
155 |
+
unless required by applicable law (such as deliberate and grossly
|
156 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
157 |
+
liable to You for damages, including any direct, indirect, special,
|
158 |
+
incidental, or consequential damages of any character arising as a
|
159 |
+
result of this License or out of the use or inability to use the
|
160 |
+
Work (including but not limited to damages for loss of goodwill,
|
161 |
+
work stoppage, computer failure or malfunction, or any and all
|
162 |
+
other commercial damages or losses), even if such Contributor
|
163 |
+
has been advised of the possibility of such damages.
|
164 |
+
|
165 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
166 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
167 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
168 |
+
or other liability obligations and/or rights consistent with this
|
169 |
+
License. However, in accepting such obligations, You may act only
|
170 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
171 |
+
of any other Contributor, and only if You agree to indemnify,
|
172 |
+
defend, and hold each Contributor harmless for any liability
|
173 |
+
incurred by, or claims asserted against, such Contributor by reason
|
174 |
+
of your accepting any such warranty or additional liability.
|
175 |
+
|
176 |
+
END OF TERMS AND CONDITIONS
|
177 |
+
|
178 |
+
APPENDIX: How to apply the Apache License to your work.
|
179 |
+
|
180 |
+
To apply the Apache License to your work, attach the following
|
181 |
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
182 |
+
replaced with your own identifying information. (Don't include
|
183 |
+
the brackets!) The text should be enclosed in the appropriate
|
184 |
+
comment syntax for the file format. We also recommend that a
|
185 |
+
file or class name and description of purpose be included on the
|
186 |
+
same "printed page" as the copyright notice for easier
|
187 |
+
identification within third-party archives.
|
188 |
+
|
189 |
+
Copyright [yyyy] [name of copyright owner]
|
190 |
+
|
191 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
192 |
+
you may not use this file except in compliance with the License.
|
193 |
+
You may obtain a copy of the License at
|
194 |
+
|
195 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
196 |
+
|
197 |
+
Unless required by applicable law or agreed to in writing, software
|
198 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
199 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
200 |
+
See the License for the specific language governing permissions and
|
201 |
+
limitations under the License.
|
README.md
CHANGED
@@ -1,12 +1,81 @@
|
|
1 |
---
|
2 |
-
title:
|
3 |
-
emoji:
|
4 |
colorFrom: purple
|
5 |
-
colorTo:
|
6 |
sdk: streamlit
|
7 |
-
sdk_version: 1.
|
8 |
-
app_file:
|
9 |
-
pinned:
|
|
|
|
|
10 |
---
|
11 |
|
12 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
---
|
2 |
+
title: Fact Checking rocks!
|
3 |
+
emoji: 🎸
|
4 |
colorFrom: purple
|
5 |
+
colorTo: blue
|
6 |
sdk: streamlit
|
7 |
+
sdk_version: 1.10.0
|
8 |
+
app_file: Rock_fact_checker.py
|
9 |
+
pinned: true
|
10 |
+
models: [sentence-transformers/msmarco-distilbert-base-tas-b, microsoft/deberta-v2-xlarge-mnli]
|
11 |
+
license: apache-2.0
|
12 |
---
|
13 |
|
14 |
+
# Fact Checking 🎸 Rocks! [![Generic badge](https://img.shields.io/badge/🤗-Open%20in%20Spaces-blue.svg)](https://huggingface.co/spaces/anakin87/fact-checking-rocks) [![Generic badge](https://img.shields.io/github/stars/anakin87/fact-checking-rocks?label=Github&style=social)](https://github.com/anakin87/fact-checking-rocks)
|
15 |
+
|
16 |
+
## *Fact checking baseline combining dense retrieval and textual entailment*
|
17 |
+
|
18 |
+
- [Fact Checking 🎸 Rocks! ](#fact-checking--rocks---)
|
19 |
+
- [*Fact checking baseline combining dense retrieval and textual entailment*](#fact-checking-baseline-combining-dense-retrieval-and-textual-entailment)
|
20 |
+
- [Idea](#idea)
|
21 |
+
- [System description](#system-description)
|
22 |
+
- [Indexing pipeline](#indexing-pipeline)
|
23 |
+
- [Search pipeline](#search-pipeline)
|
24 |
+
- [Limits and possible improvements](#limits-and-possible-improvements)
|
25 |
+
- [Repository structure](#repository-structure)
|
26 |
+
- [Installation](#installation)
|
27 |
+
|
28 |
+
### Idea
|
29 |
+
💡 This project aims to show that a *naive and simple baseline* for fact checking can be built by combining dense retrieval and a textual entailment task.
|
30 |
+
In a nutshell, the flow is as follows:
|
31 |
+
* the user enters a factual statement
|
32 |
+
* the relevant passages are retrieved from the knowledge base using dense retrieval
|
33 |
+
* the system computes the text entailment between each relevant passage and the statement, using a Natural Language Inference model
|
34 |
+
* the entailment scores are aggregated to produce a summary score.
|
35 |
+
|
36 |
+
### System description
|
37 |
+
🪄 This project is strongly based on [🔎 Haystack](https://github.com/deepset-ai/haystack), an open source NLP framework to realize search system. The main components of our system are an indexing pipeline and a search pipeline.
|
38 |
+
|
39 |
+
|
40 |
+
#### Indexing pipeline
|
41 |
+
* [Crawling](https://github.com/anakin87/fact-checking-rocks/blob/321ba7893bbe79582f8c052493acfda497c5b785/notebooks/get_wikipedia_data.ipynb): Crawl data from Wikipedia, starting from the page [List of mainstream rock performers](https://en.wikipedia.org/wiki/List_of_mainstream_rock_performers) and using the [python wrapper](https://github.com/goldsmith/Wikipedia)
|
42 |
+
* [Indexing](https://github.com/anakin87/fact-checking-rocks/blob/321ba7893bbe79582f8c052493acfda497c5b785/notebooks/indexing.ipynb)
|
43 |
+
* preprocess the downloaded documents into chunks consisting of 2 sentences
|
44 |
+
* chunks with less than 10 words are discarded, because not very informative
|
45 |
+
* instantiate a [FAISS](https://github.com/facebookresearch/faiss) Document store and store the passages on it
|
46 |
+
* create embeddings for the passages, using a Sentence Transformer model and save them in FAISS. The retrieval task will involve [*asymmetric semantic search*](https://www.sbert.net/examples/applications/semantic-search/README.html#symmetric-vs-asymmetric-semantic-search) (statements to be verified are usually shorter than inherent passages), therefore I choose the model `msmarco-distilbert-base-tas-b`
|
47 |
+
* save FAISS index.
|
48 |
+
|
49 |
+
#### Search pipeline
|
50 |
+
|
51 |
+
* the user enters a factual statement
|
52 |
+
* compute the embedding of the user statement using the same Sentence Transformer used for indexing (`msmarco-distilbert-base-tas-b`)
|
53 |
+
* retrieve the K most relevant text passages stored in FAISS (along with their relevance scores)
|
54 |
+
* **text entailment task**: compute the text entailment between each text passage (premise) and the user statement (hypotesis), using a Natural Language Inference model (`microsoft/deberta-v2-xlarge-mnli`). For every text passage, we have 3 scores (summing to 1): entailment, contradiction and neutral. *(For this task, I developed a custom Haystack node: `EntailmentChecker`)*
|
55 |
+
* aggregate the text entailment scores: compute the weighted average of them, where the weight is the relevance score. **Now it is possible to tell if the knowledge base confirms, is neutral or disproves the user statement.**
|
56 |
+
* *empirical consideration: if in the first N passages (N<K), there is strong evidence of entailment/contradiction (partial aggregate scores > 0.5), it is better not to consider (K-N) less relevant documents.*
|
57 |
+
|
58 |
+
### Limits and possible improvements
|
59 |
+
✨ As mentioned, the current approach to fact checking is simple and naive. Some **structural limits of this approach**:
|
60 |
+
* there is **no statement detection**. In fact, the statement to be verified is chosen by the user. In real-world applications, this step is often necessary.
|
61 |
+
* **Wikipedia is taken as a source of truth**. Unfortunately, Wikipedia does not contain universal knowledge and there is no real guarantee that it is a source of truth. There are certainly very interesting approaches that view a snapshot of the entire web as an uncurated source of knowledge (see [Facebook Research SPHERE](https://arxiv.org/abs/2112.09924)).
|
62 |
+
* Several papers and even our experiments show a general effectiveness of **dense retrieval** in retrieving textual passages for evaluating the user statement. However, there may be cases in which the most useful textual passages for fact checking do not emerge from the simple semantic similarity with the statement to be verified.
|
63 |
+
* **no organic evaluation** was performed, but only manual experiments.
|
64 |
+
|
65 |
+
While keeping this simple approach, some **improvements** could be made:
|
66 |
+
* For reasons of simplicity and infrastructural limitations, the retrieval uses only a very small portion of the Wikipedia data (artists pages from the [List of mainstream rock performers](https://en.wikipedia.org/wiki/List_of_mainstream_rock_performers)). With these few data available, in many cases the knowledge base remains neutral even with respect to statements about rock albums/songs. Certainly, fact checking **quality could improve by expanding the knowledge base** and possibly extending it to the entire Wikipedia.
|
67 |
+
* Both the retriever model and the Natural Language Inference model are general purpose models and have not been fine-tuned for our domain. Undoubtedly they can **show better performance if fine-tuned in the rock music domain**. Particularly, the retriever model might be adapted with low effort, using [Generative Pseudo Labelling](https://haystack.deepset.ai/guides/gpl).
|
68 |
+
|
69 |
+
### Repository structure
|
70 |
+
* [Rock_fact_checker.py](Rock_fact_checker.py) and [pages folder](./pages/): multi-page Streamlit web app
|
71 |
+
* [app_utils folder](./app_utils/): python modules used in the web app
|
72 |
+
* [notebooks folder](./notebooks/): Jupyter/Colab notebooks to get Wikipedia data and index the text passages (using Haystack)
|
73 |
+
* [data folder](./data/): all necessary data, including original Wikipedia data, FAISS Index and prepared random statements
|
74 |
+
|
75 |
+
### Installation
|
76 |
+
💻 To install this project locally, follow these steps:
|
77 |
+
* `git clone https://github.com/anakin87/fact-checking-rocks`
|
78 |
+
* `cd fact-checking-rocks`
|
79 |
+
* `pip install -r requirements.txt`
|
80 |
+
|
81 |
+
To run the web app, simply type: `streamlit run Rock_fact_checker.py`
|
Rock_fact_checker.py
ADDED
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import random
|
2 |
+
import time
|
3 |
+
import logging
|
4 |
+
from json import JSONDecodeError
|
5 |
+
|
6 |
+
import streamlit as st
|
7 |
+
|
8 |
+
from app_utils.backend_utils import load_statements, query
|
9 |
+
from app_utils.frontend_utils import (
|
10 |
+
set_state_if_absent,
|
11 |
+
reset_results,
|
12 |
+
entailment_html_messages,
|
13 |
+
create_df_for_relevant_snippets,
|
14 |
+
create_ternary_plot,
|
15 |
+
build_sidebar,
|
16 |
+
)
|
17 |
+
from app_utils.config import RETRIEVER_TOP_K
|
18 |
+
|
19 |
+
|
20 |
+
def main():
|
21 |
+
statements = load_statements()
|
22 |
+
build_sidebar()
|
23 |
+
|
24 |
+
# Persistent state
|
25 |
+
set_state_if_absent("statement", "Elvis Presley is alive")
|
26 |
+
set_state_if_absent("answer", "")
|
27 |
+
set_state_if_absent("results", None)
|
28 |
+
set_state_if_absent("raw_json", None)
|
29 |
+
set_state_if_absent("random_statement_requested", False)
|
30 |
+
|
31 |
+
st.write("# Fact Checking 🎸 Rocks!")
|
32 |
+
st.write()
|
33 |
+
st.markdown(
|
34 |
+
"""
|
35 |
+
##### Enter a factual statement about [Rock music](https://en.wikipedia.org/wiki/List_of_mainstream_rock_performers) and let the AI check it out for you...
|
36 |
+
"""
|
37 |
+
)
|
38 |
+
# Search bar
|
39 |
+
statement = st.text_input(
|
40 |
+
"", value=st.session_state.statement, max_chars=100, on_change=reset_results
|
41 |
+
)
|
42 |
+
col1, col2 = st.columns(2)
|
43 |
+
col1.markdown(
|
44 |
+
"<style>.stButton button {width:100%;}</style>", unsafe_allow_html=True
|
45 |
+
)
|
46 |
+
col2.markdown(
|
47 |
+
"<style>.stButton button {width:100%;}</style>", unsafe_allow_html=True
|
48 |
+
)
|
49 |
+
# Run button
|
50 |
+
run_pressed = col1.button("Run")
|
51 |
+
# Random statement button
|
52 |
+
if col2.button("Random statement"):
|
53 |
+
reset_results()
|
54 |
+
statement = random.choice(statements)
|
55 |
+
# Avoid picking the same statement twice (the change is not visible on the UI)
|
56 |
+
while statement == st.session_state.statement:
|
57 |
+
statement = random.choice(statements)
|
58 |
+
st.session_state.statement = statement
|
59 |
+
st.session_state.random_statement_requested = True
|
60 |
+
# Re-runs the script setting the random statement as the textbox value
|
61 |
+
# Unfortunately necessary as the Random statement button is _below_ the textbox
|
62 |
+
# Adapted for Streamlit>=1.12.0
|
63 |
+
if hasattr(st, "scriptrunner"):
|
64 |
+
raise st.scriptrunner.script_runner.RerunException(
|
65 |
+
st.scriptrunner.script_requests.RerunData(widget_states=None)
|
66 |
+
)
|
67 |
+
raise st.runtime.scriptrunner.script_runner.RerunException(
|
68 |
+
st.runtime.scriptrunner.script_requests.RerunData(widget_states=None)
|
69 |
+
)
|
70 |
+
else:
|
71 |
+
st.session_state.random_statement_requested = False
|
72 |
+
run_query = (
|
73 |
+
run_pressed or statement != st.session_state.statement
|
74 |
+
) and not st.session_state.random_statement_requested
|
75 |
+
|
76 |
+
# Get results for query
|
77 |
+
if run_query and statement:
|
78 |
+
time_start = time.time()
|
79 |
+
reset_results()
|
80 |
+
st.session_state.statement = statement
|
81 |
+
with st.spinner("🧠 Performing neural search on documents..."):
|
82 |
+
try:
|
83 |
+
st.session_state.results = query(statement, RETRIEVER_TOP_K)
|
84 |
+
print(f"S: {statement}")
|
85 |
+
time_end = time.time()
|
86 |
+
print(time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime()))
|
87 |
+
print(f"elapsed time: {time_end - time_start}")
|
88 |
+
except JSONDecodeError as je:
|
89 |
+
st.error(
|
90 |
+
"👓 An error occurred reading the results. Is the document store working?"
|
91 |
+
)
|
92 |
+
return
|
93 |
+
except Exception as e:
|
94 |
+
logging.exception(e)
|
95 |
+
st.error("🐞 An error occurred during the request.")
|
96 |
+
return
|
97 |
+
|
98 |
+
# Display results
|
99 |
+
if st.session_state.results:
|
100 |
+
docs = st.session_state.results["documents"]
|
101 |
+
agg_entailment_info = st.session_state.results["aggregate_entailment_info"]
|
102 |
+
|
103 |
+
# show different messages depending on entailment results
|
104 |
+
max_key = max(agg_entailment_info, key=agg_entailment_info.get)
|
105 |
+
message = entailment_html_messages[max_key]
|
106 |
+
st.markdown(f"<br/><h4>{message}</h4>", unsafe_allow_html=True)
|
107 |
+
|
108 |
+
st.markdown(f"###### Aggregate entailment information:")
|
109 |
+
col1, col2 = st.columns([2, 1])
|
110 |
+
fig = create_ternary_plot(agg_entailment_info)
|
111 |
+
with col1:
|
112 |
+
st.plotly_chart(fig, use_container_width=True)
|
113 |
+
with col2:
|
114 |
+
st.write(agg_entailment_info)
|
115 |
+
|
116 |
+
st.markdown(f"###### Most Relevant snippets:")
|
117 |
+
df, urls = create_df_for_relevant_snippets(docs)
|
118 |
+
st.dataframe(df)
|
119 |
+
str_wiki_pages = "Wikipedia source pages: "
|
120 |
+
for doc, url in urls.items():
|
121 |
+
str_wiki_pages += f"[{doc}]({url}) "
|
122 |
+
st.markdown(str_wiki_pages)
|
123 |
+
|
124 |
+
|
125 |
+
main()
|
app_utils/__pycache__/backend_utils.cpython-39.pyc
ADDED
Binary file (2.29 kB). View file
|
|
app_utils/backend_utils.py
ADDED
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import shutil
|
2 |
+
|
3 |
+
from haystack.document_stores import FAISSDocumentStore
|
4 |
+
from haystack.nodes import EmbeddingRetriever
|
5 |
+
from haystack.pipelines import Pipeline
|
6 |
+
import streamlit as st
|
7 |
+
|
8 |
+
from app_utils.entailment_checker import EntailmentChecker
|
9 |
+
from app_utils.config import (
|
10 |
+
STATEMENTS_PATH,
|
11 |
+
INDEX_DIR,
|
12 |
+
RETRIEVER_MODEL,
|
13 |
+
RETRIEVER_MODEL_FORMAT,
|
14 |
+
NLI_MODEL,
|
15 |
+
)
|
16 |
+
|
17 |
+
|
18 |
+
@st.cache()
|
19 |
+
def load_statements():
|
20 |
+
"""Load statements from file"""
|
21 |
+
with open(STATEMENTS_PATH) as fin:
|
22 |
+
statements = [
|
23 |
+
line.strip() for line in fin.readlines() if not line.startswith("#")
|
24 |
+
]
|
25 |
+
return statements
|
26 |
+
|
27 |
+
|
28 |
+
# cached to make index and models load only at start
|
29 |
+
@st.cache(
|
30 |
+
hash_funcs={"builtins.SwigPyObject": lambda _: None}, allow_output_mutation=True
|
31 |
+
)
|
32 |
+
def start_haystack():
|
33 |
+
"""
|
34 |
+
load document store, retriever, entailment checker and create pipeline
|
35 |
+
"""
|
36 |
+
shutil.copy(f"{INDEX_DIR}/faiss_document_store.db", ".")
|
37 |
+
document_store = FAISSDocumentStore(
|
38 |
+
faiss_index_path=f"{INDEX_DIR}/my_faiss_index.faiss",
|
39 |
+
faiss_config_path=f"{INDEX_DIR}/my_faiss_index.json",
|
40 |
+
)
|
41 |
+
print(f"Index size: {document_store.get_document_count()}")
|
42 |
+
retriever = EmbeddingRetriever(
|
43 |
+
document_store=document_store,
|
44 |
+
embedding_model=RETRIEVER_MODEL,
|
45 |
+
model_format=RETRIEVER_MODEL_FORMAT,
|
46 |
+
)
|
47 |
+
entailment_checker = EntailmentChecker(
|
48 |
+
model_name_or_path=NLI_MODEL,
|
49 |
+
use_gpu=False,
|
50 |
+
entailment_contradiction_threshold=0.5,
|
51 |
+
)
|
52 |
+
|
53 |
+
pipe = Pipeline()
|
54 |
+
pipe.add_node(component=retriever, name="retriever", inputs=["Query"])
|
55 |
+
pipe.add_node(component=entailment_checker, name="ec", inputs=["retriever"])
|
56 |
+
return pipe
|
57 |
+
|
58 |
+
|
59 |
+
pipe = start_haystack()
|
60 |
+
|
61 |
+
# the pipeline is not included as parameter of the following function,
|
62 |
+
# because it is difficult to cache
|
63 |
+
@st.cache(allow_output_mutation=True)
|
64 |
+
def query(statement: str, retriever_top_k: int = 5):
|
65 |
+
"""Run query and verify statement"""
|
66 |
+
params = {"retriever": {"top_k": retriever_top_k}}
|
67 |
+
return pipe.run(statement, params=params)
|
app_utils/config.py
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
|
3 |
+
INDEX_DIR = "data/index"
|
4 |
+
STATEMENTS_PATH = "data/statements.txt"
|
5 |
+
|
6 |
+
RETRIEVER_MODEL = "sentence-transformers/msmarco-distilbert-base-tas-b"
|
7 |
+
RETRIEVER_MODEL_FORMAT = "sentence_transformers"
|
8 |
+
RETRIEVER_TOP_K = 5
|
9 |
+
|
10 |
+
# In HF Space, we use microsoft/deberta-v2-xlarge-mnli
|
11 |
+
# for local testing, a smaller model is better
|
12 |
+
try:
|
13 |
+
NLI_MODEL = st.secrets["NLI_MODEL"]
|
14 |
+
except:
|
15 |
+
NLI_MODEL = "valhalla/distilbart-mnli-12-1"
|
16 |
+
print(f"Used NLI model: {NLI_MODEL}")
|
app_utils/entailment_checker.py
ADDED
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import List, Optional
|
2 |
+
|
3 |
+
from transformers import AutoModelForSequenceClassification, AutoTokenizer, AutoConfig
|
4 |
+
import torch
|
5 |
+
from haystack.nodes.base import BaseComponent
|
6 |
+
from haystack.modeling.utils import initialize_device_settings
|
7 |
+
from haystack.schema import Document
|
8 |
+
|
9 |
+
|
10 |
+
class EntailmentChecker(BaseComponent):
|
11 |
+
"""
|
12 |
+
This node checks the entailment between every document content and the query.
|
13 |
+
It enrichs the documents metadata with entailment informations.
|
14 |
+
It also returns aggregate entailment information.
|
15 |
+
"""
|
16 |
+
|
17 |
+
outgoing_edges = 1
|
18 |
+
|
19 |
+
def __init__(
|
20 |
+
self,
|
21 |
+
model_name_or_path: str = "roberta-large-mnli",
|
22 |
+
model_version: Optional[str] = None,
|
23 |
+
tokenizer: Optional[str] = None,
|
24 |
+
use_gpu: bool = True,
|
25 |
+
batch_size: int = 16,
|
26 |
+
entailment_contradiction_threshold: float = 0.5,
|
27 |
+
):
|
28 |
+
"""
|
29 |
+
Load a Natural Language Inference model from Transformers.
|
30 |
+
|
31 |
+
:param model_name_or_path: Directory of a saved model or the name of a public model.
|
32 |
+
See https://huggingface.co/models for full list of available models.
|
33 |
+
:param model_version: The version of model to use from the HuggingFace model hub. Can be tag name, branch name, or commit hash.
|
34 |
+
:param tokenizer: Name of the tokenizer (usually the same as model)
|
35 |
+
:param use_gpu: Whether to use GPU (if available).
|
36 |
+
:param batch_size: Number of Documents to be processed at a time.
|
37 |
+
:param entailment_contradiction_threshold: if in the first N documents there is a strong evidence of entailment/contradiction
|
38 |
+
(aggregate entailment or contradiction are greater than the threshold), the less relevant documents are not taken into account
|
39 |
+
"""
|
40 |
+
super().__init__()
|
41 |
+
|
42 |
+
self.devices, _ = initialize_device_settings(use_cuda=use_gpu, multi_gpu=False)
|
43 |
+
|
44 |
+
tokenizer = tokenizer or model_name_or_path
|
45 |
+
self.tokenizer = AutoTokenizer.from_pretrained(tokenizer)
|
46 |
+
self.model = AutoModelForSequenceClassification.from_pretrained(
|
47 |
+
pretrained_model_name_or_path=model_name_or_path, revision=model_version
|
48 |
+
)
|
49 |
+
self.batch_size = batch_size
|
50 |
+
self.entailment_contradiction_threshold = entailment_contradiction_threshold
|
51 |
+
self.model.to(str(self.devices[0]))
|
52 |
+
|
53 |
+
id2label = AutoConfig.from_pretrained(model_name_or_path).id2label
|
54 |
+
self.labels = [id2label[k].lower() for k in sorted(id2label)]
|
55 |
+
if "entailment" not in self.labels:
|
56 |
+
raise ValueError(
|
57 |
+
"The model config must contain entailment value in the id2label dict."
|
58 |
+
)
|
59 |
+
|
60 |
+
def run(self, query: str, documents: List[Document]):
|
61 |
+
|
62 |
+
scores, agg_con, agg_neu, agg_ent = 0, 0, 0, 0
|
63 |
+
for i, doc in enumerate(documents):
|
64 |
+
entailment_info = self.get_entailment(premise=doc.content, hypotesis=query)
|
65 |
+
doc.meta["entailment_info"] = entailment_info
|
66 |
+
|
67 |
+
scores += doc.score
|
68 |
+
con, neu, ent = (
|
69 |
+
entailment_info["contradiction"],
|
70 |
+
entailment_info["neutral"],
|
71 |
+
entailment_info["entailment"],
|
72 |
+
)
|
73 |
+
agg_con += con * doc.score
|
74 |
+
agg_neu += neu * doc.score
|
75 |
+
agg_ent += ent * doc.score
|
76 |
+
|
77 |
+
# if in the first documents there is a strong evidence of entailment/contradiction,
|
78 |
+
# there is no need to consider less relevant documents
|
79 |
+
if max(agg_con, agg_ent) / scores > self.entailment_contradiction_threshold:
|
80 |
+
break
|
81 |
+
|
82 |
+
aggregate_entailment_info = {
|
83 |
+
"contradiction": round(agg_con / scores, 2),
|
84 |
+
"neutral": round(agg_neu / scores, 2),
|
85 |
+
"entailment": round(agg_ent / scores, 2),
|
86 |
+
}
|
87 |
+
|
88 |
+
entailment_checker_result = {
|
89 |
+
"documents": documents[: i + 1],
|
90 |
+
"aggregate_entailment_info": aggregate_entailment_info,
|
91 |
+
}
|
92 |
+
|
93 |
+
return entailment_checker_result, "output_1"
|
94 |
+
|
95 |
+
def run_batch(self, queries: List[str], documents: List[Document]):
|
96 |
+
pass
|
97 |
+
|
98 |
+
def get_entailment(self, premise, hypotesis):
|
99 |
+
with torch.inference_mode():
|
100 |
+
inputs = self.tokenizer(
|
101 |
+
f"{premise}{self.tokenizer.sep_token}{hypotesis}", return_tensors="pt"
|
102 |
+
).to(self.devices[0])
|
103 |
+
out = self.model(**inputs)
|
104 |
+
logits = out.logits
|
105 |
+
probs = (
|
106 |
+
torch.nn.functional.softmax(logits, dim=-1)[0, :].detach().cpu().numpy()
|
107 |
+
)
|
108 |
+
entailment_dict = {k.lower(): v for k, v in zip(self.labels, probs)}
|
109 |
+
return entailment_dict
|
app_utils/frontend_utils.py
ADDED
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import pandas as pd
|
3 |
+
import plotly.graph_objects as go
|
4 |
+
|
5 |
+
|
6 |
+
entailment_html_messages = {
|
7 |
+
"entailment": 'The knowledge base seems to <span style="color:green">confirm</span> your statement',
|
8 |
+
"contradiction": 'The knowledge base seems to <span style="color:red">contradict</span> your statement',
|
9 |
+
"neutral": 'The knowledge base is <span style="color:darkgray">neutral</span> about your statement',
|
10 |
+
}
|
11 |
+
|
12 |
+
|
13 |
+
def build_sidebar():
|
14 |
+
sidebar = """
|
15 |
+
<h1 style='text-align: center'>Fact Checking 🎸 Rocks!</h1>
|
16 |
+
<div style='text-align: center'>
|
17 |
+
<i>Fact checking baseline combining dense retrieval and textual entailment</i>
|
18 |
+
<p><br/><a href='https://github.com/anakin87/fact-checking-rocks'>Github project</a> - Based on <a href='https://github.com/deepset-ai/haystack'>Haystack</a></p>
|
19 |
+
<p><small><a href='https://en.wikipedia.org/wiki/List_of_mainstream_rock_performers'>Data crawled from Wikipedia</a></small></p>
|
20 |
+
</div>
|
21 |
+
"""
|
22 |
+
st.sidebar.markdown(sidebar, unsafe_allow_html=True)
|
23 |
+
|
24 |
+
|
25 |
+
def set_state_if_absent(key, value):
|
26 |
+
if key not in st.session_state:
|
27 |
+
st.session_state[key] = value
|
28 |
+
|
29 |
+
|
30 |
+
# Small callback to reset the interface in case the text of the question changes
|
31 |
+
def reset_results(*args):
|
32 |
+
st.session_state.answer = None
|
33 |
+
st.session_state.results = None
|
34 |
+
st.session_state.raw_json = None
|
35 |
+
|
36 |
+
|
37 |
+
def create_ternary_plot(entailment_data):
|
38 |
+
"""
|
39 |
+
Create a Plotly ternary plot for the given entailment dict.
|
40 |
+
"""
|
41 |
+
hover_text = ""
|
42 |
+
for label, value in entailment_data.items():
|
43 |
+
hover_text += f"{label}: {value}<br>"
|
44 |
+
|
45 |
+
fig = go.Figure(
|
46 |
+
go.Scatterternary(
|
47 |
+
{
|
48 |
+
"cliponaxis": False,
|
49 |
+
"mode": "markers",
|
50 |
+
"a": [i for i in map(lambda x: x["entailment"], [entailment_data])],
|
51 |
+
"b": [i for i in map(lambda x: x["contradiction"], [entailment_data])],
|
52 |
+
"c": [i for i in map(lambda x: x["neutral"], [entailment_data])],
|
53 |
+
"hoverinfo": "text",
|
54 |
+
"text": hover_text,
|
55 |
+
"marker": {
|
56 |
+
"color": "#636efa",
|
57 |
+
"size": [0.01],
|
58 |
+
"sizemode": "area",
|
59 |
+
"sizeref": 2.5e-05,
|
60 |
+
"symbol": "circle",
|
61 |
+
},
|
62 |
+
}
|
63 |
+
)
|
64 |
+
)
|
65 |
+
|
66 |
+
fig.update_layout(
|
67 |
+
{
|
68 |
+
"ternary": {
|
69 |
+
"sum": 1,
|
70 |
+
"aaxis": makeAxis("Entailment", 0),
|
71 |
+
"baxis": makeAxis("<br>Contradiction", 45),
|
72 |
+
"caxis": makeAxis("<br>Neutral", -45),
|
73 |
+
}
|
74 |
+
}
|
75 |
+
)
|
76 |
+
return fig
|
77 |
+
|
78 |
+
|
79 |
+
def makeAxis(title, tickangle):
|
80 |
+
return {
|
81 |
+
"title": title,
|
82 |
+
"titlefont": {"size": 20},
|
83 |
+
"tickangle": tickangle,
|
84 |
+
"tickcolor": "rgba(0,0,0,0)",
|
85 |
+
"ticklen": 5,
|
86 |
+
"showline": False,
|
87 |
+
"showgrid": True,
|
88 |
+
}
|
89 |
+
|
90 |
+
|
91 |
+
def create_df_for_relevant_snippets(docs):
|
92 |
+
"""
|
93 |
+
Create a dataframe that contains all relevant snippets.
|
94 |
+
Also returns the URLs
|
95 |
+
"""
|
96 |
+
rows = []
|
97 |
+
urls = {}
|
98 |
+
for doc in docs:
|
99 |
+
row = {
|
100 |
+
"Title": doc.meta["name"],
|
101 |
+
"Relevance": f"{doc.score:.3f}",
|
102 |
+
"con": f"{doc.meta['entailment_info']['contradiction']:.2f}",
|
103 |
+
"neu": f"{doc.meta['entailment_info']['neutral']:.2f}",
|
104 |
+
"ent": f"{doc.meta['entailment_info']['entailment']:.2f}",
|
105 |
+
"Content": doc.content,
|
106 |
+
}
|
107 |
+
urls[doc.meta["name"]] = doc.meta["url"]
|
108 |
+
rows.append(row)
|
109 |
+
df = pd.DataFrame(rows)
|
110 |
+
df["Content"] = df["Content"].str.wrap(75)
|
111 |
+
df = df.style.apply(highlight_cols)
|
112 |
+
|
113 |
+
return df, urls
|
114 |
+
|
115 |
+
|
116 |
+
def highlight_cols(s):
|
117 |
+
coldict = {"con": "#FFA07A", "neu": "#E5E4E2", "ent": "#a9d39e"}
|
118 |
+
if s.name in coldict.keys():
|
119 |
+
return ["background-color: {}".format(coldict[s.name])] * len(s)
|
120 |
+
return [""] * len(s)
|
data/.DS_Store
ADDED
Binary file (6.15 kB). View file
|
|
data/index/.DS_Store
ADDED
Binary file (6.15 kB). View file
|
|
data/index/faiss_document_store.db
ADDED
Binary file (73.7 kB). View file
|
|
data/index/my_faiss_index.faiss
ADDED
Binary file (6.19 kB). View file
|
|
data/index/my_faiss_index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"similarity": "dot_product", "embedding_dim": 768}
|
data/statements.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
Bring your friend and you will get referral bonus
|
notebooks/get_wikipedia_data.ipynb
ADDED
@@ -0,0 +1,582 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"metadata": {
|
3 |
+
"kernelspec": {
|
4 |
+
"language": "python",
|
5 |
+
"display_name": "Python 3",
|
6 |
+
"name": "python3"
|
7 |
+
},
|
8 |
+
"language_info": {
|
9 |
+
"name": "python",
|
10 |
+
"version": "3.7.12",
|
11 |
+
"mimetype": "text/x-python",
|
12 |
+
"codemirror_mode": {
|
13 |
+
"name": "ipython",
|
14 |
+
"version": 3
|
15 |
+
},
|
16 |
+
"pygments_lexer": "ipython3",
|
17 |
+
"nbconvert_exporter": "python",
|
18 |
+
"file_extension": ".py"
|
19 |
+
}
|
20 |
+
},
|
21 |
+
"nbformat_minor": 4,
|
22 |
+
"nbformat": 4,
|
23 |
+
"cells": [
|
24 |
+
{
|
25 |
+
"cell_type": "markdown",
|
26 |
+
"source": "# Download data from Wikipedia",
|
27 |
+
"metadata": {}
|
28 |
+
},
|
29 |
+
{
|
30 |
+
"cell_type": "code",
|
31 |
+
"source": "# install wikipedia API python wrapper\n! pip install wikipedia",
|
32 |
+
"metadata": {
|
33 |
+
"_uuid": "8f2839f25d086af736a60e9eeb907d3b93b6e0e5",
|
34 |
+
"_cell_guid": "b1076dfc-b9ad-4769-8c92-a6c4dae69d19",
|
35 |
+
"execution": {
|
36 |
+
"iopub.status.busy": "2022-08-20T21:43:59.293655Z",
|
37 |
+
"iopub.execute_input": "2022-08-20T21:43:59.294792Z",
|
38 |
+
"iopub.status.idle": "2022-08-20T21:44:15.263363Z",
|
39 |
+
"shell.execute_reply.started": "2022-08-20T21:43:59.294746Z",
|
40 |
+
"shell.execute_reply": "2022-08-20T21:44:15.262171Z"
|
41 |
+
},
|
42 |
+
"trusted": true
|
43 |
+
},
|
44 |
+
"execution_count": 3,
|
45 |
+
"outputs": []
|
46 |
+
},
|
47 |
+
{
|
48 |
+
"cell_type": "code",
|
49 |
+
"source": "import wikipedia\nimport json\nimport traceback",
|
50 |
+
"metadata": {
|
51 |
+
"execution": {
|
52 |
+
"iopub.status.busy": "2022-08-20T21:44:15.265341Z",
|
53 |
+
"iopub.execute_input": "2022-08-20T21:44:15.265753Z",
|
54 |
+
"iopub.status.idle": "2022-08-20T21:44:15.470330Z",
|
55 |
+
"shell.execute_reply.started": "2022-08-20T21:44:15.265709Z",
|
56 |
+
"shell.execute_reply": "2022-08-20T21:44:15.468665Z"
|
57 |
+
},
|
58 |
+
"trusted": true
|
59 |
+
},
|
60 |
+
"execution_count": 4,
|
61 |
+
"outputs": []
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"cell_type": "code",
|
65 |
+
"source": [
|
66 |
+
"# titles to download, from https://en.wikipedia.org/wiki/List_of_mainstream_rock_performers\n",
|
67 |
+
"\n",
|
68 |
+
"pages_titles = \"\"\"10cc\n",
|
69 |
+
"10_Years_(band)\n",
|
70 |
+
"3_Doors_Down\n",
|
71 |
+
"311_(band)\n",
|
72 |
+
"38_Special_(band)\n",
|
73 |
+
"Accept_(band)\n",
|
74 |
+
"AC/DC\n",
|
75 |
+
"Bryan_Adams\n",
|
76 |
+
"Aerosmith\n",
|
77 |
+
"AFI_(band)\n",
|
78 |
+
"Air_Supply\n",
|
79 |
+
"The_Alan_Parsons_Project\n",
|
80 |
+
"Alice_in_Chains\n",
|
81 |
+
"The_All-American_Rejects\n",
|
82 |
+
"The_Allman_Brothers_Band\n",
|
83 |
+
"Alter_Bridge\n",
|
84 |
+
"Ambrosia_(band)\n",
|
85 |
+
"America_(band)\n",
|
86 |
+
"The_Animals\n",
|
87 |
+
"Adam_Ant\n",
|
88 |
+
"Anthrax_(American_band)\n",
|
89 |
+
"April_Wine\n",
|
90 |
+
"Arcade_Fire\n",
|
91 |
+
"Arctic_Monkeys\n",
|
92 |
+
"Asia_(band)\n",
|
93 |
+
"Audioslave\n",
|
94 |
+
"Avenged_Sevenfold\n",
|
95 |
+
"Awolnation\n",
|
96 |
+
"The_B-52's\n",
|
97 |
+
"Bachman–Turner_Overdrive\n",
|
98 |
+
"Bad_Company\n",
|
99 |
+
"Badfinger\n",
|
100 |
+
"The_Band\n",
|
101 |
+
"The_Bangles\n",
|
102 |
+
"Barenaked_Ladies\n",
|
103 |
+
"Bay_City_Rollers\n",
|
104 |
+
"The_Beach_Boys\n",
|
105 |
+
"The_Beatles\n",
|
106 |
+
"Beck\n",
|
107 |
+
"Ben_Folds_Five\n",
|
108 |
+
"Pat_Benatar\n",
|
109 |
+
"Chuck_Berry\n",
|
110 |
+
"The_Big_Bopper\n",
|
111 |
+
"Billy_Talent\n",
|
112 |
+
"The_Black_Crowes\n",
|
113 |
+
"The_Black_Keys\n",
|
114 |
+
"Black_Sabbath\n",
|
115 |
+
"Black_Stone_Cherry\n",
|
116 |
+
"Black_Veil_Brides\n",
|
117 |
+
"Blink-182\n",
|
118 |
+
"Bloodhound_Gang\n",
|
119 |
+
"Blue_October\n",
|
120 |
+
"Blue_Öyster_Cult\n",
|
121 |
+
"Blues_Traveler\n",
|
122 |
+
"James_Blunt\n",
|
123 |
+
"Blur_(band)\n",
|
124 |
+
"Bon_Jovi\n",
|
125 |
+
"Boston_(band)\n",
|
126 |
+
"David_Bowie\n",
|
127 |
+
"Bowling_for_Soup\n",
|
128 |
+
"Boys_Like_Girls\n",
|
129 |
+
"Bread_(band)\n",
|
130 |
+
"Breaking_Benjamin\n",
|
131 |
+
"Bring_Me_the_Horizon\n",
|
132 |
+
"Jackson_Browne\n",
|
133 |
+
"Buckcherry\n",
|
134 |
+
"Jeff_Buckley\n",
|
135 |
+
"Bullet_for_My_Valentine\n",
|
136 |
+
"Bush_(British_band)\n",
|
137 |
+
"The_Byrds\n",
|
138 |
+
"Cage_the_Elephant\n",
|
139 |
+
"Cake_(band)\n",
|
140 |
+
"Canned_Heat\n",
|
141 |
+
"The_Cab\n",
|
142 |
+
"The_Cardigans\n",
|
143 |
+
"The_Cars\n",
|
144 |
+
"Catfish_and_the_Bottlemen\n",
|
145 |
+
"Harry_Chapin\n",
|
146 |
+
"Tracy_Chapman\n",
|
147 |
+
"Cheap_Trick\n",
|
148 |
+
"Chevelle_(band)\n",
|
149 |
+
"Chicago_(band)\n",
|
150 |
+
"Chubby_Checker\n",
|
151 |
+
"Cinderella_(band)\n",
|
152 |
+
"Dallas_Green_(musician)\n",
|
153 |
+
"Eric_Clapton\n",
|
154 |
+
"The_Clash\n",
|
155 |
+
"Eddie_Cochran\n",
|
156 |
+
"Joe_Cocker\n",
|
157 |
+
"Coheed_and_Cambria\n",
|
158 |
+
"Cold_Chisel\n",
|
159 |
+
"Coldplay\n",
|
160 |
+
"Collective_Soul\n",
|
161 |
+
"Phil_Collins\n",
|
162 |
+
"Alice_Cooper\n",
|
163 |
+
"Chris_Cornell\n",
|
164 |
+
"Elvis_Costello\n",
|
165 |
+
"Counting_Crows\n",
|
166 |
+
"The_Cranberries\n",
|
167 |
+
"Crash_Test_Dummies\n",
|
168 |
+
"Cream_(band)\n",
|
169 |
+
"Creed_(band)\n",
|
170 |
+
"Creedence_Clearwater_Revival\n",
|
171 |
+
"Jim_Croce\n",
|
172 |
+
"Crosby,_Stills,_Nash_&_Young\n",
|
173 |
+
"Christopher_Cross\n",
|
174 |
+
"Sheryl_Crow\n",
|
175 |
+
"Crowded_House\n",
|
176 |
+
"The_Cult\n",
|
177 |
+
"The_Cure\n",
|
178 |
+
"Damn_Yankees_(band)\n",
|
179 |
+
"Dashboard_Confessional\n",
|
180 |
+
"Daughtry_(band)\n",
|
181 |
+
"The_Dave_Clark_Five\n",
|
182 |
+
"Dave_Matthews_Band\n",
|
183 |
+
"Days_of_the_New\n",
|
184 |
+
"Death_Cab_for_Cutie\n",
|
185 |
+
"Deep_Purple\n",
|
186 |
+
"Def_Leppard\n",
|
187 |
+
"Deftones\n",
|
188 |
+
"Depeche_Mode\n",
|
189 |
+
"Bo_Diddley\n",
|
190 |
+
"Dio_(band)\n",
|
191 |
+
"Dire_Straits\n",
|
192 |
+
"Disturbed_(band)\n",
|
193 |
+
"Fats_Domino\n",
|
194 |
+
"Donovan\n",
|
195 |
+
"The_Doobie_Brothers\n",
|
196 |
+
"The_Doors\n",
|
197 |
+
"Dr._Hook_&_the_Medicine_Show\n",
|
198 |
+
"Dropkick_Murphys\n",
|
199 |
+
"Drowning_Pool\n",
|
200 |
+
"Duran_Duran\n",
|
201 |
+
"Ian_Dury\n",
|
202 |
+
"Bob_Dylan\n",
|
203 |
+
"Eagles_(band)\n",
|
204 |
+
"Echo_&_the_Bunnymen\n",
|
205 |
+
"Duane_Eddy\n",
|
206 |
+
"Edgar_Winter\n",
|
207 |
+
"Electric_Light_Orchestra\n",
|
208 |
+
"Emerson,_Lake_&_Palmer\n",
|
209 |
+
"England_Dan_&_John_Ford_Coley\n",
|
210 |
+
"Melissa_Etheridge\n",
|
211 |
+
"Europe_(band)\n",
|
212 |
+
"Evanescence\n",
|
213 |
+
"Everclear_(band)\n",
|
214 |
+
"Everlast\n",
|
215 |
+
"The_Everly_Brothers\n",
|
216 |
+
"Extreme_(band)\n",
|
217 |
+
"Faces_(band)\n",
|
218 |
+
"Faith_No_More\n",
|
219 |
+
"Fall_Out_Boy\n",
|
220 |
+
"Bryan_Ferry\n",
|
221 |
+
"Filter_(band)\n",
|
222 |
+
"Finger_Eleven\n",
|
223 |
+
"FireHouse\n",
|
224 |
+
"Five_Finger_Death_Punch\n",
|
225 |
+
"Five_for_Fighting\n",
|
226 |
+
"The_Fixx\n",
|
227 |
+
"The_Flaming_Lips\n",
|
228 |
+
"Fleetwood_Mac\n",
|
229 |
+
"Flogging_Molly\n",
|
230 |
+
"Florence_and_the_Machine\n",
|
231 |
+
"Flyleaf_(band)\n",
|
232 |
+
"Foals_(band)\n",
|
233 |
+
"Dan_Fogelberg\n",
|
234 |
+
"John_Fogerty\n",
|
235 |
+
"Foo_Fighters\n",
|
236 |
+
"Foreigner_(band)\n",
|
237 |
+
"Foster_the_People\n",
|
238 |
+
"The_Four_Seasons_(band)\n",
|
239 |
+
"Peter_Frampton\n",
|
240 |
+
"Franz_Ferdinand_(band)\n",
|
241 |
+
"The_Fray\n",
|
242 |
+
"Glenn_Frey\n",
|
243 |
+
"Fuel_(band)\n",
|
244 |
+
"Fun_(band)\n",
|
245 |
+
"Peter_Gabriel\n",
|
246 |
+
"Garbage_(band)\n",
|
247 |
+
"Genesis_(band)\n",
|
248 |
+
"Ghost_(Swedish_band)\n",
|
249 |
+
"Gin_Blossoms\n",
|
250 |
+
"Gary_Glitter\n",
|
251 |
+
"The_Go-Go's\n",
|
252 |
+
"Godsmack\n",
|
253 |
+
"Golden_Earring\n",
|
254 |
+
"Goo_Goo_Dolls\n",
|
255 |
+
"Good_Charlotte\n",
|
256 |
+
"Grand_Funk_Railroad\n",
|
257 |
+
"Grateful_Dead\n",
|
258 |
+
"Great_White\n",
|
259 |
+
"Green_Day\n",
|
260 |
+
"Greta_Van_Fleet\n",
|
261 |
+
"The_Guess_Who\n",
|
262 |
+
"Guns_N'_Roses\n",
|
263 |
+
"Halestorm\n",
|
264 |
+
"Bill_Haley_&_His_Comets\n",
|
265 |
+
"Hall_&_Oates\n",
|
266 |
+
"George_Harrison\n",
|
267 |
+
"Heart_(band)\n",
|
268 |
+
"Jimi_Hendrix\n",
|
269 |
+
"Don_Henley\n",
|
270 |
+
"Herman's_Hermits\n",
|
271 |
+
"Highly_Suspect\n",
|
272 |
+
"Hinder\n",
|
273 |
+
"The_Hives\n",
|
274 |
+
"Hole_(band)\n",
|
275 |
+
"The_Hollies\n",
|
276 |
+
"Buddy_Holly\n",
|
277 |
+
"Hoobastank\n",
|
278 |
+
"Hootie_&_the_Blowfish\n",
|
279 |
+
"Icehouse_(band)\n",
|
280 |
+
"Billy_Idol\n",
|
281 |
+
"Imagine_Dragons\n",
|
282 |
+
"Incubus_(band)\n",
|
283 |
+
"Interpol_(band)\n",
|
284 |
+
"INXS\n",
|
285 |
+
"Iron_Maiden\n",
|
286 |
+
"The_J._Geils_Band\n",
|
287 |
+
"The_Jam\n",
|
288 |
+
"Tommy_James_and_the_Shondells\n",
|
289 |
+
"Jane's_Addiction\n",
|
290 |
+
"Jefferson_Airplane\n",
|
291 |
+
"Jefferson_Starship\n",
|
292 |
+
"The_Jesus_and_Mary_Chain\n",
|
293 |
+
"Jet_(Australian_band)\n",
|
294 |
+
"Jethro_Tull_(band)\n",
|
295 |
+
"Joan_Jett\n",
|
296 |
+
"Jimmy_Eat_World\n",
|
297 |
+
"Billy_Joel\n",
|
298 |
+
"Elton_John\n",
|
299 |
+
"Janis_Joplin\n",
|
300 |
+
"Journey_(band)\n",
|
301 |
+
"Joy_Division\n",
|
302 |
+
"Judas_Priest\n",
|
303 |
+
"Kaiser_Chiefs\n",
|
304 |
+
"Kaleo_(band)\n",
|
305 |
+
"Kansas_(band)\n",
|
306 |
+
"Keane_(band)\n",
|
307 |
+
"Kid_Rock\n",
|
308 |
+
"The_Killers\n",
|
309 |
+
"Killswitch_Engage\n",
|
310 |
+
"Kings_of_Leon\n",
|
311 |
+
"The_Kinks\n",
|
312 |
+
"Kiss_(band)\n",
|
313 |
+
"Korn\n",
|
314 |
+
"Lenny_Kravitz\n",
|
315 |
+
"Lacuna_Coil\n",
|
316 |
+
"Lamb_of_God_(band)\n",
|
317 |
+
"Avril_Lavigne\n",
|
318 |
+
"Led_Zeppelin\n",
|
319 |
+
"John_Lennon\n",
|
320 |
+
"Huey_Lewis_and_the_News\n",
|
321 |
+
"Jerry_Lee_Lewis\n",
|
322 |
+
"Lifehouse_(band)\n",
|
323 |
+
"Limp_Bizkit\n",
|
324 |
+
"Linkin_Park\n",
|
325 |
+
"Little_Richard\n",
|
326 |
+
"Little_River_Band\n",
|
327 |
+
"Live_(band)\n",
|
328 |
+
"Living_Colour\n",
|
329 |
+
"Kenny_Loggins\n",
|
330 |
+
"Loverboy\n",
|
331 |
+
"The_Lovin'_Spoonful\n",
|
332 |
+
"The_Lumineers\n",
|
333 |
+
"Lynyrd_Skynyrd\n",
|
334 |
+
"The_Mamas_&_the_Papas\n",
|
335 |
+
"Marilyn_Manson\n",
|
336 |
+
"The_Marshall_Tucker_Band\n",
|
337 |
+
"Matchbox_Twenty\n",
|
338 |
+
"John_Mayer\n",
|
339 |
+
"Paul_McCartney\n",
|
340 |
+
"Meat_Loaf\n",
|
341 |
+
"Megadeth\n",
|
342 |
+
"John_Mellencamp\n",
|
343 |
+
"Men_at_Work\n",
|
344 |
+
"Metallica\n",
|
345 |
+
"Midnight_Oil\n",
|
346 |
+
"Mike_and_the_Mechanics\n",
|
347 |
+
"Modest_Mouse\n",
|
348 |
+
"Eddie_Money\n",
|
349 |
+
"The_Monkees\n",
|
350 |
+
"The_Moody_Blues\n",
|
351 |
+
"Alanis_Morissette\n",
|
352 |
+
"Van_Morrison\n",
|
353 |
+
"Morrissey\n",
|
354 |
+
"Mötley_Crüe\n",
|
355 |
+
"Motörhead\n",
|
356 |
+
"Mudvayne\n",
|
357 |
+
"Mumford_&_Sons\n",
|
358 |
+
"Muse_(band)\n",
|
359 |
+
"My_Chemical_Romance\n",
|
360 |
+
"Nickelback\n",
|
361 |
+
"Stevie_Nicks\n",
|
362 |
+
"Harry_Nilsson\n",
|
363 |
+
"Nine_Inch_Nails\n",
|
364 |
+
"Nirvana_(band)\n",
|
365 |
+
"No_Doubt\n",
|
366 |
+
"Ted_Nugent\n",
|
367 |
+
"Oasis_(band)\n",
|
368 |
+
"The_Offspring\n",
|
369 |
+
"Roy_Orbison\n",
|
370 |
+
"Ozzy_Osbourne\n",
|
371 |
+
"Our_Lady_Peace\n",
|
372 |
+
"The_Outfield\n",
|
373 |
+
"P.O.D.\n",
|
374 |
+
"Panic!_at_the_Disco\n",
|
375 |
+
"Pantera\n",
|
376 |
+
"Papa_Roach\n",
|
377 |
+
"Paramore\n",
|
378 |
+
"Pearl_Jam\n",
|
379 |
+
"A_Perfect_Circle\n",
|
380 |
+
"Tom_Petty_and_the_Heartbreakers\n",
|
381 |
+
"Pink_Floyd\n",
|
382 |
+
"Pixies_(band)\n",
|
383 |
+
"Robert_Plant\n",
|
384 |
+
"Poison_(American_band)\n",
|
385 |
+
"The_Police\n",
|
386 |
+
"Iggy_Pop\n",
|
387 |
+
"Pop_Evil\n",
|
388 |
+
"The_Presidents_of_the_United_States_of_America_(band)\n",
|
389 |
+
"The_Pretenders\n",
|
390 |
+
"Elvis_Presley\n",
|
391 |
+
"The_Pretty_Reckless\n",
|
392 |
+
"Primus_(band)\n",
|
393 |
+
"Puddle_of_Mudd\n",
|
394 |
+
"Queen_(band)\n",
|
395 |
+
"Queens_of_the_Stone_Age\n",
|
396 |
+
"Queensrÿche\n",
|
397 |
+
"Quiet_Riot\n",
|
398 |
+
"R.E.M.\n",
|
399 |
+
"Radiohead\n",
|
400 |
+
"Rage_Against_the_Machine\n",
|
401 |
+
"Rainbow_(rock_band)\n",
|
402 |
+
"Rammstein\n",
|
403 |
+
"Ramones\n",
|
404 |
+
"Red_Hot_Chili_Peppers\n",
|
405 |
+
"Lou_Reed\n",
|
406 |
+
"REO_Speedwagon\n",
|
407 |
+
"Rise_Against\n",
|
408 |
+
"The_Rolling_Stones\n",
|
409 |
+
"Linda_Ronstadt\n",
|
410 |
+
"Roxy_Music\n",
|
411 |
+
"Royal_Blood_(band)\n",
|
412 |
+
"Rush_(band)\n",
|
413 |
+
"Saliva_(band)\n",
|
414 |
+
"Sam_Fender\n",
|
415 |
+
"Santana_(band)\n",
|
416 |
+
"Joe_Satriani\n",
|
417 |
+
"Saving_Abel\n",
|
418 |
+
"Scorpions_(band)\n",
|
419 |
+
"The_Script\n",
|
420 |
+
"Seether\n",
|
421 |
+
"Bob_Seger\n",
|
422 |
+
"Sepultura\n",
|
423 |
+
"Sex_Pistols\n",
|
424 |
+
"Shakin'_Stevens\n",
|
425 |
+
"Shinedown\n",
|
426 |
+
"Silverchair\n",
|
427 |
+
"Simon_&_Garfunkel\n",
|
428 |
+
"Simple_Minds\n",
|
429 |
+
"Simple_Plan\n",
|
430 |
+
"Skid_Row_(American_band)\n",
|
431 |
+
"Skillet_(band)\n",
|
432 |
+
"Slade\n",
|
433 |
+
"Slayer\n",
|
434 |
+
"Slipknot_(band)\n",
|
435 |
+
"Small_Faces\n",
|
436 |
+
"Smash_Mouth\n",
|
437 |
+
"The_Smashing_Pumpkins\n",
|
438 |
+
"The_Smiths\n",
|
439 |
+
"Smokie_(band)\n",
|
440 |
+
"Snow_Patrol\n",
|
441 |
+
"Social_Distortion\n",
|
442 |
+
"Soundgarden\n",
|
443 |
+
"Bruce_Springsteen\n",
|
444 |
+
"Billy_Squier\n",
|
445 |
+
"Staind\n",
|
446 |
+
"Ringo_Starr\n",
|
447 |
+
"Starset\n",
|
448 |
+
"Starship_(band)\n",
|
449 |
+
"Status_Quo_(band)\n",
|
450 |
+
"Steely_Dan\n",
|
451 |
+
"Steppenwolf_(band)\n",
|
452 |
+
"Steve_Miller_Band\n",
|
453 |
+
"Rod_Stewart\n",
|
454 |
+
"Sting_(musician)\n",
|
455 |
+
"The_Stone_Roses\n",
|
456 |
+
"Stone_Sour\n",
|
457 |
+
"Stone_Temple_Pilots\n",
|
458 |
+
"The_Strokes\n",
|
459 |
+
"Styx_(band)\n",
|
460 |
+
"Sublime_(band)\n",
|
461 |
+
"Sum_41\n",
|
462 |
+
"Supertramp\n",
|
463 |
+
"Survivor_(band)\n",
|
464 |
+
"The_Sweet\n",
|
465 |
+
"System_of_a_Down\n",
|
466 |
+
"T._Rex_(band)\n",
|
467 |
+
"Talking_Heads\n",
|
468 |
+
"James_Taylor\n",
|
469 |
+
"Tenacious_D\n",
|
470 |
+
"Tesla_(band)\n",
|
471 |
+
"Theory_of_a_Deadman\n",
|
472 |
+
"Thin_Lizzy\n",
|
473 |
+
"Third_Eye_Blind\n",
|
474 |
+
"Thirty_Seconds_to_Mars\n",
|
475 |
+
"George_Thorogood\n",
|
476 |
+
"Thousand_Foot_Krutch\n",
|
477 |
+
"Three_Days_Grace\n",
|
478 |
+
"Three_Dog_Night\n",
|
479 |
+
"Tool_(band)\n",
|
480 |
+
"Toto_(band)\n",
|
481 |
+
"Traffic_(band)\n",
|
482 |
+
"The_Tragically_Hip\n",
|
483 |
+
"Train_(band)\n",
|
484 |
+
"Traveling_Wilburys\n",
|
485 |
+
"Travis_(band)\n",
|
486 |
+
"Trivium_(band)\n",
|
487 |
+
"Twenty_One_Pilots\n",
|
488 |
+
"Twisted_Sister\n",
|
489 |
+
"U2\n",
|
490 |
+
"Uriah_Heep_(band)\n",
|
491 |
+
"The_Used\n",
|
492 |
+
"Steve_Vai\n",
|
493 |
+
"Ritchie_Valens\n",
|
494 |
+
"Vampire_Weekend\n",
|
495 |
+
"Van_Halen\n",
|
496 |
+
"Stevie_Ray_Vaughan\n",
|
497 |
+
"Velvet_Revolver\n",
|
498 |
+
"The_Velvet_Underground\n",
|
499 |
+
"The_Verve\n",
|
500 |
+
"Volbeat\n",
|
501 |
+
"Joe_Walsh\n",
|
502 |
+
"Warrant_(American_band)\n",
|
503 |
+
"Weezer\n",
|
504 |
+
"Jack_White\n",
|
505 |
+
"The_White_Stripes\n",
|
506 |
+
"White_Zombie_(band)\n",
|
507 |
+
"Whitesnake\n",
|
508 |
+
"The_Who\n",
|
509 |
+
"Paul_McCartney_and_Wings\n",
|
510 |
+
"Steve_Winwood\n",
|
511 |
+
"The_Yardbirds\n",
|
512 |
+
"Yes_(band)\n",
|
513 |
+
"Neil_Young\n",
|
514 |
+
"Frank_Zappa\n",
|
515 |
+
"Rob_Zombie\n",
|
516 |
+
"The_Zombies\n",
|
517 |
+
"ZZ_Top\"\"\".split(\n",
|
518 |
+
" \"\\n\"\n",
|
519 |
+
")"
|
520 |
+
],
|
521 |
+
"metadata": {
|
522 |
+
"execution": {
|
523 |
+
"iopub.status.busy": "2022-08-20T23:34:24.681697Z",
|
524 |
+
"iopub.execute_input": "2022-08-20T23:34:24.682223Z",
|
525 |
+
"iopub.status.idle": "2022-08-20T23:34:24.693942Z",
|
526 |
+
"shell.execute_reply.started": "2022-08-20T23:34:24.682178Z",
|
527 |
+
"shell.execute_reply": "2022-08-20T23:34:24.693004Z"
|
528 |
+
},
|
529 |
+
"trusted": true
|
530 |
+
},
|
531 |
+
"execution_count": 54,
|
532 |
+
"outputs": []
|
533 |
+
},
|
534 |
+
{
|
535 |
+
"cell_type": "code",
|
536 |
+
"source": [
|
537 |
+
"for i, raw_title in enumerate(pages_titles):\n",
|
538 |
+
" if i % 10 == 0:\n",
|
539 |
+
" print(i / len(pages_titles) * 100)\n",
|
540 |
+
" try:\n",
|
541 |
+
" page = wikipedia.page(title=raw_title.replace(\"_\", \" \"), auto_suggest=False)\n",
|
542 |
+
" id_ = page.pageid\n",
|
543 |
+
" url = page.url\n",
|
544 |
+
" dic = {\"content\": page.content, \"meta\": {\"name\": page.title, \"url\": url}}\n",
|
545 |
+
"\n",
|
546 |
+
" with open(f\"/kaggle/working/rock_wiki/{id_}.json\", \"w\") as fo:\n",
|
547 |
+
" json.dump(dic, fo)\n",
|
548 |
+
" except Exception as e:\n",
|
549 |
+
" traceback.print_exc()\n",
|
550 |
+
" print(raw_title)"
|
551 |
+
],
|
552 |
+
"metadata": {
|
553 |
+
"execution": {
|
554 |
+
"iopub.status.busy": "2022-08-20T23:34:49.157641Z",
|
555 |
+
"iopub.execute_input": "2022-08-20T23:34:49.158086Z",
|
556 |
+
"iopub.status.idle": "2022-08-20T23:44:29.346317Z",
|
557 |
+
"shell.execute_reply.started": "2022-08-20T23:34:49.158047Z",
|
558 |
+
"shell.execute_reply": "2022-08-20T23:44:29.345032Z"
|
559 |
+
},
|
560 |
+
"trusted": true
|
561 |
+
},
|
562 |
+
"execution_count": 57,
|
563 |
+
"outputs": []
|
564 |
+
},
|
565 |
+
{
|
566 |
+
"cell_type": "code",
|
567 |
+
"source": "! tar -czvf rock_wiki.tar.gz ./rock_wiki",
|
568 |
+
"metadata": {
|
569 |
+
"execution": {
|
570 |
+
"iopub.status.busy": "2022-08-20T23:50:44.643851Z",
|
571 |
+
"iopub.execute_input": "2022-08-20T23:50:44.644378Z",
|
572 |
+
"iopub.status.idle": "2022-08-20T23:50:44.650366Z",
|
573 |
+
"shell.execute_reply.started": "2022-08-20T23:50:44.644328Z",
|
574 |
+
"shell.execute_reply": "2022-08-20T23:50:44.649169Z"
|
575 |
+
},
|
576 |
+
"trusted": true
|
577 |
+
},
|
578 |
+
"execution_count": 60,
|
579 |
+
"outputs": []
|
580 |
+
}
|
581 |
+
]
|
582 |
+
}
|
notebooks/indexing.ipynb
ADDED
@@ -0,0 +1,417 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "markdown",
|
5 |
+
"metadata": {},
|
6 |
+
"source": [
|
7 |
+
"# Indexing\n",
|
8 |
+
"Using [Haystack](https://github.com/deepset-ai/haystack), the following steps are performed:\n",
|
9 |
+
"- load and preprocess documents downloaded from Wikipedia\n",
|
10 |
+
"- create document store and write documents\n",
|
11 |
+
"- initialize retriever and generate document embeddings"
|
12 |
+
]
|
13 |
+
},
|
14 |
+
{
|
15 |
+
"cell_type": "code",
|
16 |
+
"execution_count": null,
|
17 |
+
"metadata": {
|
18 |
+
"_cell_guid": "b1076dfc-b9ad-4769-8c92-a6c4dae69d19",
|
19 |
+
"_uuid": "8f2839f25d086af736a60e9eeb907d3b93b6e0e5",
|
20 |
+
"trusted": true
|
21 |
+
},
|
22 |
+
"outputs": [],
|
23 |
+
"source": [
|
24 |
+
"! pip install farm-haystack[faiss-gpu]==1.7.0"
|
25 |
+
]
|
26 |
+
},
|
27 |
+
{
|
28 |
+
"cell_type": "markdown",
|
29 |
+
"metadata": {},
|
30 |
+
"source": [
|
31 |
+
"## Load documents"
|
32 |
+
]
|
33 |
+
},
|
34 |
+
{
|
35 |
+
"cell_type": "code",
|
36 |
+
"execution_count": 2,
|
37 |
+
"metadata": {
|
38 |
+
"execution": {
|
39 |
+
"iopub.execute_input": "2022-08-21T08:23:23.692554Z",
|
40 |
+
"iopub.status.busy": "2022-08-21T08:23:23.692208Z",
|
41 |
+
"iopub.status.idle": "2022-08-21T08:23:23.700721Z",
|
42 |
+
"shell.execute_reply": "2022-08-21T08:23:23.698130Z",
|
43 |
+
"shell.execute_reply.started": "2022-08-21T08:23:23.692512Z"
|
44 |
+
},
|
45 |
+
"trusted": true
|
46 |
+
},
|
47 |
+
"outputs": [],
|
48 |
+
"source": [
|
49 |
+
"import glob, json"
|
50 |
+
]
|
51 |
+
},
|
52 |
+
{
|
53 |
+
"cell_type": "code",
|
54 |
+
"execution_count": 3,
|
55 |
+
"metadata": {
|
56 |
+
"execution": {
|
57 |
+
"iopub.execute_input": "2022-08-21T08:23:23.707774Z",
|
58 |
+
"iopub.status.busy": "2022-08-21T08:23:23.704107Z",
|
59 |
+
"iopub.status.idle": "2022-08-21T08:23:25.026910Z",
|
60 |
+
"shell.execute_reply": "2022-08-21T08:23:25.025990Z",
|
61 |
+
"shell.execute_reply.started": "2022-08-21T08:23:23.705010Z"
|
62 |
+
},
|
63 |
+
"trusted": true
|
64 |
+
},
|
65 |
+
"outputs": [],
|
66 |
+
"source": [
|
67 |
+
"docs = []\n",
|
68 |
+
"\n",
|
69 |
+
"for json_file in glob.glob(\"../input/crawl-rock/rock_wiki/*.json\"):\n",
|
70 |
+
" with open(json_file, \"r\") as fin:\n",
|
71 |
+
" doc = json.load(fin)\n",
|
72 |
+
"\n",
|
73 |
+
" docs.append(doc)"
|
74 |
+
]
|
75 |
+
},
|
76 |
+
{
|
77 |
+
"cell_type": "code",
|
78 |
+
"execution_count": 4,
|
79 |
+
"metadata": {
|
80 |
+
"execution": {
|
81 |
+
"iopub.execute_input": "2022-08-21T08:23:25.030530Z",
|
82 |
+
"iopub.status.busy": "2022-08-21T08:23:25.029931Z",
|
83 |
+
"iopub.status.idle": "2022-08-21T08:23:25.039324Z",
|
84 |
+
"shell.execute_reply": "2022-08-21T08:23:25.037960Z",
|
85 |
+
"shell.execute_reply.started": "2022-08-21T08:23:25.030491Z"
|
86 |
+
},
|
87 |
+
"trusted": true
|
88 |
+
},
|
89 |
+
"outputs": [
|
90 |
+
{
|
91 |
+
"data": {
|
92 |
+
"text/plain": [
|
93 |
+
"453"
|
94 |
+
]
|
95 |
+
},
|
96 |
+
"execution_count": 4,
|
97 |
+
"metadata": {},
|
98 |
+
"output_type": "execute_result"
|
99 |
+
}
|
100 |
+
],
|
101 |
+
"source": [
|
102 |
+
"len(docs)"
|
103 |
+
]
|
104 |
+
},
|
105 |
+
{
|
106 |
+
"cell_type": "markdown",
|
107 |
+
"metadata": {},
|
108 |
+
"source": [
|
109 |
+
"## Preprocess documents"
|
110 |
+
]
|
111 |
+
},
|
112 |
+
{
|
113 |
+
"cell_type": "code",
|
114 |
+
"execution_count": 6,
|
115 |
+
"metadata": {
|
116 |
+
"execution": {
|
117 |
+
"iopub.execute_input": "2022-08-21T08:23:25.050479Z",
|
118 |
+
"iopub.status.busy": "2022-08-21T08:23:25.050099Z",
|
119 |
+
"iopub.status.idle": "2022-08-21T08:23:42.089083Z",
|
120 |
+
"shell.execute_reply": "2022-08-21T08:23:42.087929Z",
|
121 |
+
"shell.execute_reply.started": "2022-08-21T08:23:25.050446Z"
|
122 |
+
},
|
123 |
+
"trusted": true
|
124 |
+
},
|
125 |
+
"outputs": [
|
126 |
+
{
|
127 |
+
"data": {
|
128 |
+
"application/vnd.jupyter.widget-view+json": {
|
129 |
+
"model_id": "108e8c46426f44e7be98a8ae930d81ce",
|
130 |
+
"version_major": 2,
|
131 |
+
"version_minor": 0
|
132 |
+
},
|
133 |
+
"text/plain": [
|
134 |
+
"Preprocessing: 0%| | 0/453 [00:00<?, ?docs/s]"
|
135 |
+
]
|
136 |
+
},
|
137 |
+
"metadata": {},
|
138 |
+
"output_type": "display_data"
|
139 |
+
}
|
140 |
+
],
|
141 |
+
"source": [
|
142 |
+
"# preprocess documents, splitting by chunks of 2 sentences\n",
|
143 |
+
"\n",
|
144 |
+
"from haystack.nodes import PreProcessor\n",
|
145 |
+
"\n",
|
146 |
+
"processor = PreProcessor(\n",
|
147 |
+
" clean_empty_lines=True,\n",
|
148 |
+
" clean_whitespace=True,\n",
|
149 |
+
" clean_header_footer=True,\n",
|
150 |
+
" split_by=\"sentence\",\n",
|
151 |
+
" split_length=2,\n",
|
152 |
+
" split_respect_sentence_boundary=False,\n",
|
153 |
+
" split_overlap=0,\n",
|
154 |
+
" language=\"en\",\n",
|
155 |
+
")\n",
|
156 |
+
"preprocessed_docs = processor.process(docs)"
|
157 |
+
]
|
158 |
+
},
|
159 |
+
{
|
160 |
+
"cell_type": "code",
|
161 |
+
"execution_count": 7,
|
162 |
+
"metadata": {
|
163 |
+
"execution": {
|
164 |
+
"iopub.execute_input": "2022-08-21T08:23:42.092031Z",
|
165 |
+
"iopub.status.busy": "2022-08-21T08:23:42.090654Z",
|
166 |
+
"iopub.status.idle": "2022-08-21T08:23:42.105757Z",
|
167 |
+
"shell.execute_reply": "2022-08-21T08:23:42.104500Z",
|
168 |
+
"shell.execute_reply.started": "2022-08-21T08:23:42.091989Z"
|
169 |
+
},
|
170 |
+
"trusted": true
|
171 |
+
},
|
172 |
+
"outputs": [
|
173 |
+
{
|
174 |
+
"data": {
|
175 |
+
"text/plain": [
|
176 |
+
"50024"
|
177 |
+
]
|
178 |
+
},
|
179 |
+
"execution_count": 7,
|
180 |
+
"metadata": {},
|
181 |
+
"output_type": "execute_result"
|
182 |
+
}
|
183 |
+
],
|
184 |
+
"source": [
|
185 |
+
"len(preprocessed_docs)"
|
186 |
+
]
|
187 |
+
},
|
188 |
+
{
|
189 |
+
"cell_type": "code",
|
190 |
+
"execution_count": 8,
|
191 |
+
"metadata": {
|
192 |
+
"execution": {
|
193 |
+
"iopub.execute_input": "2022-08-21T08:23:42.108367Z",
|
194 |
+
"iopub.status.busy": "2022-08-21T08:23:42.107604Z",
|
195 |
+
"iopub.status.idle": "2022-08-21T08:23:42.117080Z",
|
196 |
+
"shell.execute_reply": "2022-08-21T08:23:42.115996Z",
|
197 |
+
"shell.execute_reply.started": "2022-08-21T08:23:42.108271Z"
|
198 |
+
},
|
199 |
+
"trusted": true
|
200 |
+
},
|
201 |
+
"outputs": [
|
202 |
+
{
|
203 |
+
"data": {
|
204 |
+
"text/plain": [
|
205 |
+
"[<Document: {'content': 'Disturbed is an American heavy metal band from Chicago, formed in 1994. The band includes vocalist David Draiman, guitarist/keyboardist Dan Donegan, bassist John Moyer, and drummer Mike Wengren.', 'content_type': 'text', 'score': None, 'meta': {'name': 'Disturbed (band)', 'url': 'https://en.wikipedia.org/wiki/Disturbed_(band)', '_split_id': 0}, 'embedding': None, 'id': '543d4f9f9023bfc277edf307a6aef870'}>,\n",
|
206 |
+
" <Document: {'content': 'Donegan and Wengren have been involved in the band since its inception, with Moyer replacing former bassist Steve \"Fuzz\" Kmak and Draiman replacing original lead vocalist Erich Awalt. The band has released seven studio albums, five of which have consecutively debuted at number one on the Billboard 200.', 'content_type': 'text', 'score': None, 'meta': {'name': 'Disturbed (band)', 'url': 'https://en.wikipedia.org/wiki/Disturbed_(band)', '_split_id': 1}, 'embedding': None, 'id': 'dfb0ef877837c95b2e8b03cfe2ae2057'}>,\n",
|
207 |
+
" <Document: {'content': \"Disturbed went into hiatus in October 2011, during which the band's members focused on various side projects, and returned in June 2015, releasing their first album in four years, Immortalized in August 2015. They also released two live albums, Music as a Weapon II in February 2004 and Disturbed: Live at Red Rocks in November 2016.\", 'content_type': 'text', 'score': None, 'meta': {'name': 'Disturbed (band)', 'url': 'https://en.wikipedia.org/wiki/Disturbed_(band)', '_split_id': 2}, 'embedding': None, 'id': 'e498da0cc7477f698f4a30c85dbfd95d'}>,\n",
|
208 |
+
" <Document: {'content': 'With over 17 million records sold worldwide, Disturbed ranks alongside Slipknot and Godsmack as one of the most successful rock bands of the 21st century. == History ==\\n\\n=== Early years (1994–1996) ===\\nBefore David Draiman joined Disturbed, guitarist Dan Donegan, drummer Mike Wengren and bassist Steve \"Fuzz\" Kmak were in a band called Brawl with vocalist Erich Awalt.', 'content_type': 'text', 'score': None, 'meta': {'name': 'Disturbed (band)', 'url': 'https://en.wikipedia.org/wiki/Disturbed_(band)', '_split_id': 3}, 'embedding': None, 'id': '2b51b8f38befc2c53c65c66af6de1e05'}>,\n",
|
209 |
+
" <Document: {'content': 'Before changing their name to \"Brawl\", however, Donegan mentioned in the band\\'s DVD, Decade of Disturbed, that the name was originally going to be \"Crawl\"; they switched it to \"Brawl\", due to the name already being used by another band. Awalt left the band shortly after the recording of a demo tape; the other three members advertised for a singer.', 'content_type': 'text', 'score': None, 'meta': {'name': 'Disturbed (band)', 'url': 'https://en.wikipedia.org/wiki/Disturbed_(band)', '_split_id': 4}, 'embedding': None, 'id': 'c9b3a4a74f8332c9d4c3d1e30cac49f7'}>,\n",
|
210 |
+
" <Document: {'content': 'They posted an advertisement in the local music publication in Chicago, Illinois, called the \"Illinois Entertainer\". Draiman answered the advertisement after going to twenty other auditions that month.', 'content_type': 'text', 'score': None, 'meta': {'name': 'Disturbed (band)', 'url': 'https://en.wikipedia.org/wiki/Disturbed_(band)', '_split_id': 5}, 'embedding': None, 'id': '946a4a27f2f1838ec070a951dab2e1b0'}>,\n",
|
211 |
+
" <Document: {'content': 'Guitarist Dan Donegan commented on Draiman: \"You know, out of all the singers that we had talked to or auditioned, he [Draiman] was the only singer who was ready to go with originals. And that impressed me, just to attempt that\".With regard to Draiman being the singer for the band, Donegan said, \"After a minute or two, he just starts banging out these melodies that were huge...I\\'m playing my guitar and I\\'m grinning from ear to ear, trying not to give it away that I like this guy, you know, because I don\\'t want to, you know...[say] \\'Yeah, we\\'ll give you a call back.', 'content_type': 'text', 'score': None, 'meta': {'name': 'Disturbed (band)', 'url': 'https://en.wikipedia.org/wiki/Disturbed_(band)', '_split_id': 6}, 'embedding': None, 'id': 'ac2bea954f4d19cc0e48868fb503e23e'}>,\n",
|
212 |
+
" <Document: {'content': \"We'll, you know, discuss it.' But I was so psyched.\", 'content_type': 'text', 'score': None, 'meta': {'name': 'Disturbed (band)', 'url': 'https://en.wikipedia.org/wiki/Disturbed_(band)', '_split_id': 7}, 'embedding': None, 'id': '852a32a0d3ce1eefa48d6420fab35dba'}>,\n",
|
213 |
+
" <Document: {'content': 'Chill up my spine. I\\'m like, \\'There is something here.\\'\"', 'content_type': 'text', 'score': None, 'meta': {'name': 'Disturbed (band)', 'url': 'https://en.wikipedia.org/wiki/Disturbed_(band)', '_split_id': 8}, 'embedding': None, 'id': 'ebb584a0fee18b51cd14ceadcb4a7bb8'}>,\n",
|
214 |
+
" <Document: {'content': 'As drummer Mike Wengren commented, \"We clicked right off the bat.\" Draiman then joined the band in 1996 and the band was renamed Disturbed.', 'content_type': 'text', 'score': None, 'meta': {'name': 'Disturbed (band)', 'url': 'https://en.wikipedia.org/wiki/Disturbed_(band)', '_split_id': 9}, 'embedding': None, 'id': '51340b7bf229e5b8d4460341f7aaa9d0'}>]"
|
215 |
+
]
|
216 |
+
},
|
217 |
+
"execution_count": 8,
|
218 |
+
"metadata": {},
|
219 |
+
"output_type": "execute_result"
|
220 |
+
}
|
221 |
+
],
|
222 |
+
"source": [
|
223 |
+
"preprocessed_docs[:10]"
|
224 |
+
]
|
225 |
+
},
|
226 |
+
{
|
227 |
+
"cell_type": "code",
|
228 |
+
"execution_count": null,
|
229 |
+
"metadata": {},
|
230 |
+
"outputs": [],
|
231 |
+
"source": [
|
232 |
+
"# select only documents with at least 10 words. Otherwise, the documents are not very informative\n",
|
233 |
+
"preprocessed_docs = [doc for doc in preprocessed_docs if len(doc.content.split()) >= 10]"
|
234 |
+
]
|
235 |
+
},
|
236 |
+
{
|
237 |
+
"cell_type": "markdown",
|
238 |
+
"metadata": {},
|
239 |
+
"source": [
|
240 |
+
"## Create document store ([FAISS](https://github.com/facebookresearch/faiss)) and write documents"
|
241 |
+
]
|
242 |
+
},
|
243 |
+
{
|
244 |
+
"cell_type": "code",
|
245 |
+
"execution_count": 9,
|
246 |
+
"metadata": {
|
247 |
+
"execution": {
|
248 |
+
"iopub.execute_input": "2022-08-21T08:23:42.119585Z",
|
249 |
+
"iopub.status.busy": "2022-08-21T08:23:42.118544Z",
|
250 |
+
"iopub.status.idle": "2022-08-21T08:23:42.124669Z",
|
251 |
+
"shell.execute_reply": "2022-08-21T08:23:42.123597Z",
|
252 |
+
"shell.execute_reply.started": "2022-08-21T08:23:42.119551Z"
|
253 |
+
},
|
254 |
+
"trusted": true
|
255 |
+
},
|
256 |
+
"outputs": [],
|
257 |
+
"source": [
|
258 |
+
"from haystack.document_stores import FAISSDocumentStore\n",
|
259 |
+
"from haystack.nodes import EmbeddingRetriever"
|
260 |
+
]
|
261 |
+
},
|
262 |
+
{
|
263 |
+
"cell_type": "code",
|
264 |
+
"execution_count": 10,
|
265 |
+
"metadata": {
|
266 |
+
"execution": {
|
267 |
+
"iopub.execute_input": "2022-08-21T08:23:42.129562Z",
|
268 |
+
"iopub.status.busy": "2022-08-21T08:23:42.128772Z",
|
269 |
+
"iopub.status.idle": "2022-08-21T08:23:42.259879Z",
|
270 |
+
"shell.execute_reply": "2022-08-21T08:23:42.258950Z",
|
271 |
+
"shell.execute_reply.started": "2022-08-21T08:23:42.129518Z"
|
272 |
+
},
|
273 |
+
"trusted": true
|
274 |
+
},
|
275 |
+
"outputs": [],
|
276 |
+
"source": [
|
277 |
+
"# the document store settings are those compatible with Embedding Retriever\n",
|
278 |
+
"document_store = FAISSDocumentStore(similarity=\"dot_product\", embedding_dim=768)"
|
279 |
+
]
|
280 |
+
},
|
281 |
+
{
|
282 |
+
"cell_type": "code",
|
283 |
+
"execution_count": 46,
|
284 |
+
"metadata": {
|
285 |
+
"execution": {
|
286 |
+
"iopub.execute_input": "2022-08-21T08:43:25.952230Z",
|
287 |
+
"iopub.status.busy": "2022-08-21T08:43:25.951856Z",
|
288 |
+
"iopub.status.idle": "2022-08-21T08:46:12.506842Z",
|
289 |
+
"shell.execute_reply": "2022-08-21T08:46:12.505845Z",
|
290 |
+
"shell.execute_reply.started": "2022-08-21T08:43:25.952198Z"
|
291 |
+
},
|
292 |
+
"trusted": true
|
293 |
+
},
|
294 |
+
"outputs": [
|
295 |
+
{
|
296 |
+
"data": {
|
297 |
+
"application/vnd.jupyter.widget-view+json": {
|
298 |
+
"model_id": "dbd72ecf0d36401ba26826f7d9a42540",
|
299 |
+
"version_major": 2,
|
300 |
+
"version_minor": 0
|
301 |
+
},
|
302 |
+
"text/plain": [
|
303 |
+
"Writing Documents: 0%| | 0/50024 [00:00<?, ?it/s]"
|
304 |
+
]
|
305 |
+
},
|
306 |
+
"metadata": {},
|
307 |
+
"output_type": "display_data"
|
308 |
+
}
|
309 |
+
],
|
310 |
+
"source": [
|
311 |
+
"# write documents\n",
|
312 |
+
"document_store.write_documents(preprocessed_docs)"
|
313 |
+
]
|
314 |
+
},
|
315 |
+
{
|
316 |
+
"cell_type": "markdown",
|
317 |
+
"metadata": {},
|
318 |
+
"source": [
|
319 |
+
"## Initialize retriever (Embedding Retriever) and generate document embeddings\n",
|
320 |
+
"We choose a Sentence Tranformer model that is suitable for asymmetric semantic search (short query and longer passages), according to [documentation](https://www.sbert.net/examples/applications/semantic-search/README.html#symmetric-vs-asymmetric-semantic-search)."
|
321 |
+
]
|
322 |
+
},
|
323 |
+
{
|
324 |
+
"cell_type": "code",
|
325 |
+
"execution_count": null,
|
326 |
+
"metadata": {
|
327 |
+
"execution": {
|
328 |
+
"iopub.execute_input": "2022-08-21T08:56:25.360959Z",
|
329 |
+
"iopub.status.busy": "2022-08-21T08:56:25.360546Z",
|
330 |
+
"iopub.status.idle": "2022-08-21T08:58:07.214654Z",
|
331 |
+
"shell.execute_reply": "2022-08-21T08:58:07.213653Z",
|
332 |
+
"shell.execute_reply.started": "2022-08-21T08:56:25.360926Z"
|
333 |
+
},
|
334 |
+
"trusted": true
|
335 |
+
},
|
336 |
+
"outputs": [],
|
337 |
+
"source": [
|
338 |
+
"from haystack.nodes import EmbeddingRetriever\n",
|
339 |
+
"\n",
|
340 |
+
"retriever = EmbeddingRetriever(\n",
|
341 |
+
" document_store=document_store,\n",
|
342 |
+
" embedding_model=\"sentence-transformers/msmarco-distilbert-base-tas-b\",\n",
|
343 |
+
" model_format=\"sentence_transformers\",\n",
|
344 |
+
" embed_meta_fields=[\"name\"],\n",
|
345 |
+
")\n",
|
346 |
+
"\n",
|
347 |
+
"# generate embeddings\n",
|
348 |
+
"document_store.update_embeddings(retriever)"
|
349 |
+
]
|
350 |
+
},
|
351 |
+
{
|
352 |
+
"cell_type": "markdown",
|
353 |
+
"metadata": {},
|
354 |
+
"source": [
|
355 |
+
"## Save and export index"
|
356 |
+
]
|
357 |
+
},
|
358 |
+
{
|
359 |
+
"cell_type": "code",
|
360 |
+
"execution_count": null,
|
361 |
+
"metadata": {},
|
362 |
+
"outputs": [],
|
363 |
+
"source": [
|
364 |
+
"import shutil\n",
|
365 |
+
"import glob"
|
366 |
+
]
|
367 |
+
},
|
368 |
+
{
|
369 |
+
"cell_type": "code",
|
370 |
+
"execution_count": 73,
|
371 |
+
"metadata": {
|
372 |
+
"execution": {
|
373 |
+
"iopub.execute_input": "2022-08-21T08:58:33.494417Z",
|
374 |
+
"iopub.status.busy": "2022-08-21T08:58:33.493822Z",
|
375 |
+
"iopub.status.idle": "2022-08-21T08:58:33.635915Z",
|
376 |
+
"shell.execute_reply": "2022-08-21T08:58:33.634599Z",
|
377 |
+
"shell.execute_reply.started": "2022-08-21T08:58:33.494382Z"
|
378 |
+
},
|
379 |
+
"trusted": true
|
380 |
+
},
|
381 |
+
"outputs": [],
|
382 |
+
"source": [
|
383 |
+
"OUT_DIR = \"YOUR-OUT-DIR\"\n",
|
384 |
+
"\n",
|
385 |
+
"document_store.save(\"my_faiss_index.faiss\")\n",
|
386 |
+
"for f in glob.glob(\"*faiss*.*\") + glob.glob(\"faiss*.*\"):\n",
|
387 |
+
" shutil.copy(f, OUT_DIR)"
|
388 |
+
]
|
389 |
+
}
|
390 |
+
],
|
391 |
+
"metadata": {
|
392 |
+
"kernelspec": {
|
393 |
+
"display_name": "Python 3.7.13 ('venv': venv)",
|
394 |
+
"language": "python",
|
395 |
+
"name": "python3"
|
396 |
+
},
|
397 |
+
"language_info": {
|
398 |
+
"codemirror_mode": {
|
399 |
+
"name": "ipython",
|
400 |
+
"version": 3
|
401 |
+
},
|
402 |
+
"file_extension": ".py",
|
403 |
+
"mimetype": "text/x-python",
|
404 |
+
"name": "python",
|
405 |
+
"nbconvert_exporter": "python",
|
406 |
+
"pygments_lexer": "ipython3",
|
407 |
+
"version": "3.7.13"
|
408 |
+
},
|
409 |
+
"vscode": {
|
410 |
+
"interpreter": {
|
411 |
+
"hash": "c114177cb475e38b99e396ae1ef7cfcaaa7967120589f47745b82f90d7e35d1b"
|
412 |
+
}
|
413 |
+
}
|
414 |
+
},
|
415 |
+
"nbformat": 4,
|
416 |
+
"nbformat_minor": 4
|
417 |
+
}
|
pages/Info.py
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
|
3 |
+
from app_utils.frontend_utils import build_sidebar
|
4 |
+
|
5 |
+
build_sidebar()
|
6 |
+
|
7 |
+
with open("README.md", "r") as fin:
|
8 |
+
readme = fin.read().rpartition("---")[-1]
|
9 |
+
|
10 |
+
st.markdown(readme, unsafe_allow_html=True)
|
requirements.txt
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
farm-haystack==1.7.1
|
2 |
+
faiss-cpu<=1.7.2
|
3 |
+
plotly==5.10.0
|
4 |
+
# commented to not interfere with streamlit SDK in HF spces
|
5 |
+
# uncomment for local installation
|
6 |
+
#streamlit==1.10.0
|