Spaces:
Running
Running
mastodon migration
Browse files- README.md +2 -4
- app.py +6 -7
- requirements.txt +4 -2
- utils/haystack.py +24 -28
- utils/ui.py +2 -2
README.md
CHANGED
@@ -14,10 +14,10 @@ pinned: false
|
|
14 |
|
15 |
### Try it out on [🤗 Spaces](https://huggingface.co/spaces/deepset/should-i-follow)
|
16 |
|
17 |
-
##### A simple app to get an overview of what the
|
18 |
|
19 |
This is a demo just for fun 🥳
|
20 |
-
This repo contains a streamlit application that given a
|
21 |
|
22 |
It's been built with [Haystack](https://haystack.deepset.ai) using the [`PromptNode`](https://docs.haystack.deepset.ai/docs/prompt_node) and by creating a custom [`PromptTemplate`](https://docs.haystack.deepset.ai/docs/prompt_node#templates)
|
23 |
|
@@ -40,8 +40,6 @@ To run the bare application which does _nothing_:
|
|
40 |
`pip install -r requirements.txt`
|
41 |
2. Run the streamlit app:
|
42 |
`streamlit run app.py`
|
43 |
-
3. Createa a `.env` and add your Twitter Bearer token:
|
44 |
-
`TWITTER_BEARER_TOKEN`
|
45 |
|
46 |
This will start up the app on `localhost:8501` where you will dind a simple search bar
|
47 |
|
|
|
14 |
|
15 |
### Try it out on [🤗 Spaces](https://huggingface.co/spaces/deepset/should-i-follow)
|
16 |
|
17 |
+
##### A simple app to get an overview of what the Mastodon user has been posting about and their tone
|
18 |
|
19 |
This is a demo just for fun 🥳
|
20 |
+
This repo contains a streamlit application that given a Mastodon username, tells you what type of things they've been posting about lately, their tone, and the languages they use. It uses the LLM by OpenAI `text-davinci-003`.
|
21 |
|
22 |
It's been built with [Haystack](https://haystack.deepset.ai) using the [`PromptNode`](https://docs.haystack.deepset.ai/docs/prompt_node) and by creating a custom [`PromptTemplate`](https://docs.haystack.deepset.ai/docs/prompt_node#templates)
|
23 |
|
|
|
40 |
`pip install -r requirements.txt`
|
41 |
2. Run the streamlit app:
|
42 |
`streamlit run app.py`
|
|
|
|
|
43 |
|
44 |
This will start up the app on `localhost:8501` where you will dind a simple search bar
|
45 |
|
app.py
CHANGED
@@ -9,26 +9,25 @@ import streamlit as st
|
|
9 |
|
10 |
from utils.haystack import query, start_haystack
|
11 |
from utils.ui import reset_results, set_initial_state, sidebar
|
12 |
-
from utils.config import TWITTER_BEARER
|
13 |
|
14 |
set_initial_state()
|
15 |
|
16 |
sidebar()
|
17 |
|
18 |
-
st.write("# 🐤 What have they been
|
19 |
|
20 |
if st.session_state.get("OPENAI_API_KEY"):
|
21 |
-
|
22 |
st.session_state["api_key_configured"] = True
|
23 |
search_bar, button = st.columns(2)
|
24 |
# Search bar
|
25 |
with search_bar:
|
26 |
-
username = st.text_input("Please provide a
|
27 |
|
28 |
with button:
|
29 |
st.write("")
|
30 |
st.write("")
|
31 |
-
run_pressed = st.button("Search
|
32 |
else:
|
33 |
st.write("Please provide your OpenAI Key to start using the application")
|
34 |
st.write("If you are using a smaller screen, open the sidebar from the top left to provide your OpenAI Key 🙌")
|
@@ -44,7 +43,7 @@ if st.session_state.get("api_key_configured"):
|
|
44 |
st.session_state.username = username
|
45 |
with st.spinner("🔎"):
|
46 |
try:
|
47 |
-
st.session_state.result = query(username,
|
48 |
except JSONDecodeError as je:
|
49 |
st.error(
|
50 |
"👓 An error occurred reading the results. Is the document store working?"
|
@@ -55,5 +54,5 @@ if st.session_state.get("api_key_configured"):
|
|
55 |
|
56 |
if st.session_state.result:
|
57 |
voice = st.session_state.result
|
58 |
-
st.write(voice[0])
|
59 |
|
|
|
9 |
|
10 |
from utils.haystack import query, start_haystack
|
11 |
from utils.ui import reset_results, set_initial_state, sidebar
|
|
|
12 |
|
13 |
set_initial_state()
|
14 |
|
15 |
sidebar()
|
16 |
|
17 |
+
st.write("# 🐤 What have they been posting about lately on Mastodon?")
|
18 |
|
19 |
if st.session_state.get("OPENAI_API_KEY"):
|
20 |
+
pipeline = start_haystack(st.session_state.get("OPENAI_API_KEY"))
|
21 |
st.session_state["api_key_configured"] = True
|
22 |
search_bar, button = st.columns(2)
|
23 |
# Search bar
|
24 |
with search_bar:
|
25 |
+
username = st.text_input("Please provide a Mastodon username", on_change=reset_results)
|
26 |
|
27 |
with button:
|
28 |
st.write("")
|
29 |
st.write("")
|
30 |
+
run_pressed = st.button("Search posts (toots)")
|
31 |
else:
|
32 |
st.write("Please provide your OpenAI Key to start using the application")
|
33 |
st.write("If you are using a smaller screen, open the sidebar from the top left to provide your OpenAI Key 🙌")
|
|
|
43 |
st.session_state.username = username
|
44 |
with st.spinner("🔎"):
|
45 |
try:
|
46 |
+
st.session_state.result = query(username, pipeline)
|
47 |
except JSONDecodeError as je:
|
48 |
st.error(
|
49 |
"👓 An error occurred reading the results. Is the document store working?"
|
|
|
54 |
|
55 |
if st.session_state.result:
|
56 |
voice = st.session_state.result
|
57 |
+
st.write(voice['results'][0])
|
58 |
|
requirements.txt
CHANGED
@@ -1,5 +1,7 @@
|
|
1 |
-
|
|
|
2 |
streamlit==1.21.0
|
3 |
markdown
|
4 |
st-annotated-text
|
5 |
-
python-dotenv
|
|
|
|
1 |
+
safetensors==0.3.3.post1
|
2 |
+
farm-haystack==1.20.0
|
3 |
streamlit==1.21.0
|
4 |
markdown
|
5 |
st-annotated-text
|
6 |
+
python-dotenv
|
7 |
+
mastodon-fetcher-haystack
|
utils/haystack.py
CHANGED
@@ -1,55 +1,51 @@
|
|
1 |
import streamlit as st
|
2 |
-
import
|
3 |
-
from
|
4 |
-
|
5 |
from haystack.nodes import PromptNode, PromptTemplate
|
6 |
|
7 |
def start_haystack(openai_key):
|
8 |
#Use this function to contruct a pipeline
|
9 |
-
|
10 |
-
|
11 |
|
12 |
-
|
13 |
-
You may go into some detail about what topics they tend to like
|
14 |
negative, political, sarcastic or something else.
|
15 |
|
16 |
Examples:
|
17 |
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
|
23 |
-
Summary: This user has lately been
|
24 |
been posting in English, and have had a positive, informative tone.
|
25 |
|
26 |
-
|
27 |
the incursion by China’s high-altitude balloon, we enhanced radar to pick up slower objects.\n \nBy doing so, w…
|
28 |
I gave an update on the United States’ response to recent aerial objects.
|
29 |
|
30 |
-
Summary: This user has lately been
|
31 |
-
baloon. Their
|
32 |
|
33 |
-
|
34 |
|
35 |
Summary:
|
36 |
""")
|
|
|
|
|
|
|
37 |
|
38 |
-
|
39 |
-
|
|
|
|
|
40 |
|
41 |
|
42 |
@st.cache_data(show_spinner=True)
|
43 |
-
def query(username,
|
44 |
-
headers = {"Authorization": "Bearer {}".format(TWITTER_BEARER)}
|
45 |
-
print(username)
|
46 |
-
url = f"https://api.twitter.com/1.1/statuses/user_timeline.json?screen_name={username}&count={60}"
|
47 |
try:
|
48 |
-
|
49 |
-
twitter_stream = ""
|
50 |
-
for tweet in response.json():
|
51 |
-
twitter_stream += tweet["text"]
|
52 |
-
result = _prompter.prompt(prompt_template=_template, tweets=twitter_stream)
|
53 |
except Exception as e:
|
54 |
-
result = ["Please make sure you are providing a correct, public
|
55 |
return result
|
|
|
1 |
import streamlit as st
|
2 |
+
from mastodon_fetcher_haystack.mastodon_fetcher import MastodonFetcher
|
3 |
+
from haystack import Pipeline
|
|
|
4 |
from haystack.nodes import PromptNode, PromptTemplate
|
5 |
|
6 |
def start_haystack(openai_key):
|
7 |
#Use this function to contruct a pipeline
|
8 |
+
fetcher = MastodonFetcher()
|
|
|
9 |
|
10 |
+
mastodon_template = PromptTemplate(prompt="""You will be given a post stream belonging to a specific Mastodon profile. Answer with a summary of what they've lately been posting about and in what languages.
|
11 |
+
You may go into some detail about what topics they tend to like postint about. Please also mention their overall tone, for example: positive,
|
12 |
negative, political, sarcastic or something else.
|
13 |
|
14 |
Examples:
|
15 |
|
16 |
+
Post stream: [@deepset_ai](https://mastodon.social/@deepset_ai): Come join our Haystack server for our first Discord event tomorrow, a deepset AMA session with @rusic_milos @malte_pietsch…
|
17 |
+
[@deepset_ai](https://mastodon.social/@deepset_ai): Join us for a chat! On Thursday 25th we are hosting a 'deepset - Ask Me Anything' session on our brand new Discord. Come…
|
18 |
+
[@deepset_ai](https://mastodon.social/@deepset_ai): Curious about how you can use @OpenAI GPT3 in a Haystack pipeline? This week we released Haystack 1.7 with which we introdu…
|
19 |
+
[@deepset_ai](https://mastodon.social/@deepset_ai): So many updates from @deepset_ai today!
|
20 |
|
21 |
+
Summary: This user has lately been reposting posts from @deepset_ai. The topics of the posts have been around the Haystack community, NLP and GPT. They've
|
22 |
been posting in English, and have had a positive, informative tone.
|
23 |
|
24 |
+
Post stream: I've directed my team to set sharper rules on how we deal with unidentified objects.\n\nWe will inventory, improve ca…
|
25 |
the incursion by China’s high-altitude balloon, we enhanced radar to pick up slower objects.\n \nBy doing so, w…
|
26 |
I gave an update on the United States’ response to recent aerial objects.
|
27 |
|
28 |
+
Summary: This user has lately been posting about having sharper rules to deal with unidentified objects and an incursuin by China's high-altitude
|
29 |
+
baloon. Their pots have mostly been neutral but determined in tone. They mostly post in English.
|
30 |
|
31 |
+
Post stream: {join(documents)}
|
32 |
|
33 |
Summary:
|
34 |
""")
|
35 |
+
prompt_node = PromptNode(model_name_or_path="gpt-4", default_prompt_template=mastodon_template, api_key=openai_key)
|
36 |
+
|
37 |
+
st.session_state["haystack_started"] = True
|
38 |
|
39 |
+
mastodon_pipeline = Pipeline()
|
40 |
+
mastodon_pipeline.add_node(component=fetcher, name="MastodonFetcher", inputs=["Query"])
|
41 |
+
mastodon_pipeline.add_node(component=prompt_node, name="PromptNode", inputs=["MastodonFetcher"])
|
42 |
+
return mastodon_pipeline
|
43 |
|
44 |
|
45 |
@st.cache_data(show_spinner=True)
|
46 |
+
def query(username, _pipeline):
|
|
|
|
|
|
|
47 |
try:
|
48 |
+
result = _pipeline.run(query=username, params={"MastodonFetcher": {"last_k_posts": 20}})
|
|
|
|
|
|
|
|
|
49 |
except Exception as e:
|
50 |
+
result = ["Please make sure you are providing a correct, public Mastodon account"]
|
51 |
return result
|
utils/ui.py
CHANGED
@@ -6,7 +6,7 @@ def set_state_if_absent(key, value):
|
|
6 |
st.session_state[key] = value
|
7 |
|
8 |
def set_initial_state():
|
9 |
-
set_state_if_absent("username", "Provide a
|
10 |
set_state_if_absent("result", None)
|
11 |
set_state_if_absent("haystack_started", False)
|
12 |
|
@@ -28,7 +28,7 @@ def sidebar():
|
|
28 |
st.markdown(
|
29 |
"## How to use\n"
|
30 |
"1. Enter your [OpenAI API key](https://platform.openai.com/account/api-keys) below\n"
|
31 |
-
"2. Enter a
|
32 |
"3. Enjoy 🤗\n"
|
33 |
)
|
34 |
|
|
|
6 |
st.session_state[key] = value
|
7 |
|
8 |
def set_initial_state():
|
9 |
+
set_state_if_absent("username", "Provide a Mastodon username (e.g. [email protected])")
|
10 |
set_state_if_absent("result", None)
|
11 |
set_state_if_absent("haystack_started", False)
|
12 |
|
|
|
28 |
st.markdown(
|
29 |
"## How to use\n"
|
30 |
"1. Enter your [OpenAI API key](https://platform.openai.com/account/api-keys) below\n"
|
31 |
+
"2. Enter a Mastodon username in the searchbar\n"
|
32 |
"3. Enjoy 🤗\n"
|
33 |
)
|
34 |
|