File size: 7,323 Bytes
fe3d5c9
 
 
 
 
afc2587
fe3d5c9
e6a496c
1d4fc4e
fe3d5c9
5e98371
f3054d4
a7d0af8
 
e8d7188
1d4fc4e
 
 
aa04d75
e8d7188
aa04d75
e8d7188
37276fb
 
 
e8d7188
 
37276fb
 
 
fe3d5c9
 
a7d0af8
 
 
 
 
 
 
 
 
 
 
afc2587
fe3d5c9
 
 
e8d7188
 
 
 
 
243d786
e8d7188
 
 
fe3d5c9
e8d7188
 
 
 
 
 
 
243d786
e8d7188
fe3d5c9
e8d7188
 
 
 
 
 
 
 
e6a496c
e8d7188
a7d0af8
 
 
76af03e
a7d0af8
 
 
 
f3054d4
a7d0af8
76af03e
 
 
 
a7d0af8
e8d7188
 
 
fe3d5c9
76af03e
fe775f1
 
76af03e
 
 
fe775f1
 
76af03e
fe775f1
 
 
 
76af03e
e8d7188
f3054d4
76af03e
fe775f1
 
76af03e
 
 
fe775f1
 
76af03e
fe775f1
 
 
 
 
e8d7188
92390c9
76af03e
 
e8d7188
76af03e
 
 
 
e8d7188
76af03e
f3054d4
5e98371
f3054d4
e8d7188
 
 
76af03e
e8d7188
a7d0af8
 
 
 
76af03e
a7d0af8
 
f3054d4
a7d0af8
76af03e
a7d0af8
76af03e
 
a7d0af8
 
 
 
 
76af03e
 
a7d0af8
76af03e
a7d0af8
 
 
 
 
 
 
 
 
 
 
76af03e
a7d0af8
 
 
e8d7188
 
 
a7d0af8
fe3d5c9
e8d7188
 
 
 
 
 
1d4fc4e
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
import streamlit as st
from urllib.request import urlopen, Request
from bs4 import BeautifulSoup
import pandas as pd
import plotly.express as px
from dateutil import parser
import datetime
import requests
from transformers import BertTokenizer, BertForSequenceClassification, pipeline

st.set_page_config(page_title="Stock News Confidence Analyzer ", layout="wide")

# Initialize FinBERT (yiyanghkust/finbert-tone) pipeline only once and cache
@st.cache_resource(show_spinner=False)
def load_model():
    model = BertForSequenceClassification.from_pretrained('yiyanghkust/finbert-tone', num_labels=3)
    tokenizer = BertTokenizer.from_pretrained('yiyanghkust/finbert-tone')
    return pipeline("sentiment-analysis", model=model, tokenizer=tokenizer)

finbert = load_model()

def verify_link(url, timeout=10, retries=3):
    for _ in range(retries):
        try:
            response = requests.head(url, timeout=timeout, allow_redirects=True)
            if 200 <= response.status_code < 300:
                return True
        except requests.RequestException:
            continue
    return False

def get_news(ticker):
    try:
        finviz_url = 'https://finviz.com/quote.ashx?t='
        url = finviz_url + ticker
        req = Request(url=url, headers={'User-Agent': 'Mozilla/5.0'}) 
        response = urlopen(req)    
        html = BeautifulSoup(response, 'html.parser')
        news_table = html.find(id='news-table')
        return news_table
    except Exception as e:
        st.write("Error fetching news:", str(e))
        return None

def parse_news(news_table):
    parsed_news = []
    
    for x in news_table.findAll('tr'):
        try:
            text = x.a.get_text()
            link = x.a['href']
            date_scrape = x.td.text.strip().split()
            
            if len(date_scrape) == 1:
                date = datetime.datetime.today().strftime('%Y-%m-%d')
                time = date_scrape[0]
            else:
                date = date_scrape[0]
                time = date_scrape[1]

            datetime_str = f"{date} {time}"
            datetime_parsed = parser.parse(datetime_str)

            is_valid = verify_link(link)
            
            parsed_news.append([datetime_parsed, text, link, is_valid])
            
        except Exception as e:
            print("Error parsing news:", e)
            continue
    
    columns = ['datetime', 'headline', 'link', 'is_valid']
    parsed_news_df = pd.DataFrame(parsed_news, columns=columns)
    
    return parsed_news_df

def score_news(parsed_news_df):
    # Send headlines in smaller batches to speed up processing
    batch_size = 10
    parsed_news_df = parsed_news_df.reset_index(drop=True)
    confidence_scores = []

    for i in range(0, len(parsed_news_df), batch_size):
        batch_headlines = parsed_news_df['headline'][i:i+batch_size].tolist()
        predictions = finbert(batch_headlines)
        
        for pred in predictions:
            confidence = pred['score']  # Only confidence score is considered
            confidence_scores.append(confidence)

    scores_df = pd.DataFrame({'confidence': confidence_scores})
    parsed_and_scored_news = pd.concat([parsed_news_df, scores_df], axis=1)
    parsed_and_scored_news = parsed_and_scored_news.set_index('datetime')
    
    return parsed_and_scored_news

def plot_hourly_confidence(parsed_and_scored_news, ticker):
    mean_confidence = parsed_and_scored_news['confidence'].resample('h').mean()

    fig = px.line(mean_confidence, x=mean_confidence.index, y='confidence',
                  title=f'{ticker} Hourly Confidence Scores',
                  color_discrete_sequence=['blue'])

    fig.update_layout(
        yaxis=dict(
            title="Confidence Score",
            range=[0, 1]
        )
    )

    return fig

def plot_daily_confidence(parsed_and_scored_news, ticker):
    mean_confidence = parsed_and_scored_news['confidence'].resample('D').mean()

    fig = px.line(mean_confidence, x=mean_confidence.index, y='confidence',
                  title=f'{ticker} Daily Confidence Scores',
                  color_discrete_sequence=['blue'])

    fig.update_layout(
        yaxis=dict(
            title="Confidence Score",
            range=[0, 1]
        )
    )

    return fig

def get_recommendation(confidence_scores):
    avg_confidence = confidence_scores['confidence'].mean()
    
    if avg_confidence >= 0.7:
        return f"High confidence in predictions (Confidence Score: {avg_confidence:.2f}). The news analysis suggests a strong trend. Consider acting based on the news."
    elif avg_confidence >= 0.4:
        return f"Moderate confidence in predictions (Confidence Score: {avg_confidence:.2f}). The trend is somewhat clear. Be cautious and consider consulting other sources."
    else:
        return f"Low confidence in predictions (Confidence Score: {avg_confidence:.2f}). The news does not strongly indicate a particular trend. Consider waiting for more clear signals."

st.header("Stock News Confidence Analyzer (HKUST FinBERT)")

ticker = st.text_input('Enter Stock Ticker', '').upper()

try:
    st.subheader(f"Confidence Analysis and Recommendation for {ticker} Stock")
    news_table = get_news(ticker)
    if news_table:
        parsed_news_df = parse_news(news_table)
        parsed_and_scored_news = score_news(parsed_news_df)

        # Generate and display recommendation based on confidence scores
        recommendation = get_recommendation(parsed_and_scored_news)
        st.write(recommendation)
        
        # Display a disclaimer
        st.warning("Disclaimer: This recommendation is based solely on the confidence of the sentiment analysis. Always do your own research and consult with a qualified financial advisor before making investment decisions.")
        
        fig_hourly = plot_hourly_confidence(parsed_and_scored_news, ticker)
        fig_daily = plot_daily_confidence(parsed_and_scored_news, ticker) 
        
        st.plotly_chart(fig_hourly)
        st.plotly_chart(fig_daily)

        description = f"""
            The above charts display the average confidence scores of {ticker} stock hourly and daily.
            The table below shows recent headlines with their confidence scores.
            The news headlines are obtained from the FinViz website.
            Confidence scores indicate how certain the model is about its sentiment predictions.
            Links have been verified for validity.
            """
            
        st.write(description)     
        
        parsed_and_scored_news['link'] = parsed_and_scored_news.apply(
            lambda row: f'<a href="{row["link"]}" target="_blank">{"Valid✅" if row["is_valid"] else "Invalid❌"} Link</a>', 
            axis=1
        )
        
        display_df = parsed_and_scored_news.drop(columns=['is_valid'])
        st.write(display_df[['headline', 'confidence', 'link']].to_html(escape=False), unsafe_allow_html=True)

    else:
        st.write("No news available or invalid ticker symbol.")
    
except Exception as e:
    print(str(e))
    st.write("Enter a correct stock ticker, e.g. 'AAPL' above and hit Enter.")

hide_streamlit_style = """
<style>
#MainMenu {visibility: hidden;}
footer {visibility: hidden;}
</style>
"""
st.markdown(hide_streamlit_style, unsafe_allow_html=True)