Spaces:
Running
Running
Akshayram1
commited on
Commit
•
f3054d4
1
Parent(s):
92390c9
Update app.py
Browse files
app.py
CHANGED
@@ -9,218 +9,284 @@ import requests
|
|
9 |
from transformers import AutoModelForSequenceClassification, AutoTokenizer, pipeline
|
10 |
import torch
|
11 |
|
12 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
|
14 |
# Initialize FinBERT-tone model and tokenizer
|
15 |
@st.cache_resource
|
16 |
-
def
|
17 |
-
|
18 |
-
|
19 |
-
|
|
|
|
|
|
|
|
|
20 |
|
21 |
-
|
|
|
22 |
|
|
|
23 |
def verify_link(url, timeout=10, retries=3):
|
|
|
24 |
for _ in range(retries):
|
25 |
try:
|
26 |
response = requests.head(url, timeout=timeout, allow_redirects=True)
|
27 |
-
|
28 |
-
return True
|
29 |
except requests.RequestException:
|
30 |
continue
|
31 |
return False
|
32 |
|
33 |
def get_news(ticker):
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
41 |
|
42 |
def parse_news(news_table):
|
|
|
43 |
parsed_news = []
|
44 |
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
link =
|
49 |
-
|
50 |
|
51 |
-
if len(
|
|
|
52 |
date = datetime.datetime.today().strftime('%Y-%m-%d')
|
53 |
-
time = date_scrape[0]
|
54 |
else:
|
55 |
-
date =
|
56 |
-
time =
|
57 |
-
|
58 |
-
datetime_str = f"{date} {time}"
|
59 |
-
datetime_parsed = parser.parse(datetime_str)
|
60 |
-
|
61 |
-
is_valid = verify_link(link)
|
62 |
|
63 |
-
|
|
|
64 |
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
parsed_news_df = pd.DataFrame(parsed_news, columns=columns)
|
71 |
-
|
72 |
-
return parsed_news_df
|
73 |
|
74 |
-
def
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
for pred in predictions:
|
81 |
-
label = pred['label']
|
82 |
-
score = pred['score']
|
83 |
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
sentiment_data.append({
|
93 |
'sentiment_score': sentiment_score,
|
94 |
'tone': label,
|
95 |
'confidence': score
|
96 |
-
}
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
# Join with original news DataFrame
|
102 |
-
parsed_and_scored_news = parsed_news_df.join(scores_df)
|
103 |
-
parsed_and_scored_news = parsed_and_scored_news.set_index('datetime')
|
104 |
-
|
105 |
-
return parsed_and_scored_news
|
106 |
-
|
107 |
-
def plot_hourly_sentiment(parsed_and_scored_news, ticker):
|
108 |
-
mean_scores = parsed_and_scored_news['sentiment_score'].resample('h').mean()
|
109 |
-
|
110 |
-
fig = px.bar(mean_scores, x=mean_scores.index, y='sentiment_score',
|
111 |
-
title=f'{ticker} Hourly Sentiment Scores',
|
112 |
-
color='sentiment_score',
|
113 |
-
color_continuous_scale=['red', 'yellow', 'green'],
|
114 |
-
range_color=[-1, 1])
|
115 |
-
|
116 |
-
fig.update_layout(coloraxis_colorbar=dict(
|
117 |
-
title="Sentiment",
|
118 |
-
tickvals=[-1, 0, 1],
|
119 |
-
ticktext=["Negative", "Neutral", "Positive"],
|
120 |
-
))
|
121 |
-
|
122 |
-
return fig
|
123 |
|
124 |
-
def
|
125 |
-
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
-
title=f'{ticker} News Sentiment Distribution',
|
130 |
-
color=tone_counts.index,
|
131 |
-
color_discrete_map={'Positive': 'green',
|
132 |
-
'Neutral': 'yellow',
|
133 |
-
'Negative': 'red'})
|
134 |
-
|
135 |
-
return fig
|
136 |
-
|
137 |
-
def get_recommendation(sentiment_scores):
|
138 |
-
avg_sentiment = sentiment_scores['sentiment_score'].mean()
|
139 |
-
tone_counts = sentiment_scores['tone'].value_counts()
|
140 |
-
total_articles = len(sentiment_scores)
|
141 |
-
|
142 |
-
# Calculate percentages
|
143 |
-
positive_pct = tone_counts.get('Positive', 0) / total_articles * 100
|
144 |
-
negative_pct = tone_counts.get('Negative', 0) / total_articles * 100
|
145 |
-
|
146 |
-
if avg_sentiment >= 0.3 and positive_pct >= 50:
|
147 |
-
return f"Strong Positive Sentiment (Score: {avg_sentiment:.2f}, {positive_pct:.1f}% positive news). The recent news suggests a very favorable outlook for this stock. Consider buying or holding if you already own it."
|
148 |
-
elif avg_sentiment >= 0.1:
|
149 |
-
return f"Moderately Positive Sentiment (Score: {avg_sentiment:.2f}, {positive_pct:.1f}% positive news). The recent news leans positive. Consider holding if you own the stock."
|
150 |
-
elif avg_sentiment <= -0.3 and negative_pct >= 50:
|
151 |
-
return f"Strong Negative Sentiment (Score: {avg_sentiment:.2f}, {negative_pct:.1f}% negative news). The recent news suggests significant caution. Consider selling or avoiding this stock for now."
|
152 |
-
elif avg_sentiment <= -0.1:
|
153 |
-
return f"Moderately Negative Sentiment (Score: {avg_sentiment:.2f}, {negative_pct:.1f}% negative news). The recent news leans negative. Consider reducing exposure or waiting for better entry points."
|
154 |
-
else:
|
155 |
-
return f"Neutral Sentiment (Score: {avg_sentiment:.2f}). The recent news shows mixed or neutral signals. Consider holding current positions and monitoring for clearer trends."
|
156 |
-
|
157 |
-
st.header("Stock News Sentiment Analyzer (FinBERT-tone)")
|
158 |
-
|
159 |
-
ticker = st.text_input('Enter Stock Ticker', '').upper()
|
160 |
-
|
161 |
-
try:
|
162 |
-
if ticker:
|
163 |
-
st.subheader(f"Sentiment Analysis and Recommendation for {ticker} Stock")
|
164 |
-
news_table = get_news(ticker)
|
165 |
-
parsed_news_df = parse_news(news_table)
|
166 |
-
parsed_and_scored_news = score_news(parsed_news_df)
|
167 |
|
168 |
-
#
|
169 |
-
|
170 |
-
st.write(recommendation)
|
171 |
|
172 |
-
#
|
173 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
174 |
|
175 |
-
|
176 |
-
|
|
|
|
|
|
|
|
|
|
|
177 |
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
|
|
|
|
|
|
|
|
|
|
|
185 |
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
- The table below provides detailed analysis of each headline
|
191 |
-
- News headlines are sourced from FinViz
|
192 |
-
- Sentiment analysis uses the FinBERT-tone model, specifically trained for financial text tone analysis
|
193 |
-
"""
|
194 |
-
|
195 |
-
st.markdown(description)
|
196 |
|
197 |
-
|
198 |
-
|
199 |
-
|
200 |
-
|
201 |
-
|
|
|
|
|
|
|
|
|
|
|
202 |
)
|
203 |
|
204 |
-
|
205 |
-
|
206 |
-
|
207 |
-
|
208 |
-
|
209 |
-
|
210 |
-
|
211 |
-
|
212 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
213 |
|
214 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
215 |
|
216 |
-
|
217 |
-
st.
|
218 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
219 |
|
220 |
-
|
221 |
-
|
222 |
-
|
223 |
-
footer {visibility: hidden;}
|
224 |
-
</style>
|
225 |
-
"""
|
226 |
-
st.markdown(hide_streamlit_style, unsafe_allow_html=True)
|
|
|
9 |
from transformers import AutoModelForSequenceClassification, AutoTokenizer, pipeline
|
10 |
import torch
|
11 |
|
12 |
+
# Page config
|
13 |
+
st.set_page_config(
|
14 |
+
page_title="Stock News Sentiment Analyzer",
|
15 |
+
page_icon="📈",
|
16 |
+
layout="wide",
|
17 |
+
initial_sidebar_state="expanded"
|
18 |
+
)
|
19 |
+
|
20 |
+
# Custom CSS for styling
|
21 |
+
st.markdown("""
|
22 |
+
<style>
|
23 |
+
.stAlert {
|
24 |
+
padding: 10px;
|
25 |
+
margin-bottom: 20px;
|
26 |
+
}
|
27 |
+
.reportview-container {
|
28 |
+
background: #f0f2f6
|
29 |
+
}
|
30 |
+
.main {
|
31 |
+
padding: 2rem;
|
32 |
+
}
|
33 |
+
h1, h2, h3 {
|
34 |
+
color: #1f77b4;
|
35 |
+
}
|
36 |
+
</style>
|
37 |
+
""", unsafe_allow_html=True)
|
38 |
|
39 |
# Initialize FinBERT-tone model and tokenizer
|
40 |
@st.cache_resource
|
41 |
+
def load_finbert_model():
|
42 |
+
try:
|
43 |
+
model = AutoModelForSequenceClassification.from_pretrained("yiyanghkust/finbert-tone")
|
44 |
+
tokenizer = AutoTokenizer.from_pretrained("yiyanghkust/finbert-tone")
|
45 |
+
return pipeline("text-classification", model=model, tokenizer=tokenizer)
|
46 |
+
except Exception as e:
|
47 |
+
st.error(f"Error loading model: {str(e)}")
|
48 |
+
return None
|
49 |
|
50 |
+
# Load the model
|
51 |
+
finbert = load_finbert_model()
|
52 |
|
53 |
+
# Web scraping functions
|
54 |
def verify_link(url, timeout=10, retries=3):
|
55 |
+
"""Verify if a URL is accessible."""
|
56 |
for _ in range(retries):
|
57 |
try:
|
58 |
response = requests.head(url, timeout=timeout, allow_redirects=True)
|
59 |
+
return 200 <= response.status_code < 300
|
|
|
60 |
except requests.RequestException:
|
61 |
continue
|
62 |
return False
|
63 |
|
64 |
def get_news(ticker):
|
65 |
+
"""Scrape news from FinViz for a given stock ticker."""
|
66 |
+
try:
|
67 |
+
finviz_url = f'https://finviz.com/quote.ashx?t={ticker}'
|
68 |
+
req = Request(url=finviz_url, headers={
|
69 |
+
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
|
70 |
+
})
|
71 |
+
response = urlopen(req)
|
72 |
+
html = BeautifulSoup(response, 'html.parser')
|
73 |
+
news_table = html.find(id='news-table')
|
74 |
+
if news_table is None:
|
75 |
+
raise ValueError("No news table found - invalid ticker or website structure changed")
|
76 |
+
return news_table
|
77 |
+
except Exception as e:
|
78 |
+
raise Exception(f"Error fetching news for {ticker}: {str(e)}")
|
79 |
|
80 |
def parse_news(news_table):
|
81 |
+
"""Parse the news table and return a DataFrame."""
|
82 |
parsed_news = []
|
83 |
|
84 |
+
try:
|
85 |
+
for row in news_table.findAll('tr'):
|
86 |
+
title = row.a.get_text()
|
87 |
+
link = row.a['href']
|
88 |
+
date_data = row.td.text.strip().split()
|
89 |
|
90 |
+
if len(date_data) == 1:
|
91 |
+
time = date_data[0]
|
92 |
date = datetime.datetime.today().strftime('%Y-%m-%d')
|
|
|
93 |
else:
|
94 |
+
date = date_data[0]
|
95 |
+
time = date_data[1]
|
|
|
|
|
|
|
|
|
|
|
96 |
|
97 |
+
parsed_date = parser.parse(f"{date} {time}")
|
98 |
+
is_valid = verify_link(link)
|
99 |
|
100 |
+
parsed_news.append([parsed_date, title, link, is_valid])
|
101 |
+
|
102 |
+
return pd.DataFrame(parsed_news, columns=['datetime', 'headline', 'link', 'is_valid'])
|
103 |
+
except Exception as e:
|
104 |
+
raise Exception(f"Error parsing news: {str(e)}")
|
|
|
|
|
|
|
105 |
|
106 |
+
def analyze_sentiment(text):
|
107 |
+
"""Analyze sentiment of a single piece of text using FinBERT-tone."""
|
108 |
+
try:
|
109 |
+
result = finbert(text)[0]
|
110 |
+
label = result['label']
|
111 |
+
score = result['score']
|
|
|
|
|
|
|
112 |
|
113 |
+
sentiment_score = {
|
114 |
+
'Positive': score,
|
115 |
+
'Negative': -score,
|
116 |
+
'Neutral': 0
|
117 |
+
}.get(label, 0)
|
118 |
+
|
119 |
+
return {
|
|
|
|
|
120 |
'sentiment_score': sentiment_score,
|
121 |
'tone': label,
|
122 |
'confidence': score
|
123 |
+
}
|
124 |
+
except Exception as e:
|
125 |
+
st.error(f"Error analyzing sentiment: {str(e)}")
|
126 |
+
return {'sentiment_score': 0, 'tone': 'Error', 'confidence': 0}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
127 |
|
128 |
+
def process_news_sentiment(parsed_news_df):
|
129 |
+
"""Process sentiment for all news headlines."""
|
130 |
+
try:
|
131 |
+
# Analyze sentiment for each headline
|
132 |
+
sentiment_data = [analyze_sentiment(headline) for headline in parsed_news_df['headline']]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
133 |
|
134 |
+
# Convert to DataFrame
|
135 |
+
sentiment_df = pd.DataFrame(sentiment_data)
|
|
|
136 |
|
137 |
+
# Join with original news DataFrame
|
138 |
+
result_df = parsed_news_df.join(sentiment_df)
|
139 |
+
return result_df.set_index('datetime')
|
140 |
+
except Exception as e:
|
141 |
+
raise Exception(f"Error processing sentiments: {str(e)}")
|
142 |
+
|
143 |
+
# Visualization functions
|
144 |
+
def plot_sentiment_timeline(df, ticker):
|
145 |
+
"""Create an hourly sentiment timeline plot."""
|
146 |
+
try:
|
147 |
+
hourly_sentiment = df['sentiment_score'].resample('H').mean()
|
148 |
|
149 |
+
fig = px.bar(
|
150 |
+
hourly_sentiment,
|
151 |
+
title=f"{ticker} Hourly Sentiment Trend",
|
152 |
+
color=hourly_sentiment.values,
|
153 |
+
color_continuous_scale=['red', 'yellow', 'green'],
|
154 |
+
range_color=[-1, 1]
|
155 |
+
)
|
156 |
|
157 |
+
fig.update_layout(
|
158 |
+
xaxis_title="Time",
|
159 |
+
yaxis_title="Sentiment Score",
|
160 |
+
coloraxis_colorbar_title="Sentiment",
|
161 |
+
showlegend=False,
|
162 |
+
height=400
|
163 |
+
)
|
164 |
+
|
165 |
+
return fig
|
166 |
+
except Exception as e:
|
167 |
+
st.error(f"Error creating timeline plot: {str(e)}")
|
168 |
+
return None
|
169 |
|
170 |
+
def plot_sentiment_distribution(df, ticker):
|
171 |
+
"""Create a pie chart of sentiment distribution."""
|
172 |
+
try:
|
173 |
+
tone_counts = df['tone'].value_counts()
|
|
|
|
|
|
|
|
|
|
|
|
|
174 |
|
175 |
+
fig = px.pie(
|
176 |
+
values=tone_counts.values,
|
177 |
+
names=tone_counts.index,
|
178 |
+
title=f"{ticker} Sentiment Distribution",
|
179 |
+
color=tone_counts.index,
|
180 |
+
color_discrete_map={
|
181 |
+
'Positive': 'green',
|
182 |
+
'Neutral': 'yellow',
|
183 |
+
'Negative': 'red'
|
184 |
+
}
|
185 |
)
|
186 |
|
187 |
+
fig.update_layout(height=400)
|
188 |
+
return fig
|
189 |
+
except Exception as e:
|
190 |
+
st.error(f"Error creating distribution plot: {str(e)}")
|
191 |
+
return None
|
192 |
+
|
193 |
+
def generate_recommendation(df):
|
194 |
+
"""Generate trading recommendation based on sentiment analysis."""
|
195 |
+
try:
|
196 |
+
avg_sentiment = df['sentiment_score'].mean()
|
197 |
+
tone_counts = df['tone'].value_counts()
|
198 |
+
total_articles = len(df)
|
199 |
+
|
200 |
+
positive_pct = tone_counts.get('Positive', 0) / total_articles * 100
|
201 |
+
negative_pct = tone_counts.get('Negative', 0) / total_articles * 100
|
202 |
|
203 |
+
if avg_sentiment >= 0.3 and positive_pct >= 50:
|
204 |
+
return "🟢 STRONG BUY", f"Strong positive sentiment (Score: {avg_sentiment:.2f}, {positive_pct:.1f}% positive news). The recent news suggests a very favorable outlook."
|
205 |
+
elif avg_sentiment >= 0.1:
|
206 |
+
return "🟡 MODERATE BUY", f"Moderately positive sentiment (Score: {avg_sentiment:.2f}, {positive_pct:.1f}% positive news). The recent news leans positive."
|
207 |
+
elif avg_sentiment <= -0.3 and negative_pct >= 50:
|
208 |
+
return "🔴 STRONG SELL", f"Strong negative sentiment (Score: {avg_sentiment:.2f}, {negative_pct:.1f}% negative news). The recent news suggests significant caution."
|
209 |
+
elif avg_sentiment <= -0.1:
|
210 |
+
return "🟡 MODERATE SELL", f"Moderately negative sentiment (Score: {avg_sentiment:.2f}, {negative_pct:.1f}% negative news). The recent news leans negative."
|
211 |
+
else:
|
212 |
+
return "⚪ NEUTRAL", f"Neutral sentiment (Score: {avg_sentiment:.2f}). The recent news shows mixed or neutral signals."
|
213 |
+
except Exception as e:
|
214 |
+
st.error(f"Error generating recommendation: {str(e)}")
|
215 |
+
return "⚠️ ERROR", "Unable to generate recommendation due to an error."
|
216 |
+
|
217 |
+
# Main application
|
218 |
+
def main():
|
219 |
+
st.title("📈 Stock News Sentiment Analyzer")
|
220 |
+
st.markdown("""
|
221 |
+
This application analyzes the sentiment of recent news articles for any given stock ticker using the FinBERT-tone model,
|
222 |
+
which is specifically trained for financial text analysis.
|
223 |
+
""")
|
224 |
|
225 |
+
# User input
|
226 |
+
ticker = st.text_input('Enter Stock Ticker (e.g., AAPL, GOOGL)', '').upper()
|
227 |
+
|
228 |
+
if ticker:
|
229 |
+
try:
|
230 |
+
with st.spinner('Fetching and analyzing news...'):
|
231 |
+
# Get and process news
|
232 |
+
news_table = get_news(ticker)
|
233 |
+
parsed_news_df = parse_news(news_table)
|
234 |
+
analyzed_news = process_news_sentiment(parsed_news_df)
|
235 |
+
|
236 |
+
# Generate recommendation
|
237 |
+
signal, explanation = generate_recommendation(analyzed_news)
|
238 |
+
|
239 |
+
# Display recommendation
|
240 |
+
st.header(f"Analysis Results for {ticker}")
|
241 |
+
st.subheader(f"Signal: {signal}")
|
242 |
+
st.write(explanation)
|
243 |
+
|
244 |
+
# Display charts
|
245 |
+
col1, col2 = st.columns(2)
|
246 |
+
|
247 |
+
with col1:
|
248 |
+
timeline_fig = plot_sentiment_timeline(analyzed_news, ticker)
|
249 |
+
if timeline_fig:
|
250 |
+
st.plotly_chart(timeline_fig, use_container_width=True)
|
251 |
+
|
252 |
+
with col2:
|
253 |
+
distribution_fig = plot_sentiment_distribution(analyzed_news, ticker)
|
254 |
+
if distribution_fig:
|
255 |
+
st.plotly_chart(distribution_fig, use_container_width=True)
|
256 |
+
|
257 |
+
# Display news table
|
258 |
+
st.subheader("Recent News Analysis")
|
259 |
+
|
260 |
+
# Prepare display DataFrame
|
261 |
+
display_df = analyzed_news.copy()
|
262 |
+
display_df['link'] = display_df.apply(
|
263 |
+
lambda row: f'<a href="{row["link"]}" target="_blank">{"🔗" if row["is_valid"] else "❌"}</a>',
|
264 |
+
axis=1
|
265 |
+
)
|
266 |
+
|
267 |
+
# Format and display table
|
268 |
+
display_df = display_df[['headline', 'tone', 'confidence', 'sentiment_score', 'link']]
|
269 |
+
display_df = display_df.rename(columns={
|
270 |
+
'headline': 'Headline',
|
271 |
+
'tone': 'Sentiment',
|
272 |
+
'confidence': 'Confidence',
|
273 |
+
'sentiment_score': 'Score',
|
274 |
+
'link': 'Link'
|
275 |
+
})
|
276 |
+
|
277 |
+
st.write(display_df.to_html(escape=False), unsafe_allow_html=True)
|
278 |
+
|
279 |
+
# Disclaimer
|
280 |
+
st.markdown("""
|
281 |
+
---
|
282 |
+
**Disclaimer:** This analysis is based on news sentiment only and should not be considered as financial advice.
|
283 |
+
Always conduct thorough research and consult with financial professionals before making investment decisions.
|
284 |
+
""")
|
285 |
+
|
286 |
+
except Exception as e:
|
287 |
+
st.error(f"Error processing {ticker}: {str(e)}")
|
288 |
+
st.write("Please check the ticker symbol and try again.")
|
289 |
|
290 |
+
# Run the application
|
291 |
+
if __name__ == "__main__":
|
292 |
+
main()
|
|
|
|
|
|
|
|