Spaces:
Runtime error
Runtime error
Jumper-Clown
commited on
Commit
•
46ebf5d
1
Parent(s):
22435da
implemented symbol getter, price candlestick, trend deviations from linear regressed prices, VWAP, bollinger bands, candle indicators and sector trends
Browse files- .idea/.gitignore +3 -0
- .idea/finance.iml +8 -0
- .idea/inspectionProfiles/profiles_settings.xml +6 -0
- .idea/misc.xml +7 -0
- .idea/modules.xml +8 -0
- .idea/vcs.xml +6 -0
- __pycache__/calculator.cpython-312.pyc +0 -0
- __pycache__/data_retriever.cpython-312.pyc +0 -0
- __pycache__/page_symbol_details.cpython-312.pyc +0 -0
- __pycache__/static_data.cpython-312.pyc +0 -0
- __pycache__/trends.cpython-312.pyc +0 -0
- __pycache__/ui.cpython-312.pyc +0 -0
- app.py +8 -0
- calculator.py +55 -0
- data_retriever.py +102 -0
- indicators.py +102 -0
- page_symbol_details.py +86 -0
- requirements.txt +11 -0
- static_data.py +163 -0
- trends.py +140 -0
- ui.py +97 -0
.idea/.gitignore
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
# Default ignored files
|
2 |
+
/shelf/
|
3 |
+
/workspace.xml
|
.idea/finance.iml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
2 |
+
<module type="PYTHON_MODULE" version="4">
|
3 |
+
<component name="NewModuleRootManager">
|
4 |
+
<content url="file://$MODULE_DIR$" />
|
5 |
+
<orderEntry type="jdk" jdkName="Python 3.12" jdkType="Python SDK" />
|
6 |
+
<orderEntry type="sourceFolder" forTests="false" />
|
7 |
+
</component>
|
8 |
+
</module>
|
.idea/inspectionProfiles/profiles_settings.xml
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<component name="InspectionProjectProfileManager">
|
2 |
+
<settings>
|
3 |
+
<option name="USE_PROJECT_PROFILE" value="false" />
|
4 |
+
<version value="1.0" />
|
5 |
+
</settings>
|
6 |
+
</component>
|
.idea/misc.xml
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
2 |
+
<project version="4">
|
3 |
+
<component name="Black">
|
4 |
+
<option name="sdkName" value="Python 3.12" />
|
5 |
+
</component>
|
6 |
+
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.12" project-jdk-type="Python SDK" />
|
7 |
+
</project>
|
.idea/modules.xml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
2 |
+
<project version="4">
|
3 |
+
<component name="ProjectModuleManager">
|
4 |
+
<modules>
|
5 |
+
<module fileurl="file://$PROJECT_DIR$/.idea/finance.iml" filepath="$PROJECT_DIR$/.idea/finance.iml" />
|
6 |
+
</modules>
|
7 |
+
</component>
|
8 |
+
</project>
|
.idea/vcs.xml
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
2 |
+
<project version="4">
|
3 |
+
<component name="VcsDirectoryMappings">
|
4 |
+
<mapping directory="" vcs="Git" />
|
5 |
+
</component>
|
6 |
+
</project>
|
__pycache__/calculator.cpython-312.pyc
ADDED
Binary file (2.65 kB). View file
|
|
__pycache__/data_retriever.cpython-312.pyc
ADDED
Binary file (5.1 kB). View file
|
|
__pycache__/page_symbol_details.cpython-312.pyc
ADDED
Binary file (4.81 kB). View file
|
|
__pycache__/static_data.cpython-312.pyc
ADDED
Binary file (2.57 kB). View file
|
|
__pycache__/trends.cpython-312.pyc
ADDED
Binary file (6.02 kB). View file
|
|
__pycache__/ui.cpython-312.pyc
ADDED
Binary file (3.7 kB). View file
|
|
app.py
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
|
3 |
+
import page_symbol_details
|
4 |
+
|
5 |
+
if __name__ == '__main__':
|
6 |
+
st.set_page_config(layout="wide")
|
7 |
+
|
8 |
+
page_symbol_details.run()
|
calculator.py
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
from datetime import datetime
|
3 |
+
|
4 |
+
|
5 |
+
def linear_regression_line(dates, y_list):
|
6 |
+
if not any(dates):
|
7 |
+
return 0, 0
|
8 |
+
if isinstance(dates[0], datetime):
|
9 |
+
dates = [ts.timestamp() for ts in dates]
|
10 |
+
if not isinstance(dates, np.ndarray):
|
11 |
+
dates = np.array(dates)
|
12 |
+
if not isinstance(y_list, np.ndarray):
|
13 |
+
y_list = np.array(y_list)
|
14 |
+
|
15 |
+
mean_x = np.mean(dates)
|
16 |
+
mean_y = np.mean(y_list)
|
17 |
+
|
18 |
+
# Calculate the slope (m) and y-intercept (c) of the regression line
|
19 |
+
m = np.sum((dates - mean_x) * (y_list - mean_y)) / np.sum((dates - mean_x) ** 2)
|
20 |
+
c = mean_y - m * mean_x
|
21 |
+
|
22 |
+
return m, c
|
23 |
+
|
24 |
+
|
25 |
+
def linear_regression_points(dates, y_list):
|
26 |
+
if not any(dates):
|
27 |
+
return 0, 0
|
28 |
+
if isinstance(dates[0], datetime):
|
29 |
+
dates = [ts.timestamp() for ts in dates]
|
30 |
+
if not isinstance(dates, np.ndarray):
|
31 |
+
dates = np.array(dates)
|
32 |
+
if not isinstance(y_list, np.ndarray):
|
33 |
+
y_list = np.array(y_list)
|
34 |
+
|
35 |
+
m, c = linear_regression_line(dates, y_list)
|
36 |
+
|
37 |
+
return m * dates + c
|
38 |
+
|
39 |
+
|
40 |
+
def linear_regression(dates, y_list):
|
41 |
+
if not any(dates):
|
42 |
+
return 0, 0
|
43 |
+
if isinstance(dates[0], datetime):
|
44 |
+
dates = [ts.timestamp() for ts in dates]
|
45 |
+
if not isinstance(dates, np.ndarray):
|
46 |
+
dates = np.array(dates)
|
47 |
+
if not isinstance(y_list, np.ndarray):
|
48 |
+
y_list = np.array(y_list)
|
49 |
+
|
50 |
+
m, c = linear_regression_line(dates, y_list)
|
51 |
+
|
52 |
+
y_low = m * dates[0] + c
|
53 |
+
y_high = m * dates[-1] + c
|
54 |
+
|
55 |
+
return y_low, y_high
|
data_retriever.py
ADDED
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import yfinance as yf
|
2 |
+
import finnhub
|
3 |
+
|
4 |
+
import os
|
5 |
+
from datetime import date, datetime, timedelta
|
6 |
+
from collections import defaultdict
|
7 |
+
|
8 |
+
import static_data
|
9 |
+
|
10 |
+
#test
|
11 |
+
import streamlit as st
|
12 |
+
|
13 |
+
|
14 |
+
@st.cache_data
|
15 |
+
def get_exchange_code_names():
|
16 |
+
return static_data.exchange_code_names
|
17 |
+
|
18 |
+
|
19 |
+
@st.cache_data
|
20 |
+
def get_exchange_codes():
|
21 |
+
return static_data.exchange_codes
|
22 |
+
|
23 |
+
|
24 |
+
@st.cache_data
|
25 |
+
def get_symbols(exchange_code):
|
26 |
+
symbol_data = finnhub_client().stock_symbols(exchange_code)
|
27 |
+
symbols = []
|
28 |
+
for symbol_info in symbol_data:
|
29 |
+
symbols.append(symbol_info['displaySymbol'])
|
30 |
+
symbols.sort()
|
31 |
+
return symbols
|
32 |
+
|
33 |
+
|
34 |
+
@st.cache_data
|
35 |
+
def today():
|
36 |
+
return date.today().strftime("%Y-%m-%d")
|
37 |
+
|
38 |
+
|
39 |
+
def n_weeks_before(date_string, n):
|
40 |
+
date_value = datetime.strptime(date_string, "%Y-%m-%d") - timedelta(days=7*n)
|
41 |
+
return date_value.strftime("%Y-%m-%d")
|
42 |
+
|
43 |
+
|
44 |
+
def n_days_before(date_string, n):
|
45 |
+
date_value = datetime.strptime(date_string, "%Y-%m-%d") - timedelta(days=n)
|
46 |
+
return date_value.strftime("%Y-%m-%d")
|
47 |
+
|
48 |
+
@st.cache_data
|
49 |
+
def get_current_stock_data(symbol, n_weeks):
|
50 |
+
current_date = today()
|
51 |
+
n_weeks_before_date = n_weeks_before(current_date, n_weeks)
|
52 |
+
stock_data = yf.download(symbol, n_weeks_before_date, current_date)
|
53 |
+
return stock_data
|
54 |
+
|
55 |
+
|
56 |
+
@st.cache_data
|
57 |
+
def finnhub_client():
|
58 |
+
return finnhub.Client(api_key=os.environ["FINNHUB_API_KEY"])
|
59 |
+
|
60 |
+
|
61 |
+
@st.cache_data
|
62 |
+
def get_current_basics(symbol, day):
|
63 |
+
basic_financials = finnhub_client().company_basic_financials(symbol, 'all')
|
64 |
+
if not basic_financials['series']:
|
65 |
+
return []
|
66 |
+
|
67 |
+
basic_list, basic_dict = [], defaultdict(dict)
|
68 |
+
|
69 |
+
for metric, value_list in basic_financials['series']['quarterly'].items():
|
70 |
+
for value in value_list:
|
71 |
+
basic_dict[value['period']].update({metric: value['v']})
|
72 |
+
|
73 |
+
for k, v in basic_dict.items():
|
74 |
+
v.update({'period': k})
|
75 |
+
basic_list.append(v)
|
76 |
+
|
77 |
+
basic_list.sort(key=lambda x: x['period'])
|
78 |
+
|
79 |
+
for basic in basic_list[::-1]:
|
80 |
+
if basic['period'] <= day:
|
81 |
+
return basic
|
82 |
+
|
83 |
+
return basic_list[-1]
|
84 |
+
|
85 |
+
|
86 |
+
@st.cache_data
|
87 |
+
def get_peers(symbol):
|
88 |
+
return finnhub_client().company_peers(symbol)
|
89 |
+
|
90 |
+
|
91 |
+
@st.cache_data
|
92 |
+
def get_financials(symbol, freq):
|
93 |
+
return finnhub_client().financials_reported(symbol=symbol, freq=freq)
|
94 |
+
|
95 |
+
|
96 |
+
@st.cache_data
|
97 |
+
def get_income_statement(symbol, freq='quarterly'):
|
98 |
+
financials = get_financials(symbol, freq)
|
99 |
+
financials_data = financials['data']
|
100 |
+
dates = [financials_data['endDate'] for financials_data in financials_data]
|
101 |
+
ic = [financials_data['report']['ic'] for financials_data in financials_data]
|
102 |
+
return dates, ic
|
indicators.py
ADDED
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Defines candlestick indicators, and creates data-points for a dataset when an indicator is detected.
|
2 |
+
|
3 |
+
import streamlit as st
|
4 |
+
from collections import defaultdict
|
5 |
+
|
6 |
+
|
7 |
+
def price_at_index(index, dates, dataset):
|
8 |
+
price = defaultdict(float)
|
9 |
+
|
10 |
+
opens = dataset['Open']
|
11 |
+
closes = dataset['Close']
|
12 |
+
highs = dataset['High']
|
13 |
+
lows = dataset['Low']
|
14 |
+
|
15 |
+
price['Date'] = dates[index]
|
16 |
+
price['Open'] = opens[index]
|
17 |
+
price['Close'] = closes[index]
|
18 |
+
price['High'] = highs[index]
|
19 |
+
price['Low'] = lows[index]
|
20 |
+
return price
|
21 |
+
|
22 |
+
|
23 |
+
def candle_is_green(current_price):
|
24 |
+
return current_price['Open'] < current_price['Close']
|
25 |
+
|
26 |
+
|
27 |
+
def engulfing_candle(prev_price, current_price):
|
28 |
+
prev_is_green = candle_is_green(prev_price)
|
29 |
+
current_is_green = candle_is_green(current_price)
|
30 |
+
from_negative_to_positive = not prev_is_green and current_is_green
|
31 |
+
from_positive_to_negative = prev_is_green and not current_is_green
|
32 |
+
is_bullish_engulfing = from_negative_to_positive and current_price['Close'] > prev_price['Open'] and current_price['Open'] < prev_price['Close']
|
33 |
+
is_bearish_engulfing = from_positive_to_negative and current_price['Close'] < prev_price['Open'] and current_price['Open'] > prev_price['Close']
|
34 |
+
return is_bullish_engulfing, is_bearish_engulfing
|
35 |
+
|
36 |
+
|
37 |
+
def engulfing_candle_bullish(prev_price, current_price):
|
38 |
+
prev_is_green = candle_is_green(prev_price)
|
39 |
+
current_is_green = candle_is_green(current_price)
|
40 |
+
from_negative_to_positive = not prev_is_green and current_is_green
|
41 |
+
is_bullish_engulfing = from_negative_to_positive and current_price['Close'] > prev_price['Open'] and current_price['Open'] < prev_price['Close']
|
42 |
+
return is_bullish_engulfing
|
43 |
+
|
44 |
+
|
45 |
+
def engulfing_candle_bearish(prev_price, current_price):
|
46 |
+
prev_is_green = candle_is_green(prev_price)
|
47 |
+
current_is_green = candle_is_green(current_price)
|
48 |
+
from_positive_to_negative = prev_is_green and not current_is_green
|
49 |
+
is_bearish_engulfing = from_positive_to_negative and current_price['Close'] < prev_price['Open'] and current_price['Open'] > prev_price['Close']
|
50 |
+
return is_bearish_engulfing
|
51 |
+
|
52 |
+
|
53 |
+
def create_engulfing_candle_bullish_indicators(dates, dataset):
|
54 |
+
indicator = defaultdict(list)
|
55 |
+
indicator_timestamps = indicator['Date']
|
56 |
+
indicator_values = indicator['Values']
|
57 |
+
|
58 |
+
prev_price = price_at_index(0, dates, dataset)
|
59 |
+
for index in range(1, len(dates)):
|
60 |
+
price = price_at_index(index, dates, dataset)
|
61 |
+
is_engulfing = engulfing_candle_bullish(prev_price, price)
|
62 |
+
if is_engulfing:
|
63 |
+
indicator_timestamps.append(dates[index])
|
64 |
+
offset = ((price['Close'] - price['Open']) - (prev_price['Open'] - prev_price['Close'])) * 5
|
65 |
+
value = price['Close'] + offset
|
66 |
+
indicator_values.append(value)
|
67 |
+
prev_price = price
|
68 |
+
|
69 |
+
indicator_dict = dict(indicator)
|
70 |
+
indicator_dict['IsBullish'] = True
|
71 |
+
|
72 |
+
return indicator_dict
|
73 |
+
|
74 |
+
|
75 |
+
def create_engulfing_candle_bearish_indicators(dates, dataset):
|
76 |
+
indicator = defaultdict(list)
|
77 |
+
indicator_timestamps = indicator['Date']
|
78 |
+
indicator_values = indicator['Values']
|
79 |
+
|
80 |
+
prev_price = price_at_index(0, dates, dataset)
|
81 |
+
for index in range(1, len(dates)):
|
82 |
+
price = price_at_index(index, dates, dataset)
|
83 |
+
is_engulfing = engulfing_candle_bearish(prev_price, price)
|
84 |
+
if is_engulfing:
|
85 |
+
indicator_timestamps.append(dates[index])
|
86 |
+
offset = ((price['Open'] - price['Close']) - (prev_price['Close'] - prev_price['Open'])) * 5
|
87 |
+
value = price['Close'] - offset
|
88 |
+
indicator_values.append(value)
|
89 |
+
prev_price = price
|
90 |
+
|
91 |
+
indicator_dict = dict(indicator)
|
92 |
+
indicator_dict['IsBullish'] = False
|
93 |
+
|
94 |
+
return indicator_dict
|
95 |
+
|
96 |
+
|
97 |
+
def create_indicators(dates, dataset):
|
98 |
+
indicators = defaultdict(dict)
|
99 |
+
indicators['Engulfing Bullish'] = create_engulfing_candle_bullish_indicators(dates, dataset)
|
100 |
+
indicators['Engulfing Bearish'] = create_engulfing_candle_bearish_indicators(dates, dataset)
|
101 |
+
return indicators
|
102 |
+
|
page_symbol_details.py
ADDED
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import streamlit as st
|
3 |
+
from plotly.subplots import make_subplots
|
4 |
+
|
5 |
+
import data_retriever
|
6 |
+
import trends
|
7 |
+
import ui
|
8 |
+
|
9 |
+
|
10 |
+
def run():
|
11 |
+
exchange_names = data_retriever.get_exchange_code_names()
|
12 |
+
exchanges_selectbox = st.selectbox(
|
13 |
+
'Exchange:',
|
14 |
+
exchange_names,
|
15 |
+
index=exchange_names.index('US exchanges (NYSE, Nasdaq)')
|
16 |
+
)
|
17 |
+
exchange_name = exchanges_selectbox
|
18 |
+
exchange_index = exchange_names.index(exchange_name)
|
19 |
+
exchange = data_retriever.get_exchange_codes()[exchange_index]
|
20 |
+
|
21 |
+
symbols = data_retriever.get_symbols(exchange)
|
22 |
+
symbols_selectbox = st.selectbox(
|
23 |
+
'Stock:',
|
24 |
+
symbols,
|
25 |
+
index=symbols.index('AAPL')
|
26 |
+
)
|
27 |
+
symbol = symbols_selectbox
|
28 |
+
|
29 |
+
# max time period
|
30 |
+
st.text_input("No. of years look-back", value=1, key="years_back")
|
31 |
+
years_back = int(st.session_state.years_back)
|
32 |
+
weeks_back = years_back*12*4
|
33 |
+
|
34 |
+
symbol_prices = data_retriever.get_current_stock_data(symbol, weeks_back)
|
35 |
+
if not any(symbol_prices):
|
36 |
+
return
|
37 |
+
dates = symbol_prices.index.format()
|
38 |
+
|
39 |
+
# back test
|
40 |
+
st.text_input('No. of days back-test', value=0, key="backtest_period")
|
41 |
+
n_days_back = int(st.session_state.backtest_period)
|
42 |
+
end_date = data_retriever.n_days_before(data_retriever.today(), n_days_back)
|
43 |
+
symbol_prices_backtest = symbol_prices[symbol_prices.index <= end_date]
|
44 |
+
backtest_dates = symbol_prices_backtest.index.format()
|
45 |
+
|
46 |
+
# symbol candlestick graph
|
47 |
+
candleFigure = make_subplots(rows=1, cols=1)
|
48 |
+
ui.create_candlestick(candleFigure, dates, symbol_prices, symbol, 'Price')
|
49 |
+
# # indicators
|
50 |
+
# indicator_datasets = indicators.create_indicators(dates, symbol_prices)
|
51 |
+
# ui.create_indicators(fig1, indicator_datasets)
|
52 |
+
# trend lines
|
53 |
+
sigma_multiplier = st.slider(label='min trend deviation', min_value=1.0, max_value=10.0, value=3.0, step=0.1)
|
54 |
+
sigma_dates, sigma_values = trends.trend_line(symbol_prices_backtest.index, np.array(symbol_prices_backtest['Close']), min_trend_size=21, sigma_multiplier=sigma_multiplier)
|
55 |
+
ui.create_markers(candleFigure, sigma_dates, sigma_values, f"{sigma_multiplier} Deviations from regression line", 'Deviations')
|
56 |
+
|
57 |
+
# vwap
|
58 |
+
vwap = trends.vwap(symbol_prices_backtest)
|
59 |
+
ui.create_line(candleFigure, backtest_dates, vwap, "Volume Weighted Average Price (VWAP)", "VWAP")
|
60 |
+
|
61 |
+
# bollinger bands
|
62 |
+
bollinger_dates, bollinger_low, bollinger_high = trends.bollinger_bands(backtest_dates, symbol_prices_backtest)
|
63 |
+
ui.create_fill_area(candleFigure, bollinger_dates, bollinger_low, bollinger_high, "Bollinger Bands")
|
64 |
+
|
65 |
+
# plot all
|
66 |
+
candleFigure.update_layout(title="Symbol Ticker",
|
67 |
+
xaxis_title='Date',
|
68 |
+
yaxis_title="Price per Share",
|
69 |
+
template='plotly_dark')
|
70 |
+
candle_chart_id = st.plotly_chart(candleFigure, use_container_width=True, key='candle')
|
71 |
+
|
72 |
+
if st.checkbox('Sector Trends'):
|
73 |
+
# plot the trend of the market as a candlestick graph.
|
74 |
+
fig2 = make_subplots(rows=1, cols=1)
|
75 |
+
dates, close_data, relative_close_data, sector_normalized_avg = trends.sector_trends(symbol, weeks_back)
|
76 |
+
ui.create_candlestick(fig2, dates, sector_normalized_avg, 'Sector Trend', 'Normalized Price')
|
77 |
+
st.plotly_chart(fig2, use_container_width=True)
|
78 |
+
|
79 |
+
# plot the difference between each peer and the sector trend
|
80 |
+
fig3 = make_subplots(rows=1, cols=1)
|
81 |
+
ui.create_lines(fig3, dates, relative_close_data, "Peer 'Close' Relative to Sector", 'Relative Close')
|
82 |
+
st.plotly_chart(fig3, use_container_width=True)
|
83 |
+
|
84 |
+
if st.checkbox('Symbol Basics'):
|
85 |
+
basics_data = data_retriever.get_current_basics(symbol, data_retriever.today())
|
86 |
+
st.write(basics_data)
|
requirements.txt
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
streamlit
|
2 |
+
|
3 |
+
yfinance
|
4 |
+
finnhub-python
|
5 |
+
|
6 |
+
pandas
|
7 |
+
datetime
|
8 |
+
|
9 |
+
plotly
|
10 |
+
|
11 |
+
numpy
|
static_data.py
ADDED
@@ -0,0 +1,163 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
exchange_code_names = ['ABU DHABI SECURITIES EXCHANGE',
|
2 |
+
'Aquis Exchange',
|
3 |
+
'NYSE EURONEXT - EURONEXT AMSTERDAM',
|
4 |
+
'ATHENS EXCHANGE S.A. CASH MARKET',
|
5 |
+
'ASX - ALL MARKETS',
|
6 |
+
'BOLSA DE COMERCIO DE BUENOS AIRES',
|
7 |
+
'BOLSA DE VALORES DE COLOMBIA',
|
8 |
+
'BUDAPEST STOCK EXCHANGE',
|
9 |
+
'BOERSE BERLIN',
|
10 |
+
'BAHRAIN BOURSE',
|
11 |
+
'STOCK EXCHANGE OF THAILAND',
|
12 |
+
'BSE LTD',
|
13 |
+
'NYSE EURONEXT - EURONEXT BRUSSELS',
|
14 |
+
'Egyptian Stock Exchange',
|
15 |
+
'CANADIAN NATIONAL STOCK EXCHANGE',
|
16 |
+
'OMX NORDIC EXCHANGE COPENHAGEN A/S',
|
17 |
+
'CARACAS STOCK EXCHANGE',
|
18 |
+
'CASABLANCA STOCK EXCHANGE',
|
19 |
+
'DUBAI FINANCIAL MARKET',
|
20 |
+
'XETRA',
|
21 |
+
'Dhaka Stock Exchange',
|
22 |
+
'BOERSE DUESSELDORF',
|
23 |
+
'DEUTSCHE BOERSE AG',
|
24 |
+
'NASDAQ OMX HELSINKI LTD',
|
25 |
+
'HONG KONG EXCHANGES AND CLEARING LTD',
|
26 |
+
'HANSEATISCHE WERTPAPIERBOERSE HAMBURG',
|
27 |
+
'NASDAQ OMX ICELAND',
|
28 |
+
'IRISH STOCK EXCHANGE - ALL MARKET',
|
29 |
+
'BORSA ISTANBUL',
|
30 |
+
'INDONESIA STOCK EXCHANGE',
|
31 |
+
'JOHANNESBURG STOCK EXCHANGE',
|
32 |
+
'BURSA MALAYSIA',
|
33 |
+
'KOREA EXCHANGE (KOSDAQ)',
|
34 |
+
'KOREA EXCHANGE (STOCK MARKET)',
|
35 |
+
'Kuwait Stock Exchange',
|
36 |
+
'LONDON STOCK EXCHANGE',
|
37 |
+
'Euronext London',
|
38 |
+
'NYSE EURONEXT - EURONEXT LISBON',
|
39 |
+
'BOLSA DE MADRID',
|
40 |
+
'MOSCOW EXCHANGE',
|
41 |
+
'Italian Stock Exchange',
|
42 |
+
'BOERSE MUENCHEN',
|
43 |
+
'BOLSA MEXICANA DE VALORES (MEXICAN STOCK EXCHANGE)',
|
44 |
+
'AEQUITAS NEO EXCHANGE',
|
45 |
+
'Nigerian Stock Exchange',
|
46 |
+
'NATIONAL STOCK EXCHANGE OF INDIA',
|
47 |
+
'NEW ZEALAND EXCHANGE LTD',
|
48 |
+
'OSLO BORS ASA',
|
49 |
+
'NYSE EURONEXT - MARCHE LIBRE PARIS',
|
50 |
+
'Philippine Stock Exchange',
|
51 |
+
'PRAGUE STOCK EXCHANGE',
|
52 |
+
'QATAR EXCHANGE',
|
53 |
+
'NASDAQ OMX RIGA',
|
54 |
+
'Brazil Bolsa - Sao Paolo',
|
55 |
+
'BOERSE STUTTGART',
|
56 |
+
'SINGAPORE EXCHANGE',
|
57 |
+
'SANTIAGO STOCK EXCHANGE',
|
58 |
+
'SAUDI STOCK EXCHANGE',
|
59 |
+
'SHANGHAI STOCK EXCHANGE',
|
60 |
+
'NASDAQ OMX NORDIC STOCKHOLM',
|
61 |
+
'SWISS EXCHANGE',
|
62 |
+
'SHENZHEN STOCK EXCHANGE',
|
63 |
+
'TOKYO STOCK EXCHANGE-TOKYO PRO MARKET',
|
64 |
+
'TEL AVIV STOCK EXCHANGE',
|
65 |
+
'NASDAQ OMX TALLINN',
|
66 |
+
'TORONTO STOCK EXCHANGE',
|
67 |
+
'TAIWAN STOCK EXCHANGE',
|
68 |
+
'TPEx',
|
69 |
+
'Turquoise',
|
70 |
+
'US exchanges (NYSE, Nasdaq)',
|
71 |
+
'TSX VENTURE EXCHANGE - NEX',
|
72 |
+
'Vienna Stock Exchange',
|
73 |
+
'Vietnam exchanges including HOSE, HNX and UPCOM',
|
74 |
+
'NASDAQ OMX VILNIUS',
|
75 |
+
'WARSAW STOCK EXCHANGE/EQUITIES/MAIN MARKET',
|
76 |
+
'CBOE Australia',
|
77 |
+
'Hanover Stock Exchange',
|
78 |
+
'DEUTSCHE BOERSE Stoxx',
|
79 |
+
'DEUTSCHE BOERSE TradeGate',
|
80 |
+
'BOERSE_FRANKFURT_ZERTIFIKATE',
|
81 |
+
'Spotlight Stock Market']
|
82 |
+
|
83 |
+
exchange_codes = ['AD'
|
84 |
+
,'AQ'
|
85 |
+
,'AS'
|
86 |
+
,'AT'
|
87 |
+
,'AX'
|
88 |
+
,'BA'
|
89 |
+
,'BC'
|
90 |
+
,'BD'
|
91 |
+
,'BE'
|
92 |
+
,'BH'
|
93 |
+
,'BK'
|
94 |
+
,'BO'
|
95 |
+
,'BR'
|
96 |
+
,'CA'
|
97 |
+
,'CN'
|
98 |
+
,'CO'
|
99 |
+
,'CR'
|
100 |
+
,'CS'
|
101 |
+
,'DB'
|
102 |
+
,'DE'
|
103 |
+
,'DS'
|
104 |
+
,'DU'
|
105 |
+
,'F'
|
106 |
+
,'HE'
|
107 |
+
,'HK'
|
108 |
+
,'HM'
|
109 |
+
,'IC'
|
110 |
+
,'IR'
|
111 |
+
,'IS'
|
112 |
+
,'JK'
|
113 |
+
,'JO'
|
114 |
+
,'KL'
|
115 |
+
,'KQ'
|
116 |
+
,'KS'
|
117 |
+
,'KW'
|
118 |
+
,'L'
|
119 |
+
,'LN'
|
120 |
+
,'LS'
|
121 |
+
,'MC'
|
122 |
+
,'ME'
|
123 |
+
,'MI'
|
124 |
+
,'MU'
|
125 |
+
,'MX'
|
126 |
+
,'NE'
|
127 |
+
,'NL'
|
128 |
+
,'NS'
|
129 |
+
,'NZ'
|
130 |
+
,'OL'
|
131 |
+
,'PA'
|
132 |
+
,'PM'
|
133 |
+
,'PR'
|
134 |
+
,'QA'
|
135 |
+
,'RG'
|
136 |
+
,'SA'
|
137 |
+
,'SG'
|
138 |
+
,'SI'
|
139 |
+
,'SN'
|
140 |
+
,'SR'
|
141 |
+
,'SS'
|
142 |
+
,'ST'
|
143 |
+
,'SW'
|
144 |
+
,'SZ'
|
145 |
+
,'T'
|
146 |
+
,'TA'
|
147 |
+
,'TL'
|
148 |
+
,'TO'
|
149 |
+
,'TW'
|
150 |
+
,'TWO'
|
151 |
+
,'TU'
|
152 |
+
,'US'
|
153 |
+
,'V'
|
154 |
+
,'VI'
|
155 |
+
,'VN'
|
156 |
+
,'VS'
|
157 |
+
,'WA'
|
158 |
+
,'XA'
|
159 |
+
,'HA'
|
160 |
+
,'SX'
|
161 |
+
,'TG'
|
162 |
+
,'SC'
|
163 |
+
,'SL']
|
trends.py
ADDED
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from collections import defaultdict
|
2 |
+
|
3 |
+
import numpy as np
|
4 |
+
|
5 |
+
import calculator
|
6 |
+
import data_retriever
|
7 |
+
|
8 |
+
import streamlit as st
|
9 |
+
|
10 |
+
|
11 |
+
# plot the trend of the market as a candlestick graph.
|
12 |
+
def sector_trends(symbol, weeks_back):
|
13 |
+
# 1. get all peers
|
14 |
+
peers = data_retriever.get_peers(symbol)
|
15 |
+
# 2. get data for each peer
|
16 |
+
peers_stock_data = defaultdict(list)
|
17 |
+
dates = []
|
18 |
+
for peer in peers:
|
19 |
+
peer_data = data_retriever.get_current_stock_data(peer, weeks_back)
|
20 |
+
if len(peer_data) == 0:
|
21 |
+
continue
|
22 |
+
if not any(dates):
|
23 |
+
dates = peer_data.index.format()
|
24 |
+
peers_stock_data[peer] = peer_data
|
25 |
+
|
26 |
+
# 3. normalize all data (get min->max value, then set each value to (X-min)/(max-min)
|
27 |
+
peers_stock_data_normalized = defaultdict(dict)
|
28 |
+
indicator_stock_data_normalized_peer_sum = defaultdict(list)
|
29 |
+
for peer, peer_stock_data in peers_stock_data.items():
|
30 |
+
peer_stock_data_normalized = defaultdict(list)
|
31 |
+
for indicator in peer_stock_data:
|
32 |
+
indicator_normalized = peer_stock_data_normalized[indicator]
|
33 |
+
indicator_stock_data_normalized_sum = indicator_stock_data_normalized_peer_sum[indicator]
|
34 |
+
|
35 |
+
indicator_values = peer_stock_data[indicator]
|
36 |
+
min_val = indicator_values[0]
|
37 |
+
max_val = indicator_values[0]
|
38 |
+
for value in indicator_values:
|
39 |
+
if value < min_val:
|
40 |
+
min_val = value
|
41 |
+
if value > max_val:
|
42 |
+
max_val = value
|
43 |
+
delta = max_val - min_val
|
44 |
+
|
45 |
+
value_idx = 0
|
46 |
+
for value in indicator_values:
|
47 |
+
normalized = (value - min_val)/delta
|
48 |
+
indicator_normalized.append(normalized)
|
49 |
+
|
50 |
+
while len(indicator_stock_data_normalized_sum) <= value_idx:
|
51 |
+
indicator_stock_data_normalized_sum.append(0)
|
52 |
+
indicator_stock_data_normalized_sum[value_idx] += normalized
|
53 |
+
value_idx += 1
|
54 |
+
|
55 |
+
peers_stock_data_normalized[peer] = peer_stock_data_normalized
|
56 |
+
# 4. get the average value for each indicator [open, close, high, low] for each time-step.
|
57 |
+
peer_count = len(peers)
|
58 |
+
indicator_stock_data_normalized_peer_avg = defaultdict(list)
|
59 |
+
for indicator, indicator_sum_values in indicator_stock_data_normalized_peer_sum.items():
|
60 |
+
indicator_avg_values = indicator_stock_data_normalized_peer_avg[indicator]
|
61 |
+
for sum_value in indicator_sum_values:
|
62 |
+
indicator_avg_values.append(sum_value/peer_count)
|
63 |
+
# 5. plot the resulting normalized-averaged-indicators in a candlestick chart.
|
64 |
+
close_data = defaultdict(list)
|
65 |
+
for peer, peer_data_normalized in peers_stock_data_normalized.items():
|
66 |
+
close_data[peer] = peer_data_normalized['Close']
|
67 |
+
|
68 |
+
relative_close_data = defaultdict(list)
|
69 |
+
for peer in peers_stock_data_normalized:
|
70 |
+
relative_close_data[peer] = [a - b for a, b in zip(close_data[peer], indicator_stock_data_normalized_peer_avg['Close'])]
|
71 |
+
|
72 |
+
return dates, close_data, relative_close_data, indicator_stock_data_normalized_peer_avg
|
73 |
+
|
74 |
+
|
75 |
+
def trend_line(date_indices, data_list, min_trend_size=7, step_size=1, sigma_multiplier=1):
|
76 |
+
trend_dates, trend_values = [], []
|
77 |
+
if not any(date_indices) or not any(data_list):
|
78 |
+
return trend_dates, trend_values
|
79 |
+
|
80 |
+
np_dates = np.array([ts.timestamp() for ts in date_indices])
|
81 |
+
dates = date_indices.format()
|
82 |
+
|
83 |
+
start_index = 0
|
84 |
+
index = min_trend_size
|
85 |
+
|
86 |
+
while index < len(data_list):
|
87 |
+
np_dates_subset = np_dates[start_index:index]
|
88 |
+
np_values_subset = data_list[start_index:index]
|
89 |
+
# for the value range, calculate linear_regression and standard deviation
|
90 |
+
m, c = calculator.linear_regression_line(np_dates_subset, np_values_subset)
|
91 |
+
predicted_points = m * np_dates_subset + c
|
92 |
+
relative_mean = np.mean(np.abs(predicted_points - np_values_subset)) # TODO: use Welford's algorithm
|
93 |
+
|
94 |
+
# for the next datapoint(s), calculate the next value in that trend,
|
95 |
+
# and check if it is n*trend_sigma away from the current trend line
|
96 |
+
next_index = index + step_size
|
97 |
+
if next_index >= len(data_list):
|
98 |
+
break
|
99 |
+
|
100 |
+
x = np_dates[next_index:next_index+1]
|
101 |
+
y = data_list[next_index:next_index+1]
|
102 |
+
expected_y = m*x + c
|
103 |
+
dev = np.mean(np.abs(y - expected_y))
|
104 |
+
|
105 |
+
if dev > relative_mean * sigma_multiplier:
|
106 |
+
# store the current date and value as a trend changer
|
107 |
+
trend_dates.append(dates[index])
|
108 |
+
trend_values.append(data_list[index])
|
109 |
+
# reset the calculation for the next data
|
110 |
+
start_index = next_index - min_trend_size
|
111 |
+
|
112 |
+
index = next_index
|
113 |
+
|
114 |
+
return trend_dates, trend_values
|
115 |
+
|
116 |
+
|
117 |
+
def vwap(symbol_price_data):
|
118 |
+
typical_price = (symbol_price_data['High'] + symbol_price_data['Low'] + symbol_price_data['Close']) / 3.0
|
119 |
+
volume = symbol_price_data['Volume']
|
120 |
+
vwap_values = (typical_price * volume).cumsum() / volume.cumsum()
|
121 |
+
return vwap_values
|
122 |
+
|
123 |
+
|
124 |
+
def bollinger_bands(dates, symbol_price_data, n_days_deviation=20, deviation_multiplier=2):
|
125 |
+
bollinger_dates, bollinger_high, bollinger_low = [], [], []
|
126 |
+
closes = symbol_price_data['Close']
|
127 |
+
|
128 |
+
for index in range(0, len(closes) - n_days_deviation):
|
129 |
+
bollinger_dates.append(dates[index + n_days_deviation])
|
130 |
+
|
131 |
+
closes_subset = closes[index:index + n_days_deviation]
|
132 |
+
sma = np.sum(closes_subset)/len(closes_subset)
|
133 |
+
sigma = deviation_multiplier * np.sqrt(np.sum(np.power(closes_subset - sma, 2))/len(closes_subset))
|
134 |
+
bollinger_high.append(sma + sigma)
|
135 |
+
bollinger_low.append(sma - sigma)
|
136 |
+
return bollinger_dates, bollinger_low, bollinger_high
|
137 |
+
|
138 |
+
|
139 |
+
def support_lines():
|
140 |
+
a = 1
|
ui.py
ADDED
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
|
3 |
+
import plotly.graph_objects as go
|
4 |
+
from plotly.validators.scatter.marker import SymbolValidator
|
5 |
+
from plotly.subplots import make_subplots
|
6 |
+
|
7 |
+
# chart datapoint icons
|
8 |
+
raw_symbols = SymbolValidator().values
|
9 |
+
up_arrow = raw_symbols[5]
|
10 |
+
down_arrow = raw_symbols[6]
|
11 |
+
|
12 |
+
|
13 |
+
def create_candlestick(fig, dates, dataset, title, y_label):
|
14 |
+
candlestick = go.Candlestick(name=y_label,
|
15 |
+
x=dates,
|
16 |
+
open=dataset['Open'],
|
17 |
+
high=dataset['High'],
|
18 |
+
low=dataset['Low'],
|
19 |
+
close=dataset['Close'])
|
20 |
+
fig.add_trace(candlestick)
|
21 |
+
fig.update_xaxes(
|
22 |
+
rangeslider_visible=True,
|
23 |
+
rangeselector=dict(
|
24 |
+
buttons=list([
|
25 |
+
dict(count=1, label="1m", step="month", stepmode="backward"),
|
26 |
+
dict(count=6, label="6m", step="month", stepmode="backward"),
|
27 |
+
dict(count=1, label="YTD", step="year", stepmode="todate"),
|
28 |
+
dict(count=1, label="1y", step="year", stepmode="backward"),
|
29 |
+
dict(step="all")
|
30 |
+
])
|
31 |
+
)
|
32 |
+
)
|
33 |
+
|
34 |
+
|
35 |
+
def create_indicators(fig, datasets):
|
36 |
+
for indicator in datasets:
|
37 |
+
indicator_data = datasets[indicator]
|
38 |
+
|
39 |
+
marker_color="lightskyblue"
|
40 |
+
marker_symbol = 0
|
41 |
+
if 'IsBullish' in indicator_data:
|
42 |
+
if indicator_data['IsBullish']:
|
43 |
+
marker_color = 'green'
|
44 |
+
marker_symbol = 5
|
45 |
+
else:
|
46 |
+
marker_color = 'red'
|
47 |
+
marker_symbol = 6
|
48 |
+
|
49 |
+
indicator_plot = go.Scatter(name=indicator,
|
50 |
+
mode="markers",
|
51 |
+
x=indicator_data['Date'],
|
52 |
+
y=indicator_data['Values'],
|
53 |
+
marker_symbol=marker_symbol,
|
54 |
+
marker_line_color="midnightblue",
|
55 |
+
marker_color=marker_color,
|
56 |
+
marker_line_width=2,
|
57 |
+
marker_size=15,
|
58 |
+
hovertemplate="%{indicator}: %{y}%{x}<br>number: %{marker.symbol}<extra></extra>")
|
59 |
+
fig.add_trace(indicator_plot)
|
60 |
+
|
61 |
+
|
62 |
+
def create_lines(fig, dates, datasets, title, y_label):
|
63 |
+
for key in datasets:
|
64 |
+
line = go.Scatter(name=key, x=dates, y=datasets[key])
|
65 |
+
fig.add_trace(line)
|
66 |
+
|
67 |
+
|
68 |
+
def create_markers(fig, dates, dataset, title, y_label, marker_symbol=3, marker_color="blue", marker_size=15):
|
69 |
+
line = go.Scatter(name=title, x=dates, y=dataset,
|
70 |
+
mode="markers",
|
71 |
+
marker_symbol=marker_symbol,
|
72 |
+
marker_line_color="midnightblue",
|
73 |
+
marker_color=marker_color,
|
74 |
+
marker_line_width=2,
|
75 |
+
marker_size=marker_size)
|
76 |
+
fig.add_trace(line)
|
77 |
+
|
78 |
+
|
79 |
+
def create_line(fig, dates, dataset, title, y_label, marker_symbol=4, marker_size=15):
|
80 |
+
line = go.Scatter(name=title, x=dates, y=dataset, marker_line_color="yellow")
|
81 |
+
fig.add_trace(line)
|
82 |
+
|
83 |
+
|
84 |
+
def create_fill_area(fig, dates, y_low, y_high, title, color='rgba(0,100,80,0.2)'):
|
85 |
+
# line_low = go.Scatter(name=title, x=dates, y=y_low, fillcolor=color, showlegend=False)
|
86 |
+
# fig.add_trace(line_low)
|
87 |
+
# line_high = go.Scatter(name=title, x=dates, y=y_low, fillcolor=color, showlegend=False)
|
88 |
+
# fig.add_trace(line_high)
|
89 |
+
fill_area = go.Scatter(
|
90 |
+
name=title,
|
91 |
+
x=dates + dates[::-1],
|
92 |
+
y=y_high + y_low[::-1],
|
93 |
+
fill='toself',
|
94 |
+
fillcolor=color,
|
95 |
+
line=dict(color=color)
|
96 |
+
)
|
97 |
+
fig.add_trace(fill_area)
|