gabcares commited on
Commit
15e3c34
1 Parent(s): 92270c9

Add src folder

Browse files

## Add src folder

Files changed (2) hide show
  1. src/api/api.py +161 -0
  2. src/api/config.py +15 -0
src/api/api.py ADDED
@@ -0,0 +1,161 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from dotenv import load_dotenv
3
+
4
+ from collections.abc import AsyncIterator
5
+ from contextlib import asynccontextmanager
6
+
7
+ from fastapi import FastAPI
8
+ from fastapi_cache import FastAPICache
9
+ from fastapi_cache.backends.redis import RedisBackend
10
+ from fastapi_cache.coder import PickleCoder
11
+ from fastapi_cache.decorator import cache
12
+
13
+ from redis import asyncio as aioredis
14
+
15
+ from pydantic import BaseModel
16
+ from typing import Tuple, Dict, Union
17
+
18
+ from imblearn.pipeline import Pipeline as imbPipeline
19
+ from sklearn.preprocessing._label import LabelEncoder
20
+ import joblib
21
+ import pandas as pd
22
+ from urllib.request import urlopen
23
+
24
+
25
+ from src.api.config import ONE_DAY_SEC, ONE_WEEK_SEC, XGBOOST_URL, RANDOM_FOREST_URL, ENCODER_URL, ENV_PATH
26
+
27
+ load_dotenv(ENV_PATH)
28
+
29
+
30
+ @asynccontextmanager
31
+ async def lifespan(_: FastAPI) -> AsyncIterator[None]:
32
+ url = os.getenv("REDIS_URL")
33
+ username = os.getenv("REDIS_USERNAME")
34
+ password = os.getenv("REDIS_PASSWORD")
35
+ redis = aioredis.from_url(url=url, username=username,
36
+ password=password, encoding="utf8", decode_responses=True)
37
+ FastAPICache.init(RedisBackend(redis), prefix="fastapi-cache")
38
+ yield
39
+
40
+
41
+ # FastAPI Object
42
+ app = FastAPI(
43
+ title='Sepsis classification',
44
+ version='0.0.1',
45
+ description='Identify ICU patients at risk of developing sepsis',
46
+ lifespan=lifespan,
47
+ )
48
+
49
+
50
+ # API input features
51
+ class SepsisFeatures(BaseModel):
52
+ PRG: float
53
+ PL: float
54
+ PR: float
55
+ SK: float
56
+ TS: float
57
+ M11: float
58
+ BD2: float
59
+ Age: float
60
+ Insurance: float
61
+
62
+
63
+ class Url(BaseModel):
64
+ pipeline_url: str
65
+ encoder_url: str
66
+
67
+
68
+ class ResultData(BaseModel):
69
+ prediction: str
70
+ probability: float
71
+
72
+
73
+ class PredictionResponse(BaseModel):
74
+ execution_msg: str
75
+ execution_code: int
76
+ result: ResultData
77
+
78
+
79
+ class ErrorResponse(BaseModel):
80
+ execution_msg: Union[str, None]
81
+ execution_code: Union[int, None]
82
+ result: Union[Dict[str, Union[str, int]], Union[Dict[str, None], None]]
83
+
84
+
85
+ # Load the model pipelines and encoder
86
+ # Cache for 1 day
87
+ @cache(expire=ONE_DAY_SEC, namespace='pipeline_resource', coder=PickleCoder)
88
+ async def load_pipeline(pipeline_url: Url, encoder_url: Url) -> Tuple[imbPipeline, LabelEncoder]:
89
+ pipeline, encoder = None, None
90
+ try:
91
+ pipeline: imbPipeline = joblib.load(urlopen(pipeline_url))
92
+ encoder: LabelEncoder = joblib.load(urlopen(encoder_url))
93
+ except Exception:
94
+ # Log exception
95
+ pass
96
+ finally:
97
+ return pipeline, encoder
98
+
99
+
100
+ # Endpoints
101
+
102
+ # Status endpoint: check if api is online
103
+ @app.get('/')
104
+ @cache(expire=ONE_WEEK_SEC, namespace='status_check') # Cache for 1 week
105
+ async def status_check():
106
+ return {"Status": "API is online..."}
107
+
108
+
109
+ @cache(expire=ONE_DAY_SEC, namespace='pipeline_classifier') # Cache for 1 day
110
+ async def pipeline_classifier(pipeline: imbPipeline, encoder: LabelEncoder, data: SepsisFeatures) -> ErrorResponse | PredictionResponse:
111
+ output = ErrorResponse(**{'execution_msg': None,
112
+ 'execution_code': None, 'result': None})
113
+ try:
114
+ # Create dataframe
115
+ df = pd.DataFrame([data.model_dump()])
116
+
117
+ # Make prediction
118
+ prediction = pipeline.predict(df)
119
+
120
+ pred_int = int(prediction[0])
121
+
122
+ prediction = encoder.inverse_transform([pred_int])[0]
123
+
124
+ # Get the probability of the predicted class
125
+ probability = round(
126
+ float(pipeline.predict_proba(df)[0][pred_int] * 100), 2)
127
+
128
+ msg = 'Execution was successful'
129
+ code = 1
130
+ result = {"prediction": prediction, "probability": probability}
131
+
132
+ output = PredictionResponse(
133
+ **{'execution_msg': msg,
134
+ 'execution_code': code, 'result': result}
135
+ )
136
+
137
+ except Exception as e:
138
+ msg = 'Execution failed'
139
+ code = 0
140
+ result = {'error': f"Omg, pipeline classsifier failure{e}"}
141
+ output = ErrorResponse(**{'execution_msg': msg,
142
+ 'execution_code': code, 'result': result})
143
+
144
+ finally:
145
+ return output
146
+
147
+
148
+ # Xgboost endpoint: classify sepsis with xgboost
149
+ @app.post('/xgboost_prediction')
150
+ async def xgboost_classifier(data: SepsisFeatures) -> ErrorResponse | PredictionResponse:
151
+ xgboost_pipeline, encoder = await load_pipeline(XGBOOST_URL, ENCODER_URL)
152
+ output = await pipeline_classifier(xgboost_pipeline, encoder, data)
153
+ return output
154
+
155
+
156
+ # Random forest endpoint: classify sepsis with random forest
157
+ @app.post('/random_forest_prediction')
158
+ async def random_forest_classifier(data: SepsisFeatures) -> ErrorResponse | PredictionResponse:
159
+ random_forest_pipeline, encoder = await load_pipeline(RANDOM_FOREST_URL, ENCODER_URL)
160
+ output = await pipeline_classifier(random_forest_pipeline, encoder, data)
161
+ return output
src/api/config.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+
3
+ ONE_DAY_SEC = 24*60*60
4
+
5
+ ONE_WEEK_SEC = ONE_DAY_SEC*7
6
+
7
+ XGBOOST_URL = "https://raw.githubusercontent.com/valiantezabuku/Building-Machine-Learning-API-s-with-FastAPI-Collabo/develop/dev/models/xgboost.joblib"
8
+
9
+ RANDOM_FOREST_URL = "https://raw.githubusercontent.com/valiantezabuku/Building-Machine-Learning-API-s-with-FastAPI-Collabo/develop/dev/models/random_forest.joblib"
10
+
11
+ ENCODER_URL = "https://raw.githubusercontent.com/valiantezabuku/Building-Machine-Learning-API-s-with-FastAPI-Collabo/develop/dev/models/encoder.joblib"
12
+
13
+ BASE_DIR = './' # Where Unicorn server runs from
14
+
15
+ ENV_PATH = os.path.join(BASE_DIR, 'src/api/.env')