Spaces:
Sleeping
Sleeping
init
Browse files- .DS_Store +0 -0
- Dockerfile +24 -0
- README.md +3 -3
- requirements.txt +1 -0
- src/app.py +30 -0
- src/instructions.txt +1 -0
- src/utils.py +63 -0
.DS_Store
ADDED
Binary file (6.15 kB). View file
|
|
Dockerfile
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Use an official Python runtime as a parent image
|
2 |
+
FROM python:3.9-slim
|
3 |
+
|
4 |
+
RUN useradd -m -u 1000 user
|
5 |
+
USER user
|
6 |
+
|
7 |
+
ENV HOME=/home/user \
|
8 |
+
PATH=/home/user/.local/bin:$PATH
|
9 |
+
|
10 |
+
# Set the working directory in the container
|
11 |
+
WORKDIR $HOME/src/app
|
12 |
+
|
13 |
+
# Install any needed packages specified in requirements.txt
|
14 |
+
COPY --chown=user requirements.txt ./
|
15 |
+
RUN pip install -r requirements.txt
|
16 |
+
|
17 |
+
# Copy the rest of your application's code
|
18 |
+
COPY --chown=user ./src .
|
19 |
+
|
20 |
+
# Make port 7860 available to the world outside this container
|
21 |
+
EXPOSE 7860
|
22 |
+
|
23 |
+
# Run app.py when the container launches
|
24 |
+
CMD ["python", "./app.py"]
|
README.md
CHANGED
@@ -1,8 +1,8 @@
|
|
1 |
---
|
2 |
title: Cascadia Pacific
|
3 |
-
emoji:
|
4 |
-
colorFrom:
|
5 |
-
colorTo:
|
6 |
sdk: docker
|
7 |
pinned: false
|
8 |
---
|
|
|
1 |
---
|
2 |
title: Cascadia Pacific
|
3 |
+
emoji: ✈️
|
4 |
+
colorFrom: yellow
|
5 |
+
colorTo: purple
|
6 |
sdk: docker
|
7 |
pinned: false
|
8 |
---
|
requirements.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
gradio==4.8.0
|
src/app.py
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from utils import *
|
3 |
+
|
4 |
+
with gr.Blocks(gr.themes.Soft(primary_hue=gr.themes.colors.slate, secondary_hue=gr.themes.colors.purple)) as demo:
|
5 |
+
with gr.Row():
|
6 |
+
with gr.Column(scale=1, variant = 'panel'):
|
7 |
+
|
8 |
+
chatbot = gr.Chatbot([[None, "Hello, ask me anything about aviation"]], elem_id="chatbot", label='Chatbox', height=600)
|
9 |
+
|
10 |
+
txt = gr.Textbox(label= "Question", placeholder="Enter your question and press enter ")
|
11 |
+
|
12 |
+
with gr.Row():
|
13 |
+
with gr.Column(scale=1):
|
14 |
+
submit_btn = gr.Button('Submit',variant='primary', size = 'sm')
|
15 |
+
|
16 |
+
with gr.Column(scale=1):
|
17 |
+
clear_btn = gr.Button('Clear',variant='stop',size = 'sm')
|
18 |
+
|
19 |
+
txt.submit(add_text, [chatbot, txt], [chatbot, txt]).then(
|
20 |
+
bot, [chatbot], chatbot)
|
21 |
+
submit_btn.click(add_text, [chatbot, txt], [chatbot, txt]).then(
|
22 |
+
bot, [chatbot], chatbot).then(
|
23 |
+
clear_cuda_cache, None, None
|
24 |
+
)
|
25 |
+
|
26 |
+
clear_btn.click(lambda: None, None, chatbot, queue=False)
|
27 |
+
|
28 |
+
if __name__ == '__main__':
|
29 |
+
# demo.queue(concurrency_count=3)
|
30 |
+
demo.launch(server_name="0.0.0.0", server_port=7860, show_api=False)
|
src/instructions.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
Operate as a chatbot specializing in technical and analytical guidance for flight airline operations. When presented with technical questions offer detailed, step-by-step solutions or explanations. Refuse answering questions unrelated to aviation.
|
src/utils.py
ADDED
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gc, requests, os
|
2 |
+
|
3 |
+
MODEL_URL = os.environ.get('MODEL_URL')
|
4 |
+
BEARER_TOKEN = os.environ.get('BEARER_TOKEN')
|
5 |
+
|
6 |
+
|
7 |
+
def get_chat_history(inputs):
|
8 |
+
|
9 |
+
res = []
|
10 |
+
for human, ai in inputs:
|
11 |
+
res.append(f"Human:{human}\nAssistant:{ai}")
|
12 |
+
return "\n".join(res)
|
13 |
+
|
14 |
+
def add_text(history, text):
|
15 |
+
|
16 |
+
history = history + [[text, None]]
|
17 |
+
return history, ""
|
18 |
+
|
19 |
+
|
20 |
+
def bot(history,
|
21 |
+
instruction="Use the following pieces of context to answer the question at the end. Generate the answer based on the given context only if you find the answer in the context. If you do not find any information related to the question in the given context, just say that you don't know, don't try to make up an answer. Keep your answer expressive.",
|
22 |
+
temperature=0.1,
|
23 |
+
max_new_tokens=512,
|
24 |
+
):
|
25 |
+
|
26 |
+
with open('instructions.txt', 'r') as file:
|
27 |
+
instruction = file.read()
|
28 |
+
|
29 |
+
chat_history_formatted = get_chat_history(history[:-1])
|
30 |
+
|
31 |
+
# Setting up headers for the HTTP request
|
32 |
+
headers = {
|
33 |
+
'Content-Type': 'application/json',
|
34 |
+
'Authorization': f'Bearer {BEARER_TOKEN}'
|
35 |
+
}
|
36 |
+
|
37 |
+
# Preparing the data payload for the request
|
38 |
+
data = {
|
39 |
+
"input": {
|
40 |
+
"question": history[-1][0],
|
41 |
+
"chat_history": chat_history_formatted,
|
42 |
+
"instruction": instruction,
|
43 |
+
"temperature": temperature,
|
44 |
+
"max_new_tokens": max_new_tokens
|
45 |
+
}
|
46 |
+
}
|
47 |
+
|
48 |
+
# Sending a POST request to the AI model URL
|
49 |
+
response = requests.post(MODEL_URL, json=data, headers=headers)
|
50 |
+
|
51 |
+
# Processing the response
|
52 |
+
if response.status_code == 200:
|
53 |
+
res = response.json()
|
54 |
+
history[-1][1] = res['output']['result']
|
55 |
+
else:
|
56 |
+
history[-1][1] = "Error: Unable to get response from RunPod"
|
57 |
+
|
58 |
+
return history
|
59 |
+
|
60 |
+
def clear_cuda_cache():
|
61 |
+
|
62 |
+
gc.collect()
|
63 |
+
return None
|