add claude model
Browse files- README.md +2 -0
- app.py +29 -19
- requirements.txt +2 -1
README.md
CHANGED
@@ -10,3 +10,5 @@ pinned: false
|
|
10 |
---
|
11 |
|
12 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
|
|
|
10 |
---
|
11 |
|
12 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
13 |
+
|
14 |
+
ANTHROPIC_API_KEY
|
app.py
CHANGED
@@ -10,15 +10,14 @@ import shutil
|
|
10 |
from pathlib import Path
|
11 |
from tempfile import NamedTemporaryFile
|
12 |
from sklearn.neighbors import NearestNeighbors
|
13 |
-
import
|
14 |
|
15 |
-
#
|
16 |
-
#
|
|
|
|
|
17 |
|
18 |
-
|
19 |
-
base_url='https://api-inference.huggingface.co/models/mistralai/Mixtral-8x7B-Instruct-v0.1/v1/',
|
20 |
-
api_key=os.getenv('key')
|
21 |
-
)
|
22 |
|
23 |
from util import pdf_to_text, text_to_chunks, SemanticSearch
|
24 |
|
@@ -31,18 +30,29 @@ def load_recommender(path, start_page=1):
|
|
31 |
return 'Corpus Loaded.'
|
32 |
|
33 |
|
34 |
-
def
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
39 |
model=model,
|
|
|
|
|
|
|
40 |
messages=[
|
41 |
{"role": "user", "content": prompt}
|
42 |
-
]
|
43 |
-
|
44 |
-
|
45 |
-
return message
|
46 |
|
47 |
def generate_answer(question):
|
48 |
topn_chunks = recommender(question)
|
@@ -57,10 +67,10 @@ def generate_answer(question):
|
|
57 |
"in English \n\nQuery: "
|
58 |
|
59 |
prompt += f"{question}\nAnswer:"
|
60 |
-
answer =
|
61 |
return answer
|
62 |
|
63 |
-
def
|
64 |
suffix = Path(file.name).suffix
|
65 |
with NamedTemporaryFile(delete=False, suffix=suffix) as tmp:
|
66 |
shutil.copyfile(file.name, tmp.name)
|
@@ -91,7 +101,7 @@ with gr.Blocks(css="""#chatbot { font-size: 14px; min-height: 1200; }""") as dem
|
|
91 |
chatbot = gr.Chatbot(label="Chat History", elem_id="chatbot")
|
92 |
|
93 |
btn.click(
|
94 |
-
|
95 |
inputs=[chatbot, file, question],
|
96 |
outputs=[chatbot],
|
97 |
api_name="predict",
|
|
|
10 |
from pathlib import Path
|
11 |
from tempfile import NamedTemporaryFile
|
12 |
from sklearn.neighbors import NearestNeighbors
|
13 |
+
import anthropic
|
14 |
|
15 |
+
# client = OpenAI(
|
16 |
+
# base_url='https://api-inference.huggingface.co/models/mistralai/Mixtral-8x7B-Instruct-v0.1/v1/',
|
17 |
+
# api_key=os.getenv('openai_key')
|
18 |
+
# )
|
19 |
|
20 |
+
client = anthropic.Anthropic()
|
|
|
|
|
|
|
21 |
|
22 |
from util import pdf_to_text, text_to_chunks, SemanticSearch
|
23 |
|
|
|
30 |
return 'Corpus Loaded.'
|
31 |
|
32 |
|
33 |
+
# def openai_generate_text(prompt, model = "gpt-3.5-turbo-16k-0613"):
|
34 |
+
# model="mistralai/Mixtral-8x7B-Instruct-v0.1"
|
35 |
+
# max_tokens=1024
|
36 |
+
# message = clinet.chat.completions.create(
|
37 |
+
# model=model,
|
38 |
+
# messages=[
|
39 |
+
# {"role": "user", "content": prompt}
|
40 |
+
# ],
|
41 |
+
# max_tokens=max_tokens,
|
42 |
+
# ).choices[0].message.content
|
43 |
+
# return message
|
44 |
+
|
45 |
+
def claude_generate_text(prompt, model = "claude-3-haiku-20240307"):
|
46 |
+
message = client.messages.create(
|
47 |
model=model,
|
48 |
+
max_tokens=1000,
|
49 |
+
temperature=0.0,
|
50 |
+
# system="Respond only in mandarin",
|
51 |
messages=[
|
52 |
{"role": "user", "content": prompt}
|
53 |
+
]
|
54 |
+
)
|
55 |
+
return message.content[0].text
|
|
|
56 |
|
57 |
def generate_answer(question):
|
58 |
topn_chunks = recommender(question)
|
|
|
67 |
"in English \n\nQuery: "
|
68 |
|
69 |
prompt += f"{question}\nAnswer:"
|
70 |
+
answer = claude_generate_text(prompt)
|
71 |
return answer
|
72 |
|
73 |
+
def question_answer(chat_history, file, question):
|
74 |
suffix = Path(file.name).suffix
|
75 |
with NamedTemporaryFile(delete=False, suffix=suffix) as tmp:
|
76 |
shutil.copyfile(file.name, tmp.name)
|
|
|
101 |
chatbot = gr.Chatbot(label="Chat History", elem_id="chatbot")
|
102 |
|
103 |
btn.click(
|
104 |
+
question_answer,
|
105 |
inputs=[chatbot, file, question],
|
106 |
outputs=[chatbot],
|
107 |
api_name="predict",
|
requirements.txt
CHANGED
@@ -4,4 +4,5 @@ numpy
|
|
4 |
scikit-learn
|
5 |
tensorflow-cpu
|
6 |
tensorflow-hub
|
7 |
-
gradio
|
|
|
|
4 |
scikit-learn
|
5 |
tensorflow-cpu
|
6 |
tensorflow-hub
|
7 |
+
gradio
|
8 |
+
anthropic
|