ffreemt commited on
Commit
3892750
1 Parent(s): e561e77

Change model to sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2 d-384

Browse files
Files changed (5) hide show
  1. Dockerfile +46 -0
  2. LICENSE +21 -0
  3. download.sh +9 -0
  4. main.py +35 -0
  5. start_server.sh +3 -0
Dockerfile ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Cloned from https://huggingface.co/spaces/limcheekin/bge-small-en-v1.5/tree/main
2
+ # Define global args
3
+ # ARG MODEL="BAAI/bge-small-en-v1.5"
4
+ ARG MODEL="sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2"
5
+
6
+ FROM debian:bullseye-slim AS build-image
7
+
8
+ # Include global args in this stage of the build
9
+ ARG MODEL
10
+ ENV MODEL=${MODEL}
11
+
12
+ COPY ./download.sh ./
13
+
14
+ # Install build dependencies
15
+ RUN apt-get update && \
16
+ apt-get install -y git-lfs
17
+
18
+ RUN chmod +x *.sh && \
19
+ ./download.sh && \
20
+ rm *.sh
21
+
22
+ # Stage 3 - final runtime image
23
+ # Grab a fresh copy of the Python image
24
+ FROM python:3.11-slim
25
+
26
+ # Include global args in this stage of the build
27
+ ARG MODEL
28
+ ENV MODEL=${MODEL}
29
+ ENV NORMALIZE_EMBEDDINGS=1
30
+ ENV HF_HOME="/tmp/hf_home"
31
+ # Set environment variable for the host
32
+ ENV HOST=0.0.0.0
33
+ ENV PORT=7860
34
+
35
+ COPY --from=build-image ${MODEL} ${MODEL}
36
+ COPY ./main.py ./
37
+ COPY ./start_server.sh ./
38
+ # COPY ./index.html ./
39
+ RUN pip install --no-cache-dir open-text-embeddings[server] && \
40
+ chmod +x ./start_server.sh
41
+
42
+ # Expose a port for the server
43
+ EXPOSE ${PORT}
44
+
45
+ # Run the server start script
46
+ CMD ["/bin/sh", "./start_server.sh"]
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2023 Lim Chee Kin
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
download.sh ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ mkdir -p $MODEL
2
+ git lfs install --skip-smudge
3
+ git clone https://huggingface.co/$MODEL $MODEL
4
+ cd $MODEL
5
+ git lfs pull
6
+ git lfs install --force
7
+ rm -rf .git
8
+ pwd
9
+ ls -l
main.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from open.text.embeddings.server.app import create_app
2
+ from fastapi.responses import HTMLResponse
3
+ import os
4
+
5
+ app = create_app()
6
+
7
+ # Read the content of index.html once and store it in memory
8
+ # with open("index.html", "r") as f: content = f.read()
9
+ content = """
10
+ <ul>
11
+ <li>
12
+ The API endpoint:
13
+ <a href="https://mikeee-emb384-oai.hf.space/v1"
14
+ >https://mikeee-emb384-oai.hf.space/v1</a
15
+ >
16
+ </li>
17
+ <li>
18
+ The API doc:
19
+ <a href="https://mikeee-emb384-oai.hf.space/docs"
20
+ >https://mikeee-emb384-oai.hf.space/docs</a
21
+ >
22
+ </li>
23
+ </ul>
24
+ """
25
+
26
+ @app.get("/", response_class=HTMLResponse)
27
+ async def read_items():
28
+ return content
29
+
30
+ if __name__ == "__main__":
31
+ import uvicorn
32
+ uvicorn.run(app,
33
+ host=os.environ["HOST"],
34
+ port=int(os.environ["PORT"])
35
+ )
start_server.sh ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ #!/bin/sh
2
+
3
+ python -B main.py