sachin commited on
Commit
d83c996
1 Parent(s): d152ed5

add sarvam transalte

Browse files
accounts/__init__.py ADDED
File without changes
accounts/admin.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ from django.contrib import admin
2
+
3
+ # Register your models here.
accounts/apps.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from django.apps import AppConfig
2
+
3
+
4
+ class AccountsConfig(AppConfig):
5
+ default_auto_field = 'django.db.models.BigAutoField'
6
+ name = 'accounts'
accounts/migrations/__init__.py ADDED
File without changes
accounts/models.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ from django.db import models
2
+
3
+ # Create your models here.
accounts/tests.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ from django.test import TestCase
2
+
3
+ # Create your tests here.
accounts/urls.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from django.urls import path, include
2
+
3
+ urlpatterns = [
4
+ path('api/', include('djoser.urls')),
5
+ path('api/', include('djoser.urls.authtoken')),
6
+ ]
accounts/views.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ from django.shortcuts import render
2
+
3
+ # Create your views here.
inference/rag_process.py ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from mistralai import Mistral
2
+ import requests
3
+ import numpy as np
4
+ import faiss
5
+ import os
6
+
7
+ api_key=os.getenv("MISTRAL_API_KEY", "")
8
+ client = Mistral(api_key=api_key)
9
+
10
+ def get_data():
11
+
12
+ response = requests.get('https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt')
13
+ text = response.text
14
+
15
+ f = open('essay.txt', 'w')
16
+ f.write(text)
17
+ f.close()
18
+
19
+
20
+ len(text)
21
+ return text
22
+
23
+ def create_chunks(text):
24
+
25
+ chunk_size = 2048
26
+ chunks = [text[i:i + chunk_size] for i in range(0, len(text), chunk_size)]
27
+
28
+ len(chunks)
29
+ return chunks
30
+
31
+ def get_text_embedding(input):
32
+ embeddings_batch_response = client.embeddings.create(
33
+ model="mistral-embed",
34
+ inputs=input
35
+ )
36
+ return embeddings_batch_response.data[0].embedding
37
+
38
+
39
+ def load_vectors(chunks):
40
+ text_embeddings = np.array([get_text_embedding(chunk) for chunk in chunks])
41
+
42
+ text_embeddings.shape
43
+
44
+ d = text_embeddings.shape[1]
45
+ index = faiss.IndexFlatL2(d)
46
+ index.add(text_embeddings)
47
+ return index
48
+
49
+ def create_embed_for_question(question):
50
+
51
+ question_embeddings = np.array([get_text_embedding(question)])
52
+ question_embeddings.shape
53
+ return question_embeddings
54
+
55
+ def get_similar_chunks(index, question_embeddings, chunks):
56
+ D, I = index.search(question_embeddings, k=2)
57
+ print(I)
58
+ retrieved_chunk = [chunks[i] for i in I.tolist()[0]]
59
+ print(retrieved_chunk)
60
+ return retrieved_chunk
61
+
62
+ def create_prompt(retrieved_chunk, question):
63
+ prompt = f"""
64
+ Context information is below.
65
+ ---------------------
66
+ {retrieved_chunk}
67
+ ---------------------
68
+ Given the context information and not prior knowledge, answer the query.
69
+ Query: {question}
70
+ Answer:
71
+ """
72
+ return prompt
73
+
74
+
75
+ def run_mistral(user_message, model="mistral-large-latest"):
76
+ messages = [
77
+ {
78
+ "role": "user", "content": user_message
79
+ }
80
+ ]
81
+ chat_response = client.chat.complete(
82
+ model=model,
83
+ messages=messages
84
+ )
85
+ return (chat_response.choices[0].message.content)
86
+
87
+ def main():
88
+ text = get_data()
89
+ chunks = create_chunks(text=text)
90
+ question = "What were the two main things the author worked on before college?"
91
+
92
+ index = load_vectors(chunks=chunks)
93
+ question_embeddings = create_embed_for_question(question=question)
94
+ retrieved_chunk = get_similar_chunks(index, question_embeddings, chunks)
95
+ prompt = create_prompt(retrieved_chunk, question)
96
+ answer = run_mistral(prompt)
97
+ print(answer)
98
+
99
+ main()
inference/sarvam_inference.py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import requests
3
+ import os
4
+
5
+ def translate_text(text_string, source_lang, target_lang):
6
+
7
+ api_key=os.getenv("SARVAM_API_KEY", "")
8
+ url = "https://api.sarvam.ai/translate"
9
+ payload = {
10
+ "input": text_string,
11
+ "source_language_code": source_lang,
12
+ "target_language_code": target_lang,
13
+ "speaker_gender": "Male",
14
+ "mode": "formal",
15
+ "model": "mayura:v1",
16
+ "enable_preprocessing": True
17
+ }
18
+ headers = {"Content-Type": "application/json",
19
+ 'API-Subscription-Key': f"{api_key}"
20
+ }
21
+
22
+ response = requests.request("POST", url, json=payload, headers=headers)
23
+
24
+ #print(response.text)
25
+ return response.text
26
+
27
+
28
+ #text_string = "Who is the founder of Bengaluru?"
29
+ #source_lang = "en-IN"
30
+ #target_lang = "kn-IN"
31
+ #response_text = translate_text(text_string, source_lang, target_lang)
32
+ #print(response_text)
recipes/urls.py CHANGED
@@ -1,10 +1,11 @@
1
  from django.urls import path
2
  from .views import recipe_generate_route
3
- from .views import VisionLLMView, NIMVisionLLMView, TextLLMView
4
 
5
  urlpatterns = [
6
  path('recipe_generate/', recipe_generate_route, name='recipe_generate'),
7
  path('vision_llm_url/', VisionLLMView.as_view()),
8
  path('nim_vision_llm_url/', NIMVisionLLMView.as_view()),
9
  path('text_llm_url/', TextLLMView.as_view()),
 
10
  ]
 
1
  from django.urls import path
2
  from .views import recipe_generate_route
3
+ from .views import VisionLLMView, NIMVisionLLMView, TextLLMView, TranslateLLMView
4
 
5
  urlpatterns = [
6
  path('recipe_generate/', recipe_generate_route, name='recipe_generate'),
7
  path('vision_llm_url/', VisionLLMView.as_view()),
8
  path('nim_vision_llm_url/', NIMVisionLLMView.as_view()),
9
  path('text_llm_url/', TextLLMView.as_view()),
10
+ path('translate_llm_url/', TranslateLLMView.as_view()),
11
  ]
recipes/views.py CHANGED
@@ -10,43 +10,80 @@ import base64
10
  import json
11
  import requests
12
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
 
14
  class TextLLMView(APIView):
15
  def post(self, request, format=None):
16
- data = request.data
17
- api_key = os.environ["MISTRAL_API_KEY"]
18
-
19
- # Initialize the Mistral client
20
- client = Mistral(api_key=api_key)
21
-
22
- prompt = data['messages'][0]['prompt']
23
- # Specify model
24
- #model = "pixtral-12b-2409"
25
- model = data['model']
26
- # Define the messages for the chat
27
- messages = [
28
- {
29
- "role": "user",
30
- "content": [
31
- {
32
- "type": "text",
33
- "text": prompt
34
- }
35
- ]
36
- }
37
- ]
38
 
39
- # Get the chat response
40
- chat_response = client.chat.complete(
41
- model=model,
42
- messages=messages
43
- )
 
 
 
 
 
 
 
 
 
 
 
44
 
45
- content = chat_response.choices[0].message.content
46
- #print(chat_response.choices[0].message.content)
47
- # Return the content of the response
48
- return Response({"response": content})
 
49
 
 
 
 
 
 
 
 
50
 
51
 
52
  @api_view(['GET'])
@@ -69,44 +106,48 @@ def recipe_generate_route(request):
69
 
70
  class VisionLLMView(APIView):
71
  def post(self, request, format=None):
72
- data = request.data
73
- api_key = os.environ["MISTRAL_API_KEY"]
74
-
75
- # Initialize the Mistral client
76
- client = Mistral(api_key=api_key)
77
-
78
- image_data = (data['messages'][0]['image'][0])
79
- prompt = data['messages'][0]['prompt']
80
- # Specify model
81
- #model = "pixtral-12b-2409"
82
- model = data['model']
83
- # Define the messages for the chat
84
- messages = [
85
- {
86
- "role": "user",
87
- "content": [
88
- {
89
- "type": "text",
90
- "text": prompt
91
- },
92
- {
93
- "type": "image_url",
94
- "image_url": f"data:image/jpeg;base64,{image_data}"
95
- }
96
- ]
97
- }
98
- ]
 
99
 
100
- # Get the chat response
101
- chat_response = client.chat.complete(
102
- model=model,
103
- messages=messages
104
- )
105
 
106
- content = chat_response.choices[0].message.content
107
- #print(chat_response.choices[0].message.content)
108
- # Return the content of the response
109
- return Response({"response": content})
 
 
 
110
 
111
 
112
  class NIMVisionLLMView(APIView):
@@ -155,5 +196,5 @@ class NIMVisionLLMView(APIView):
155
 
156
 
157
  except Exception as e: # Added general exception handling
158
- print(f"Error: {e}")
159
- return None
 
10
  import json
11
  import requests
12
 
13
+ class TranslateLLMView(APIView):
14
+ def post(self, request, format=None):
15
+ try:
16
+ data = request.data
17
+ prompt = data['messages'][0]['prompt']
18
+ # Specify model
19
+ source_language = data['sourceLanguage']
20
+ target_language = data['targetLanguage']
21
+ #model = data['model']
22
+ # Define the messages for the chat
23
+ api_key=os.getenv("SARVAM_API_KEY", "")
24
+ url = "https://api.sarvam.ai/translate"
25
+
26
+ payload = {
27
+ "input": prompt,
28
+ "source_language_code": source_language,
29
+ "target_language_code": target_language,
30
+ "speaker_gender": "Male",
31
+ "mode": "formal",
32
+ "model": "mayura:v1",
33
+ "enable_preprocessing": True
34
+ }
35
+ headers = {"Content-Type": "application/json",
36
+ 'API-Subscription-Key': f"{api_key}"
37
+ }
38
+
39
+ response = requests.request("POST", url, json=payload, headers=headers)
40
+ content = response.text
41
+ #print(chat_response.choices[0].message.content)
42
+ # Return the content of the response
43
+ return Response({"response": content})
44
+ except Exception as e:
45
+ print(f"An error occurred: {e}")
46
+ return Response({'error': 'Something went wrong'}, status=500)
47
 
48
  class TextLLMView(APIView):
49
  def post(self, request, format=None):
50
+ try:
51
+ data = request.data
52
+ api_key = os.environ["MISTRAL_API_KEY"]
53
+
54
+ # Initialize the Mistral client
55
+ client = Mistral(api_key=api_key)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56
 
57
+ prompt = data['messages'][0]['prompt']
58
+ # Specify model
59
+ #model = "pixtral-12b-2409"
60
+ model = data['model']
61
+ # Define the messages for the chat
62
+ messages = [
63
+ {
64
+ "role": "user",
65
+ "content": [
66
+ {
67
+ "type": "text",
68
+ "text": prompt
69
+ }
70
+ ]
71
+ }
72
+ ]
73
 
74
+ # Get the chat response
75
+ chat_response = client.chat.complete(
76
+ model=model,
77
+ messages=messages
78
+ )
79
 
80
+ content = chat_response.choices[0].message.content
81
+ #print(chat_response.choices[0].message.content)
82
+ # Return the content of the response
83
+ return Response({"response": content})
84
+ except Exception as e:
85
+ print(f"An error occurred: {e}")
86
+ return Response({'error': 'Something went wrong'}, status=500)
87
 
88
 
89
  @api_view(['GET'])
 
106
 
107
  class VisionLLMView(APIView):
108
  def post(self, request, format=None):
109
+ try:
110
+ data = request.data
111
+ api_key = os.environ["MISTRAL_API_KEY"]
112
+
113
+ # Initialize the Mistral client
114
+ client = Mistral(api_key=api_key)
115
+
116
+ image_data = (data['messages'][0]['image'][0])
117
+ prompt = data['messages'][0]['prompt']
118
+ # Specify model
119
+ #model = "pixtral-12b-2409"
120
+ model = data['model']
121
+ # Define the messages for the chat
122
+ messages = [
123
+ {
124
+ "role": "user",
125
+ "content": [
126
+ {
127
+ "type": "text",
128
+ "text": prompt
129
+ },
130
+ {
131
+ "type": "image_url",
132
+ "image_url": f"data:image/jpeg;base64,{image_data}"
133
+ }
134
+ ]
135
+ }
136
+ ]
137
 
138
+ # Get the chat response
139
+ chat_response = client.chat.complete(
140
+ model=model,
141
+ messages=messages
142
+ )
143
 
144
+ content = chat_response.choices[0].message.content
145
+ #print(chat_response.choices[0].message.content)
146
+ # Return the content of the response
147
+ return Response({"response": content})
148
+ except Exception as e:
149
+ print(f"An error occurred: {e}")
150
+ return Response({'error': 'Something went wrong'}, status=500)
151
 
152
 
153
  class NIMVisionLLMView(APIView):
 
196
 
197
 
198
  except Exception as e: # Added general exception handling
199
+ print(f"An error occurred: {e}")
200
+ return Response({'error': 'Something went wrong'}, status=500)
requirements.txt CHANGED
@@ -2,35 +2,50 @@ annotated-types==0.7.0
2
  anyio==4.5.0
3
  asgiref==3.8.1
4
  certifi==2024.8.30
 
5
  charset-normalizer==3.3.2
6
  click==8.1.7
 
 
7
  Django==5.1.1
8
  django-cors-headers==4.4.0
9
  django-filter==24.3
 
10
  django_csp==3.8
11
  djangorestframework==3.15.2
 
 
12
  drf-yasg==1.21.7
13
  eval_type_backport==0.2.0
14
  exceptiongroup==1.2.2
 
15
  h11==0.14.0
16
  httpcore==1.0.5
17
  httpx==0.27.2
18
  idna==3.10
19
  inflection==0.5.1
20
  jsonpath-python==1.0.6
 
21
  mistralai==1.1.0
22
  mypy-extensions==1.0.0
23
- numpy==2.1.1
 
24
  packaging==24.1
25
  pandas==2.2.3
 
26
  pydantic==2.9.2
27
  pydantic_core==2.23.4
 
28
  python-dateutil==2.8.2
 
29
  pytz==2024.2
30
  PyYAML==6.0.2
31
  requests==2.32.3
 
32
  six==1.16.0
33
  sniffio==1.3.1
 
 
34
  sqlparse==0.5.1
35
  typing-inspect==0.9.0
36
  typing_extensions==4.12.2
 
2
  anyio==4.5.0
3
  asgiref==3.8.1
4
  certifi==2024.8.30
5
+ cffi==1.17.1
6
  charset-normalizer==3.3.2
7
  click==8.1.7
8
+ cryptography==43.0.1
9
+ defusedxml==0.8.0rc2
10
  Django==5.1.1
11
  django-cors-headers==4.4.0
12
  django-filter==24.3
13
+ django-templated-mail==1.1.1
14
  django_csp==3.8
15
  djangorestframework==3.15.2
16
+ djangorestframework-simplejwt==5.3.1
17
+ djoser==2.2.3
18
  drf-yasg==1.21.7
19
  eval_type_backport==0.2.0
20
  exceptiongroup==1.2.2
21
+ faiss-cpu==1.8.0.post1
22
  h11==0.14.0
23
  httpcore==1.0.5
24
  httpx==0.27.2
25
  idna==3.10
26
  inflection==0.5.1
27
  jsonpath-python==1.0.6
28
+ Markdown==3.7
29
  mistralai==1.1.0
30
  mypy-extensions==1.0.0
31
+ numpy==1.26.4
32
+ oauthlib==3.2.2
33
  packaging==24.1
34
  pandas==2.2.3
35
+ pycparser==2.22
36
  pydantic==2.9.2
37
  pydantic_core==2.23.4
38
+ PyJWT==2.9.0
39
  python-dateutil==2.8.2
40
+ python3-openid==3.2.0
41
  pytz==2024.2
42
  PyYAML==6.0.2
43
  requests==2.32.3
44
+ requests-oauthlib==2.0.0
45
  six==1.16.0
46
  sniffio==1.3.1
47
+ social-auth-app-django==5.4.2
48
+ social-auth-core==4.5.4
49
  sqlparse==0.5.1
50
  typing-inspect==0.9.0
51
  typing_extensions==4.12.2
spaces/settings.py CHANGED
@@ -43,6 +43,10 @@ INSTALLED_APPS = [
43
  'space_walks',
44
  'recipes',
45
  'csp',
 
 
 
 
46
  ]
47
 
48
  MIDDLEWARE = [
@@ -71,6 +75,10 @@ CSP_FONT_SRC = ("'self'", "https:", "data:")
71
  CORS_ORIGIN_WHITELIST = [
72
  'https://gaganyatri.in',
73
  ]
 
 
 
 
74
 
75
  CORS_ALLOW_CREDENTIALS = True
76
 
@@ -152,4 +160,16 @@ REST_FRAMEWORK = {
152
  'DEFAULT_RENDERER_CLASSES': [
153
  'rest_framework.renderers.JSONRenderer',
154
  ],
 
 
 
 
 
 
 
 
 
 
 
 
155
  }
 
43
  'space_walks',
44
  'recipes',
45
  'csp',
46
+ 'rest_framework',
47
+ 'rest_framework.authtoken',
48
+ 'djoser',
49
+ 'accounts'
50
  ]
51
 
52
  MIDDLEWARE = [
 
75
  CORS_ORIGIN_WHITELIST = [
76
  'https://gaganyatri.in',
77
  ]
78
+ CORS_ALLOWED_ORIGINS = [
79
+ "http://localhost:3000",
80
+ "http://127.0.0.1:3000"
81
+ ]
82
 
83
  CORS_ALLOW_CREDENTIALS = True
84
 
 
160
  'DEFAULT_RENDERER_CLASSES': [
161
  'rest_framework.renderers.JSONRenderer',
162
  ],
163
+ 'DEFAULT_AUTHENTICATION_CLASSES': (
164
+ 'rest_framework.authentication.TokenAuthentication',
165
+ ),
166
+ 'DEFAULT_PERMISSION_CLASSES': [
167
+ # TODO - remove comment after authentication in frontend is enforced
168
+ # 'rest_framework.permissions.IsAuthenticated',
169
+ ]
170
+ }
171
+
172
+ # configure Djoser
173
+ DJOSER = {
174
+ "USER_ID_FIELD": "username"
175
  }
spaces/urls.py CHANGED
@@ -40,4 +40,5 @@ urlpatterns = [
40
  path('redoc/', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
41
  path('api/v1/space_walks/', include('space_walks.urls')),
42
  path('api/v1/recipes/', include('recipes.urls')),
 
43
  ]
 
40
  path('redoc/', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
41
  path('api/v1/space_walks/', include('space_walks.urls')),
42
  path('api/v1/recipes/', include('recipes.urls')),
43
+ path('api/v1/accounts/', include('accounts.urls')),
44
  ]