rishi1985 commited on
Commit
f43aa76
1 Parent(s): 32ff373

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +100 -11
app.py CHANGED
@@ -79,11 +79,29 @@ from pymongo.mongo_client import MongoClient
79
 
80
  @app.on_event("startup")
81
  async def startup_event():
 
 
82
  print("on startup")
83
 
84
 
85
  # t = threading.Thread(target=makeqimg)
86
  # t.start()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87
 
88
 
89
 
@@ -97,25 +115,96 @@ async def get_answer_llama(request: Request ):
97
  data = await request.json()
98
  text = data['text']
99
  print("recived ",text)
 
100
 
101
- # try:
102
- # model = data['model']
103
- # if model == '13b':
104
- # print('using 13b')
105
- # res= do_ML_LLAMA13b(text,0)
106
- # dict={"LLAMA":res}
107
- # return JSONResponse(dict)
108
- # except:
109
- # print("Using 7b")
110
-
111
 
112
- res= do_ML_LLAMA7b(text,0)
113
 
114
  dict={"LLAMA":res}
115
 
116
  return JSONResponse(dict)
117
 
118
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
119
 
120
 
121
  def do_ML_LLAMA7b(text:str, trycount:int):
 
79
 
80
  @app.on_event("startup")
81
  async def startup_event():
82
+ x=requests.get('https://open-ai-ping-eight.vercel.app/proxy')
83
+ print("response ", x.text)
84
  print("on startup")
85
 
86
 
87
  # t = threading.Thread(target=makeqimg)
88
  # t.start()
89
+ proxy =None
90
+
91
+
92
+ @app.post("/proxy")
93
+ async def get_proxy(request: Request ):
94
+ data = await request.json()
95
+ print('data ', data)
96
+ try:
97
+ text = data['url']
98
+
99
+ global proxy
100
+ proxy = text
101
+ ##done
102
+ print('proxy recived ',proxy)
103
+ except Exception as e:
104
+ print(e)
105
 
106
 
107
 
 
115
  data = await request.json()
116
  text = data['text']
117
  print("recived ",text)
118
+ proxy=None
119
 
120
+ try:
121
+ proxy = data['proxy']
122
+ except:
123
+ pass
124
+ print('proxy ip ', proxy )
 
 
 
 
 
125
 
126
+ res= do_ML_LLAMA70b(text,0, proxy)
127
 
128
  dict={"LLAMA":res}
129
 
130
  return JSONResponse(dict)
131
 
132
 
133
+ def do_ML_LLAMA70b(text:str, trycount:int, proxy:str):
134
+ starttime=time.time()
135
+ options = ChromeOptions()
136
+ options.add_argument('--no-sandbox')
137
+ options.add_argument('-headless')
138
+ options.add_argument("start-maximized")
139
+ options.add_argument("--ignore-certificate-errors")
140
+ proxy_port = "8080"
141
+ proxy_ip= proxy
142
+ print("pxoxy ip ",proxy_ip)
143
+ proxy_ip = f"{proxy_ip}:{proxy_port}"
144
+ print("proxy final ", proxy_ip)
145
+ options.add_argument(f'--proxy-server=http://{proxy_ip}')
146
+
147
+ service = Service(executable_path=chrome_driver_path)
148
+ driver = webdriver.Chrome(options= options,service=service)
149
+ driver.get("https://huggingface.co/chat/")
150
+ try:
151
+
152
+ while True:
153
+ currtime= time.time()
154
+ if(currtime>starttime+20):
155
+ return "Requested Could not be proceed"
156
+ try:
157
+ # xpath_expression = '//textarea[@data-testid="tex
158
+ textarea_element = driver.find_element(By.XPATH, "//textarea[@placeholder='Ask anything']")
159
+
160
+
161
+ for line in text.split('\n'):
162
+ textarea_element.send_keys(line)
163
+ textarea_element.send_keys(Keys.SHIFT + Keys.ENTER)
164
+
165
+ textarea_element.send_keys('\n')
166
+
167
+ break
168
+ except:
169
+ continue
170
+
171
+
172
+ prev =""
173
+
174
+ time.sleep(0.6)
175
+ while True:
176
+ time.sleep(0.5)
177
+ currtime= time.time()
178
+
179
+ if(currtime>starttime+170):
180
+ driver.delete_all_cookies()
181
+ driver.quit()
182
+
183
+ return "Requested Could not be proceed"
184
+
185
+ try:
186
+ class_name = "-ml-1"
187
+ svg_elements = driver.find_element(By.CLASS_NAME, class_name)
188
+ continue
189
+
190
+ except:
191
+ print("not found")
192
+
193
+ element = driver.find_element(By.CLASS_NAME, "prose") # Find the element by its class name
194
+ element_text = element.text # Extract the text from the element
195
+ return element_text
196
+
197
+
198
+
199
+ return " --Error Occurred-- "
200
+
201
+
202
+
203
+ except:
204
+ return "Error Occurred "
205
+
206
+
207
+
208
 
209
 
210
  def do_ML_LLAMA7b(text:str, trycount:int):