Update app.py
Browse files
app.py
CHANGED
@@ -88,23 +88,7 @@ async def startup_event():
|
|
88 |
# t.start()
|
89 |
proxy =None
|
90 |
|
91 |
-
|
92 |
-
@app.post("/proxy")
|
93 |
-
async def get_proxy(request: Request ):
|
94 |
-
data = await request.json()
|
95 |
-
print('data ', data)
|
96 |
-
try:
|
97 |
-
text = data['url']
|
98 |
-
|
99 |
-
global proxy
|
100 |
-
proxy = text
|
101 |
-
##done
|
102 |
-
print('proxy recived ',proxy)
|
103 |
-
except Exception as e:
|
104 |
-
print(e)
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
|
109 |
# LLAMA
|
110 |
|
@@ -114,23 +98,113 @@ chrome_driver_path = '/usr/local/bin/chromedriver-linux64/chromedriver'
|
|
114 |
async def get_answer_llama(request: Request ):
|
115 |
data = await request.json()
|
116 |
text = data['text']
|
117 |
-
text = text.replace('\n', ':')
|
118 |
-
print("recived ",text)
|
119 |
proxy=None
|
120 |
|
121 |
-
try:
|
122 |
-
|
123 |
-
except:
|
124 |
-
|
125 |
-
print('proxy ip ', proxy )
|
126 |
-
|
127 |
-
res=
|
|
|
|
|
|
|
|
|
128 |
|
129 |
dict={"LLAMA":res}
|
130 |
|
131 |
return JSONResponse(dict)
|
132 |
|
133 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
134 |
def do_ML_LLAMA70b(text:str, trycount:int, proxy:str):
|
135 |
starttime=time.time()
|
136 |
options = ChromeOptions()
|
|
|
88 |
# t.start()
|
89 |
proxy =None
|
90 |
|
91 |
+
llama_last_error=0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
92 |
|
93 |
# LLAMA
|
94 |
|
|
|
98 |
async def get_answer_llama(request: Request ):
|
99 |
data = await request.json()
|
100 |
text = data['text']
|
101 |
+
text = text.replace('\n', ' : ')
|
102 |
+
# print("recived ",text)
|
103 |
proxy=None
|
104 |
|
105 |
+
# try:
|
106 |
+
# proxy = data['proxy']
|
107 |
+
# except:
|
108 |
+
# pass
|
109 |
+
# print('proxy ip ', proxy )
|
110 |
+
currtime= time.time()
|
111 |
+
res= ""
|
112 |
+
if currtime-llama_last_error<=3600:
|
113 |
+
res = do_ML_LLAMA7b(text,0)
|
114 |
+
else:
|
115 |
+
res= do_ML_LLAMA70b_chori(text,0, proxy)
|
116 |
|
117 |
dict={"LLAMA":res}
|
118 |
|
119 |
return JSONResponse(dict)
|
120 |
|
121 |
|
122 |
+
def do_ML_LLAMA70b_chori(text:str, trycount:int, proxy:str):
|
123 |
+
starttime=time.time()
|
124 |
+
options = ChromeOptions()
|
125 |
+
global llama_last_error
|
126 |
+
options.add_argument('--no-sandbox')
|
127 |
+
options.add_argument('-headless')
|
128 |
+
options.add_argument("start-maximized")
|
129 |
+
options.add_argument("--ignore-certificate-errors")
|
130 |
+
service = Service(executable_path=chrome_driver_path)
|
131 |
+
driver = webdriver.Chrome(options= options,service=service)
|
132 |
+
driver.get("https://chansung-llama2-with-gradio-chat.hf.space/?")
|
133 |
+
try:
|
134 |
+
|
135 |
+
while True:
|
136 |
+
currtime= time.time()
|
137 |
+
if(currtime>starttime+20):
|
138 |
+
driver.delete_all_cookies()
|
139 |
+
llama_last_error= currtime
|
140 |
+
driver.quit()
|
141 |
+
return "Requested Could not be proceed"
|
142 |
+
try:
|
143 |
+
control_panel_span = driver.find_element(By.XPATH,'//span[@class="svelte-s1r2yt" and contains(text(), "Control Panel")]')
|
144 |
+
control_panel_span.click()
|
145 |
+
time.sleep(0.5)
|
146 |
+
input_element = driver.find_element(By.CSS_SELECTOR,'[data-testid="number-input"]')
|
147 |
+
new_value = "4000" # Replace this with the value you want to set
|
148 |
+
input_element.clear() # Clear the current value if needed
|
149 |
+
input_element.send_keys(new_value)
|
150 |
+
xpath = "//textarea[@data-testid='textbox' and @placeholder='Ask anything']"
|
151 |
+
textarea = driver.find_element(By.XPATH,xpath)
|
152 |
+
textarea.clear()
|
153 |
+
textarea.send_keys(text)
|
154 |
+
textarea.send_keys("\n")
|
155 |
+
|
156 |
+
break
|
157 |
+
except Exception as e:
|
158 |
+
print(e)
|
159 |
+
continue
|
160 |
+
|
161 |
+
|
162 |
+
prev =""
|
163 |
+
|
164 |
+
time.sleep(0.6)
|
165 |
+
while True:
|
166 |
+
time.sleep(0.5)
|
167 |
+
currtime= time.time()
|
168 |
+
|
169 |
+
if(currtime>starttime+170):
|
170 |
+
driver.delete_all_cookies()
|
171 |
+
driver.quit()
|
172 |
+
llama_last_error= currtime
|
173 |
+
return "Requested Could not be proceed"
|
174 |
+
|
175 |
+
try:
|
176 |
+
div_element = driver.find_element(By.CSS_SELECTOR,'div.wrap.default.full.svelte-zlszon.generating')
|
177 |
+
print("generating")
|
178 |
+
continue;
|
179 |
+
except:
|
180 |
+
print('done gen ')
|
181 |
+
|
182 |
+
div_element = driver.find_elements(By.CLASS_NAME,'md.svelte-9tftx4.chatbot')
|
183 |
+
div_element= div_element[1]
|
184 |
+
# Extract and print the text content of the div element
|
185 |
+
div_text = div_element.text
|
186 |
+
print(div_text)
|
187 |
+
|
188 |
+
driver.delete_all_cookies()
|
189 |
+
driver.quit()
|
190 |
+
return div_text
|
191 |
+
|
192 |
+
|
193 |
+
driver.delete_all_cookies()
|
194 |
+
driver.quit()
|
195 |
+
return " --Error Occurred-- "
|
196 |
+
|
197 |
+
|
198 |
+
|
199 |
+
except:
|
200 |
+
driver.delete_all_cookies()
|
201 |
+
driver.quit()
|
202 |
+
currtime= time.time()
|
203 |
+
llama_last_error= currtime
|
204 |
+
return "Error Occurred "
|
205 |
+
|
206 |
+
|
207 |
+
|
208 |
def do_ML_LLAMA70b(text:str, trycount:int, proxy:str):
|
209 |
starttime=time.time()
|
210 |
options = ChromeOptions()
|