Spaces:
Sleeping
Sleeping
ravithejads
commited on
Commit
•
80aebc5
1
Parent(s):
2dfef4c
Upload 2 files
Browse files- app.py +69 -0
- requirements.txt +2 -0
app.py
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import gradio as gr
|
3 |
+
import openai
|
4 |
+
|
5 |
+
def titledescriptiongenerator(text, openaikey, style_ = "viral", media = "Twitter"):
|
6 |
+
|
7 |
+
if not text:
|
8 |
+
return "Enter Some text to give facts"
|
9 |
+
|
10 |
+
openai.api_key = openaikey
|
11 |
+
|
12 |
+
prompt = "Generate " + str(style_) + " " + str(media) + " post title for the text.\n" + text + "." + "\n\n"
|
13 |
+
|
14 |
+
response = openai.Completion.create(
|
15 |
+
model="text-davinci-003",
|
16 |
+
prompt = prompt,
|
17 |
+
temperature=0.7,
|
18 |
+
max_tokens=256,
|
19 |
+
top_p=1,
|
20 |
+
frequency_penalty=0,
|
21 |
+
presence_penalty=0
|
22 |
+
)
|
23 |
+
title = response['choices'][0]['text']
|
24 |
+
|
25 |
+
prompt = "Generate " + str(style_) + " " + str(media) + " post short description for the text.\n" + text + "." + "\n\n"
|
26 |
+
|
27 |
+
response = openai.Completion.create(
|
28 |
+
model="text-davinci-003",
|
29 |
+
prompt = prompt,
|
30 |
+
temperature=0.7,
|
31 |
+
max_tokens=256,
|
32 |
+
top_p=1,
|
33 |
+
frequency_penalty=0,
|
34 |
+
presence_penalty=0
|
35 |
+
)
|
36 |
+
description = response['choices'][0]['text']
|
37 |
+
|
38 |
+
return title, description
|
39 |
+
|
40 |
+
callback = gr.CSVLogger()
|
41 |
+
|
42 |
+
with gr.Blocks() as demo:
|
43 |
+
gr.Markdown(
|
44 |
+
"""
|
45 |
+
# TitleDescGen!
|
46 |
+
Generate title and short description for your social media post.
|
47 |
+
""")
|
48 |
+
with gr.Row():
|
49 |
+
with gr.Column():
|
50 |
+
text = gr.Textbox(lines = 5, placeholder = "PM Modi to visit Tripura tomorrow; govt expects 72,000 to attend his public meeting", label = "Enter text")
|
51 |
+
style_ = gr.Dropdown(["formal", "viral"],label="Click here to select style of the title/ description", value = "viral")
|
52 |
+
media = gr.Dropdown(["Twitter", "Linkedin", "Blog"],label="Click here to select media", value = "Twitter")
|
53 |
+
openaikey = gr.Textbox(lines = 1, label = "Enter OpenAI Key")
|
54 |
+
text_button = gr.Button("Submit")
|
55 |
+
with gr.Column():
|
56 |
+
title_output = gr.Textbox(label = "Title")
|
57 |
+
description_output = gr.Textbox(label = "Short Description")
|
58 |
+
flag_btn = gr.Button("Flag")
|
59 |
+
|
60 |
+
# This needs to be called at some point prior to the first call to callback.flag()
|
61 |
+
callback.setup([text, openaikey, style_, media, title_output, description_output], "flagged_data_points")
|
62 |
+
|
63 |
+
text_button.click(titledescriptiongenerator, inputs=[text, openaikey, style_, media], outputs= [title_output, description_output] )
|
64 |
+
# We can choose which components to flag -- in this case, we'll flag all of them
|
65 |
+
flag_btn.click(lambda *args: callback.flag(args), [text, openaikey, style_, media, title_output, description_output], None, preprocess=False)
|
66 |
+
gr.Examples([["Language model (LM) pretraining can learn various knowledge from text corpora, helping downstream tasks. However, existing methods such as BERT model a single document, and do not capture dependencies or knowledge that span across documents. In this work, we propose LinkBERT, an LM pretraining method that leverages links between documents, e.g., hyperlinks. Given a text corpus, we view it as a graph of documents and create LM inputs by placing linked documents in the same context. We then pretrain the LM with two joint self-supervised objectives: masked language modeling and our new proposal, document relation prediction. We show that LinkBERT outperforms BERT on various downstream tasks across two domains: the general domain (pretrained on Wikipedia with hyperlinks) and biomedical domain (pretrained on PubMed with citation links). LinkBERT is especially effective for multi-hop reasoning and few-shot QA (+5% absolute improvement on HotpotQA and TriviaQA), and our biomedical LinkBERT sets new states of the art on various BioNLP tasks (+7% on BioASQ and USMLE)."]],
|
67 |
+
inputs = [text])
|
68 |
+
|
69 |
+
demo.launch()
|
requirements.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
openai
|
2 |
+
gradio
|