jeremyLE-Ekimetrics commited on
Commit
073b060
1 Parent(s): 335964b
Files changed (2) hide show
  1. main.py +25 -6
  2. requirements.txt +3 -1
main.py CHANGED
@@ -1,14 +1,16 @@
1
- from diffusers import AutoPipelineForText2Image
2
  import torch
3
  import numpy as np
4
  from openai import OpenAI
5
  import os
 
6
  client = OpenAI()
7
 
8
  import streamlit as st
9
  from PIL import Image
 
10
 
11
- @st.cache_data(ttl=3600)
12
  def get_prompt_to_guess():
13
  response = client.chat.completions.create(
14
  model="gpt-3.5-turbo",
@@ -28,9 +30,18 @@ def get_model():
28
  def generate_image(_pipe, prompt):
29
  return _pipe(prompt=prompt, num_inference_steps=1, guidance_scale=0.0, seed=1).images[0]
30
 
 
 
 
 
 
 
31
  if "submit_guess" not in st.session_state:
32
  st.session_state["submit_guess"] = False
33
 
 
 
 
34
  def check_prompt(prompt, prompt_to_guess):
35
  return prompt.strip() == prompt_to_guess.strip()
36
 
@@ -40,13 +51,12 @@ im_to_guess = generate_image(pipe, prompt)
40
  h, w = im_to_guess.size
41
 
42
  st.title("Guess the prompt by Ekimetrics")
43
- st.text("Rules : guess the prompt (in French) to generate the left image with the sdxl turbo model")
44
  st.text("Hint : use right side to help you guess the prompt by testing some")
45
  st.text("Disclosure : this runs on CPU so generation are quite slow (even with sdxl turbo)")
46
  col_1, col_2 = st.columns([0.5, 0.5])
47
  with col_1:
48
  st.header("GUESS THE PROMPT")
49
- st.image(im_to_guess)
50
  guessed_prompt = st.text_area("Input your guess prompt")
51
  st.session_state["submit_guess"] = st.button("guess the prompt")
52
  if st.session_state["submit_guess"]:
@@ -54,14 +64,23 @@ with col_1:
54
  st.text("Good prompt ! test again in 24h !")
55
  else:
56
  st.text("wrong prompt !")
 
 
 
 
 
 
 
 
57
 
58
  with col_2:
59
  st.header("TEST THE PROMPT")
60
- testing_prompt = st.text_area("Input your testing prompt")
61
- st.session_state["testing"] = st.button("test the prompt")
62
  if st.session_state["testing"]:
63
  im = generate_image(pipe, testing_prompt)
64
  st.session_state["testing"] = False
65
  else:
66
  im = np.zeros([h,w,3])
 
 
67
  st.image(im)
 
 
1
+
2
  import torch
3
  import numpy as np
4
  from openai import OpenAI
5
  import os
6
+
7
  client = OpenAI()
8
 
9
  import streamlit as st
10
  from PIL import Image
11
+ from diffusers import AutoPipelineForText2Image
12
 
13
+ @st.cache_data(ttl=600)
14
  def get_prompt_to_guess():
15
  response = client.chat.completions.create(
16
  model="gpt-3.5-turbo",
 
30
  def generate_image(_pipe, prompt):
31
  return _pipe(prompt=prompt, num_inference_steps=1, guidance_scale=0.0, seed=1).images[0]
32
 
33
+ if "ask_answer" not in st.session_state:
34
+ st.session_state["ask_answer"] = False
35
+
36
+ if "testing" not in st.session_state:
37
+ st.session_state["testing"] = False
38
+
39
  if "submit_guess" not in st.session_state:
40
  st.session_state["submit_guess"] = False
41
 
42
+ if "real_ask_answer" not in st.session_state:
43
+ st.session_state["real_ask_answer"] = False
44
+
45
  def check_prompt(prompt, prompt_to_guess):
46
  return prompt.strip() == prompt_to_guess.strip()
47
 
 
51
  h, w = im_to_guess.size
52
 
53
  st.title("Guess the prompt by Ekimetrics")
54
+ st.text("Rules : guess the prompt (in French, with no fault) to generate the left image with the sdxl turbo model")
55
  st.text("Hint : use right side to help you guess the prompt by testing some")
56
  st.text("Disclosure : this runs on CPU so generation are quite slow (even with sdxl turbo)")
57
  col_1, col_2 = st.columns([0.5, 0.5])
58
  with col_1:
59
  st.header("GUESS THE PROMPT")
 
60
  guessed_prompt = st.text_area("Input your guess prompt")
61
  st.session_state["submit_guess"] = st.button("guess the prompt")
62
  if st.session_state["submit_guess"]:
 
64
  st.text("Good prompt ! test again in 24h !")
65
  else:
66
  st.text("wrong prompt !")
67
+ st.session_state["ask_answer"] = st.button("get the answer")
68
+ if st.session_state["ask_answer"]:
69
+ st.text(f"Cheater ! but here is the prompt : \n {prompt}")
70
+ st.image(im_to_guess)
71
+
72
+
73
+ if "testing" not in st.session_state:
74
+ st.session_state["testing"] = False
75
 
76
  with col_2:
77
  st.header("TEST THE PROMPT")
 
 
78
  if st.session_state["testing"]:
79
  im = generate_image(pipe, testing_prompt)
80
  st.session_state["testing"] = False
81
  else:
82
  im = np.zeros([h,w,3])
83
+ testing_prompt = st.text_area("Input your testing prompt")
84
+ st.session_state["testing"] = st.button("test the prompt")
85
  st.image(im)
86
+
requirements.txt CHANGED
@@ -3,4 +3,6 @@ transformers
3
  accelerate
4
  streamlit
5
  torch
6
- Pillow
 
 
 
3
  accelerate
4
  streamlit
5
  torch
6
+ Pillow
7
+ openai
8
+ diffusers