Spaces:
Running
on
Zero
Running
on
Zero
Upload 2 files
Browse files- genimage.py +3 -3
- llmdolphin.py +3 -0
genimage.py
CHANGED
@@ -6,7 +6,7 @@ def load_pipeline():
|
|
6 |
import torch
|
7 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
8 |
pipe = StableDiffusionXLPipeline.from_pretrained(
|
9 |
-
"John6666/
|
10 |
torch_dtype=torch.float16,
|
11 |
)
|
12 |
pipe.to(device)
|
@@ -43,8 +43,8 @@ def generate_image(prompt, neg_prompt):
|
|
43 |
}
|
44 |
try:
|
45 |
images = pipe(
|
46 |
-
prompt=prompt + ", masterpiece, best quality, very aesthetic, absurdres",
|
47 |
-
negative_prompt=neg_prompt + ", lowres, (bad), text, error, fewer, extra, missing, worst quality, jpeg artifacts, low quality, watermark, unfinished, displeasing, oldest, early, chromatic aberration, signature, extra digits, artistic error, username, scan, [abstract], photo, deformed, disfigured, low contrast, photo, deformed, disfigured, low contrast",
|
48 |
width=1024,
|
49 |
height=1024,
|
50 |
guidance_scale=7.5,
|
|
|
6 |
import torch
|
7 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
8 |
pipe = StableDiffusionXLPipeline.from_pretrained(
|
9 |
+
"John6666/t-ponynai3-v6-sdxl-spo-pcm",
|
10 |
torch_dtype=torch.float16,
|
11 |
)
|
12 |
pipe.to(device)
|
|
|
43 |
}
|
44 |
try:
|
45 |
images = pipe(
|
46 |
+
prompt=prompt + ", score_9, score_8_up, score_7_up, score_6_up, score_5_up, score_4_up, masterpiece, best quality, very aesthetic, absurdres",
|
47 |
+
negative_prompt=neg_prompt + ", score_4, score_3, score_2, score_1, bad hands, bad feet, lowres, (bad), text, error, fewer, extra, missing, worst quality, jpeg artifacts, low quality, watermark, unfinished, displeasing, oldest, early, chromatic aberration, signature, extra digits, artistic error, username, scan, [abstract], photo, deformed, disfigured, low contrast, photo, deformed, disfigured, low contrast",
|
48 |
width=1024,
|
49 |
height=1024,
|
50 |
guidance_scale=7.5,
|
llmdolphin.py
CHANGED
@@ -16,6 +16,9 @@ llm_models = {
|
|
16 |
"suzume-llama-3-8B-japanese.Q4_K_M.gguf": ["PrunaAI/lightblue-suzume-llama-3-8B-japanese-GGUF-smashed", MessagesFormatterType.LLAMA_3],
|
17 |
"suzume-llama-3-8B-multilingual-orpo-borda-top25.Q4_K_M.gguf": ["RichardErkhov/lightblue_-_suzume-llama-3-8B-multilingual-orpo-borda-top25-gguf", MessagesFormatterType.LLAMA_3],
|
18 |
"Bungo-L3-8B.Q5_K_M.gguf": ["backyardai/Bungo-L3-8B-GGUF", MessagesFormatterType.LLAMA_3],
|
|
|
|
|
|
|
19 |
"Lumimaid-v0.2-8B.i1-Q5_K_M.gguf": ["mradermacher/Lumimaid-v0.2-8B-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
20 |
"Llama-3.1-8B-Instruct-abliterated_via_adapter.Q5_K_M.gguf": ["grimjim/Llama-3.1-8B-Instruct-abliterated_via_adapter-GGUF", MessagesFormatterType.LLAMA_3],
|
21 |
"Llama-Nephilim-Metamorphosis-v1-8B.Q5_K_M.gguf": ["grimjim/Llama-Nephilim-Metamorphosis-v1-8B-GGUF", MessagesFormatterType.LLAMA_3],
|
|
|
16 |
"suzume-llama-3-8B-japanese.Q4_K_M.gguf": ["PrunaAI/lightblue-suzume-llama-3-8B-japanese-GGUF-smashed", MessagesFormatterType.LLAMA_3],
|
17 |
"suzume-llama-3-8B-multilingual-orpo-borda-top25.Q4_K_M.gguf": ["RichardErkhov/lightblue_-_suzume-llama-3-8B-multilingual-orpo-borda-top25-gguf", MessagesFormatterType.LLAMA_3],
|
18 |
"Bungo-L3-8B.Q5_K_M.gguf": ["backyardai/Bungo-L3-8B-GGUF", MessagesFormatterType.LLAMA_3],
|
19 |
+
"IceCoffeeRP-7b.i1-Q5_K_M.gguf": ["mradermacher/IceCoffeeRP-7b-i1-GGUF", MessagesFormatterType.ALPACA],
|
20 |
+
"lumi-nemo-e2.0.Q4_K_M.gguf": ["mradermacher/lumi-nemo-e2.0-GGUF", MessagesFormatterType.MISTRAL],
|
21 |
+
"Lumimaid-v0.2-12B.i1-Q4_K_M.gguf": ["mradermacher/Lumimaid-v0.2-12B-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
22 |
"Lumimaid-v0.2-8B.i1-Q5_K_M.gguf": ["mradermacher/Lumimaid-v0.2-8B-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
23 |
"Llama-3.1-8B-Instruct-abliterated_via_adapter.Q5_K_M.gguf": ["grimjim/Llama-3.1-8B-Instruct-abliterated_via_adapter-GGUF", MessagesFormatterType.LLAMA_3],
|
24 |
"Llama-Nephilim-Metamorphosis-v1-8B.Q5_K_M.gguf": ["grimjim/Llama-Nephilim-Metamorphosis-v1-8B-GGUF", MessagesFormatterType.LLAMA_3],
|