Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -141,7 +141,7 @@ mixtral_model = "mistralai/Mixtral-8x7B-Instruct-v0.1"
|
|
141 |
|
142 |
pipe = pipeline("text-generation", model=zephyr_model, torch_dtype=torch.bfloat16, device_map="auto")
|
143 |
|
144 |
-
|
145 |
You are an AI whose job is to help users create their own music which its genre will reflect the character or scene from an image described by users.
|
146 |
In particular, you need to respond succintly with few musical words, in a friendly tone, write a musical prompt for a music generation model.
|
147 |
|
@@ -150,14 +150,17 @@ Immediately STOP after that. It should be EXACTLY in this format:
|
|
150 |
"A grand orchestral arrangement with thunderous percussion, epic brass fanfares, and soaring strings, creating a cinematic atmosphere fit for a heroic battle"
|
151 |
"""
|
152 |
|
153 |
-
|
|
|
|
|
|
|
|
|
|
|
154 |
<|system|>
|
155 |
{agent_maker_sys}</s>
|
156 |
<|user|>
|
157 |
"""
|
158 |
-
|
159 |
-
@spaces.GPU(enable_queue=True)
|
160 |
-
def get_musical_prompt(user_prompt):
|
161 |
prompt = f"{instruction.strip()}\n{user_prompt}</s>"
|
162 |
outputs = pipe(prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_k=50, top_p=0.95)
|
163 |
pattern = r'\<\|system\|\>(.*?)\<\|assistant\|\>'
|
|
|
141 |
|
142 |
pipe = pipeline("text-generation", model=zephyr_model, torch_dtype=torch.bfloat16, device_map="auto")
|
143 |
|
144 |
+
standard_sys = f"""
|
145 |
You are an AI whose job is to help users create their own music which its genre will reflect the character or scene from an image described by users.
|
146 |
In particular, you need to respond succintly with few musical words, in a friendly tone, write a musical prompt for a music generation model.
|
147 |
|
|
|
150 |
"A grand orchestral arrangement with thunderous percussion, epic brass fanfares, and soaring strings, creating a cinematic atmosphere fit for a heroic battle"
|
151 |
"""
|
152 |
|
153 |
+
@spaces.GPU(enable_queue=True)
|
154 |
+
def get_musical_prompt(user_prompt):
|
155 |
+
|
156 |
+
agent_maker_sys = standard_sys
|
157 |
+
|
158 |
+
instruction = f"""
|
159 |
<|system|>
|
160 |
{agent_maker_sys}</s>
|
161 |
<|user|>
|
162 |
"""
|
163 |
+
|
|
|
|
|
164 |
prompt = f"{instruction.strip()}\n{user_prompt}</s>"
|
165 |
outputs = pipe(prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_k=50, top_p=0.95)
|
166 |
pattern = r'\<\|system\|\>(.*?)\<\|assistant\|\>'
|