Spaces:
Running
Running
import { pipeline, env } from 'https://cdn.jsdelivr.net/npm/@xenova/[email protected]'; | |
// Disable local models to fetch models from Hugging Face Hub | |
env.allowLocalModels = false; | |
// Reference the DOM elements | |
const status = document.getElementById('status'); | |
const userInput = document.getElementById('user-input'); | |
const outputContainer = document.getElementById('output'); | |
const submitButton = document.getElementById('submit-button'); | |
// Initialize variables | |
let generator; | |
// Function to load the model | |
async function loadModel() { | |
try { | |
status.textContent = 'Loading model...'; | |
// Load the LLaMA or GPT model for text generation | |
generator = await pipeline('text-generation', 'meta-llama/Llama-2-7b-chat-hf'); // Replace with your model | |
status.textContent = 'Model loaded. Ready to chat!'; | |
} catch (error) { | |
console.error('Error loading model:', error); | |
status.textContent = 'Failed to load model. Check the console for details.'; | |
} | |
} | |
// Load the model at startup | |
await loadModel(); | |
// Add event listener for button clicks | |
submitButton.addEventListener('click', async () => { | |
const inputText = userInput.value.trim(); | |
if (!inputText) { | |
outputContainer.innerText = 'Please enter a prompt.'; | |
return; | |
} | |
// Show the user that the model is processing | |
status.textContent = 'Generating response...'; | |
try { | |
// Generate a response using the pipeline | |
const response = await generator(inputText, { | |
max_new_tokens: 100, // Adjust as needed for response length | |
temperature: 0.7, // Controls randomness | |
top_p: 0.95, // Nucleus sampling | |
}); | |
// Display the generated response | |
outputContainer.innerText = response[0].generated_text; | |
} catch (error) { | |
console.error('Error generating response:', error); | |
outputContainer.innerText = 'Error generating response. Please try again.'; | |
} | |
// Reset status to indicate the model is ready again | |
status.textContent = 'Model loaded. Ready to chat!'; | |
}); | |