wwwwqeqwe's picture
integrate AIzaSyChS51_WXMCmW2tbDUpby_mMVc_vrczRSg apikey google ai studio
77a467a verified
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>DeepView - WAN 2.2 I2V Video Generation</title>
<link rel="icon" type="image/x-icon" href="/static/favicon.ico">
<script src="https://cdn.tailwindcss.com"></script>
<script src="https://cdn.jsdelivr.net/npm/feather-icons/dist/feather.min.js"></script>
<script src="https://unpkg.com/feather-icons"></script>
<script>
document.addEventListener('DOMContentLoaded', async function() {
// Initialize Google AI when page loads
await initGoogleAI();
feather.replace();
const generateBtn = document.getElementById('generate-btn');
const generationStatus = document.getElementById('generation-status');
const videoResult = document.getElementById('video-result');
const promptInput = document.getElementById('prompt');
// Handle image upload preview
const imageUpload = document.getElementById('image-upload');
const imagePreview = document.getElementById('image-preview');
const previewImg = document.getElementById('preview-img');
imageUpload.addEventListener('change', function(e) {
const file = e.target.files[0];
if (file) {
const reader = new FileReader();
reader.onload = function(event) {
previewImg.src = event.target.result;
imagePreview.classList.remove('hidden');
}
reader.readAsDataURL(file);
}
});
// Initialize Google AI Studio
function initGoogleAI() {
return new Promise((resolve) => {
gapi.load('client', () => {
gapi.client.init({
'apiKey': 'AIzaSyChS51_WXMCmW2tbDUpby_mMVc_vrczRSg',
'discoveryDocs': ['https://generativelanguage.googleapis.com/$discovery/rest?version=v1beta']
}).then(resolve);
});
});
}
async function generateVideoFromImage(image) {
try {
const formData = new FormData();
formData.append('image', image);
// Call our backend endpoint that interfaces with WAN 2.2 I2V
const response = await fetch('/generate-video', {
method: 'POST',
body: formData
});
if (!response.ok) {
throw new Error('Video generation failed');
}
const result = await response.blob();
return URL.createObjectURL(result);
} catch (error) {
console.error('Generation error:', error);
throw error;
}
}
generateBtn.addEventListener('click', async function() {
// Optional: Use Google AI for prompt enhancement
if (promptInput.value.trim()) {
try {
const response = await gapi.client.generativelanguage.models.generateText({
model: 'models/text-bison-001',
prompt: {
text: `Enhance this video generation prompt: "${promptInput.value}"`
}
});
if (response.result.candidates[0].output) {
promptInput.value = response.result.candidates[0].output;
}
} catch (error) {
console.log('Google AI prompt enhancement failed, using original prompt');
}
}
const prompt = promptInput.value.trim();
if (!prompt) {
alert('Please enter a video description');
return;
}
// GSAP animation for button click
gsap.fromTo(generateBtn,
{ scale: 1, boxShadow: "0 0 0 0 rgba(79, 70, 229, 0.7)" },
{
scale: 0.95,
boxShadow: "0 0 20px 10px rgba(79, 70, 229, 0)",
duration: 0.3,
ease: "power2.out",
onComplete: function() {
gsap.to(generateBtn, {
scale: 1,
boxShadow: "0 0 0 0 rgba(79, 70, 229, 0)",
duration: 0.2
});
}
}
);
// Text area pulse animation
gsap.fromTo(promptInput,
{ boxShadow: "0 0 0 0 rgba(79, 70, 229, 0.7)" },
{
boxShadow: "0 0 10px 5px rgba(79, 70, 229, 0)",
duration: 0.5,
repeat: 1,
yoyo: true,
ease: "power2.out"
}
);
// Show loading state
generateBtn.disabled = true;
generationStatus.classList.remove('hidden');
videoResult.classList.add('hidden');
try {
const imageFile = imageUpload.files[0];
if (!imageFile) {
alert('Please upload an image first');
return;
}
// Generate video from uploaded image using WAN 2.2 I2V
// Enhanced generation with Google AI
const videoUrl = await generateVideoFromImage(imageFile);
// Log generation to Google AI
try {
await gapi.client.generativelanguage.models.generateText({
model: 'models/text-bison-001',
prompt: {
text: `Log video generation with prompt: "${promptInput.value}"`
}
});
} catch (error) {
console.log('Google AI logging failed');
}
// Display the generated video with animation
videoResult.innerHTML = `
<video class="w-full rounded-lg" controls autoplay muted>
<source src="${videoUrl || 'https://example.com/wan-demo-video.mp4'}" type="video/mp4">
Your browser does not support the video tag.
</video>
<div class="mt-2 text-center">
<button class="bg-indigo-600 hover:bg-indigo-700 px-4 py-2 rounded-md text-sm font-medium" onclick="downloadVideo('${videoUrl}')">
Download Video
</button>
</div>
`;
// Video reveal animation
gsap.from(videoResult, {
opacity: 0,
y: 20,
duration: 0.5,
ease: "back.out(1.7)"
});
// Button animation
const saveBtn = videoResult.querySelector('button');
gsap.from(saveBtn, {
opacity: 0,
scale: 0.8,
delay: 0.3,
duration: 0.3,
ease: "power2.out"
});
videoResult.classList.remove('hidden');
} catch (error) {
console.error('Generation error:', error);
alert('Video generation failed. Please try again.');
} finally {
generateBtn.disabled = false;
generationStatus.classList.add('hidden');
}
});
});
</script>
<script src="https://cdn.jsdelivr.net/npm/vanta@latest/dist/vanta.waves.min.js"></script>
<script>
function downloadVideo(url) {
const a = document.createElement('a');
a.href = url;
a.download = 'generated-video.mp4';
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
}
// Initialize image upload preview
document.getElementById('image-upload').addEventListener('change', function() {
const file = this.files[0];
if (file) {
const reader = new FileReader();
reader.onload = function(e) {
document.getElementById('preview-img').src = e.target.result;
document.getElementById('image-preview').classList.remove('hidden');
}
reader.readAsDataURL(file);
}
});
</script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/gsap/3.11.4/gsap.min.js"></script>
<script src="https://apis.google.com/js/api.js"></script>
<style>
.gradient-text {
background: linear-gradient(90deg, #4f46e5, #ec4899);
-webkit-background-clip: text;
background-clip: text;
color: transparent;
}
.video-preview {
transition: all 0.3s ease;
box-shadow: 0 10px 25px -5px rgba(79, 70, 229, 0.3);
}
.video-preview:hover {
transform: translateY(-5px);
box-shadow: 0 20px 25px -5px rgba(79, 70, 229, 0.4);
}
.prompt-input:focus {
box-shadow: 0 0 0 3px rgba(79, 70, 229, 0.2);
}
</style>
</head>
<body class="bg-gray-900 text-white">
<div id="vanta-bg" class="fixed inset-0 -z-10"></div>
<nav class="bg-gray-900 bg-opacity-80 backdrop-blur-md border-b border-gray-800">
<div class="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8">
<div class="flex items-center justify-between h-16">
<div class="flex items-center">
<div class="flex-shrink-0">
<h1 class="text-2xl font-bold gradient-text">DeepView</h1>
</div>
</div>
<div class="flex items-center space-x-4">
<a href="#" class="px-3 py-2 rounded-md text-sm font-medium hover:bg-gray-800">Home</a>
<a href="#" class="px-3 py-2 rounded-md text-sm font-medium hover:bg-gray-800">Gallery</a>
<a href="pricing.html" class="px-3 py-2 rounded-md text-sm font-medium hover:bg-gray-800">Pricing</a>
<a href="demo.html" class="px-3 py-2 rounded-md text-sm font-medium hover:bg-gray-800">Live Demo</a>
<button class="ml-4 bg-indigo-600 hover:bg-indigo-700 px-4 py-2 rounded-md text-sm font-medium transition duration-150 ease-in-out">
Sign In
</button>
</div>
</div>
</div>
</nav>
<main class="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-12">
<section class="text-center mb-16">
<h1 class="text-5xl font-bold mb-6 leading-tight">Transform Images Into <span class="gradient-text">Dynamic Videos</span></h1>
<p class="text-xl text-gray-300 max-w-3xl mx-auto">Powered by WAN 2.2 I2V technology, DeepView converts your static images into high-quality, realistic videos.</p>
</section>
<section class="bg-gray-800 bg-opacity-60 rounded-xl p-8 backdrop-blur-md border border-gray-700 mb-16">
<div class="max-w-3xl mx-auto">
<div class="mb-6">
<label for="prompt" class="block text-sm font-medium text-gray-300 mb-2">Describe your video</label>
<div class="mt-1 relative rounded-md shadow-sm">
<textarea id="prompt" name="prompt" rows="3" class="prompt-input block w-full bg-gray-700 border border-gray-600 rounded-md p-4 text-white placeholder-gray-400 focus:outline-none focus:ring-indigo-500 focus:border-indigo-500" placeholder="(Optional) Add motion prompts to guide the video generation..."></textarea>
</div>
</div>
<div class="grid grid-cols-1 md:grid-cols-3 gap-6 mb-6">
<div>
<label class="block text-sm font-medium text-gray-300 mb-2">Upload Image</label>
<div class="relative">
<input type="file" id="image-upload" accept="image/*" class="hidden">
<label for="image-upload" class="block w-full bg-gray-700 border border-gray-600 rounded-md p-4 text-white cursor-pointer hover:bg-gray-600 transition duration-150">
<div class="flex flex-col items-center justify-center">
<i data-feather="upload" class="w-6 h-6 mb-2"></i>
<span>Click to upload</span>
</div>
</label>
</div>
<div id="image-preview" class="mt-2 hidden">
<img id="preview-img" class="max-h-32 rounded-md">
</div>
</div>
<div>
<label class="block text-sm font-medium text-gray-300 mb-2">Style</label>
<select class="block w-full bg-gray-700 border border-gray-600 rounded-md p-2 text-white">
<option>Cinematic</option>
<option>Anime</option>
<option>Fantasy</option>
<option>Cyberpunk</option>
<option>Realistic</option>
</select>
</div>
<div>
<label class="block text-sm font-medium text-gray-300 mb-2">Duration</label>
<select class="block w-full bg-gray-700 border border-gray-600 rounded-md p-2 text-white">
<option>3 seconds</option>
<option>5 seconds</option>
<option>8 seconds</option>
</select>
</div>
</div>
<button id="generate-btn" class="w-full bg-indigo-600 hover:bg-indigo-700 py-3 px-4 rounded-md font-medium flex items-center justify-center space-x-2 transition duration-150 ease-in-out">
<i data-feather="zap"></i>
<span>Generate Video (3 credits)</span>
</button>
<div id="generation-status" class="hidden mt-4 p-4 bg-gray-700 rounded-lg">
<div class="flex items-center space-x-3">
<div class="relative w-6 h-6">
<div class="absolute inset-0 flex items-center justify-center">
<div class="w-4 h-4 bg-indigo-500 rounded-full animate-ping"></div>
</div>
<div class="absolute inset-0 flex items-center justify-center">
<div class="w-4 h-4 bg-indigo-600 rounded-full"></div>
</div>
</div>
<p class="text-gray-300">Generating your video...</p>
</div>
<div class="mt-3 h-1.5 w-full bg-gray-600 rounded-full overflow-hidden">
<div id="progress-bar" class="h-full bg-indigo-500 rounded-full" style="width: 0%"></div>
</div>
</div>
<div id="video-result" class="hidden mt-4"></div>
</div>
</section>
<section class="mb-16">
<h2 class="text-3xl font-bold mb-4 text-center">Generate Videos with WAN 2.2 I2V</h2>
<p class="text-xl text-gray-300 mb-8 text-center">Upload an image below and generate a video using WAN 2.2 I2V directly on this page.</p>
<iframe
src="https://huggingface.co/spaces/Wan-AI/Wan2.2-I2V-A14B"
width="100%"
height="900px"
style="border:none; display:block; margin:auto;"
class="rounded-xl shadow-xl mb-12"
></iframe>
<h2 class="text-2xl font-bold mb-8">Recent Creations</h2>
<div class="grid grid-cols-1 md:grid-cols-3 gap-6">
<div class="video-preview bg-gray-800 rounded-lg overflow-hidden">
<video class="w-full h-48 object-cover" controls>
<source src="https://example.com/video1.mp4" type="video/mp4">
</video>
<div class="p-4">
<h3 class="font-medium">Futuristic City</h3>
<p class="text-gray-400 text-sm">4 seconds β€’ Cinematic</p>
</div>
</div>
<div class="video-preview bg-gray-800 rounded-lg overflow-hidden">
<video class="w-full h-48 object-cover" controls>
<source src="https://example.com/video2.mp4" type="video/mp4">
</video>
<div class="p-4">
<h3 class="font-medium">Magical Forest</h3>
<p class="text-gray-400 text-sm">8 seconds β€’ Anime</p>
</div>
</div>
<div class="video-preview bg-gray-800 rounded-lg overflow-hidden">
<video class="w-full h-48 object-cover" controls>
<source src="https://example.com/video3.mp4" type="video/mp4">
</video>
<div class="p-4">
<h3 class="font-medium">Cyberpunk Alley</h3>
<p class="text-gray-400 text-sm">12 seconds β€’ Cyberpunk</p>
</div>
</div>
</div>
</section>
</main>
</body>
</html>