Implement real-time streaming with Server-Sent Events for live updates, notifications, and AI response streaming.
# Server-Sent Events for Google Antigravity
Server-Sent Events provide efficient one-way real-time communication. This guide covers SSE implementation optimized for Google Antigravity and Next.js applications.
## SSE Endpoint Implementation
Create robust SSE endpoints:
```typescript
// app/api/events/route.ts
import { NextRequest } from "next/server";
export const runtime = "edge";
export async function GET(request: NextRequest) {
const encoder = new TextEncoder();
const stream = new ReadableStream({
async start(controller) {
// Send initial connection event
controller.enqueue(
encoder.encode(`event: connected\ndata: {"status": "connected"}\n\n`)
);
// Heartbeat to keep connection alive
const heartbeat = setInterval(() => {
controller.enqueue(encoder.encode(`: heartbeat\n\n`));
}, 30000);
// Subscribe to events (e.g., from Redis pub/sub)
const subscription = await subscribeToEvents((event) => {
const data = JSON.stringify(event);
controller.enqueue(
encoder.encode(`event: ${event.type}\ndata: ${data}\n\n`)
);
});
// Cleanup on close
request.signal.addEventListener("abort", () => {
clearInterval(heartbeat);
subscription.unsubscribe();
controller.close();
});
},
});
return new Response(stream, {
headers: {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache, no-transform",
"Connection": "keep-alive",
"X-Accel-Buffering": "no",
},
});
}
// Simulated subscription function
async function subscribeToEvents(callback: (event: unknown) => void) {
// In production, use Redis pub/sub or similar
const interval = setInterval(() => {
callback({
type: "update",
data: { timestamp: Date.now() },
});
}, 5000);
return {
unsubscribe: () => clearInterval(interval),
};
}
```
## AI Streaming Response
Implement streaming for AI responses:
```typescript
// app/api/ai/stream/route.ts
import { OpenAI } from "openai";
import { NextRequest } from "next/server";
const openai = new OpenAI();
export async function POST(request: NextRequest) {
const { prompt, model = "gpt-4" } = await request.json();
const encoder = new TextEncoder();
const stream = new ReadableStream({
async start(controller) {
try {
const completion = await openai.chat.completions.create({
model,
messages: [{ role: "user", content: prompt }],
stream: true,
});
for await (const chunk of completion) {
const content = chunk.choices[0]?.delta?.content;
if (content) {
controller.enqueue(
encoder.encode(`data: ${JSON.stringify({ content })}\n\n`)
);
}
}
controller.enqueue(encoder.encode(`data: [DONE]\n\n`));
controller.close();
} catch (error) {
controller.enqueue(
encoder.encode(`event: error\ndata: ${JSON.stringify({ error: "Stream failed" })}\n\n`)
);
controller.close();
}
},
});
return new Response(stream, {
headers: {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
"Connection": "keep-alive",
},
});
}
```
## Client-Side SSE Hook
Create reusable SSE hooks:
```typescript
// hooks/useSSE.ts
"use client";
import { useEffect, useState, useCallback, useRef } from "react";
interface SSEOptions {
onMessage?: (event: MessageEvent) => void;
onError?: (error: Event) => void;
onOpen?: () => void;
reconnectInterval?: number;
maxRetries?: number;
}
export function useSSE(url: string, options: SSEOptions = {}) {
const [isConnected, setIsConnected] = useState(false);
const [lastEvent, setLastEvent] = useState<MessageEvent | null>(null);
const [error, setError] = useState<Event | null>(null);
const eventSourceRef = useRef<EventSource | null>(null);
const retriesRef = useRef(0);
const {
onMessage,
onError,
onOpen,
reconnectInterval = 3000,
maxRetries = 5,
} = options;
const connect = useCallback(() => {
if (eventSourceRef.current) {
eventSourceRef.current.close();
}
const eventSource = new EventSource(url);
eventSourceRef.current = eventSource;
eventSource.onopen = () => {
setIsConnected(true);
setError(null);
retriesRef.current = 0;
onOpen?.();
};
eventSource.onmessage = (event) => {
setLastEvent(event);
onMessage?.(event);
};
eventSource.onerror = (err) => {
setIsConnected(false);
setError(err);
onError?.(err);
eventSource.close();
if (retriesRef.current < maxRetries) {
retriesRef.current++;
setTimeout(connect, reconnectInterval);
}
};
}, [url, onMessage, onError, onOpen, reconnectInterval, maxRetries]);
useEffect(() => {
connect();
return () => {
eventSourceRef.current?.close();
};
}, [connect]);
const disconnect = useCallback(() => {
eventSourceRef.current?.close();
setIsConnected(false);
}, []);
return {
isConnected,
lastEvent,
error,
disconnect,
reconnect: connect,
};
}
// Usage example
function NotificationStream() {
const { isConnected, lastEvent } = useSSE("/api/events", {
onMessage: (event) => {
const data = JSON.parse(event.data);
console.log("Received:", data);
},
});
return (
<div>
<p>Status: {isConnected ? "Connected" : "Disconnected"}</p>
{lastEvent && <p>Last event: {lastEvent.data}</p>}
</div>
);
}
```
## Best Practices
When implementing SSE in Antigravity projects, use edge runtime for better performance, implement heartbeats to detect stale connections, handle reconnection gracefully on client, use event types to categorize messages, consider EventSource polyfills for older browsers, implement proper cleanup on component unmount, and monitor connection health with logging.This SSE prompt is ideal for developers working on:
By using this prompt, you can save hours of manual coding and ensure best practices are followed from the start. It's particularly valuable for teams looking to maintain consistency across their sse implementations.
Yes! All prompts on Antigravity AI Directory are free to use for both personal and commercial projects. No attribution required, though it's always appreciated.
This prompt works excellently with Claude, ChatGPT, Cursor, GitHub Copilot, and other modern AI coding assistants. For best results, use models with large context windows.
You can modify the prompt by adding specific requirements, constraints, or preferences. For SSE projects, consider mentioning your framework version, coding style, and any specific libraries you're using.