Skip to main content

Video Integration

Table of Contents

Overview

This guide walks you through integrating video calling functionality into your frontend application using the AINative Video API and WebRTC.

What You'll Build:

  • Video session creation and management
  • Real-time audio/video communication
  • Screen sharing capabilities
  • Recording controls

Supported Platforms:

  • Web browsers (Chrome, Firefox, Safari, Edge)
  • React, Vue, Angular, or vanilla JavaScript
  • Mobile (future: React Native, Flutter)

Prerequisites

Knowledge Requirements

  • Basic understanding of JavaScript/TypeScript
  • Familiarity with Promises and async/await
  • REST API concepts
  • (Optional) Framework experience (React, Vue, etc.)

Technical Requirements

{
"browser": "Chrome 80+, Firefox 75+, Safari 14+, Edge 80+",
"permissions": ["camera", "microphone"],
"network": "UDP/TCP ports for WebRTC",
"libraries": {
"optional": [
"adapter.js (WebRTC polyfill)",
"socket.io-client (future WebSocket support)"
]
}
}

Get API Credentials

  1. Sign up at https://ainative.studio
  2. Navigate to Settings > API Keys
  3. Create a new API key
  4. Store securely (never commit to version control)

Quick Start

1. HTML Setup

<!DOCTYPE html>
<html>
<head>
<title>AINative Video Call</title>
</head>
<body>
<div id="video-container">
<video id="localVideo" autoplay muted playsinline></video>
<video id="remoteVideo" autoplay playsinline></video>
</div>

<div id="controls">
<button id="startCall">Start Call</button>
<button id="endCall" disabled>End Call</button>
<button id="toggleMute">Mute</button>
<button id="toggleVideo">Stop Video</button>
</div>

<script src="video-client.js"></script>
</body>
</html>

2. JavaScript Client

class VideoCall {
constructor(apiUrl, accessToken) {
this.apiUrl = apiUrl;
this.accessToken = accessToken;
this.pc = null;
this.localStream = null;
this.sessionId = null;
}

async startCall(sessionTitle) {
// 1. Get user media
this.localStream = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true
});
document.getElementById('localVideo').srcObject = this.localStream;

// 2. Create session
const session = await this.createSession(sessionTitle);
this.sessionId = session.session_id;

// 3. Join session and get ICE servers
const { ice_servers } = await this.joinSession(this.sessionId);

// 4. Setup WebRTC
await this.setupPeerConnection(ice_servers);

console.log('Call started!');
}

async createSession(title) {
const response = await fetch(`${this.apiUrl}/video/sessions`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${this.accessToken}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({ title, max_participants: 10 })
});
return await response.json();
}

async joinSession(sessionId) {
const response = await fetch(
`${this.apiUrl}/video/sessions/${sessionId}/join`,
{
method: 'POST',
headers: { 'Authorization': `Bearer ${this.accessToken}` }
}
);
return await response.json();
}

async setupPeerConnection(iceServers) {
this.pc = new RTCPeerConnection({ iceServers });

// Add local tracks
this.localStream.getTracks().forEach(track => {
this.pc.addTrack(track, this.localStream);
});

// Handle remote tracks
this.pc.ontrack = (event) => {
document.getElementById('remoteVideo').srcObject = event.streams[0];
};

// Create and send offer
const offer = await this.pc.createOffer();
await this.pc.setLocalDescription(offer);

const trackResponse = await fetch(
`${this.apiUrl}/video/sessions/${this.sessionId}/tracks`,
{
method: 'POST',
headers: {
'Authorization': `Bearer ${this.accessToken}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
sdp: this.pc.localDescription.sdp,
type: 'video'
})
}
);

const { sdp_answer } = await trackResponse.json();
await this.pc.setRemoteDescription({
type: 'answer',
sdp: sdp_answer
});
}

async endCall() {
if (this.localStream) {
this.localStream.getTracks().forEach(track => track.stop());
}
if (this.pc) {
this.pc.close();
}
console.log('Call ended');
}
}

// Usage
const videoCall = new VideoCall(
'https://api.ainative.studio/api/v1',
'YOUR_ACCESS_TOKEN'
);

document.getElementById('startCall').onclick = () => {
videoCall.startCall('Quick Call');
};

document.getElementById('endCall').onclick = () => {
videoCall.endCall();
};

Step-by-Step Integration

Step 1: Authentication

First, authenticate the user to get an access token:

async function login(email, password) {
const response = await fetch('https://api.ainative.studio/api/v1/auth/login', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ email, password })
});

const { access_token, expires_in } = await response.json();

// Store token securely
sessionStorage.setItem('access_token', access_token);
sessionStorage.setItem('token_expires', Date.now() + expires_in * 1000);

return access_token;
}

async function getValidToken() {
const token = sessionStorage.getItem('access_token');
const expires = sessionStorage.getItem('token_expires');

if (!token || Date.now() > expires) {
// Token expired - refresh or re-login
return await refreshToken();
}

return token;
}

Step 2: Request Permissions

Request camera and microphone access before creating a session:

async function requestMediaPermissions() {
try {
const stream = await navigator.mediaDevices.getUserMedia({
video: {
width: { ideal: 1280 },
height: { ideal: 720 },
facingMode: 'user'
},
audio: {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true
}
});

console.log('Permissions granted');
return stream;
} catch (error) {
if (error.name === 'NotAllowedError') {
alert('Please grant camera and microphone permissions');
} else if (error.name === 'NotFoundError') {
alert('No camera or microphone found');
} else {
console.error('Media error:', error);
}
throw error;
}
}

Step 3: Create Video Session

async function createVideoSession(title, description, maxParticipants = 10) {
const token = await getValidToken();

const response = await fetch('https://api.ainative.studio/api/v1/video/sessions', {
method: 'POST',
headers: {
'Authorization': `Bearer ${token}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
title,
description,
max_participants: maxParticipants
})
});

if (!response.ok) {
const error = await response.json();
throw new Error(error.detail);
}

const session = await response.json();
console.log('Session created:', session.session_id);

return session;
}

Step 4: Join Existing Session

async function joinVideoSession(sessionId) {
const token = await getValidToken();

const response = await fetch(
`https://api.ainative.studio/api/v1/video/sessions/${sessionId}/join`,
{
method: 'POST',
headers: { 'Authorization': `Bearer ${token}` }
}
);

if (!response.ok) {
if (response.status === 429) {
throw new Error('Session is full');
} else if (response.status === 404) {
throw new Error('Session not found');
}
throw new Error('Failed to join session');
}

const data = await response.json();
return data; // { session_id, participant_id, ice_servers }
}

Step 5: Setup WebRTC Connection

class WebRTCManager {
constructor(iceServers) {
this.pc = new RTCPeerConnection({
iceServers,
iceCandidatePoolSize: 10
});

this.localStream = null;
this.remoteStream = new MediaStream();

this.setupEventHandlers();
}

setupEventHandlers() {
// Handle ICE candidates
this.pc.onicecandidate = (event) => {
if (event.candidate) {
console.log('New ICE candidate:', event.candidate);
// In production, send to peer via signaling server
}
};

// Handle ICE connection state
this.pc.oniceconnectionstatechange = () => {
console.log('ICE state:', this.pc.iceConnectionState);

if (this.pc.iceConnectionState === 'failed') {
this.pc.restartIce();
}
};

// Handle remote tracks
this.pc.ontrack = (event) => {
console.log('Remote track received:', event.track.kind);
event.streams[0].getTracks().forEach(track => {
this.remoteStream.addTrack(track);
});
};

// Handle connection state
this.pc.onconnectionstatechange = () => {
console.log('Connection state:', this.pc.connectionState);

if (this.pc.connectionState === 'connected') {
console.log('Peers connected!');
}
};
}

async addLocalStream(stream) {
this.localStream = stream;
stream.getTracks().forEach(track => {
this.pc.addTrack(track, stream);
});
}

async createOffer() {
const offer = await this.pc.createOffer({
offerToReceiveAudio: true,
offerToReceiveVideo: true
});
await this.pc.setLocalDescription(offer);
return this.pc.localDescription.sdp;
}

async setRemoteAnswer(sdp) {
await this.pc.setRemoteDescription({
type: 'answer',
sdp
});
}

getRemoteStream() {
return this.remoteStream;
}

close() {
if (this.pc) {
this.pc.close();
}
if (this.localStream) {
this.localStream.getTracks().forEach(track => track.stop());
}
}
}

Step 6: Send SDP Offer and Get Answer

async function negotiateTracks(sessionId, sdpOffer, trackType = 'video') {
const token = await getValidToken();

const response = await fetch(
`https://api.ainative.studio/api/v1/video/sessions/${sessionId}/tracks`,
{
method: 'POST',
headers: {
'Authorization': `Bearer ${token}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
sdp: sdpOffer,
type: trackType
})
}
);

if (!response.ok) {
const error = await response.json();
throw new Error(error.detail);
}

const { track_id, sdp_answer } = await response.json();
return { track_id, sdp_answer };
}

WebRTC Setup

Complete WebRTC Flow

async function setupVideoCall(sessionId, iceServers) {
// 1. Get user media
const stream = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true
});

// Display local video
const localVideo = document.getElementById('localVideo');
localVideo.srcObject = stream;

// 2. Create peer connection
const webrtc = new WebRTCManager(iceServers);

// 3. Add local stream
await webrtc.addLocalStream(stream);

// 4. Create offer
const sdpOffer = await webrtc.createOffer();

// 5. Send to server and get answer
const { sdp_answer } = await negotiateTracks(sessionId, sdpOffer, 'video');

// 6. Set remote description
await webrtc.setRemoteAnswer(sdp_answer);

// 7. Display remote video
const remoteVideo = document.getElementById('remoteVideo');
remoteVideo.srcObject = webrtc.getRemoteStream();

return webrtc;
}

Handling Different Track Types

// Video track
await negotiateTracks(sessionId, sdpOffer, 'video');

// Audio track
await negotiateTracks(sessionId, sdpOffer, 'audio');

// Screen sharing
const screenStream = await navigator.mediaDevices.getDisplayMedia({
video: { cursor: 'always' },
audio: false
});
await negotiateTracks(sessionId, screenSdpOffer, 'screen');

Session Lifecycle Management

Complete Session Manager

class SessionManager {
constructor(apiUrl, accessToken) {
this.apiUrl = apiUrl;
this.accessToken = accessToken;
this.currentSession = null;
this.webrtc = null;
}

async createAndJoin(title) {
// Create session
const session = await this.createSession(title);
this.currentSession = session;

// Join session
const joinData = await this.joinSession(session.session_id);

// Setup WebRTC
this.webrtc = await setupVideoCall(
session.session_id,
joinData.ice_servers
);

return session;
}

async joinExisting(sessionId) {
const joinData = await this.joinSession(sessionId);
this.currentSession = { session_id: sessionId };

this.webrtc = await setupVideoCall(
sessionId,
joinData.ice_servers
);

return joinData;
}

async end() {
if (this.webrtc) {
this.webrtc.close();
}
this.currentSession = null;
this.webrtc = null;
}

async createSession(title) {
const response = await fetch(`${this.apiUrl}/video/sessions`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${this.accessToken}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({ title, max_participants: 10 })
});
return await response.json();
}

async joinSession(sessionId) {
const response = await fetch(
`${this.apiUrl}/video/sessions/${sessionId}/join`,
{
method: 'POST',
headers: { 'Authorization': `Bearer ${this.accessToken}` }
}
);
return await response.json();
}
}

UI Controls

class VideoControls {
constructor(webrtcManager) {
this.webrtc = webrtcManager;
this.isAudioEnabled = true;
this.isVideoEnabled = true;
}

toggleAudio() {
this.isAudioEnabled = !this.isAudioEnabled;
const audioTrack = this.webrtc.localStream
.getAudioTracks()[0];

if (audioTrack) {
audioTrack.enabled = this.isAudioEnabled;
}

return this.isAudioEnabled;
}

toggleVideo() {
this.isVideoEnabled = !this.isVideoEnabled;
const videoTrack = this.webrtc.localStream
.getVideoTracks()[0];

if (videoTrack) {
videoTrack.enabled = this.isVideoEnabled;
}

return this.isVideoEnabled;
}

async shareScreen() {
try {
const screenStream = await navigator.mediaDevices.getDisplayMedia({
video: { cursor: 'always' },
audio: false
});

const screenTrack = screenStream.getVideoTracks()[0];
const sender = this.webrtc.pc.getSenders()
.find(s => s.track?.kind === 'video');

if (sender) {
await sender.replaceTrack(screenTrack);
}

// When screen sharing stops
screenTrack.onended = () => {
this.stopScreenShare();
};

return true;
} catch (error) {
console.error('Screen share error:', error);
return false;
}
}

async stopScreenShare() {
const videoTrack = this.webrtc.localStream.getVideoTracks()[0];
const sender = this.webrtc.pc.getSenders()
.find(s => s.track?.kind === 'video');

if (sender && videoTrack) {
await sender.replaceTrack(videoTrack);
}
}
}

Recording Functionality

Start Recording

class RecordingManager {
constructor(sessionId, apiUrl, accessToken) {
this.sessionId = sessionId;
this.apiUrl = apiUrl;
this.accessToken = accessToken;
this.mediaRecorder = null;
this.recordedChunks = [];
}

async startRecording(stream) {
// Client-side recording
this.mediaRecorder = new MediaRecorder(stream, {
mimeType: 'video/webm;codecs=vp9'
});

this.mediaRecorder.ondataavailable = (event) => {
if (event.data.size > 0) {
this.recordedChunks.push(event.data);
}
};

this.mediaRecorder.start(1000); // Collect data every second
console.log('Recording started');
}

async stopRecording() {
return new Promise((resolve) => {
this.mediaRecorder.onstop = () => {
const blob = new Blob(this.recordedChunks, {
type: 'video/webm'
});
resolve(blob);
};
this.mediaRecorder.stop();
});
}

async downloadRecording(blob) {
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = `recording-${this.sessionId}.webm`;
a.click();
URL.revokeObjectURL(url);
}

async uploadRecording(blob) {
const formData = new FormData();
formData.append('file', blob, `recording-${this.sessionId}.webm`);

const response = await fetch(`${this.apiUrl}/video/recordings`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${this.accessToken}`
},
body: formData
});

return await response.json();
}
}

Best Practices

1. Error Handling

class RobustVideoClient {
async startCall() {
try {
await this.checkBrowserSupport();
await this.requestPermissions();
await this.createSession();
await this.setupWebRTC();
} catch (error) {
this.handleError(error);
}
}

checkBrowserSupport() {
if (!navigator.mediaDevices || !RTCPeerConnection) {
throw new Error('WebRTC not supported in this browser');
}
}

async requestPermissions() {
try {
const stream = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true
});
return stream;
} catch (error) {
if (error.name === 'NotAllowedError') {
throw new Error('Camera/microphone permission denied');
} else if (error.name === 'NotFoundError') {
throw new Error('No camera or microphone found');
} else {
throw error;
}
}
}

handleError(error) {
console.error('Video call error:', error);

const errorMessages = {
'NotAllowedError': 'Please grant camera and microphone permissions',
'NotFoundError': 'No camera or microphone detected',
'NetworkError': 'Network error - please check your connection',
'OverconstrainedError': 'Could not satisfy media constraints'
};

const message = errorMessages[error.name] || error.message;
this.showUserError(message);
}

showUserError(message) {
// Show user-friendly error message
alert(message);
}
}

2. Connection Monitoring

class ConnectionMonitor {
constructor(pc) {
this.pc = pc;
this.setupMonitoring();
}

setupMonitoring() {
this.pc.oniceconnectionstatechange = () => {
switch (this.pc.iceConnectionState) {
case 'checking':
console.log('Connecting...');
break;
case 'connected':
case 'completed':
console.log('Connected!');
this.onConnected();
break;
case 'failed':
console.log('Connection failed');
this.onFailed();
break;
case 'disconnected':
console.log('Disconnected');
this.onDisconnected();
break;
case 'closed':
console.log('Connection closed');
break;
}
};

// Monitor stats
setInterval(() => this.checkStats(), 1000);
}

async checkStats() {
const stats = await this.pc.getStats();
stats.forEach(report => {
if (report.type === 'inbound-rtp' && report.kind === 'video') {
console.log('Video stats:', {
packetsLost: report.packetsLost,
bytesReceived: report.bytesReceived,
framesPerSecond: report.framesPerSecond
});
}
});
}

onConnected() {
// UI update: show "Connected"
}

onFailed() {
// Attempt reconnection
this.pc.restartIce();
}

onDisconnected() {
// UI update: show "Reconnecting..."
}
}

3. Bandwidth Optimization

async function optimizeBandwidth(pc, quality = 'medium') {
const sender = pc.getSenders().find(s => s.track?.kind === 'video');
if (!sender) return;

const parameters = sender.getParameters();

const qualities = {
low: { maxBitrate: 200000, maxFramerate: 15 },
medium: { maxBitrate: 500000, maxFramerate: 24 },
high: { maxBitrate: 1500000, maxFramerate: 30 }
};

const settings = qualities[quality];

if (!parameters.encodings) {
parameters.encodings = [{}];
}

parameters.encodings[0].maxBitrate = settings.maxBitrate;
parameters.encodings[0].maxFramerate = settings.maxFramerate;

await sender.setParameters(parameters);
}

4. Cleanup on Page Unload

window.addEventListener('beforeunload', () => {
if (videoCall) {
videoCall.endCall();
}
});

Troubleshooting

Common Issues

1. No Video/Audio

async function diagnoseMediaIssues() {
// Check permissions
const permissions = await navigator.permissions.query({ name: 'camera' });
console.log('Camera permission:', permissions.state);

// Check devices
const devices = await navigator.mediaDevices.enumerateDevices();
const cameras = devices.filter(d => d.kind === 'videoinput');
const mics = devices.filter(d => d.kind === 'audioinput');

console.log(`Found ${cameras.length} cameras, ${mics.length} microphones`);

// Test stream
try {
const stream = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true
});
console.log('Media stream OK:', stream.getTracks());
stream.getTracks().forEach(track => track.stop());
} catch (error) {
console.error('Media error:', error);
}
}

2. Connection Failure

function diagnoseConnection(pc) {
console.log('ICE Connection State:', pc.iceConnectionState);
console.log('ICE Gathering State:', pc.iceGatheringState);
console.log('Signaling State:', pc.signalingState);

pc.getStats().then(stats => {
stats.forEach(report => {
if (report.type === 'candidate-pair' && report.state === 'succeeded') {
console.log('Connected via:', report.localCandidateId, '->', report.remoteCandidateId);
}
});
});
}

3. TURN Server Not Working

async function testTurnServer(turnUrl, username, credential) {
const pc = new RTCPeerConnection({
iceServers: [{
urls: [turnUrl],
username,
credential
}],
iceTransportPolicy: 'relay' // Force TURN
});

pc.createDataChannel('test');

const offer = await pc.createOffer();
await pc.setLocalDescription(offer);

return new Promise((resolve) => {
pc.onicecandidate = (event) => {
if (event.candidate) {
console.log('ICE candidate:', event.candidate);
if (event.candidate.candidate.includes('relay')) {
console.log('TURN server working!');
resolve(true);
}
}
};

setTimeout(() => resolve(false), 5000);
});
}

Advanced Features

Multi-Party Calls

class MultiPartyCall {
constructor() {
this.peers = new Map(); // participant_id -> RTCPeerConnection
}

async addParticipant(participantId, iceServers) {
const pc = new RTCPeerConnection({ iceServers });

// Add local stream
this.localStream.getTracks().forEach(track => {
pc.addTrack(track, this.localStream);
});

// Handle remote stream
pc.ontrack = (event) => {
this.displayRemoteStream(participantId, event.streams[0]);
};

this.peers.set(participantId, pc);

// Create offer
const offer = await pc.createOffer();
await pc.setLocalDescription(offer);

return offer;
}

displayRemoteStream(participantId, stream) {
const video = document.createElement('video');
video.id = `remote-${participantId}`;
video.srcObject = stream;
video.autoplay = true;
document.getElementById('videos').appendChild(video);
}

removeParticipant(participantId) {
const pc = this.peers.get(participantId);
if (pc) {
pc.close();
this.peers.delete(participantId);
}

const video = document.getElementById(`remote-${participantId}`);
if (video) {
video.remove();
}
}
}

Virtual Backgrounds

async function applyVirtualBackground(stream) {
const videoTrack = stream.getVideoTracks()[0];
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d');

const video = document.createElement('video');
video.srcObject = new MediaStream([videoTrack]);
await video.play();

canvas.width = video.videoWidth;
canvas.height = video.videoHeight;

function processFrame() {
ctx.drawImage(video, 0, 0);

// Apply background blur or replacement
// (Requires ML library like TensorFlow.js)

requestAnimationFrame(processFrame);
}

processFrame();

return canvas.captureStream().getVideoTracks()[0];
}

Network Quality Indicator

class NetworkQualityMonitor {
constructor(pc) {
this.pc = pc;
this.quality = 'good';
}

async startMonitoring() {
setInterval(async () => {
const stats = await this.pc.getStats();

stats.forEach(report => {
if (report.type === 'inbound-rtp' && report.kind === 'video') {
const packetsLost = report.packetsLost || 0;
const packetsReceived = report.packetsReceived || 0;
const lossRate = packetsLost / (packetsLost + packetsReceived);

if (lossRate > 0.1) {
this.quality = 'poor';
} else if (lossRate > 0.05) {
this.quality = 'fair';
} else {
this.quality = 'good';
}

this.updateUI(this.quality);
}
});
}, 2000);
}

updateUI(quality) {
const indicator = document.getElementById('quality-indicator');
indicator.className = `quality-${quality}`;
indicator.textContent = quality.toUpperCase();
}
}

React Integration Example

import { useState, useEffect, useRef } from 'react';

function VideoCall({ sessionId, apiUrl, accessToken }) {
const [isConnected, setIsConnected] = useState(false);
const [isMuted, setIsMuted] = useState(false);
const localVideoRef = useRef(null);
const remoteVideoRef = useRef(null);
const pcRef = useRef(null);

useEffect(() => {
setupCall();
return () => cleanup();
}, []);

async function setupCall() {
try {
// Get media
const stream = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true
});
localVideoRef.current.srcObject = stream;

// Join session
const response = await fetch(
`${apiUrl}/video/sessions/${sessionId}/join`,
{
method: 'POST',
headers: { 'Authorization': `Bearer ${accessToken}` }
}
);
const { ice_servers } = await response.json();

// Setup WebRTC
const pc = new RTCPeerConnection({ iceServers: ice_servers });
pcRef.current = pc;

stream.getTracks().forEach(track => {
pc.addTrack(track, stream);
});

pc.ontrack = (event) => {
remoteVideoRef.current.srcObject = event.streams[0];
};

pc.onconnectionstatechange = () => {
setIsConnected(pc.connectionState === 'connected');
};

const offer = await pc.createOffer();
await pc.setLocalDescription(offer);

// Send offer, get answer
const trackResponse = await fetch(
`${apiUrl}/video/sessions/${sessionId}/tracks`,
{
method: 'POST',
headers: {
'Authorization': `Bearer ${accessToken}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
sdp: pc.localDescription.sdp,
type: 'video'
})
}
);

const { sdp_answer } = await trackResponse.json();
await pc.setRemoteDescription({ type: 'answer', sdp: sdp_answer });

} catch (error) {
console.error('Setup error:', error);
}
}

function toggleMute() {
const audioTrack = localVideoRef.current.srcObject
.getAudioTracks()[0];
audioTrack.enabled = !audioTrack.enabled;
setIsMuted(!audioTrack.enabled);
}

function cleanup() {
if (pcRef.current) {
pcRef.current.close();
}
if (localVideoRef.current?.srcObject) {
localVideoRef.current.srcObject.getTracks()
.forEach(track => track.stop());
}
}

return (
<div>
<video ref={localVideoRef} autoPlay muted playsInline />
<video ref={remoteVideoRef} autoPlay playsInline />
<div>
<button onClick={toggleMute}>
{isMuted ? 'Unmute' : 'Mute'}
</button>
<span>{isConnected ? 'Connected' : 'Connecting...'}</span>
</div>
</div>
);
}

Next Steps

  1. Review the API Documentation
  2. Check the Deployment Guide
  3. Explore the Architecture Documentation
  4. Join our Discord for community support

Resources