Ready-to-use code examples for integrating with NeutralAiz services. Each example is fully functional and can be adapted to your needs.
Speech-to-Text (STT)
Basic Transcription
// Transcribe audio file
async function transcribeAudio(audioFile) {
const formData = new FormData();
formData.append('audio', audioFile);
formData.append('provider', 'whisper');
const response = await fetch('http://stt-agent:8001/transcribe', {
method: 'POST',
body: formData
});
const result = await response.json();
return result.text;
}
Streaming Transcription
// Real-time streaming transcription
const ws = new WebSocket('ws://stt-agent:8001/transcribe/stream');
// Send audio chunks
ws.send(audioChunk);
// Receive transcriptions
ws.onmessage = (event) => {
const transcription = JSON.parse(event.data);
console.log('Partial:', transcription.text);
};
Multi-Provider Comparison
// Compare results from multiple providers
async function compareProviders(audioFile) {
const providers = ['whisper', 'deepgram', 'parakeet'];
const results = {};
for (const provider of providers) {
const formData = new FormData();
formData.append('audio', audioFile);
formData.append('provider', provider);
const response = await fetch('http://stt-agent:8001/transcribe', {
method: 'POST',
body: formData
});
results[provider] = await response.json();
}
return results;
}
Text-to-Speech (TTS)
Generate Speech
// Convert text to speech
async function textToSpeech(text, voice = 'nova') {
const response = await fetch('http://tts-agent:8002/synthesize', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
text: text,
voice: voice,
speed: 1.0
})
});
const audioBlob = await response.blob();
return URL.createObjectURL(audioBlob);
}
Stream Audio Output
// Stream TTS audio
async function streamTTS(text) {
const response = await fetch('http://tts-agent:8002/stream', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({ text })
});
const reader = response.body.getReader();
const audioContext = new AudioContext();
while (true) {
const { done, value } = await reader.read();
if (done) break;
// Process audio chunk
await audioContext.decodeAudioData(value.buffer);
}
}
LiveKit WebRTC
Connect to Room
// Connect to LiveKit room
async function connectToRoom(roomName, userName) {
// Get access token
const tokenResponse = await fetch(`/api/get-token?roomName=${roomName}&user=${userName}`);
const { token } = await tokenResponse.json();
// Create room instance
const room = new LiveKit.Room({
adaptiveStream: true,
dynacast: true
});
// Connect
await room.connect('wss://livekit.test.neutralaiz.com', token);
// Publish local tracks
const audioTrack = await LiveKit.createLocalAudioTrack();
const videoTrack = await LiveKit.createLocalVideoTrack();
await room.localParticipant.publishTrack(audioTrack);
await room.localParticipant.publishTrack(videoTrack);
return room;
}
Handle Participants
// Monitor participant events
room.on('participantConnected', (participant) => {
console.log(`${participant.identity} joined`);
// Subscribe to their tracks
participant.on('trackSubscribed', (track, publication) => {
if (track.kind === 'video') {
const element = track.attach();
document.getElementById('videos').appendChild(element);
}
});
});
room.on('participantDisconnected', (participant) => {
console.log(`${participant.identity} left`);
});
Entity Extraction
Extract Entities
// Extract entities from conversation
async function extractEntities(roomName) {
const response = await fetch(`https://entities.test.neutralaiz.com/entities/${roomName}`);
const data = await response.json();
return {
people: data.entities.filter(e => e.type === 'PERSON'),
locations: data.entities.filter(e => e.type === 'LOCATION'),
organizations: data.entities.filter(e => e.type === 'ORG')
};
}
Real-time Updates
// Subscribe to entity updates
const ws = new WebSocket('wss://entities.test.neutralaiz.com/ws/room-name');
ws.onmessage = (event) => {
const update = JSON.parse(event.data);
if (update.type === 'entities_updated') {
console.log('New entities:', update.entities);
} else if (update.type === 'questions_tasks_updated') {
console.log('Questions:', update.questions);
console.log('Tasks:', update.tasks);
}
};
WebSocket Integration
Transcription Stream
// Connect to transcription WebSocket
class TranscriptionClient {
constructor(roomName) {
this.ws = new WebSocket(`wss://admin.test.neutralaiz.com/ws/transcriptions/${roomName}`);
this.setupHandlers();
}
setupHandlers() {
this.ws.onopen = () => {
console.log('Connected to transcription service');
};
this.ws.onmessage = (event) => {
const data = JSON.parse(event.data);
this.handleTranscription(data);
};
this.ws.onerror = (error) => {
console.error('WebSocket error:', error);
};
}
handleTranscription(data) {
console.log(`[${data.participant_name}]: ${data.text}`);
}
}
Error Handling
Robust API Calls
// Error handling with retry logic
async function apiCallWithRetry(url, options, maxRetries = 3) {
let lastError;
for (let i = 0; i < maxRetries; i++) {
try {
const response = await fetch(url, options);
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
return await response.json();
} catch (error) {
lastError = error;
console.warn(`Attempt ${i + 1} failed:`, error.message);
// Exponential backoff
if (i < maxRetries - 1) {
await new Promise(resolve => setTimeout(resolve, Math.pow(2, i) * 1000));
}
}
}
throw lastError;
}