Example: Answering an incoming call

View as MarkdownOpen in Claude

This example answers an inbound Voximplant call and bridges audio to ElevenLabs Agents for real-time speech-to-speech conversations.

Jump to the Full VoxEngine scenario.

Prerequisites

Session setup

ElevenLabs Agents are configured in the ElevenLabs console. In VoxEngine, you only need the API key and agent ID.

In the full example, the client is created with:

Create Agents client
1agentsClient = await ElevenLabs.createAgentsClient({
2 xiApiKey: (await ApplicationStorage.get("ELEVENLABS_API_KEY")).value, // from ApplicationStorage
3 agentId: (await ApplicationStorage.get("ELEVENLABS_AGENT_ID")).value, // from ApplicationStorage
4});
Configure prompts and tools in ElevenLabs

Prompts, voices, and tools live in your ElevenLabs Agent configuration. Update them in the ElevenLabs console and reuse the same agent ID in VoxEngine.

Connect call audio

Once you have an ElevenLabs.AgentsClient, bridge audio both ways between the call and the agent:

Connect call audio
1VoxEngine.sendMediaBetween(call, agentsClient);

Barge-in

To keep the conversation interruption-friendly, the example listens for ElevenLabs.AgentsEvents.Interruption and clears the media buffer so any in-progress TTS audio is canceled when the caller starts talking:

Barge-in
1agentsClient.addEventListener(ElevenLabs.AgentsEvents.Interruption, () => {
2 agentsClient.clearMediaBuffer();
3});

Events

The scenario logs transcripts and key lifecycle events. For example:

Events (example from the scenario)
1agentsClient.addEventListener(ElevenLabs.AgentsEvents.UserTranscript, (event) => {
2 const payload = event?.data?.payload || event?.data || {};
3 const text = payload.text || payload.transcript || payload.user_transcript;
4 if (text) Logger.write(`USER: ${text}`);
5});

Notes

See the VoxEngine API Reference for more details.

Full VoxEngine scenario

voxeengine-elevenlabs-inbound.js
1/**
2 * Voximplant + ElevenLabs Agents connector demo
3 * Scenario: answer an incoming call and bridge it to ElevenLabs Agents.
4 */
5
6require(Modules.ElevenLabs);
7require(Modules.ApplicationStorage);
8
9
10VoxEngine.addEventListener(AppEvents.CallAlerting, async ({call}) => {
11 let voiceAIClient;
12
13 // Termination handlers
14 call.addEventListener(CallEvents.Disconnected, () => VoxEngine.terminate());
15 call.addEventListener(CallEvents.Failed, () => VoxEngine.terminate());
16
17 try {
18 call.answer();
19 // call.record({hd_audio: true, stereo: true}); // Optional: record the call
20
21 // Create client and connect to ElevenLabs Agents
22 voiceAIClient = await ElevenLabs.createAgentsClient({
23 xiApiKey: (await ApplicationStorage.get("ELEVENLABS_API_KEY")).value,
24 agentId: (await ApplicationStorage.get("ELEVENLABS_AGENT_ID")).value,
25 onWebSocketClose: (event) => {
26 Logger.write("===ElevenLabs.WebSocket.Close===");
27 if (event) Logger.write(JSON.stringify(event));
28 VoxEngine.terminate();
29 },
30 });
31
32 // Bridge media between the call and ElevenLabs Agents
33 VoxEngine.sendMediaBetween(call, voiceAIClient);
34
35 // ---------------------- Event handlers -----------------------
36 // Barge-in: keep conversation responsive
37 voiceAIClient.addEventListener(ElevenLabs.AgentsEvents.Interruption, () => {
38 Logger.write("===BARGE-IN: ElevenLabs.AgentsEvents.Interruption===");
39 voiceAIClient.clearMediaBuffer();
40 });
41
42 voiceAIClient.addEventListener(ElevenLabs.AgentsEvents.UserTranscript, (event) => {
43 const payload = event?.data?.payload || event?.data || {};
44 const text = payload.text || payload.transcript || payload.user_transcript;
45 if (text) {
46 Logger.write(`===USER=== ${text}`);
47 } else {
48 Logger.write("===USER_TRANSCRIPT===");
49 Logger.write(JSON.stringify(payload));
50 }
51 });
52
53 voiceAIClient.addEventListener(ElevenLabs.AgentsEvents.AgentResponse, (event) => {
54 const payload = event?.data?.payload || event?.data || {};
55 const text = payload.text || payload.response || payload.agent_response;
56 if (text) {
57 Logger.write(`===AGENT=== ${text}`);
58 } else {
59 Logger.write("===AGENT_RESPONSE===");
60 Logger.write(JSON.stringify(payload));
61 }
62 });
63
64 // Consolidated "log-only" handlers - key ElevenLabs/VoxEngine debugging events
65 [
66 ElevenLabs.AgentsEvents.ConversationInitiationMetadata,
67 ElevenLabs.AgentsEvents.AgentResponseCorrection,
68 ElevenLabs.AgentsEvents.ContextualUpdate,
69 ElevenLabs.AgentsEvents.AgentToolResponse,
70 ElevenLabs.AgentsEvents.VadScore,
71 ElevenLabs.AgentsEvents.Ping,
72 ElevenLabs.AgentsEvents.HTTPResponse,
73 ElevenLabs.AgentsEvents.WebSocketError,
74 ElevenLabs.AgentsEvents.ConnectorInformation,
75 ElevenLabs.AgentsEvents.Unknown,
76 ElevenLabs.Events.WebSocketMediaStarted,
77 ElevenLabs.Events.WebSocketMediaEnded,
78 ].forEach((eventName) => {
79 voiceAIClient.addEventListener(eventName, (event) => {
80 Logger.write(`===${event.name}===`);
81 if (event?.data) Logger.write(JSON.stringify(event.data));
82 });
83 });
84 } catch (error) {
85 Logger.write("===UNHANDLED_ERROR===");
86 Logger.write(error);
87 voiceAIClient?.close();
88 VoxEngine.terminate();
89 }
90});