Example: Placing an outbound call

View as MarkdownOpen in Claude

This example starts a VoxEngine session, places an outbound PSTN call, and bridges audio to OpenAI Realtime once the callee answers.

Jump to the Full VoxEngine scenario.

Prerequisites

  • Store your OpenAI API key in Voximplant ApplicationStorage under OPENAI_API_KEY.
  • Ensure outbound calling is enabled for your Voximplant application and that your caller ID is verified.

Outbound call parameters

The example expects destination and caller ID in customData (read via VoxEngine.customData()):

Custom data example
1{"destination":"+15551234567","callerId":"+15557654321"}

Launch the routing rule

For quick testing, you can start this outbound scenario from the Voximplant Control Panel:

  1. Open your Voximplant application and go to the Routing tab.
  2. Select the routing rule that has this scenario attached.
  3. Click Run.
  4. Provide Custom data (max 200 bytes) with destination and callerId:
Custom data example
1{"destination":"+15551234567","callerId":"+15557654321"}

For production, start the routing rule via Management API startScenarios (pass rule_id, and pass the same JSON string in script_custom_data): https://voximplant.com/docs/references/httpapi/scenarios#startscenarios

Connect call audio

After the callee answers, the example bridges audio both ways:

Connect call audio
1VoxEngine.sendMediaBetween(call, voiceAIClient);

Barge-in

Barge-in
1voiceAIClient.addEventListener(OpenAI.RealtimeAPIEvents.InputAudioBufferSpeechStarted, () => {
2 voiceAIClient.clearMediaBuffer();
3});

Notes

See the VoxEngine API Reference for more details.

Full VoxEngine scenario

voxeengine-openai-place-outbound-call.js
1/**
2 * Voximplant + OpenAI Realtime API connector demo
3 * Scenario: place an outbound PSTN call and bridge it to OpenAI Realtime.
4 */
5
6require(Modules.OpenAI);
7require(Modules.ApplicationStorage);
8
9const SYSTEM_PROMPT = `
10You are Voxi, a concise phone assistant for outbound calls.
11Keep responses short and helpful.
12`;
13
14const SESSION_CONFIG = {
15 session: {
16 type: "realtime",
17 instructions: SYSTEM_PROMPT,
18 voice: "alloy",
19 output_modalities: ["audio"],
20 turn_detection: {type: "server_vad", interrupt_response: true},
21 },
22};
23
24const MAX_CALL_MS = 2 * 60 * 1000;
25
26VoxEngine.addEventListener(AppEvents.Started, async () => {
27 let call;
28 let voiceAIClient;
29 let hangupTimer;
30
31 try {
32 // Custom data example: {"destination":"+15551234567","callerId":"+15557654321"}
33 const {destination, callerId} = JSON.parse(VoxEngine.customData());
34
35 call = VoxEngine.callPSTN(destination, callerId);
36
37 call.addEventListener(CallEvents.Failed, () => VoxEngine.terminate());
38 call.addEventListener(CallEvents.Disconnected, () => {
39 if (hangupTimer) clearTimeout(hangupTimer);
40 VoxEngine.terminate();
41 });
42
43 call.addEventListener(CallEvents.Connected, async () => {
44 hangupTimer = setTimeout(() => {
45 Logger.write("===HANGUP_TIMER===");
46 call.hangup();
47 }, MAX_CALL_MS);
48
49 voiceAIClient = await OpenAI.createRealtimeAPIClient({
50 apiKey: (await ApplicationStorage.get("OPENAI_API_KEY")).value,
51 model: "gpt-realtime",
52 onWebSocketClose: (event) => {
53 Logger.write("===OpenAI.WebSocket.Close===");
54 if (event) Logger.write(JSON.stringify(event));
55 VoxEngine.terminate();
56 },
57 });
58
59 voiceAIClient.addEventListener(OpenAI.RealtimeAPIEvents.SessionCreated, () => {
60 voiceAIClient.sessionUpdate(SESSION_CONFIG);
61 });
62
63 voiceAIClient.addEventListener(OpenAI.RealtimeAPIEvents.SessionUpdated, () => {
64 VoxEngine.sendMediaBetween(call, voiceAIClient);
65 voiceAIClient.responseCreate({instructions: "Hello! This is Voxi. How can I help today?"});
66 });
67
68 voiceAIClient.addEventListener(
69 OpenAI.RealtimeAPIEvents.InputAudioBufferSpeechStarted,
70 () => {
71 Logger.write("===BARGE-IN: OpenAI.InputAudioBufferSpeechStarted===");
72 voiceAIClient.clearMediaBuffer();
73 }
74 );
75
76 // Consolidated "log-only" handlers
77 [
78 OpenAI.RealtimeAPIEvents.ResponseCreated,
79 OpenAI.RealtimeAPIEvents.ResponseDone,
80 OpenAI.RealtimeAPIEvents.ResponseOutputAudioDone,
81 OpenAI.RealtimeAPIEvents.ConversationItemInputAudioTranscriptionCompleted,
82 OpenAI.RealtimeAPIEvents.ResponseOutputAudioTranscriptDone,
83 OpenAI.RealtimeAPIEvents.ConnectorInformation,
84 OpenAI.RealtimeAPIEvents.HTTPResponse,
85 OpenAI.RealtimeAPIEvents.WebSocketError,
86 OpenAI.RealtimeAPIEvents.Unknown,
87 OpenAI.Events.WebSocketMediaStarted,
88 OpenAI.Events.WebSocketMediaEnded,
89 ].forEach((eventName) => {
90 voiceAIClient.addEventListener(eventName, (event) => {
91 Logger.write(`===${event.name}===`);
92 if (event?.data) Logger.write(JSON.stringify(event.data));
93 });
94 });
95 });
96 } catch (error) {
97 Logger.write("===UNHANDLED_ERROR===");
98 Logger.write(error);
99 voiceAIClient?.close();
100 VoxEngine.terminate();
101 }
102});