diff --git a/realtime/index.html b/realtime/index.html
index df02b6e..fea3d8a 100644
--- a/realtime/index.html
+++ b/realtime/index.html
@@ -36,6 +36,10 @@
🤖 Voice Agent Demo |
Lets you chat with a simple voice agent. |
+
+ | ⏩ Speed Control Demo |
+ Adjust speech speed while talking to the agent. |
+
| 📓 Voice to Text Agent Demo |
Lets you chat with an agent that listens to voice and replies with text. |
diff --git a/realtime/speed/index.html b/realtime/speed/index.html
new file mode 100644
index 0000000..aca5897
--- /dev/null
+++ b/realtime/speed/index.html
@@ -0,0 +1,64 @@
+
+
+ Realtime Speed Demo
+
+
+
+
+
+
+
+
+ This demo shows how to use the OpenAI
Realtime API to create an interactive voice agent.
+ Use the slider to control the speaking speed.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
GitHub
+
+
+
+
+
diff --git a/realtime/speed/main.js b/realtime/speed/main.js
new file mode 100644
index 0000000..9a1c04b
--- /dev/null
+++ b/realtime/speed/main.js
@@ -0,0 +1,103 @@
+const APP_PREFIX = "realtime/speed/";
+const $ = document.querySelector.bind(document);
+const apiKeyEl = $("#openai-api-key");
+const modelEl = $("#model");
+const voiceEl = $("#voice");
+const speedEl = $("#speed");
+const instructionsEl = $("#instructions");
+const startMicrophoneEl = $("#start-microphone");
+const stopEl = $("#stop");
+const statusEl = $("#status");
+const prefs = [apiKeyEl, modelEl, voiceEl, instructionsEl];
+
+let session = null;
+
+function initState() {
+ prefs.forEach(p => {
+ const fqid = p.id != "openai-api-key" ? APP_PREFIX + p.id : p.id;
+ const v = localStorage.getItem(fqid);
+ if (v) {
+ p.value = v;
+ }
+ p.addEventListener("change", () => {
+ localStorage.setItem(fqid, p.value);
+ });
+ });
+ const speedVal = localStorage.getItem(APP_PREFIX + "speed");
+ if (speedVal) {
+ speedEl.value = speedVal;
+ }
+ speedEl.addEventListener("change", handleSpeedChange);
+ updateState(false);
+}
+
+function updateState(started) {
+ statusEl.textContent = "";
+ prefs.forEach(p => p.disabled = started);
+ startMicrophoneEl.disabled = started;
+ stopEl.disabled = !started;
+}
+
+async function startMicrophone() {
+ if (!apiKeyEl.value) {
+ window.alert("Please enter your OpenAI API Key. You can obtain one from https://platform.openai.com/settings/organization/api-keys");
+ return;
+ }
+ const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
+ start(stream);
+}
+
+async function start(stream) {
+ updateState(true);
+ session = new Session(apiKeyEl.value);
+ session.onconnectionstatechange = state => statusEl.textContent = state;
+ session.ontrack = e => handleTrack(e);
+ session.onopen = e => handleOpen();
+ session.onmessage = e => handleMessage(e);
+ session.onerror = e => handleError(e);
+ const sessionConfig = {
+ model: modelEl.value,
+ voice: voiceEl.value,
+ speed: parseFloat(speedEl.value),
+ instructions: instructionsEl.value || undefined
+ }
+ await session.start(stream, sessionConfig);
+}
+
+function stop() {
+ updateState(false);
+ session.stop();
+ session = null;
+}
+
+function handleTrack(e) {
+ const audio = new Audio();
+ audio.srcObject = e.streams[0];
+ audio.play();
+}
+
+function handleSpeedChange() {
+ localStorage.setItem(APP_PREFIX + "speed", speedEl.value);
+ if (session) {
+ const updateMessage = { type: "session.update", session: { speed: parseFloat(speedEl.value) } };
+ session.sendMessage(updateMessage);
+ const createResponse = { type: "response.create" };
+ session.sendMessage(createResponse);
+ }
+}
+
+function handleOpen() {
+ const message = { type: "response.create" };
+ session.sendMessage(message);
+}
+
+function handleMessage(message) {
+ console.log("message", message);
+}
+
+function handleError(e) {
+ console.error(e);
+ stop();
+}
+
+initState();