the relay server for openai websocket is ready

This commit is contained in:
RockYang
2024-10-17 16:46:41 +08:00
parent 2102e1afbb
commit fbd3478772
13 changed files with 184 additions and 263 deletions

View File

@@ -12,8 +12,6 @@ import {isChrome, isMobile} from "@/utils/libs";
import {showMessageInfo} from "@/utils/dialog";
import {useSharedStore} from "@/store/sharedata";
import {getUserToken} from "@/store/session";
import {router} from "@/router";
import {onBeforeRouteLeave, onBeforeRouteUpdate} from "vue-router";
const debounce = (fn, delay) => {
let timer
@@ -71,7 +69,7 @@ const connect = () => {
}
}
const clientId = getClientId()
const _socket = new WebSocket(host + `/api/ws?client_id=${clientId}&token=${getUserToken()}`);
const _socket = new WebSocket(host + `/api/ws?client_id=${clientId}`,["token",getUserToken()]);
_socket.addEventListener('open', () => {
console.log('WebSocket 已连接')
handler.value = setInterval(() => {

View File

@@ -9,7 +9,7 @@
margin: 0;
overflow: hidden;
font-family: Arial, sans-serif;
width 100vw
width 100%
.phone-container {
position: relative;
@@ -90,7 +90,7 @@
justify-content: space-between;
align-items: center;
padding: 0;
width 100vw
width 100%
.wave-container {
padding 3rem
@@ -148,13 +148,6 @@
flex-flow row
justify-content: space-between;
width 100%
.left {
margin-left 3rem
}
.right {
margin-right 3rem
}
}
.call-controls {

View File

@@ -54,6 +54,7 @@ import { WavRecorder, WavStreamPlayer } from '@/lib/wavtools/index.js';
import { instructions } from '@/utils/conversation_config.js';
import { WavRenderer } from '@/utils/wav_renderer';
import {showMessageError} from "@/utils/dialog";
import {getUserToken} from "@/store/session";
// eslint-disable-next-line no-unused-vars,no-undef
const props = defineProps({
@@ -73,7 +74,7 @@ const typeText = () => {
if (index < fullText.length) {
connectingText.value += fullText[index];
index++;
setTimeout(typeText, 300); // 300
setTimeout(typeText, 200); // 300
} else {
setTimeout(() => {
connectingText.value = '';
@@ -97,10 +98,18 @@ const animateVoice = () => {
const wavRecorder = ref(new WavRecorder({ sampleRate: 24000 }));
const wavStreamPlayer = ref(new WavStreamPlayer({ sampleRate: 24000 }));
let host = process.env.VUE_APP_WS_HOST
if (host === '') {
if (location.protocol === 'https:') {
host = 'wss://' + location.host;
} else {
host = 'ws://' + location.host;
}
}
const client = ref(
new RealtimeClient({
url: "ws://localhost:5678/api/realtime",
apiKey: "sk-Gc5cEzDzGQLIqxWA9d62089350F3454bB359C4A3Fa21B3E4",
url: `${host}/api/realtime`,
apiKey: getUserToken(),
dangerouslyAllowAPIKeyInBrowser: true,
})
);
@@ -115,41 +124,10 @@ client.value.updateSession({
// set voice wave canvas
const clientCanvasRef = ref(null);
const serverCanvasRef = ref(null);
// const eventsScrollRef = ref(null);
// const startTime = ref(new Date().toISOString());
// const items = ref([]);
// const realtimeEvents = ref([]);
// const expandedEvents = reactive({});
const isConnected = ref(false);
// const canPushToTalk = ref(true);
const isRecording = ref(false);
// const memoryKv = ref({});
// const coords = ref({ lat: 37.775593, lng: -122.418137 });
// const marker = ref(null);
// Methods
// const formatTime = (timestamp) => {
// const t0 = new Date(startTime.value).valueOf();
// const t1 = new Date(timestamp).valueOf();
// const delta = t1 - t0;
// const hs = Math.floor(delta / 10) % 100;
// const s = Math.floor(delta / 1000) % 60;
// const m = Math.floor(delta / 60_000) % 60;
// const pad = (n) => {
// let s = n + '';
// while (s.length < 2) {
// s = '0' + s;
// }
// return s;
// };
// return `${pad(m)}:${pad(s)}.${pad(hs)}`;
// };
const connect = async () => {
// startTime.value = new Date().toISOString();
// realtimeEvents.value = [];
// items.value = client.value.conversation.getItems();
if (isConnected.value) {
return
}
@@ -158,54 +136,54 @@ const connect = async () => {
await client.value.connect();
await wavRecorder.value.begin();
await wavStreamPlayer.value.connect();
isConnected.value = true;
console.log("对话连接成功!")
if (!client.value.isConnected()) {
return
}
isConnected.value = true;
client.value.sendUserMessageContent([
{
type: 'input_text',
text: '你好,我是老阳!',
},
]);
if (client.value.getTurnDetectionType() === 'server_vad') {
await wavRecorder.value.record((data) => client.value.appendInputAudio(data.mono));
}
} catch (e) {
showMessageError(e.message)
console.error(e)
}
};
// const disconnectConversation = async () => {
// isConnected.value = false;
// // realtimeEvents.value = [];
// // items.value = [];
// // memoryKv.value = {};
// // coords.value = { lat: 37.775593, lng: -122.418137 };
// // marker.value = null;
//
// client.value.disconnect();
// await wavRecorder.value.end();
// await wavStreamPlayer.value.interrupt();
// };
// const deleteConversationItem = async (id) => {
// client.value.deleteItem(id);
// };
//
const startRecording = async () => {
isRecording.value = true;
const trackSampleOffset = await wavStreamPlayer.value.interrupt();
if (trackSampleOffset?.trackId) {
const { trackId, offset } = trackSampleOffset;
client.value.cancelResponse(trackId, offset);
if (isRecording.value) {
return
}
isRecording.value = true;
try {
const trackSampleOffset = await wavStreamPlayer.value.interrupt();
if (trackSampleOffset?.trackId) {
const { trackId, offset } = trackSampleOffset;
client.value.cancelResponse(trackId, offset);
}
await wavRecorder.value.record((data) => client.value.appendInputAudio(data.mono));
} catch (e) {
console.error(e)
}
await wavRecorder.value.record((data) => client.value.appendInputAudio(data.mono));
};
//
const stopRecording = async () => {
isRecording.value = false;
await wavRecorder.value.pause();
client.value.createResponse();
try {
isRecording.value = false;
await wavRecorder.value.pause();
client.value.createResponse();
} catch (e) {
console.error(e)
}
};
// const changeTurnEndType = async (value) => {
@@ -220,16 +198,8 @@ const stopRecording = async () => {
// }
// canPushToTalk.value = value === 'none';
// };
//
// const toggleEventDetails = (eventId) => {
// if (expandedEvents[eventId]) {
// delete expandedEvents[eventId];
// } else {
// expandedEvents[eventId] = true;
// }
// };
// Lifecycle hooks and watchers
// WaveRecorder RealtimeClient
const initialize = async () => {
// Set up render loops for the visualization canvas
let isLoaded = true;
@@ -270,21 +240,15 @@ const initialize = async () => {
};
render();
// Set up client event listeners
client.value.on('realtime.event', (realtimeEvent) => {
// realtimeEvents.value = realtimeEvents.value.slice();
// const lastEvent = realtimeEvents.value[realtimeEvents.value.length - 1];
// if (lastEvent?.event.type === realtimeEvent.event.type) {
// lastEvent.count = (lastEvent.count || 0) + 1;
// realtimeEvents.value.splice(-1, 1, lastEvent);
// } else {
// realtimeEvents.value.push(realtimeEvent);
// }
// console.log(realtimeEvent)
client.value.on('error', (event) => {
showMessageError(event.error)
});
client.value.on('error', (event) => console.error(event));
client.value.on('realtime.event', (re) => {
if (re.event.type === 'error') {
showMessageError(re.event.error)
}
});
client.value.on('conversation.interrupted', async () => {
const trackSampleOffset = await wavStreamPlayer.value.interrupt();
@@ -295,41 +259,19 @@ const initialize = async () => {
});
client.value.on('conversation.updated', async ({ item, delta }) => {
console.log('item updated', item, delta)
// console.log('item updated', item, delta)
if (delta?.audio) {
wavStreamPlayer.value.add16BitPCM(delta.audio, item.id);
}
if (item.status === 'completed' && item.formatted.audio?.length) {
const wavFile = await WavRecorder.decode(
item.formatted.audio,
24000,
24000
);
item.formatted.file = wavFile;
}
});
}
// Watchers
// watch(realtimeEvents, () => {
// if (eventsScrollRef.value) {
// const eventsEl = eventsScrollRef.value;
// eventsEl.scrollTop = eventsEl.scrollHeight;
// }
// });
// watch(items, () => {
// const conversationEls = document.querySelectorAll('[data-conversation-content]');
// conversationEls.forEach((el) => {
// el.scrollTop = el.scrollHeight;
// });
// });
const voiceInterval = ref(null);
onMounted(() => {
initialize()
voiceInterval.value = setInterval(animateVoice, 500);
//
voiceInterval.value = setInterval(animateVoice, 200);
typeText()
});
@@ -338,16 +280,21 @@ onUnmounted(() => {
client.value.reset();
});
//
const hangUp = async () => {
emits('close')
isConnected.value = false;
client.value.disconnect();
await wavRecorder.value.end();
await wavStreamPlayer.value.interrupt();
try {
isConnected.value = false;
client.value.disconnect();
await wavRecorder.value.end();
await wavStreamPlayer.value.interrupt();
emits('close')
} catch (e) {
console.error(e)
}
};
// eslint-disable-next-line no-undef
defineExpose({ connect });
defineExpose({ connect,hangUp });
</script>
<style scoped lang="stylus">

View File

@@ -170,7 +170,7 @@ const wavRecorder = ref(new WavRecorder({ sampleRate: 24000 }));
const wavStreamPlayer = ref(new WavStreamPlayer({ sampleRate: 24000 }));
const client = ref(
new RealtimeClient({
url: "wss://api.geekai.pro/v1/realtime",
url: "ws://localhost:5678/api/realtime",
apiKey: "sk-Gc5cEzDzGQLIqxWA9d62089350F3454bB359C4A3Fa21B3E4",
dangerouslyAllowAPIKeyInBrowser: true,
})

View File

@@ -2,15 +2,15 @@
<div class="audio-chat-page">
<el-button style="margin: 20px" type="primary" size="large" @click="connect()">开始语音对话</el-button>
<el-dialog v-model="showDialog" title="语音通话" >
<realtime-conversation @close="showDialog = false" ref="conversationRef" :height="dialogHeight+'px'" />
<el-dialog v-model="showDialog" title="语音通话" :before-close="close">
<realtime-conversation @close="showDialog = false" ref="conversationRef" :height="dialogHeight+'px'" />
</el-dialog>
</div>
</template>
<script setup>
import {nextTick, ref} from 'vue';
import RealtimeConversation from "@/components/RealtimeConversation .vue";
import RealtimeConversation from "@/components/RealtimeConversation.vue";
const showDialog = ref(false);
const dialogHeight = ref(window.innerHeight - 75);
@@ -21,6 +21,10 @@ const connect = () => {
conversationRef.value.connect()
})
}
const close = () => {
showDialog.value = false;
conversationRef.value.hangUp()
}
</script>
<style scoped lang="stylus">

View File

@@ -15,7 +15,7 @@
import {ref} from 'vue';
import { RealtimeClient } from '@openai/realtime-api-beta';
import Calling from "@/components/Calling.vue";
import Conversation from "@/components/RealtimeConversation .vue";
import Conversation from "@/components/RealtimeConversation.vue";
import {playPCM16} from "@/utils/wav_player";
import {showMessageError} from "@/utils/dialog";

View File

@@ -79,7 +79,7 @@
<el-input v-model="item.name" autocomplete="off"/>
</el-form-item>
<el-form-item label="类型" prop="type">
<el-select v-model="item.type" placeholder="请选择类型">
<el-select v-model="item.type" placeholder="请选择类型" @change="changeType">
<el-option v-for="item in types" :value="item.value" :label="item.label" :key="item.value">{{
item.label
}}
@@ -91,13 +91,13 @@
</el-form-item>
<el-form-item label="API URL" prop="api_url">
<el-input v-model="item.api_url" autocomplete="off"
placeholder="只填 BASE URL 即可https://api.openai.com"/>
placeholder="只填 BASE URL 即可https://api.openai.com 或者 wss://api.openai.com"/>
</el-form-item>
<el-form-item label="代理地址:" prop="proxy_url">
<el-input v-model="item.proxy_url" autocomplete="off"/>
<div class="info">如果想要通过代理来访问 API请填写代理地址http://127.0.0.1:7890</div>
</el-form-item>
<!-- <el-form-item label="代理地址:" prop="proxy_url">-->
<!-- <el-input v-model="item.proxy_url" autocomplete="off"/>-->
<!-- <div class="info">如果想要通过代理来访问 API请填写代理地址http://127.0.0.1:7890</div>-->
<!-- </el-form-item>-->
<el-form-item label="启用状态:" prop="enable">
<el-switch v-model="item.enabled"/>
@@ -125,7 +125,9 @@ import ClipboardJS from "clipboard";
// 变量定义
const items = ref([])
const query = ref({type: ''})
const item = ref({})
const item = ref({
enabled: true, api_url: ""
})
const showDialog = ref(false)
const rules = reactive({
name: [{required: true, message: '请输入名称', trigger: 'change',}],
@@ -143,9 +145,9 @@ const types = ref([
{label: "DALL-E", value:"dalle"},
{label: "Suno文生歌", value:"suno"},
{label: "Luma视频", value:"luma"},
{label: "Realtime API", value:"realtime"},
])
const isEdit = ref(false)
const clipboard = ref(null)
onMounted(() => {
clipboard.value = new ClipboardJS('.copy-key');
@@ -164,6 +166,18 @@ onUnmounted(() => {
clipboard.value.destroy()
})
const changeType = (event) => {
if (isEdit.value) {
return
}
if (event === 'realtime') {
item.value.api_url = "wss://api.geekai.pro"
} else {
item.value.api_url = "https://api.geekai.pro"
}
}
const getTypeName = (type) => {
for (let v of types.value) {
if (v.value === type) {
@@ -194,13 +208,14 @@ const fetchData = () => {
const add = function () {
showDialog.value = true
title.value = "新增 API KEY"
item.value = {enabled: true,api_url: "https://api.geekai.pro"}
isEdit.value = false
}
const edit = function (row) {
showDialog.value = true
title.value = "修改 API KEY"
item.value = row
isEdit.value = true
}
const save = function () {