mirror of
				https://github.com/yangjian102621/geekai.git
				synced 2025-11-04 16:23:42 +08:00 
			
		
		
		
	add websocket relayer for openai realtime api
This commit is contained in:
		@@ -221,6 +221,7 @@ func needLogin(c *gin.Context) bool {
 | 
			
		||||
		c.Request.URL.Path == "/api/suno/detail" ||
 | 
			
		||||
		c.Request.URL.Path == "/api/suno/play" ||
 | 
			
		||||
		c.Request.URL.Path == "/api/download" ||
 | 
			
		||||
		c.Request.URL.Path == "/api/realtime" ||
 | 
			
		||||
		strings.HasPrefix(c.Request.URL.Path, "/api/test") ||
 | 
			
		||||
		strings.HasPrefix(c.Request.URL.Path, "/api/payment/notify/") ||
 | 
			
		||||
		strings.HasPrefix(c.Request.URL.Path, "/api/user/clogin") ||
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										173
									
								
								api/handler/realtime_handler.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										173
									
								
								api/handler/realtime_handler.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,173 @@
 | 
			
		||||
package handler
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/gorilla/websocket"
 | 
			
		||||
	"log"
 | 
			
		||||
	"net/http"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
// * +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 | 
			
		||||
// * Copyright 2023 The Geek-AI Authors. All rights reserved.
 | 
			
		||||
// * Use of this source code is governed by a Apache-2.0 license
 | 
			
		||||
// * that can be found in the LICENSE file.
 | 
			
		||||
// * @Author yangjian102621@163.com
 | 
			
		||||
// * +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 | 
			
		||||
 | 
			
		||||
// 实时 API 中继器
 | 
			
		||||
 | 
			
		||||
type RealtimeHandler struct {
 | 
			
		||||
	BaseHandler
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func NewRealtimeHandler() *RealtimeHandler {
 | 
			
		||||
	return &RealtimeHandler{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (h *RealtimeHandler) Connection(c *gin.Context) {
 | 
			
		||||
	// 获取客户端请求中指定的子协议
 | 
			
		||||
	clientProtocols := c.GetHeader("Sec-WebSocket-Protocol")
 | 
			
		||||
	logger.Info(clientProtocols)
 | 
			
		||||
 | 
			
		||||
	// 升级HTTP连接为WebSocket,并传入客户端请求的子协议
 | 
			
		||||
	upgrader := websocket.Upgrader{
 | 
			
		||||
		CheckOrigin:  func(r *http.Request) bool { return true },
 | 
			
		||||
		Subprotocols: []string{clientProtocols},
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	ws, err := upgrader.Upgrade(c.Writer, c.Request, nil)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		logger.Error(err)
 | 
			
		||||
		c.Abort()
 | 
			
		||||
		return
 | 
			
		||||
	}
 | 
			
		||||
	defer ws.Close()
 | 
			
		||||
 | 
			
		||||
	// 连接到真实的后端服务器,传入相同的子协议
 | 
			
		||||
	headers := http.Header{}
 | 
			
		||||
	if clientProtocols != "" {
 | 
			
		||||
		headers.Set("Sec-WebSocket-Protocol", clientProtocols)
 | 
			
		||||
	}
 | 
			
		||||
	for key, values := range headers {
 | 
			
		||||
		for _, value := range values {
 | 
			
		||||
			logger.Infof("%s: %s", key, value)
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	backendConn, _, err := websocket.DefaultDialer.Dial("wss://api.geekai.pro/v1/realtime?model=gpt-4o-realtime-preview-2024-10-01", headers)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		log.Printf("Failed to connect to backend: %v", err)
 | 
			
		||||
		return
 | 
			
		||||
	}
 | 
			
		||||
	defer backendConn.Close()
 | 
			
		||||
 | 
			
		||||
	//logger.Info(ws.Subprotocol(), ",", backendConn.Subprotocol())
 | 
			
		||||
	//// 确保协议一致性,如果失败返回
 | 
			
		||||
	//if ws.Subprotocol() != backendConn.Subprotocol() {
 | 
			
		||||
	//	log.Println("Subprotocol mismatch")
 | 
			
		||||
	//	return
 | 
			
		||||
	//}
 | 
			
		||||
 | 
			
		||||
	// 开始双向转发
 | 
			
		||||
	errorChan := make(chan error, 2)
 | 
			
		||||
	go relay(ws, backendConn, errorChan)
 | 
			
		||||
	go relay(backendConn, ws, errorChan)
 | 
			
		||||
 | 
			
		||||
	// 等待其中一个连接关闭
 | 
			
		||||
	<-errorChan
 | 
			
		||||
	log.Println("Relay ended")
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func relay(src, dst *websocket.Conn, errorChan chan error) {
 | 
			
		||||
	for {
 | 
			
		||||
		messageType, message, err := src.ReadMessage()
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			errorChan <- err
 | 
			
		||||
			return
 | 
			
		||||
		}
 | 
			
		||||
		err = dst.WriteMessage(messageType, message)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			errorChan <- err
 | 
			
		||||
			return
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
//func (h *RealtimeHandler) handleMessage(client *RealtimeClient, message []byte) {
 | 
			
		||||
//	var event Event
 | 
			
		||||
//	err := json.Unmarshal(message, &event)
 | 
			
		||||
//	if err != nil {
 | 
			
		||||
//		logger.Infof("Error parsing event from client: %s", message)
 | 
			
		||||
//		return
 | 
			
		||||
//	}
 | 
			
		||||
//	logger.Infof("Relaying %q to OpenAI", event.Type)
 | 
			
		||||
//	client.Send(event)
 | 
			
		||||
//}
 | 
			
		||||
//
 | 
			
		||||
//func relay(src, dst *websocket.Conn, errorChan chan error) {
 | 
			
		||||
//	for {
 | 
			
		||||
//		messageType, message, err := src.ReadMessage()
 | 
			
		||||
//		if err != nil {
 | 
			
		||||
//			errorChan <- err
 | 
			
		||||
//			return
 | 
			
		||||
//		}
 | 
			
		||||
//		err = dst.WriteMessage(messageType, message)
 | 
			
		||||
//		if err != nil {
 | 
			
		||||
//			errorChan <- err
 | 
			
		||||
//			return
 | 
			
		||||
//		}
 | 
			
		||||
//	}
 | 
			
		||||
//}
 | 
			
		||||
//
 | 
			
		||||
//func NewRealtimeClient(apiKey string) *RealtimeClient {
 | 
			
		||||
//	return &RealtimeClient{
 | 
			
		||||
//		APIKey: apiKey,
 | 
			
		||||
//		send:   make(chan Event, 100),
 | 
			
		||||
//	}
 | 
			
		||||
//}
 | 
			
		||||
//
 | 
			
		||||
//func (rc *RealtimeClient) Connect() error {
 | 
			
		||||
//	u := url.URL{Scheme: "wss", Host: "api.geekai.pro", Path: "v1/realtime", RawQuery: "model=gpt-4o-realtime-preview-2024-10-01"}
 | 
			
		||||
//	c, _, err := websocket.DefaultDialer.Dial(u.String(), nil)
 | 
			
		||||
//	if err != nil {
 | 
			
		||||
//		return err
 | 
			
		||||
//	}
 | 
			
		||||
//	rc.conn = c
 | 
			
		||||
//
 | 
			
		||||
//	go rc.readPump()
 | 
			
		||||
//	go rc.writePump()
 | 
			
		||||
//
 | 
			
		||||
//	return nil
 | 
			
		||||
//}
 | 
			
		||||
//
 | 
			
		||||
//func (rc *RealtimeClient) readPump() {
 | 
			
		||||
//	defer rc.conn.Close()
 | 
			
		||||
//	for {
 | 
			
		||||
//		_, message, err := rc.conn.ReadMessage()
 | 
			
		||||
//		if err != nil {
 | 
			
		||||
//			log.Println("read error:", err)
 | 
			
		||||
//			return
 | 
			
		||||
//		}
 | 
			
		||||
//		var event Event
 | 
			
		||||
//		err = json.Unmarshal(message, &event)
 | 
			
		||||
//		if err != nil {
 | 
			
		||||
//			log.Println("parse error:", err)
 | 
			
		||||
//			continue
 | 
			
		||||
//		}
 | 
			
		||||
//		rc.send <- event
 | 
			
		||||
//	}
 | 
			
		||||
//}
 | 
			
		||||
//
 | 
			
		||||
//func (rc *RealtimeClient) writePump() {
 | 
			
		||||
//	defer rc.conn.Close()
 | 
			
		||||
//	for event := range rc.send {
 | 
			
		||||
//		err := rc.conn.WriteJSON(event)
 | 
			
		||||
//		if err != nil {
 | 
			
		||||
//			log.Println("write error:", err)
 | 
			
		||||
//			return
 | 
			
		||||
//		}
 | 
			
		||||
//	}
 | 
			
		||||
//}
 | 
			
		||||
//
 | 
			
		||||
//func (rc *RealtimeClient) Send(event Event) {
 | 
			
		||||
//	rc.send <- event
 | 
			
		||||
//}
 | 
			
		||||
@@ -554,6 +554,10 @@ func main() {
 | 
			
		||||
			group.POST("/list/luma", h.LumaList)
 | 
			
		||||
			group.GET("/remove", h.Remove)
 | 
			
		||||
		}),
 | 
			
		||||
		fx.Provide(handler.NewRealtimeHandler),
 | 
			
		||||
		fx.Invoke(func(s *core.AppServer, h *handler.RealtimeHandler) {
 | 
			
		||||
			s.Engine.Any("/api/realtime", h.Connection)
 | 
			
		||||
		}),
 | 
			
		||||
	)
 | 
			
		||||
	// 启动应用程序
 | 
			
		||||
	go func() {
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										1
									
								
								database/update-v4.1.6.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								database/update-v4.1.6.sql
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1 @@
 | 
			
		||||
ALTER TABLE `chatgpt_chat_models` CHANGE `value` `value` VARCHAR(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NOT NULL COMMENT '模型值';
 | 
			
		||||
							
								
								
									
										196
									
								
								web/src/assets/css/realtime.styl
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										196
									
								
								web/src/assets/css/realtime.styl
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,196 @@
 | 
			
		||||
.realtime-conversation {
 | 
			
		||||
  /********************** connection ****************************/
 | 
			
		||||
  .connection-container {
 | 
			
		||||
    background-color: #000;
 | 
			
		||||
    display: flex;
 | 
			
		||||
    flex-direction: column;
 | 
			
		||||
    justify-content: center;
 | 
			
		||||
    align-items: center;
 | 
			
		||||
    margin: 0;
 | 
			
		||||
    overflow: hidden;
 | 
			
		||||
    font-family: Arial, sans-serif;
 | 
			
		||||
    width 100vw
 | 
			
		||||
 | 
			
		||||
    .phone-container {
 | 
			
		||||
      position: relative;
 | 
			
		||||
      width: 200px;
 | 
			
		||||
      height: 200px;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    .phone {
 | 
			
		||||
      position: absolute;
 | 
			
		||||
      top: 50%;
 | 
			
		||||
      left: 50%;
 | 
			
		||||
      transform: translate(-50%, -50%);
 | 
			
		||||
      width: 60px;
 | 
			
		||||
      height: 60px;
 | 
			
		||||
      background-color: #00ffcc;
 | 
			
		||||
      mask: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'%3E%3Cpath d='M20 15.5c-1.25 0-2.45-.2-3.57-.57a1.02 1.02 0 0 0-1.02.24l-2.2 2.2a15.074 15.074 0 0 1-6.59-6.59l2.2-2.2c.27-.27.35-.68.24-1.02a11.36 11.36 0 0 1-.57-3.57c0-.55-.45-1-1-1H4c-.55 0-1 .45-1 1 0 9.39 7.61 17 17 17 .55 0 1-.45 1-1v-3.5c0-.55-.45-1-1-1zM5.03 5h1.5c.07.89.22 1.76.46 2.59l-1.2 1.2c-.41-1.2-.67-2.47-.76-3.79zM19 18.97c-1.32-.09-2.59-.35-3.8-.75l1.2-1.2c.85.24 1.72.39 2.6.45v1.5z'/%3E%3C/svg%3E") no-repeat 50% 50%;
 | 
			
		||||
      mask-size: cover;
 | 
			
		||||
      -webkit-mask: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'%3E%3Cpath d='M20 15.5c-1.25 0-2.45-.2-3.57-.57a1.02 1.02 0 0 0-1.02.24l-2.2 2.2a15.074 15.074 0 0 1-6.59-6.59l2.2-2.2c.27-.27.35-.68.24-1.02a11.36 11.36 0 0 1-.57-3.57c0-.55-.45-1-1-1H4c-.55 0-1 .45-1 1 0 9.39 7.61 17 17 17 .55 0 1-.45 1-1v-3.5c0-.55-.45-1-1-1zM5.03 5h1.5c.07.89.22 1.76.46 2.59l-1.2 1.2c-.41-1.2-.67-2.47-.76-3.79zM19 18.97c-1.32-.09-2.59-.35-3.8-.75l1.2-1.2c.85.24 1.72.39 2.6.45v1.5z'/%3E%3C/svg%3E") no-repeat 50% 50%;
 | 
			
		||||
      -webkit-mask-size: cover;
 | 
			
		||||
      animation: shake 0.5s ease-in-out infinite;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    .signal {
 | 
			
		||||
      position: absolute;
 | 
			
		||||
      top: 50%;
 | 
			
		||||
      left: 50%;
 | 
			
		||||
      transform: translate(-50%, -50%);
 | 
			
		||||
      width: 100px;
 | 
			
		||||
      height: 100px;
 | 
			
		||||
      border: 2px dashed #00ffcc;
 | 
			
		||||
      border-radius: 50%;
 | 
			
		||||
      opacity: 0;
 | 
			
		||||
      animation: signal 2s linear infinite;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    .signal:nth-child(2) {
 | 
			
		||||
      animation-delay: 0.5s;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    .signal:nth-child(3) {
 | 
			
		||||
      animation-delay: 1s;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    .status-text {
 | 
			
		||||
      color: #00ffcc;
 | 
			
		||||
      font-size: 18px;
 | 
			
		||||
      margin-top: 20px;
 | 
			
		||||
      height: 1.2em;
 | 
			
		||||
      overflow: hidden;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @keyframes shake {
 | 
			
		||||
      0%, 100% { transform: translate(-50%, -50%) rotate(0deg); }
 | 
			
		||||
      25% { transform: translate(-52%, -48%) rotate(-5deg); }
 | 
			
		||||
      75% { transform: translate(-48%, -52%) rotate(5deg); }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @keyframes signal {
 | 
			
		||||
      0% {
 | 
			
		||||
        width: 60px;
 | 
			
		||||
        height: 60px;
 | 
			
		||||
        opacity: 1;
 | 
			
		||||
      }
 | 
			
		||||
      100% {
 | 
			
		||||
        width: 200px;
 | 
			
		||||
        height: 200px;
 | 
			
		||||
        opacity: 0;
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
  /*********** end of connection ************/
 | 
			
		||||
 | 
			
		||||
  .conversation-container {
 | 
			
		||||
    background: linear-gradient(to right, #2c3e50, #4a5568, #6b46c1);
 | 
			
		||||
    display: flex;
 | 
			
		||||
    height 100%
 | 
			
		||||
    flex-direction: column;
 | 
			
		||||
    justify-content: space-between;
 | 
			
		||||
    align-items: center;
 | 
			
		||||
    padding: 0;
 | 
			
		||||
    width 100vw
 | 
			
		||||
 | 
			
		||||
    .wave-container {
 | 
			
		||||
      padding 3rem
 | 
			
		||||
      .wave-animation {
 | 
			
		||||
        display: flex;
 | 
			
		||||
        justify-content: center;
 | 
			
		||||
        align-items: center;
 | 
			
		||||
        gap: 10px;
 | 
			
		||||
 | 
			
		||||
        .wave-ellipse {
 | 
			
		||||
          width: 40px;
 | 
			
		||||
          height: 40px;
 | 
			
		||||
          background-color: white;
 | 
			
		||||
          border-radius: 20px;
 | 
			
		||||
          animation: wave 0.8s infinite ease-in-out;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        .wave-ellipse:nth-child(odd) {
 | 
			
		||||
          height: 60px;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        .wave-ellipse:nth-child(even) {
 | 
			
		||||
          height: 80px;
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @keyframes wave {
 | 
			
		||||
      0%, 100% {
 | 
			
		||||
        transform: scaleY(0.8);
 | 
			
		||||
      }
 | 
			
		||||
      50% {
 | 
			
		||||
        transform: scaleY(1.2);
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    .wave-ellipse:nth-child(2) {
 | 
			
		||||
      animation-delay: 0.1s;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    .wave-ellipse:nth-child(3) {
 | 
			
		||||
      animation-delay: 0.2s;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    .wave-ellipse:nth-child(4) {
 | 
			
		||||
      animation-delay: 0.3s;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    .wave-ellipse:nth-child(5) {
 | 
			
		||||
      animation-delay: 0.4s;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    .voice-indicators {
 | 
			
		||||
      display flex
 | 
			
		||||
      flex-flow row
 | 
			
		||||
      justify-content: space-between;
 | 
			
		||||
      width 100%
 | 
			
		||||
 | 
			
		||||
      .left {
 | 
			
		||||
        margin-left 3rem
 | 
			
		||||
      }
 | 
			
		||||
      .right {
 | 
			
		||||
        margin-right 3rem
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    .call-controls {
 | 
			
		||||
      display: flex;
 | 
			
		||||
      justify-content: center;
 | 
			
		||||
      gap: 3rem;
 | 
			
		||||
      padding 3rem
 | 
			
		||||
 | 
			
		||||
      .call-button {
 | 
			
		||||
        width: 60px;
 | 
			
		||||
        height: 60px;
 | 
			
		||||
        border-radius: 50%;
 | 
			
		||||
        border: none;
 | 
			
		||||
        display: flex;
 | 
			
		||||
        justify-content: center;
 | 
			
		||||
        align-items: center;
 | 
			
		||||
        font-size: 24px;
 | 
			
		||||
        color: white;
 | 
			
		||||
        cursor: pointer;
 | 
			
		||||
 | 
			
		||||
        .iconfont {
 | 
			
		||||
          font-size 24px
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
      .hangup {
 | 
			
		||||
        background-color: #e74c3c;
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      .answer {
 | 
			
		||||
        background-color: #2ecc71;
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      .icon {
 | 
			
		||||
        font-size: 28px;
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,161 +0,0 @@
 | 
			
		||||
<template>
 | 
			
		||||
  <!--语音通话组件-->
 | 
			
		||||
  <div class="video-call-container" :style="{height: height}">
 | 
			
		||||
    <div class="wave-container">
 | 
			
		||||
      <div class="wave-animation">
 | 
			
		||||
        <div v-for="i in 5" :key="i" class="wave-ellipse"></div>
 | 
			
		||||
      </div>
 | 
			
		||||
    </div>
 | 
			
		||||
    <!-- 其余部分保持不变 -->
 | 
			
		||||
    <div class="voice-indicators">
 | 
			
		||||
      <div class="voice-indicator left">
 | 
			
		||||
        <canvas ref="canvasClientRef" width="600" height="200"></canvas>
 | 
			
		||||
      </div>
 | 
			
		||||
      <div class="voice-indicator right">
 | 
			
		||||
        <canvas ref="canvasServerRef" width="600" height="200"></canvas>
 | 
			
		||||
      </div>
 | 
			
		||||
    </div>
 | 
			
		||||
    <div class="call-controls">
 | 
			
		||||
      <button class="call-button hangup" @click="hangUp">
 | 
			
		||||
        <i class="iconfont icon-hung-up"></i>
 | 
			
		||||
      </button>
 | 
			
		||||
    </div>
 | 
			
		||||
  </div>
 | 
			
		||||
</template>
 | 
			
		||||
 | 
			
		||||
<script setup>
 | 
			
		||||
import {onMounted, onUnmounted, ref} from "vue";
 | 
			
		||||
 | 
			
		||||
const leftVoiceActive = ref(false);
 | 
			
		||||
const rightVoiceActive = ref(false);
 | 
			
		||||
const props = defineProps({
 | 
			
		||||
  height: {
 | 
			
		||||
    type: String,
 | 
			
		||||
    default: '100vh'
 | 
			
		||||
  }
 | 
			
		||||
})
 | 
			
		||||
const emits = defineEmits(['hangUp']);
 | 
			
		||||
 | 
			
		||||
const animateVoice = () => {
 | 
			
		||||
  leftVoiceActive.value = Math.random() > 0.5;
 | 
			
		||||
  rightVoiceActive.value = Math.random() > 0.5;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
let voiceInterval;
 | 
			
		||||
 | 
			
		||||
onMounted(() => {
 | 
			
		||||
  voiceInterval = setInterval(animateVoice, 500);
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
onUnmounted(() => {
 | 
			
		||||
  clearInterval(voiceInterval);
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
const hangUp = () => {
 | 
			
		||||
  console.log('Call hung up');
 | 
			
		||||
  emits('hangUp')
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
</script>
 | 
			
		||||
 | 
			
		||||
<style scoped lang="stylus">
 | 
			
		||||
 | 
			
		||||
.video-call-container {
 | 
			
		||||
  background: linear-gradient(to right, #2c3e50, #4a5568, #6b46c1);
 | 
			
		||||
  display: flex;
 | 
			
		||||
  flex-direction: column;
 | 
			
		||||
  justify-content: space-between;
 | 
			
		||||
  align-items: center;
 | 
			
		||||
  padding: 0;
 | 
			
		||||
  width 100vw
 | 
			
		||||
 | 
			
		||||
  .wave-container {
 | 
			
		||||
    padding 3rem
 | 
			
		||||
    .wave-animation {
 | 
			
		||||
      display: flex;
 | 
			
		||||
      justify-content: center;
 | 
			
		||||
      align-items: center;
 | 
			
		||||
      gap: 10px;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
  .wave-ellipse {
 | 
			
		||||
    width: 40px;
 | 
			
		||||
    height: 40px;
 | 
			
		||||
    background-color: white;
 | 
			
		||||
    border-radius: 20px;
 | 
			
		||||
    animation: wave 0.8s infinite ease-in-out;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  .wave-ellipse:nth-child(odd) {
 | 
			
		||||
    height: 60px;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  .wave-ellipse:nth-child(even) {
 | 
			
		||||
    height: 80px;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @keyframes wave {
 | 
			
		||||
    0%, 100% {
 | 
			
		||||
      transform: scaleY(0.8);
 | 
			
		||||
    }
 | 
			
		||||
    50% {
 | 
			
		||||
      transform: scaleY(1.2);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  .wave-ellipse:nth-child(2) {
 | 
			
		||||
    animation-delay: 0.1s;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  .wave-ellipse:nth-child(3) {
 | 
			
		||||
    animation-delay: 0.2s;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  .wave-ellipse:nth-child(4) {
 | 
			
		||||
    animation-delay: 0.3s;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  .wave-ellipse:nth-child(5) {
 | 
			
		||||
    animation-delay: 0.4s;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  .call-controls {
 | 
			
		||||
    display: flex;
 | 
			
		||||
    justify-content: center;
 | 
			
		||||
    gap: 3rem;
 | 
			
		||||
    padding 3rem
 | 
			
		||||
 | 
			
		||||
    .call-button {
 | 
			
		||||
      width: 60px;
 | 
			
		||||
      height: 60px;
 | 
			
		||||
      border-radius: 50%;
 | 
			
		||||
      border: none;
 | 
			
		||||
      display: flex;
 | 
			
		||||
      justify-content: center;
 | 
			
		||||
      align-items: center;
 | 
			
		||||
      font-size: 24px;
 | 
			
		||||
      color: white;
 | 
			
		||||
      cursor: pointer;
 | 
			
		||||
 | 
			
		||||
      .iconfont {
 | 
			
		||||
        font-size 24px
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
    .hangup {
 | 
			
		||||
      background-color: #e74c3c;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    .answer {
 | 
			
		||||
      background-color: #2ecc71;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    .icon {
 | 
			
		||||
      font-size: 28px;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
</style>
 | 
			
		||||
							
								
								
									
										357
									
								
								web/src/components/RealtimeConversation .vue
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										357
									
								
								web/src/components/RealtimeConversation .vue
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,357 @@
 | 
			
		||||
<template>
 | 
			
		||||
  <el-container class="realtime-conversation" :style="{height: height}">
 | 
			
		||||
    <!-- connection animation -->
 | 
			
		||||
    <el-container class="connection-container" v-if="!isConnected">
 | 
			
		||||
      <div class="phone-container">
 | 
			
		||||
        <div class="signal"></div>
 | 
			
		||||
        <div class="signal"></div>
 | 
			
		||||
        <div class="signal"></div>
 | 
			
		||||
        <div class="phone"></div>
 | 
			
		||||
      </div>
 | 
			
		||||
      <div class="status-text">{{ connectingText }}</div>
 | 
			
		||||
    </el-container>
 | 
			
		||||
 | 
			
		||||
    <!-- conversation body -->
 | 
			
		||||
    <div class="conversation-container" v-else>
 | 
			
		||||
      <div class="wave-container">
 | 
			
		||||
        <div class="wave-animation">
 | 
			
		||||
          <div v-for="i in 5" :key="i" class="wave-ellipse"></div>
 | 
			
		||||
        </div>
 | 
			
		||||
      </div>
 | 
			
		||||
      <!-- 其余部分保持不变 -->
 | 
			
		||||
      <div class="voice-indicators">
 | 
			
		||||
        <div class="voice-indicator left">
 | 
			
		||||
          <canvas ref="clientCanvasRef"></canvas>
 | 
			
		||||
        </div>
 | 
			
		||||
        <div class="voice-indicator right">
 | 
			
		||||
          <canvas ref="serverCanvasRef"></canvas>
 | 
			
		||||
        </div>
 | 
			
		||||
      </div>
 | 
			
		||||
      <div class="call-controls">
 | 
			
		||||
        <el-tooltip content="长按发送语音" placement="top" effect="light">
 | 
			
		||||
          <ripple-button>
 | 
			
		||||
            <button class="call-button answer" @mousedown="startRecording" @mouseup="stopRecording">
 | 
			
		||||
              <i class="iconfont icon-mic-bold"></i>
 | 
			
		||||
            </button>
 | 
			
		||||
          </ripple-button>
 | 
			
		||||
        </el-tooltip>
 | 
			
		||||
        <el-tooltip content="结束通话" placement="top" effect="light">
 | 
			
		||||
          <button class="call-button hangup" @click="hangUp">
 | 
			
		||||
            <i class="iconfont icon-hung-up"></i>
 | 
			
		||||
          </button>
 | 
			
		||||
        </el-tooltip>
 | 
			
		||||
      </div>
 | 
			
		||||
    </div>
 | 
			
		||||
  </el-container>
 | 
			
		||||
 | 
			
		||||
</template>
 | 
			
		||||
 | 
			
		||||
<script setup>
 | 
			
		||||
import RippleButton from "@/components/ui/RippleButton.vue";
 | 
			
		||||
import { ref, onMounted, onUnmounted } from 'vue';
 | 
			
		||||
import { RealtimeClient } from '@openai/realtime-api-beta';
 | 
			
		||||
import { WavRecorder, WavStreamPlayer } from '@/lib/wavtools/index.js';
 | 
			
		||||
import { instructions } from '@/utils/conversation_config.js';
 | 
			
		||||
import { WavRenderer } from '@/utils/wav_renderer';
 | 
			
		||||
import {showMessageError} from "@/utils/dialog";
 | 
			
		||||
 | 
			
		||||
// eslint-disable-next-line no-unused-vars,no-undef
 | 
			
		||||
const props = defineProps({
 | 
			
		||||
  height: {
 | 
			
		||||
    type: String,
 | 
			
		||||
    default: '100vh'
 | 
			
		||||
  }
 | 
			
		||||
})
 | 
			
		||||
// eslint-disable-next-line no-undef
 | 
			
		||||
const emits = defineEmits(['close']);
 | 
			
		||||
 | 
			
		||||
/********************** connection animation code *************************/
 | 
			
		||||
const fullText = "正在接通中...";
 | 
			
		||||
const connectingText = ref("")
 | 
			
		||||
let index = 0;
 | 
			
		||||
const typeText = () => {
 | 
			
		||||
  if (index < fullText.length) {
 | 
			
		||||
    connectingText.value += fullText[index];
 | 
			
		||||
    index++;
 | 
			
		||||
    setTimeout(typeText, 300); // 每300毫秒显示一个字
 | 
			
		||||
  } else {
 | 
			
		||||
    setTimeout(() => {
 | 
			
		||||
      connectingText.value = '';
 | 
			
		||||
      index = 0;
 | 
			
		||||
      typeText();
 | 
			
		||||
    }, 1000); // 等待1秒后重新开始
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
/*************************** end of code ****************************************/
 | 
			
		||||
 | 
			
		||||
/********************** conversation process code ***************************/
 | 
			
		||||
const leftVoiceActive = ref(false);
 | 
			
		||||
const rightVoiceActive = ref(false);
 | 
			
		||||
 | 
			
		||||
const animateVoice = () => {
 | 
			
		||||
  leftVoiceActive.value = Math.random() > 0.5;
 | 
			
		||||
  rightVoiceActive.value = Math.random() > 0.5;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
const wavRecorder = ref(new WavRecorder({ sampleRate: 24000 }));
 | 
			
		||||
const wavStreamPlayer = ref(new WavStreamPlayer({ sampleRate: 24000 }));
 | 
			
		||||
const client = ref(
 | 
			
		||||
    new RealtimeClient({
 | 
			
		||||
      url: "ws://localhost:5678/api/realtime",
 | 
			
		||||
      apiKey: "sk-Gc5cEzDzGQLIqxWA9d62089350F3454bB359C4A3Fa21B3E4",
 | 
			
		||||
      dangerouslyAllowAPIKeyInBrowser: true,
 | 
			
		||||
    })
 | 
			
		||||
);
 | 
			
		||||
// // Set up client instructions and transcription
 | 
			
		||||
client.value.updateSession({
 | 
			
		||||
  instructions: instructions,
 | 
			
		||||
  turn_detection: null,
 | 
			
		||||
  input_audio_transcription: { model: 'whisper-1' },
 | 
			
		||||
  voice: 'alloy',
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
// set voice wave canvas
 | 
			
		||||
const clientCanvasRef = ref(null);
 | 
			
		||||
const serverCanvasRef = ref(null);
 | 
			
		||||
// const eventsScrollRef = ref(null);
 | 
			
		||||
// const startTime = ref(new Date().toISOString());
 | 
			
		||||
 | 
			
		||||
// const items = ref([]);
 | 
			
		||||
// const realtimeEvents = ref([]);
 | 
			
		||||
// const expandedEvents = reactive({});
 | 
			
		||||
const isConnected = ref(false);
 | 
			
		||||
// const canPushToTalk = ref(true);
 | 
			
		||||
const isRecording = ref(false);
 | 
			
		||||
// const memoryKv = ref({});
 | 
			
		||||
// const coords = ref({ lat: 37.775593, lng: -122.418137 });
 | 
			
		||||
// const marker = ref(null);
 | 
			
		||||
 | 
			
		||||
// Methods
 | 
			
		||||
// const formatTime = (timestamp) => {
 | 
			
		||||
//   const t0 = new Date(startTime.value).valueOf();
 | 
			
		||||
//   const t1 = new Date(timestamp).valueOf();
 | 
			
		||||
//   const delta = t1 - t0;
 | 
			
		||||
//   const hs = Math.floor(delta / 10) % 100;
 | 
			
		||||
//   const s = Math.floor(delta / 1000) % 60;
 | 
			
		||||
//   const m = Math.floor(delta / 60_000) % 60;
 | 
			
		||||
//   const pad = (n) => {
 | 
			
		||||
//     let s = n + '';
 | 
			
		||||
//     while (s.length < 2) {
 | 
			
		||||
//       s = '0' + s;
 | 
			
		||||
//     }
 | 
			
		||||
//     return s;
 | 
			
		||||
//   };
 | 
			
		||||
//   return `${pad(m)}:${pad(s)}.${pad(hs)}`;
 | 
			
		||||
// };
 | 
			
		||||
 | 
			
		||||
const connect = async () => {
 | 
			
		||||
  // startTime.value = new Date().toISOString();
 | 
			
		||||
  // realtimeEvents.value = [];
 | 
			
		||||
  // items.value = client.value.conversation.getItems();
 | 
			
		||||
  if (isConnected.value) {
 | 
			
		||||
    return
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  try {
 | 
			
		||||
    await client.value.connect();
 | 
			
		||||
    await wavRecorder.value.begin();
 | 
			
		||||
    await wavStreamPlayer.value.connect();
 | 
			
		||||
    isConnected.value = true;
 | 
			
		||||
    console.log("对话连接成功!")
 | 
			
		||||
    client.value.sendUserMessageContent([
 | 
			
		||||
      {
 | 
			
		||||
        type: 'input_text',
 | 
			
		||||
        text: '你好,我是老阳!',
 | 
			
		||||
      },
 | 
			
		||||
    ]);
 | 
			
		||||
 | 
			
		||||
    if (client.value.getTurnDetectionType() === 'server_vad') {
 | 
			
		||||
      await wavRecorder.value.record((data) => client.value.appendInputAudio(data.mono));
 | 
			
		||||
    }
 | 
			
		||||
  } catch (e) {
 | 
			
		||||
    showMessageError(e.message)
 | 
			
		||||
  }
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
// const disconnectConversation = async () => {
 | 
			
		||||
//   isConnected.value = false;
 | 
			
		||||
//   // realtimeEvents.value = [];
 | 
			
		||||
//   // items.value = [];
 | 
			
		||||
//   // memoryKv.value = {};
 | 
			
		||||
//   // coords.value = { lat: 37.775593, lng: -122.418137 };
 | 
			
		||||
//   // marker.value = null;
 | 
			
		||||
//
 | 
			
		||||
//   client.value.disconnect();
 | 
			
		||||
//   await wavRecorder.value.end();
 | 
			
		||||
//   await wavStreamPlayer.value.interrupt();
 | 
			
		||||
// };
 | 
			
		||||
 | 
			
		||||
// const deleteConversationItem = async (id) => {
 | 
			
		||||
//   client.value.deleteItem(id);
 | 
			
		||||
// };
 | 
			
		||||
 | 
			
		||||
const startRecording = async () => {
 | 
			
		||||
  isRecording.value = true;
 | 
			
		||||
  const trackSampleOffset = await wavStreamPlayer.value.interrupt();
 | 
			
		||||
  if (trackSampleOffset?.trackId) {
 | 
			
		||||
    const { trackId, offset } = trackSampleOffset;
 | 
			
		||||
    client.value.cancelResponse(trackId, offset);
 | 
			
		||||
  }
 | 
			
		||||
  await wavRecorder.value.record((data) => client.value.appendInputAudio(data.mono));
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
const stopRecording = async () => {
 | 
			
		||||
  isRecording.value = false;
 | 
			
		||||
  await wavRecorder.value.pause();
 | 
			
		||||
  client.value.createResponse();
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
// const changeTurnEndType = async (value) => {
 | 
			
		||||
//   if (value === 'none' && wavRecorder.value.getStatus() === 'recording') {
 | 
			
		||||
//     await wavRecorder.value.pause();
 | 
			
		||||
//   }
 | 
			
		||||
//   client.value.updateSession({
 | 
			
		||||
//     turn_detection: value === 'none' ? null : { type: 'server_vad' },
 | 
			
		||||
//   });
 | 
			
		||||
//   if (value === 'server_vad' && client.value.isConnected()) {
 | 
			
		||||
//     await wavRecorder.value.record((data) => client.value.appendInputAudio(data.mono));
 | 
			
		||||
//   }
 | 
			
		||||
//   canPushToTalk.value = value === 'none';
 | 
			
		||||
// };
 | 
			
		||||
//
 | 
			
		||||
// const toggleEventDetails = (eventId) => {
 | 
			
		||||
//   if (expandedEvents[eventId]) {
 | 
			
		||||
//     delete expandedEvents[eventId];
 | 
			
		||||
//   } else {
 | 
			
		||||
//     expandedEvents[eventId] = true;
 | 
			
		||||
//   }
 | 
			
		||||
// };
 | 
			
		||||
 | 
			
		||||
// Lifecycle hooks and watchers
 | 
			
		||||
const initialize = async () => {
 | 
			
		||||
  // Set up render loops for the visualization canvas
 | 
			
		||||
  let isLoaded = true;
 | 
			
		||||
  const render = () => {
 | 
			
		||||
    if (isLoaded) {
 | 
			
		||||
      if (clientCanvasRef.value) {
 | 
			
		||||
        const canvas = clientCanvasRef.value;
 | 
			
		||||
        if (!canvas.width || !canvas.height) {
 | 
			
		||||
          canvas.width = canvas.offsetWidth;
 | 
			
		||||
          canvas.height = canvas.offsetHeight;
 | 
			
		||||
        }
 | 
			
		||||
        const ctx = canvas.getContext('2d');
 | 
			
		||||
        if (ctx) {
 | 
			
		||||
          ctx.clearRect(0, 0, canvas.width, canvas.height);
 | 
			
		||||
          const result = wavRecorder.value.recording
 | 
			
		||||
              ? wavRecorder.value.getFrequencies('voice')
 | 
			
		||||
              : { values: new Float32Array([0]) };
 | 
			
		||||
          WavRenderer.drawBars(canvas, ctx, result.values, '#0099ff', 10, 0, 8);
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
      if (serverCanvasRef.value) {
 | 
			
		||||
        const canvas = serverCanvasRef.value;
 | 
			
		||||
        if (!canvas.width || !canvas.height) {
 | 
			
		||||
          canvas.width = canvas.offsetWidth;
 | 
			
		||||
          canvas.height = canvas.offsetHeight;
 | 
			
		||||
        }
 | 
			
		||||
        const ctx = canvas.getContext('2d');
 | 
			
		||||
        if (ctx) {
 | 
			
		||||
          ctx.clearRect(0, 0, canvas.width, canvas.height);
 | 
			
		||||
          const result = wavStreamPlayer.value.analyser
 | 
			
		||||
              ?  wavStreamPlayer.value.getFrequencies('voice')
 | 
			
		||||
              : { values: new Float32Array([0]) };
 | 
			
		||||
          WavRenderer.drawBars(canvas, ctx, result.values, '#009900', 10, 0, 8);
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
      requestAnimationFrame(render);
 | 
			
		||||
    }
 | 
			
		||||
  };
 | 
			
		||||
  render();
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
  // Set up client event listeners
 | 
			
		||||
  client.value.on('realtime.event', (realtimeEvent) => {
 | 
			
		||||
    // realtimeEvents.value = realtimeEvents.value.slice();
 | 
			
		||||
    // const lastEvent = realtimeEvents.value[realtimeEvents.value.length - 1];
 | 
			
		||||
    // if (lastEvent?.event.type === realtimeEvent.event.type) {
 | 
			
		||||
    //   lastEvent.count = (lastEvent.count || 0) + 1;
 | 
			
		||||
    //   realtimeEvents.value.splice(-1, 1, lastEvent);
 | 
			
		||||
    // } else {
 | 
			
		||||
    //   realtimeEvents.value.push(realtimeEvent);
 | 
			
		||||
    // }
 | 
			
		||||
    // console.log(realtimeEvent)
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  client.value.on('error', (event) => console.error(event));
 | 
			
		||||
 | 
			
		||||
  client.value.on('conversation.interrupted', async () => {
 | 
			
		||||
    const trackSampleOffset = await wavStreamPlayer.value.interrupt();
 | 
			
		||||
    if (trackSampleOffset?.trackId) {
 | 
			
		||||
      const { trackId, offset } = trackSampleOffset;
 | 
			
		||||
      client.value.cancelResponse(trackId, offset);
 | 
			
		||||
    }
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  client.value.on('conversation.updated', async ({ item, delta }) => {
 | 
			
		||||
    console.log('item updated', item, delta)
 | 
			
		||||
    if (delta?.audio) {
 | 
			
		||||
      wavStreamPlayer.value.add16BitPCM(delta.audio, item.id);
 | 
			
		||||
    }
 | 
			
		||||
    if (item.status === 'completed' && item.formatted.audio?.length) {
 | 
			
		||||
      const wavFile = await WavRecorder.decode(
 | 
			
		||||
          item.formatted.audio,
 | 
			
		||||
          24000,
 | 
			
		||||
          24000
 | 
			
		||||
      );
 | 
			
		||||
      item.formatted.file = wavFile;
 | 
			
		||||
    }
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Watchers
 | 
			
		||||
// watch(realtimeEvents, () => {
 | 
			
		||||
//   if (eventsScrollRef.value) {
 | 
			
		||||
//     const eventsEl = eventsScrollRef.value;
 | 
			
		||||
//     eventsEl.scrollTop = eventsEl.scrollHeight;
 | 
			
		||||
//   }
 | 
			
		||||
// });
 | 
			
		||||
 | 
			
		||||
// watch(items, () => {
 | 
			
		||||
//   const conversationEls = document.querySelectorAll('[data-conversation-content]');
 | 
			
		||||
//   conversationEls.forEach((el) => {
 | 
			
		||||
//     el.scrollTop = el.scrollHeight;
 | 
			
		||||
//   });
 | 
			
		||||
// });
 | 
			
		||||
 | 
			
		||||
const voiceInterval = ref(null);
 | 
			
		||||
onMounted(() => {
 | 
			
		||||
  initialize()
 | 
			
		||||
  voiceInterval.value = setInterval(animateVoice, 500);
 | 
			
		||||
  typeText()
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
onUnmounted(() => {
 | 
			
		||||
  clearInterval(voiceInterval.value);
 | 
			
		||||
  client.value.reset();
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
const hangUp = async () => {
 | 
			
		||||
  emits('close')
 | 
			
		||||
  isConnected.value = false;
 | 
			
		||||
  client.value.disconnect();
 | 
			
		||||
  await wavRecorder.value.end();
 | 
			
		||||
  await wavStreamPlayer.value.interrupt();
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
// eslint-disable-next-line no-undef
 | 
			
		||||
defineExpose({ connect });
 | 
			
		||||
</script>
 | 
			
		||||
 | 
			
		||||
<style scoped lang="stylus">
 | 
			
		||||
 | 
			
		||||
@import "@/assets/css/realtime.styl"
 | 
			
		||||
 | 
			
		||||
</style>
 | 
			
		||||
							
								
								
									
										102
									
								
								web/src/components/ui/RippleButton.vue
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										102
									
								
								web/src/components/ui/RippleButton.vue
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,102 @@
 | 
			
		||||
<template>
 | 
			
		||||
  <button
 | 
			
		||||
      class="ripple-button"
 | 
			
		||||
      @mousedown="startRipples"
 | 
			
		||||
      @mouseup="stopRipples"
 | 
			
		||||
      @mouseleave="stopRipples"
 | 
			
		||||
  >
 | 
			
		||||
    <slot></slot>
 | 
			
		||||
    <span
 | 
			
		||||
        v-for="ripple in ripples"
 | 
			
		||||
        :key="ripple.id"
 | 
			
		||||
        class="ripple"
 | 
			
		||||
        :style="getRippleStyle(ripple)"
 | 
			
		||||
    ></span>
 | 
			
		||||
  </button>
 | 
			
		||||
</template>
 | 
			
		||||
 | 
			
		||||
<script setup>
 | 
			
		||||
import { ref } from 'vue';
 | 
			
		||||
 | 
			
		||||
const ripples = ref([]);
 | 
			
		||||
let rippleCount = 0;
 | 
			
		||||
let animationId;
 | 
			
		||||
 | 
			
		||||
const startRipples = (event) => {
 | 
			
		||||
  const button = event.currentTarget;
 | 
			
		||||
  const rect = button.getBoundingClientRect();
 | 
			
		||||
  const size = Math.max(rect.width, rect.height);
 | 
			
		||||
  // const x = event.clientX - rect.left;
 | 
			
		||||
  // const y = event.clientY - rect.top;
 | 
			
		||||
  const x = rect.right - rect.left - size/2;
 | 
			
		||||
  const y = rect.bottom - rect.top - size/2;
 | 
			
		||||
 | 
			
		||||
  const createRipple = () => {
 | 
			
		||||
    ripples.value.push({
 | 
			
		||||
      id: rippleCount++,
 | 
			
		||||
      x,
 | 
			
		||||
      y,
 | 
			
		||||
      size: 0,
 | 
			
		||||
      opacity: 0.5
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    if (ripples.value.length > 3) {
 | 
			
		||||
      ripples.value.shift();
 | 
			
		||||
    }
 | 
			
		||||
  };
 | 
			
		||||
 | 
			
		||||
  const animate = () => {
 | 
			
		||||
    ripples.value.forEach(ripple => {
 | 
			
		||||
      ripple.size += 2;
 | 
			
		||||
      ripple.opacity -= 0.01;
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    ripples.value = ripples.value.filter(ripple => ripple.opacity > 0);
 | 
			
		||||
 | 
			
		||||
    if (ripples.value.length < 3) {
 | 
			
		||||
      createRipple();
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    animationId = requestAnimationFrame(animate);
 | 
			
		||||
  };
 | 
			
		||||
 | 
			
		||||
  createRipple();
 | 
			
		||||
  animate();
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
const stopRipples = () => {
 | 
			
		||||
  cancelAnimationFrame(animationId);
 | 
			
		||||
  ripples.value = [];
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
const getRippleStyle = (ripple) => ({
 | 
			
		||||
  left: `${ripple.x}px`,
 | 
			
		||||
  top: `${ripple.y}px`,
 | 
			
		||||
  width: `${ripple.size}px`,
 | 
			
		||||
  height: `${ripple.size}px`,
 | 
			
		||||
  opacity: ripple.opacity
 | 
			
		||||
});
 | 
			
		||||
</script>
 | 
			
		||||
 | 
			
		||||
<style scoped lang="stylus">
 | 
			
		||||
.ripple-button {
 | 
			
		||||
  position: relative;
 | 
			
		||||
  overflow: hidden;
 | 
			
		||||
  border: none;
 | 
			
		||||
  background none;
 | 
			
		||||
  color: white;
 | 
			
		||||
  cursor: pointer;
 | 
			
		||||
  border-radius: 50%;
 | 
			
		||||
  outline: none;
 | 
			
		||||
  margin 0
 | 
			
		||||
  padding 0
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.ripple {
 | 
			
		||||
  position: absolute;
 | 
			
		||||
  border-radius: 50%;
 | 
			
		||||
  background-color: rgba(255, 255, 255, 0.7);
 | 
			
		||||
  transform: translate(-50%, -50%);
 | 
			
		||||
  pointer-events: none;
 | 
			
		||||
}
 | 
			
		||||
</style>
 | 
			
		||||
@@ -328,7 +328,7 @@ const routes = [
 | 
			
		||||
        name: 'test2',
 | 
			
		||||
        path: '/test2',
 | 
			
		||||
        meta: {title: '测试页面'},
 | 
			
		||||
        component: () => import('@/views/Test2.vue'),
 | 
			
		||||
        component: () => import('@/views/RealtimeTest.vue'),
 | 
			
		||||
    },
 | 
			
		||||
    {
 | 
			
		||||
        name: 'NotFound',
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										471
									
								
								web/src/views/RealtimeTest.vue
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										471
									
								
								web/src/views/RealtimeTest.vue
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,471 @@
 | 
			
		||||
<template>
 | 
			
		||||
  <div data-component="ConsolePage">
 | 
			
		||||
    <div class="content-top">
 | 
			
		||||
      <div class="content-title">
 | 
			
		||||
        <img src="/openai-logomark.svg" alt="OpenAI Logo" />
 | 
			
		||||
        <span>realtime console</span>
 | 
			
		||||
      </div>
 | 
			
		||||
 | 
			
		||||
    </div>
 | 
			
		||||
    <div class="content-main">
 | 
			
		||||
      <div class="content-logs">
 | 
			
		||||
        <div class="content-block events">
 | 
			
		||||
          <div class="visualization">
 | 
			
		||||
            <div class="visualization-entry client">
 | 
			
		||||
              <canvas ref="clientCanvasRef" />
 | 
			
		||||
            </div>
 | 
			
		||||
            <div class="visualization-entry server">
 | 
			
		||||
              <canvas ref="serverCanvasRef" />
 | 
			
		||||
            </div>
 | 
			
		||||
          </div>
 | 
			
		||||
          <div class="content-block-title">events</div>
 | 
			
		||||
          <div class="content-block-body" ref="eventsScrollRef">
 | 
			
		||||
            <template v-if="!realtimeEvents.length">
 | 
			
		||||
              awaiting connection...
 | 
			
		||||
            </template>
 | 
			
		||||
            <template v-else>
 | 
			
		||||
              <div v-for="(realtimeEvent, i) in realtimeEvents" :key="realtimeEvent.event.event_id" class="event">
 | 
			
		||||
                <div class="event-timestamp">
 | 
			
		||||
                  {{ formatTime(realtimeEvent.time) }}
 | 
			
		||||
                </div>
 | 
			
		||||
                <div class="event-details">
 | 
			
		||||
                  <div
 | 
			
		||||
                      class="event-summary"
 | 
			
		||||
                      @click="toggleEventDetails(realtimeEvent.event.event_id)"
 | 
			
		||||
                  >
 | 
			
		||||
                    <div
 | 
			
		||||
                        :class="[
 | 
			
		||||
                        'event-source',
 | 
			
		||||
                        realtimeEvent.event.type === 'error'
 | 
			
		||||
                          ? 'error'
 | 
			
		||||
                          : realtimeEvent.source,
 | 
			
		||||
                      ]"
 | 
			
		||||
                    >
 | 
			
		||||
                      <component :is="realtimeEvent.source === 'client' ? ArrowUp : ArrowDown" />
 | 
			
		||||
                      <span>
 | 
			
		||||
                        {{ realtimeEvent.event.type === 'error'
 | 
			
		||||
                          ? 'error!'
 | 
			
		||||
                          : realtimeEvent.source }}
 | 
			
		||||
                      </span>
 | 
			
		||||
                    </div>
 | 
			
		||||
                    <div class="event-type">
 | 
			
		||||
                      {{ realtimeEvent.event.type }}
 | 
			
		||||
                      {{ realtimeEvent.count ? `(${realtimeEvent.count})` : '' }}
 | 
			
		||||
                    </div>
 | 
			
		||||
                  </div>
 | 
			
		||||
                  <div
 | 
			
		||||
                      v-if="expandedEvents[realtimeEvent.event.event_id]"
 | 
			
		||||
                      class="event-payload"
 | 
			
		||||
                  >
 | 
			
		||||
                    {{ JSON.stringify(realtimeEvent.event, null, 2) }}
 | 
			
		||||
                  </div>
 | 
			
		||||
                </div>
 | 
			
		||||
              </div>
 | 
			
		||||
            </template>
 | 
			
		||||
          </div>
 | 
			
		||||
        </div>
 | 
			
		||||
        <div class="content-block conversation">
 | 
			
		||||
          <div class="content-block-title">conversation</div>
 | 
			
		||||
          <div class="content-block-body" data-conversation-content>
 | 
			
		||||
            <template v-if="!items.length">
 | 
			
		||||
              awaiting connection...
 | 
			
		||||
            </template>
 | 
			
		||||
            <template v-else>
 | 
			
		||||
              <div
 | 
			
		||||
                  v-for="(conversationItem, i) in items"
 | 
			
		||||
                  :key="conversationItem.id"
 | 
			
		||||
                  class="conversation-item"
 | 
			
		||||
              >
 | 
			
		||||
                <div :class="['speaker', conversationItem.role || '']">
 | 
			
		||||
                  <div>
 | 
			
		||||
                    {{
 | 
			
		||||
                      (conversationItem.role || conversationItem.type).replaceAll(
 | 
			
		||||
                          '_',
 | 
			
		||||
                          ' '
 | 
			
		||||
                      )
 | 
			
		||||
                    }}
 | 
			
		||||
                  </div>
 | 
			
		||||
                  <div class="close" @click="deleteConversationItem(conversationItem.id)">
 | 
			
		||||
                    <X />
 | 
			
		||||
                  </div>
 | 
			
		||||
                </div>
 | 
			
		||||
                <div class="speaker-content">
 | 
			
		||||
                  <!-- tool response -->
 | 
			
		||||
                  <div v-if="conversationItem.type === 'function_call_output'">
 | 
			
		||||
                    {{ conversationItem.formatted.output }}
 | 
			
		||||
                  </div>
 | 
			
		||||
                  <!-- tool call -->
 | 
			
		||||
                  <div v-if="conversationItem.formatted.tool">
 | 
			
		||||
                    {{ conversationItem.formatted.tool.name }}(
 | 
			
		||||
                    {{ conversationItem.formatted.tool.arguments }})
 | 
			
		||||
                  </div>
 | 
			
		||||
                  <div
 | 
			
		||||
                      v-if="
 | 
			
		||||
                      !conversationItem.formatted.tool &&
 | 
			
		||||
                      conversationItem.role === 'user'
 | 
			
		||||
                    "
 | 
			
		||||
                  >
 | 
			
		||||
                    {{
 | 
			
		||||
                      conversationItem.formatted.transcript ||
 | 
			
		||||
                      (conversationItem.formatted.audio?.length
 | 
			
		||||
                          ? '(awaiting transcript)'
 | 
			
		||||
                          : conversationItem.formatted.text || '(item sent)')
 | 
			
		||||
                    }}
 | 
			
		||||
                  </div>
 | 
			
		||||
                  <div
 | 
			
		||||
                      v-if="
 | 
			
		||||
                      !conversationItem.formatted.tool &&
 | 
			
		||||
                      conversationItem.role === 'assistant'
 | 
			
		||||
                    "
 | 
			
		||||
                  >
 | 
			
		||||
                    {{
 | 
			
		||||
                      conversationItem.formatted.transcript ||
 | 
			
		||||
                      conversationItem.formatted.text ||
 | 
			
		||||
                      '(truncated)'
 | 
			
		||||
                    }}
 | 
			
		||||
                  </div>
 | 
			
		||||
                  <audio
 | 
			
		||||
                      v-if="conversationItem.formatted.file"
 | 
			
		||||
                      :src="conversationItem.formatted.file.url"
 | 
			
		||||
                      controls
 | 
			
		||||
                  />
 | 
			
		||||
                </div>
 | 
			
		||||
              </div>
 | 
			
		||||
            </template>
 | 
			
		||||
          </div>
 | 
			
		||||
        </div>
 | 
			
		||||
        <div class="content-actions" style="position:absolute; top: 0; left: 0">
 | 
			
		||||
          <el-button
 | 
			
		||||
              :type="isConnected ? '' : 'primary'"
 | 
			
		||||
              @click="connectConversation"
 | 
			
		||||
          >
 | 
			
		||||
            {{isConnected ? '断开连接' : '连接对话'}}
 | 
			
		||||
          </el-button>
 | 
			
		||||
 | 
			
		||||
          <el-button @mousedown="startRecording" @mouseup="stopRecording">开始讲话</el-button>
 | 
			
		||||
        </div>
 | 
			
		||||
      </div>
 | 
			
		||||
 | 
			
		||||
    </div>
 | 
			
		||||
  </div>
 | 
			
		||||
</template>
 | 
			
		||||
 | 
			
		||||
<script setup>
 | 
			
		||||
import { ref, reactive, onMounted, onUnmounted, watch } from 'vue';
 | 
			
		||||
import { RealtimeClient } from '@openai/realtime-api-beta';
 | 
			
		||||
import { WavRecorder, WavStreamPlayer } from '@/lib/wavtools/index.js';
 | 
			
		||||
import { instructions } from '@/utils/conversation_config.js';
 | 
			
		||||
import { WavRenderer } from '@/utils/wav_renderer';
 | 
			
		||||
 | 
			
		||||
// Constants
 | 
			
		||||
const LOCAL_RELAY_SERVER_URL = process.env.REACT_APP_LOCAL_RELAY_SERVER_URL || '';
 | 
			
		||||
 | 
			
		||||
// Reactive state
 | 
			
		||||
const apiKey = ref(
 | 
			
		||||
    LOCAL_RELAY_SERVER_URL
 | 
			
		||||
        ? ''
 | 
			
		||||
        : localStorage.getItem('tmp::voice_api_key') || prompt('OpenAI API Key') || ''
 | 
			
		||||
);
 | 
			
		||||
const wavRecorder = ref(new WavRecorder({ sampleRate: 24000 }));
 | 
			
		||||
const wavStreamPlayer = ref(new WavStreamPlayer({ sampleRate: 24000 }));
 | 
			
		||||
const client = ref(
 | 
			
		||||
    new RealtimeClient({
 | 
			
		||||
      url: "wss://api.geekai.pro/v1/realtime",
 | 
			
		||||
      apiKey: "sk-Gc5cEzDzGQLIqxWA9d62089350F3454bB359C4A3Fa21B3E4",
 | 
			
		||||
      dangerouslyAllowAPIKeyInBrowser: true,
 | 
			
		||||
    })
 | 
			
		||||
);
 | 
			
		||||
 | 
			
		||||
const clientCanvasRef = ref(null);
 | 
			
		||||
const serverCanvasRef = ref(null);
 | 
			
		||||
const eventsScrollRef = ref(null);
 | 
			
		||||
const startTime = ref(new Date().toISOString());
 | 
			
		||||
 | 
			
		||||
const items = ref([]);
 | 
			
		||||
const realtimeEvents = ref([]);
 | 
			
		||||
const expandedEvents = reactive({});
 | 
			
		||||
const isConnected = ref(false);
 | 
			
		||||
const canPushToTalk = ref(true);
 | 
			
		||||
const isRecording = ref(false);
 | 
			
		||||
const memoryKv = ref({});
 | 
			
		||||
const coords = ref({ lat: 37.775593, lng: -122.418137 });
 | 
			
		||||
const marker = ref(null);
 | 
			
		||||
 | 
			
		||||
// Methods
 | 
			
		||||
const formatTime = (timestamp) => {
 | 
			
		||||
  const t0 = new Date(startTime.value).valueOf();
 | 
			
		||||
  const t1 = new Date(timestamp).valueOf();
 | 
			
		||||
  const delta = t1 - t0;
 | 
			
		||||
  const hs = Math.floor(delta / 10) % 100;
 | 
			
		||||
  const s = Math.floor(delta / 1000) % 60;
 | 
			
		||||
  const m = Math.floor(delta / 60_000) % 60;
 | 
			
		||||
  const pad = (n) => {
 | 
			
		||||
    let s = n + '';
 | 
			
		||||
    while (s.length < 2) {
 | 
			
		||||
      s = '0' + s;
 | 
			
		||||
    }
 | 
			
		||||
    return s;
 | 
			
		||||
  };
 | 
			
		||||
  return `${pad(m)}:${pad(s)}.${pad(hs)}`;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
const connectConversation = async () => {
 | 
			
		||||
  alert(123)
 | 
			
		||||
  startTime.value = new Date().toISOString();
 | 
			
		||||
  isConnected.value = true;
 | 
			
		||||
  realtimeEvents.value = [];
 | 
			
		||||
  items.value = client.value.conversation.getItems();
 | 
			
		||||
 | 
			
		||||
  await wavRecorder.value.begin();
 | 
			
		||||
  await wavStreamPlayer.value.connect();
 | 
			
		||||
  await client.value.connect();
 | 
			
		||||
  client.value.sendUserMessageContent([
 | 
			
		||||
    {
 | 
			
		||||
      type: 'input_text',
 | 
			
		||||
      text: '你好,我是老阳!',
 | 
			
		||||
    },
 | 
			
		||||
  ]);
 | 
			
		||||
 | 
			
		||||
  if (client.value.getTurnDetectionType() === 'server_vad') {
 | 
			
		||||
    await wavRecorder.value.record((data) => client.value.appendInputAudio(data.mono));
 | 
			
		||||
  }
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
const disconnectConversation = async () => {
 | 
			
		||||
  isConnected.value = false;
 | 
			
		||||
  realtimeEvents.value = [];
 | 
			
		||||
  items.value = [];
 | 
			
		||||
  memoryKv.value = {};
 | 
			
		||||
  coords.value = { lat: 37.775593, lng: -122.418137 };
 | 
			
		||||
  marker.value = null;
 | 
			
		||||
 | 
			
		||||
  client.value.disconnect();
 | 
			
		||||
  await wavRecorder.value.end();
 | 
			
		||||
  await wavStreamPlayer.value.interrupt();
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
const deleteConversationItem = async (id) => {
 | 
			
		||||
  client.value.deleteItem(id);
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
const startRecording = async () => {
 | 
			
		||||
  isRecording.value = true;
 | 
			
		||||
  const trackSampleOffset = await wavStreamPlayer.value.interrupt();
 | 
			
		||||
  if (trackSampleOffset?.trackId) {
 | 
			
		||||
    const { trackId, offset } = trackSampleOffset;
 | 
			
		||||
    await client.value.cancelResponse(trackId, offset);
 | 
			
		||||
  }
 | 
			
		||||
  await wavRecorder.value.record((data) => client.value.appendInputAudio(data.mono));
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
const stopRecording = async () => {
 | 
			
		||||
  isRecording.value = false;
 | 
			
		||||
  await wavRecorder.value.pause();
 | 
			
		||||
  client.value.createResponse();
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
const changeTurnEndType = async (value) => {
 | 
			
		||||
  if (value === 'none' && wavRecorder.value.getStatus() === 'recording') {
 | 
			
		||||
    await wavRecorder.value.pause();
 | 
			
		||||
  }
 | 
			
		||||
  client.value.updateSession({
 | 
			
		||||
    turn_detection: value === 'none' ? null : { type: 'server_vad' },
 | 
			
		||||
  });
 | 
			
		||||
  if (value === 'server_vad' && client.value.isConnected()) {
 | 
			
		||||
    await wavRecorder.value.record((data) => client.value.appendInputAudio(data.mono));
 | 
			
		||||
  }
 | 
			
		||||
  canPushToTalk.value = value === 'none';
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
const toggleEventDetails = (eventId) => {
 | 
			
		||||
  if (expandedEvents[eventId]) {
 | 
			
		||||
    delete expandedEvents[eventId];
 | 
			
		||||
  } else {
 | 
			
		||||
    expandedEvents[eventId] = true;
 | 
			
		||||
  }
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
// Lifecycle hooks and watchers
 | 
			
		||||
onMounted(() => {
 | 
			
		||||
  if (apiKey.value !== '') {
 | 
			
		||||
    localStorage.setItem('tmp::voice_api_key', apiKey.value);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Set up render loops for the visualization canvas
 | 
			
		||||
  let isLoaded = true;
 | 
			
		||||
  const render = () => {
 | 
			
		||||
    if (isLoaded) {
 | 
			
		||||
      if (clientCanvasRef.value) {
 | 
			
		||||
        const canvas = clientCanvasRef.value;
 | 
			
		||||
        if (!canvas.width || !canvas.height) {
 | 
			
		||||
          canvas.width = canvas.offsetWidth;
 | 
			
		||||
          canvas.height = canvas.offsetHeight;
 | 
			
		||||
        }
 | 
			
		||||
        const ctx = canvas.getContext('2d');
 | 
			
		||||
        if (ctx) {
 | 
			
		||||
          ctx.clearRect(0, 0, canvas.width, canvas.height);
 | 
			
		||||
          const result = wavRecorder.value.recording
 | 
			
		||||
              ? wavRecorder.value.getFrequencies('voice')
 | 
			
		||||
              : { values: new Float32Array([0]) };
 | 
			
		||||
          WavRenderer.drawBars(canvas, ctx, result.values, '#0099ff', 10, 0, 8);
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
      if (serverCanvasRef.value) {
 | 
			
		||||
        const canvas = serverCanvasRef.value;
 | 
			
		||||
        if (!canvas.width || !canvas.height) {
 | 
			
		||||
          canvas.width = canvas.offsetWidth;
 | 
			
		||||
          canvas.height = canvas.offsetHeight;
 | 
			
		||||
        }
 | 
			
		||||
        const ctx = canvas.getContext('2d');
 | 
			
		||||
        if (ctx) {
 | 
			
		||||
          ctx.clearRect(0, 0, canvas.width, canvas.height);
 | 
			
		||||
          const result = wavStreamPlayer.value.analyser
 | 
			
		||||
              ?  wavStreamPlayer.value.getFrequencies('voice')
 | 
			
		||||
                  : { values: new Float32Array([0]) };
 | 
			
		||||
          WavRenderer.drawBars(canvas, ctx, result.values, '#009900', 10, 0, 8);
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
      requestAnimationFrame(render);
 | 
			
		||||
    }
 | 
			
		||||
  };
 | 
			
		||||
  render();
 | 
			
		||||
 | 
			
		||||
  // Set up client event listeners
 | 
			
		||||
  client.value.on('realtime.event', (realtimeEvent) => {
 | 
			
		||||
    realtimeEvents.value = realtimeEvents.value.slice();
 | 
			
		||||
    const lastEvent = realtimeEvents.value[realtimeEvents.value.length - 1];
 | 
			
		||||
    if (lastEvent?.event.type === realtimeEvent.event.type) {
 | 
			
		||||
      lastEvent.count = (lastEvent.count || 0) + 1;
 | 
			
		||||
      realtimeEvents.value.splice(-1, 1, lastEvent);
 | 
			
		||||
    } else {
 | 
			
		||||
      realtimeEvents.value.push(realtimeEvent);
 | 
			
		||||
    }
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  client.value.on('error', (event) => console.error(event));
 | 
			
		||||
 | 
			
		||||
  client.value.on('conversation.interrupted', async () => {
 | 
			
		||||
    const trackSampleOffset = await wavStreamPlayer.value.interrupt();
 | 
			
		||||
    if (trackSampleOffset?.trackId) {
 | 
			
		||||
      const { trackId, offset } = trackSampleOffset;
 | 
			
		||||
      await client.value.cancelResponse(trackId, offset);
 | 
			
		||||
    }
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  client.value.on('conversation.updated', async ({ item, delta }) => {
 | 
			
		||||
    items.value = client.value.conversation.getItems();
 | 
			
		||||
    if (delta?.audio) {
 | 
			
		||||
      wavStreamPlayer.value.add16BitPCM(delta.audio, item.id);
 | 
			
		||||
    }
 | 
			
		||||
    if (item.status === 'completed' && item.formatted.audio?.length) {
 | 
			
		||||
      const wavFile = await WavRecorder.decode(
 | 
			
		||||
          item.formatted.audio,
 | 
			
		||||
          24000,
 | 
			
		||||
          24000
 | 
			
		||||
      );
 | 
			
		||||
      item.formatted.file = wavFile;
 | 
			
		||||
    }
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  // Set up client instructions and tools
 | 
			
		||||
  client.value.updateSession({ instructions: instructions });
 | 
			
		||||
  client.value.updateSession({ input_audio_transcription: { model: 'whisper-1' } });
 | 
			
		||||
 | 
			
		||||
  client.value.addTool(
 | 
			
		||||
      {
 | 
			
		||||
        name: 'set_memory',
 | 
			
		||||
        description: 'Saves important data about the user into memory.',
 | 
			
		||||
        parameters: {
 | 
			
		||||
          type: 'object',
 | 
			
		||||
          properties: {
 | 
			
		||||
            key: {
 | 
			
		||||
              type: 'string',
 | 
			
		||||
              description:
 | 
			
		||||
                  'The key of the memory value. Always use lowercase and underscores, no other characters.',
 | 
			
		||||
            },
 | 
			
		||||
            value: {
 | 
			
		||||
              type: 'string',
 | 
			
		||||
              description: 'Value can be anything represented as a string',
 | 
			
		||||
            },
 | 
			
		||||
          },
 | 
			
		||||
          required: ['key', 'value'],
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      async ({ key, value }) => {
 | 
			
		||||
        memoryKv.value = { ...memoryKv.value, [key]: value };
 | 
			
		||||
        return { ok: true };
 | 
			
		||||
      }
 | 
			
		||||
  );
 | 
			
		||||
 | 
			
		||||
  client.value.addTool(
 | 
			
		||||
      {
 | 
			
		||||
        name: 'get_weather',
 | 
			
		||||
        description:
 | 
			
		||||
            'Retrieves the weather for a given lat, lng coordinate pair. Specify a label for the location.',
 | 
			
		||||
        parameters: {
 | 
			
		||||
          type: 'object',
 | 
			
		||||
          properties: {
 | 
			
		||||
            lat: {
 | 
			
		||||
              type: 'number',
 | 
			
		||||
              description: 'Latitude',
 | 
			
		||||
            },
 | 
			
		||||
            lng: {
 | 
			
		||||
              type: 'number',
 | 
			
		||||
              description: 'Longitude',
 | 
			
		||||
            },
 | 
			
		||||
            location: {
 | 
			
		||||
              type: 'string',
 | 
			
		||||
              description: 'Name of the location',
 | 
			
		||||
            },
 | 
			
		||||
          },
 | 
			
		||||
          required: ['lat', 'lng', 'location'],
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      async ({ lat, lng, location }) => {
 | 
			
		||||
        marker.value = { lat, lng, location };
 | 
			
		||||
        coords.value = { lat, lng, location };
 | 
			
		||||
        const result = await fetch(
 | 
			
		||||
            `https://api.open-meteo.com/v1/forecast?latitude=${lat}&longitude=${lng}¤t=temperature_2m,wind_speed_10m`
 | 
			
		||||
        );
 | 
			
		||||
        const json = await result.json();
 | 
			
		||||
        const temperature = {
 | 
			
		||||
          value: json.current.temperature_2m,
 | 
			
		||||
          units: json.current_units.temperature_2m,
 | 
			
		||||
        };
 | 
			
		||||
        const wind_speed = {
 | 
			
		||||
          value: json.current.wind_speed_10m,
 | 
			
		||||
          units: json.current_units.wind_speed_10m,
 | 
			
		||||
        };
 | 
			
		||||
        marker.value = { lat, lng, location, temperature, wind_speed };
 | 
			
		||||
        return json;
 | 
			
		||||
      }
 | 
			
		||||
  );
 | 
			
		||||
 | 
			
		||||
  items.value = client.value.conversation.getItems();
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
onUnmounted(() => {
 | 
			
		||||
  client.value.reset();
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
// Watchers
 | 
			
		||||
watch(realtimeEvents, () => {
 | 
			
		||||
  if (eventsScrollRef.value) {
 | 
			
		||||
    const eventsEl = eventsScrollRef.value;
 | 
			
		||||
    eventsEl.scrollTop = eventsEl.scrollHeight;
 | 
			
		||||
  }
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
watch(items, () => {
 | 
			
		||||
  const conversationEls = document.querySelectorAll('[data-conversation-content]');
 | 
			
		||||
  conversationEls.forEach((el) => {
 | 
			
		||||
    el.scrollTop = el.scrollHeight;
 | 
			
		||||
  });
 | 
			
		||||
});
 | 
			
		||||
</script>
 | 
			
		||||
 | 
			
		||||
<style scoped>
 | 
			
		||||
/* You can add your component-specific styles here */
 | 
			
		||||
/* If you're using SCSS, you might want to import your existing SCSS file */
 | 
			
		||||
/* @import './ConsolePage.scss'; */
 | 
			
		||||
</style>
 | 
			
		||||
@@ -1,471 +1,40 @@
 | 
			
		||||
<template>
 | 
			
		||||
  <div data-component="ConsolePage">
 | 
			
		||||
    <div class="content-top">
 | 
			
		||||
      <div class="content-title">
 | 
			
		||||
        <img src="/openai-logomark.svg" alt="OpenAI Logo" />
 | 
			
		||||
        <span>realtime console</span>
 | 
			
		||||
      </div>
 | 
			
		||||
  <div class="audio-chat-page">
 | 
			
		||||
    <el-button style="margin: 20px" type="primary" size="large" @click="connect()">开始语音对话</el-button>
 | 
			
		||||
 | 
			
		||||
    </div>
 | 
			
		||||
    <div class="content-main">
 | 
			
		||||
      <div class="content-logs">
 | 
			
		||||
        <div class="content-block events">
 | 
			
		||||
          <div class="visualization">
 | 
			
		||||
            <div class="visualization-entry client">
 | 
			
		||||
              <canvas ref="clientCanvasRef" />
 | 
			
		||||
            </div>
 | 
			
		||||
            <div class="visualization-entry server">
 | 
			
		||||
              <canvas ref="serverCanvasRef" />
 | 
			
		||||
            </div>
 | 
			
		||||
          </div>
 | 
			
		||||
          <div class="content-block-title">events</div>
 | 
			
		||||
          <div class="content-block-body" ref="eventsScrollRef">
 | 
			
		||||
            <template v-if="!realtimeEvents.length">
 | 
			
		||||
              awaiting connection...
 | 
			
		||||
            </template>
 | 
			
		||||
            <template v-else>
 | 
			
		||||
              <div v-for="(realtimeEvent, i) in realtimeEvents" :key="realtimeEvent.event.event_id" class="event">
 | 
			
		||||
                <div class="event-timestamp">
 | 
			
		||||
                  {{ formatTime(realtimeEvent.time) }}
 | 
			
		||||
                </div>
 | 
			
		||||
                <div class="event-details">
 | 
			
		||||
                  <div
 | 
			
		||||
                      class="event-summary"
 | 
			
		||||
                      @click="toggleEventDetails(realtimeEvent.event.event_id)"
 | 
			
		||||
                  >
 | 
			
		||||
                    <div
 | 
			
		||||
                        :class="[
 | 
			
		||||
                        'event-source',
 | 
			
		||||
                        realtimeEvent.event.type === 'error'
 | 
			
		||||
                          ? 'error'
 | 
			
		||||
                          : realtimeEvent.source,
 | 
			
		||||
                      ]"
 | 
			
		||||
                    >
 | 
			
		||||
                      <component :is="realtimeEvent.source === 'client' ? ArrowUp : ArrowDown" />
 | 
			
		||||
                      <span>
 | 
			
		||||
                        {{ realtimeEvent.event.type === 'error'
 | 
			
		||||
                          ? 'error!'
 | 
			
		||||
                          : realtimeEvent.source }}
 | 
			
		||||
                      </span>
 | 
			
		||||
                    </div>
 | 
			
		||||
                    <div class="event-type">
 | 
			
		||||
                      {{ realtimeEvent.event.type }}
 | 
			
		||||
                      {{ realtimeEvent.count ? `(${realtimeEvent.count})` : '' }}
 | 
			
		||||
                    </div>
 | 
			
		||||
                  </div>
 | 
			
		||||
                  <div
 | 
			
		||||
                      v-if="expandedEvents[realtimeEvent.event.event_id]"
 | 
			
		||||
                      class="event-payload"
 | 
			
		||||
                  >
 | 
			
		||||
                    {{ JSON.stringify(realtimeEvent.event, null, 2) }}
 | 
			
		||||
                  </div>
 | 
			
		||||
                </div>
 | 
			
		||||
              </div>
 | 
			
		||||
            </template>
 | 
			
		||||
          </div>
 | 
			
		||||
        </div>
 | 
			
		||||
        <div class="content-block conversation">
 | 
			
		||||
          <div class="content-block-title">conversation</div>
 | 
			
		||||
          <div class="content-block-body" data-conversation-content>
 | 
			
		||||
            <template v-if="!items.length">
 | 
			
		||||
              awaiting connection...
 | 
			
		||||
            </template>
 | 
			
		||||
            <template v-else>
 | 
			
		||||
              <div
 | 
			
		||||
                  v-for="(conversationItem, i) in items"
 | 
			
		||||
                  :key="conversationItem.id"
 | 
			
		||||
                  class="conversation-item"
 | 
			
		||||
              >
 | 
			
		||||
                <div :class="['speaker', conversationItem.role || '']">
 | 
			
		||||
                  <div>
 | 
			
		||||
                    {{
 | 
			
		||||
                      (conversationItem.role || conversationItem.type).replaceAll(
 | 
			
		||||
                          '_',
 | 
			
		||||
                          ' '
 | 
			
		||||
                      )
 | 
			
		||||
                    }}
 | 
			
		||||
                  </div>
 | 
			
		||||
                  <div class="close" @click="deleteConversationItem(conversationItem.id)">
 | 
			
		||||
                    <X />
 | 
			
		||||
                  </div>
 | 
			
		||||
                </div>
 | 
			
		||||
                <div class="speaker-content">
 | 
			
		||||
                  <!-- tool response -->
 | 
			
		||||
                  <div v-if="conversationItem.type === 'function_call_output'">
 | 
			
		||||
                    {{ conversationItem.formatted.output }}
 | 
			
		||||
                  </div>
 | 
			
		||||
                  <!-- tool call -->
 | 
			
		||||
                  <div v-if="conversationItem.formatted.tool">
 | 
			
		||||
                    {{ conversationItem.formatted.tool.name }}(
 | 
			
		||||
                    {{ conversationItem.formatted.tool.arguments }})
 | 
			
		||||
                  </div>
 | 
			
		||||
                  <div
 | 
			
		||||
                      v-if="
 | 
			
		||||
                      !conversationItem.formatted.tool &&
 | 
			
		||||
                      conversationItem.role === 'user'
 | 
			
		||||
                    "
 | 
			
		||||
                  >
 | 
			
		||||
                    {{
 | 
			
		||||
                      conversationItem.formatted.transcript ||
 | 
			
		||||
                      (conversationItem.formatted.audio?.length
 | 
			
		||||
                          ? '(awaiting transcript)'
 | 
			
		||||
                          : conversationItem.formatted.text || '(item sent)')
 | 
			
		||||
                    }}
 | 
			
		||||
                  </div>
 | 
			
		||||
                  <div
 | 
			
		||||
                      v-if="
 | 
			
		||||
                      !conversationItem.formatted.tool &&
 | 
			
		||||
                      conversationItem.role === 'assistant'
 | 
			
		||||
                    "
 | 
			
		||||
                  >
 | 
			
		||||
                    {{
 | 
			
		||||
                      conversationItem.formatted.transcript ||
 | 
			
		||||
                      conversationItem.formatted.text ||
 | 
			
		||||
                      '(truncated)'
 | 
			
		||||
                    }}
 | 
			
		||||
                  </div>
 | 
			
		||||
                  <audio
 | 
			
		||||
                      v-if="conversationItem.formatted.file"
 | 
			
		||||
                      :src="conversationItem.formatted.file.url"
 | 
			
		||||
                      controls
 | 
			
		||||
                  />
 | 
			
		||||
                </div>
 | 
			
		||||
              </div>
 | 
			
		||||
            </template>
 | 
			
		||||
          </div>
 | 
			
		||||
        </div>
 | 
			
		||||
        <div class="content-actions" style="position:absolute; top: 0; left: 0">
 | 
			
		||||
          <el-button
 | 
			
		||||
              :type="isConnected ? '' : 'primary'"
 | 
			
		||||
              @click="connectConversation"
 | 
			
		||||
          >
 | 
			
		||||
            {{isConnected ? '断开连接' : '连接对话'}}
 | 
			
		||||
          </el-button>
 | 
			
		||||
 | 
			
		||||
          <el-button @mousedown="startRecording" @mouseup="stopRecording">开始讲话</el-button>
 | 
			
		||||
        </div>
 | 
			
		||||
      </div>
 | 
			
		||||
 | 
			
		||||
    </div>
 | 
			
		||||
    <el-dialog v-model="showDialog" title="语音通话" >
 | 
			
		||||
      <realtime-conversation @close="showDialog = false" ref="conversationRef" :height="dialogHeight+'px'" />
 | 
			
		||||
    </el-dialog>
 | 
			
		||||
  </div>
 | 
			
		||||
</template>
 | 
			
		||||
 | 
			
		||||
<script setup>
 | 
			
		||||
import { ref, reactive, onMounted, onUnmounted, watch } from 'vue';
 | 
			
		||||
import { RealtimeClient } from '@openai/realtime-api-beta';
 | 
			
		||||
import { WavRecorder, WavStreamPlayer } from '@/lib/wavtools/index.js';
 | 
			
		||||
import { instructions } from '@/utils/conversation_config.js';
 | 
			
		||||
import { WavRenderer } from '@/utils/wav_renderer';
 | 
			
		||||
import {nextTick, ref} from 'vue';
 | 
			
		||||
import RealtimeConversation from "@/components/RealtimeConversation .vue";
 | 
			
		||||
 | 
			
		||||
// Constants
 | 
			
		||||
const LOCAL_RELAY_SERVER_URL = process.env.REACT_APP_LOCAL_RELAY_SERVER_URL || '';
 | 
			
		||||
 | 
			
		||||
// Reactive state
 | 
			
		||||
const apiKey = ref(
 | 
			
		||||
    LOCAL_RELAY_SERVER_URL
 | 
			
		||||
        ? ''
 | 
			
		||||
        : localStorage.getItem('tmp::voice_api_key') || prompt('OpenAI API Key') || ''
 | 
			
		||||
);
 | 
			
		||||
const wavRecorder = ref(new WavRecorder({ sampleRate: 24000 }));
 | 
			
		||||
const wavStreamPlayer = ref(new WavStreamPlayer({ sampleRate: 24000 }));
 | 
			
		||||
const client = ref(
 | 
			
		||||
    new RealtimeClient({
 | 
			
		||||
      url: "wss://api.geekai.pro/v1/realtime",
 | 
			
		||||
      apiKey: "sk-Gc5cEzDzGQLIqxWA9d62089350F3454bB359C4A3Fa21B3E4",
 | 
			
		||||
      dangerouslyAllowAPIKeyInBrowser: true,
 | 
			
		||||
    })
 | 
			
		||||
);
 | 
			
		||||
 | 
			
		||||
const clientCanvasRef = ref(null);
 | 
			
		||||
const serverCanvasRef = ref(null);
 | 
			
		||||
const eventsScrollRef = ref(null);
 | 
			
		||||
const startTime = ref(new Date().toISOString());
 | 
			
		||||
 | 
			
		||||
const items = ref([]);
 | 
			
		||||
const realtimeEvents = ref([]);
 | 
			
		||||
const expandedEvents = reactive({});
 | 
			
		||||
const isConnected = ref(false);
 | 
			
		||||
const canPushToTalk = ref(true);
 | 
			
		||||
const isRecording = ref(false);
 | 
			
		||||
const memoryKv = ref({});
 | 
			
		||||
const coords = ref({ lat: 37.775593, lng: -122.418137 });
 | 
			
		||||
const marker = ref(null);
 | 
			
		||||
 | 
			
		||||
// Methods
 | 
			
		||||
const formatTime = (timestamp) => {
 | 
			
		||||
  const t0 = new Date(startTime.value).valueOf();
 | 
			
		||||
  const t1 = new Date(timestamp).valueOf();
 | 
			
		||||
  const delta = t1 - t0;
 | 
			
		||||
  const hs = Math.floor(delta / 10) % 100;
 | 
			
		||||
  const s = Math.floor(delta / 1000) % 60;
 | 
			
		||||
  const m = Math.floor(delta / 60_000) % 60;
 | 
			
		||||
  const pad = (n) => {
 | 
			
		||||
    let s = n + '';
 | 
			
		||||
    while (s.length < 2) {
 | 
			
		||||
      s = '0' + s;
 | 
			
		||||
    }
 | 
			
		||||
    return s;
 | 
			
		||||
  };
 | 
			
		||||
  return `${pad(m)}:${pad(s)}.${pad(hs)}`;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
const connectConversation = async () => {
 | 
			
		||||
  alert(123)
 | 
			
		||||
  startTime.value = new Date().toISOString();
 | 
			
		||||
  isConnected.value = true;
 | 
			
		||||
  realtimeEvents.value = [];
 | 
			
		||||
  items.value = client.value.conversation.getItems();
 | 
			
		||||
 | 
			
		||||
  await wavRecorder.value.begin();
 | 
			
		||||
  await wavStreamPlayer.value.connect();
 | 
			
		||||
  await client.value.connect();
 | 
			
		||||
  client.value.sendUserMessageContent([
 | 
			
		||||
    {
 | 
			
		||||
      type: 'input_text',
 | 
			
		||||
      text: '你好,我是老阳!',
 | 
			
		||||
    },
 | 
			
		||||
  ]);
 | 
			
		||||
 | 
			
		||||
  if (client.value.getTurnDetectionType() === 'server_vad') {
 | 
			
		||||
    await wavRecorder.value.record((data) => client.value.appendInputAudio(data.mono));
 | 
			
		||||
  }
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
const disconnectConversation = async () => {
 | 
			
		||||
  isConnected.value = false;
 | 
			
		||||
  realtimeEvents.value = [];
 | 
			
		||||
  items.value = [];
 | 
			
		||||
  memoryKv.value = {};
 | 
			
		||||
  coords.value = { lat: 37.775593, lng: -122.418137 };
 | 
			
		||||
  marker.value = null;
 | 
			
		||||
 | 
			
		||||
  client.value.disconnect();
 | 
			
		||||
  await wavRecorder.value.end();
 | 
			
		||||
  await wavStreamPlayer.value.interrupt();
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
const deleteConversationItem = async (id) => {
 | 
			
		||||
  client.value.deleteItem(id);
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
const startRecording = async () => {
 | 
			
		||||
  isRecording.value = true;
 | 
			
		||||
  const trackSampleOffset = await wavStreamPlayer.value.interrupt();
 | 
			
		||||
  if (trackSampleOffset?.trackId) {
 | 
			
		||||
    const { trackId, offset } = trackSampleOffset;
 | 
			
		||||
    await client.value.cancelResponse(trackId, offset);
 | 
			
		||||
  }
 | 
			
		||||
  await wavRecorder.value.record((data) => client.value.appendInputAudio(data.mono));
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
const stopRecording = async () => {
 | 
			
		||||
  isRecording.value = false;
 | 
			
		||||
  await wavRecorder.value.pause();
 | 
			
		||||
  client.value.createResponse();
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
const changeTurnEndType = async (value) => {
 | 
			
		||||
  if (value === 'none' && wavRecorder.value.getStatus() === 'recording') {
 | 
			
		||||
    await wavRecorder.value.pause();
 | 
			
		||||
  }
 | 
			
		||||
  client.value.updateSession({
 | 
			
		||||
    turn_detection: value === 'none' ? null : { type: 'server_vad' },
 | 
			
		||||
  });
 | 
			
		||||
  if (value === 'server_vad' && client.value.isConnected()) {
 | 
			
		||||
    await wavRecorder.value.record((data) => client.value.appendInputAudio(data.mono));
 | 
			
		||||
  }
 | 
			
		||||
  canPushToTalk.value = value === 'none';
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
const toggleEventDetails = (eventId) => {
 | 
			
		||||
  if (expandedEvents[eventId]) {
 | 
			
		||||
    delete expandedEvents[eventId];
 | 
			
		||||
  } else {
 | 
			
		||||
    expandedEvents[eventId] = true;
 | 
			
		||||
  }
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
// Lifecycle hooks and watchers
 | 
			
		||||
onMounted(() => {
 | 
			
		||||
  if (apiKey.value !== '') {
 | 
			
		||||
    localStorage.setItem('tmp::voice_api_key', apiKey.value);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Set up render loops for the visualization canvas
 | 
			
		||||
  let isLoaded = true;
 | 
			
		||||
  const render = () => {
 | 
			
		||||
    if (isLoaded) {
 | 
			
		||||
      if (clientCanvasRef.value) {
 | 
			
		||||
        const canvas = clientCanvasRef.value;
 | 
			
		||||
        if (!canvas.width || !canvas.height) {
 | 
			
		||||
          canvas.width = canvas.offsetWidth;
 | 
			
		||||
          canvas.height = canvas.offsetHeight;
 | 
			
		||||
        }
 | 
			
		||||
        const ctx = canvas.getContext('2d');
 | 
			
		||||
        if (ctx) {
 | 
			
		||||
          ctx.clearRect(0, 0, canvas.width, canvas.height);
 | 
			
		||||
          const result = wavRecorder.value.recording
 | 
			
		||||
              ? wavRecorder.value.getFrequencies('voice')
 | 
			
		||||
              : { values: new Float32Array([0]) };
 | 
			
		||||
          WavRenderer.drawBars(canvas, ctx, result.values, '#0099ff', 10, 0, 8);
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
      if (serverCanvasRef.value) {
 | 
			
		||||
        const canvas = serverCanvasRef.value;
 | 
			
		||||
        if (!canvas.width || !canvas.height) {
 | 
			
		||||
          canvas.width = canvas.offsetWidth;
 | 
			
		||||
          canvas.height = canvas.offsetHeight;
 | 
			
		||||
        }
 | 
			
		||||
        const ctx = canvas.getContext('2d');
 | 
			
		||||
        if (ctx) {
 | 
			
		||||
          ctx.clearRect(0, 0, canvas.width, canvas.height);
 | 
			
		||||
          const result = wavStreamPlayer.value.analyser
 | 
			
		||||
              ?  wavStreamPlayer.value.getFrequencies('voice')
 | 
			
		||||
                  : { values: new Float32Array([0]) };
 | 
			
		||||
          WavRenderer.drawBars(canvas, ctx, result.values, '#009900', 10, 0, 8);
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
      requestAnimationFrame(render);
 | 
			
		||||
    }
 | 
			
		||||
  };
 | 
			
		||||
  render();
 | 
			
		||||
 | 
			
		||||
  // Set up client event listeners
 | 
			
		||||
  client.value.on('realtime.event', (realtimeEvent) => {
 | 
			
		||||
    realtimeEvents.value = realtimeEvents.value.slice();
 | 
			
		||||
    const lastEvent = realtimeEvents.value[realtimeEvents.value.length - 1];
 | 
			
		||||
    if (lastEvent?.event.type === realtimeEvent.event.type) {
 | 
			
		||||
      lastEvent.count = (lastEvent.count || 0) + 1;
 | 
			
		||||
      realtimeEvents.value.splice(-1, 1, lastEvent);
 | 
			
		||||
    } else {
 | 
			
		||||
      realtimeEvents.value.push(realtimeEvent);
 | 
			
		||||
    }
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  client.value.on('error', (event) => console.error(event));
 | 
			
		||||
 | 
			
		||||
  client.value.on('conversation.interrupted', async () => {
 | 
			
		||||
    const trackSampleOffset = await wavStreamPlayer.value.interrupt();
 | 
			
		||||
    if (trackSampleOffset?.trackId) {
 | 
			
		||||
      const { trackId, offset } = trackSampleOffset;
 | 
			
		||||
      await client.value.cancelResponse(trackId, offset);
 | 
			
		||||
    }
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  client.value.on('conversation.updated', async ({ item, delta }) => {
 | 
			
		||||
    items.value = client.value.conversation.getItems();
 | 
			
		||||
    if (delta?.audio) {
 | 
			
		||||
      wavStreamPlayer.value.add16BitPCM(delta.audio, item.id);
 | 
			
		||||
    }
 | 
			
		||||
    if (item.status === 'completed' && item.formatted.audio?.length) {
 | 
			
		||||
      const wavFile = await WavRecorder.decode(
 | 
			
		||||
          item.formatted.audio,
 | 
			
		||||
          24000,
 | 
			
		||||
          24000
 | 
			
		||||
      );
 | 
			
		||||
      item.formatted.file = wavFile;
 | 
			
		||||
    }
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  // Set up client instructions and tools
 | 
			
		||||
  client.value.updateSession({ instructions: instructions });
 | 
			
		||||
  client.value.updateSession({ input_audio_transcription: { model: 'whisper-1' } });
 | 
			
		||||
 | 
			
		||||
  client.value.addTool(
 | 
			
		||||
      {
 | 
			
		||||
        name: 'set_memory',
 | 
			
		||||
        description: 'Saves important data about the user into memory.',
 | 
			
		||||
        parameters: {
 | 
			
		||||
          type: 'object',
 | 
			
		||||
          properties: {
 | 
			
		||||
            key: {
 | 
			
		||||
              type: 'string',
 | 
			
		||||
              description:
 | 
			
		||||
                  'The key of the memory value. Always use lowercase and underscores, no other characters.',
 | 
			
		||||
            },
 | 
			
		||||
            value: {
 | 
			
		||||
              type: 'string',
 | 
			
		||||
              description: 'Value can be anything represented as a string',
 | 
			
		||||
            },
 | 
			
		||||
          },
 | 
			
		||||
          required: ['key', 'value'],
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      async ({ key, value }) => {
 | 
			
		||||
        memoryKv.value = { ...memoryKv.value, [key]: value };
 | 
			
		||||
        return { ok: true };
 | 
			
		||||
      }
 | 
			
		||||
  );
 | 
			
		||||
 | 
			
		||||
  client.value.addTool(
 | 
			
		||||
      {
 | 
			
		||||
        name: 'get_weather',
 | 
			
		||||
        description:
 | 
			
		||||
            'Retrieves the weather for a given lat, lng coordinate pair. Specify a label for the location.',
 | 
			
		||||
        parameters: {
 | 
			
		||||
          type: 'object',
 | 
			
		||||
          properties: {
 | 
			
		||||
            lat: {
 | 
			
		||||
              type: 'number',
 | 
			
		||||
              description: 'Latitude',
 | 
			
		||||
            },
 | 
			
		||||
            lng: {
 | 
			
		||||
              type: 'number',
 | 
			
		||||
              description: 'Longitude',
 | 
			
		||||
            },
 | 
			
		||||
            location: {
 | 
			
		||||
              type: 'string',
 | 
			
		||||
              description: 'Name of the location',
 | 
			
		||||
            },
 | 
			
		||||
          },
 | 
			
		||||
          required: ['lat', 'lng', 'location'],
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      async ({ lat, lng, location }) => {
 | 
			
		||||
        marker.value = { lat, lng, location };
 | 
			
		||||
        coords.value = { lat, lng, location };
 | 
			
		||||
        const result = await fetch(
 | 
			
		||||
            `https://api.open-meteo.com/v1/forecast?latitude=${lat}&longitude=${lng}¤t=temperature_2m,wind_speed_10m`
 | 
			
		||||
        );
 | 
			
		||||
        const json = await result.json();
 | 
			
		||||
        const temperature = {
 | 
			
		||||
          value: json.current.temperature_2m,
 | 
			
		||||
          units: json.current_units.temperature_2m,
 | 
			
		||||
        };
 | 
			
		||||
        const wind_speed = {
 | 
			
		||||
          value: json.current.wind_speed_10m,
 | 
			
		||||
          units: json.current_units.wind_speed_10m,
 | 
			
		||||
        };
 | 
			
		||||
        marker.value = { lat, lng, location, temperature, wind_speed };
 | 
			
		||||
        return json;
 | 
			
		||||
      }
 | 
			
		||||
  );
 | 
			
		||||
 | 
			
		||||
  items.value = client.value.conversation.getItems();
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
onUnmounted(() => {
 | 
			
		||||
  client.value.reset();
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
// Watchers
 | 
			
		||||
watch(realtimeEvents, () => {
 | 
			
		||||
  if (eventsScrollRef.value) {
 | 
			
		||||
    const eventsEl = eventsScrollRef.value;
 | 
			
		||||
    eventsEl.scrollTop = eventsEl.scrollHeight;
 | 
			
		||||
  }
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
watch(items, () => {
 | 
			
		||||
  const conversationEls = document.querySelectorAll('[data-conversation-content]');
 | 
			
		||||
  conversationEls.forEach((el) => {
 | 
			
		||||
    el.scrollTop = el.scrollHeight;
 | 
			
		||||
  });
 | 
			
		||||
});
 | 
			
		||||
const showDialog = ref(false);
 | 
			
		||||
const dialogHeight = ref(window.innerHeight - 75);
 | 
			
		||||
const conversationRef = ref(null);
 | 
			
		||||
const connect = () => {
 | 
			
		||||
  showDialog.value = true;
 | 
			
		||||
  nextTick(() => {
 | 
			
		||||
    conversationRef.value.connect()
 | 
			
		||||
  })
 | 
			
		||||
}
 | 
			
		||||
</script>
 | 
			
		||||
 | 
			
		||||
<style scoped>
 | 
			
		||||
/* You can add your component-specific styles here */
 | 
			
		||||
/* If you're using SCSS, you might want to import your existing SCSS file */
 | 
			
		||||
/* @import './ConsolePage.scss'; */
 | 
			
		||||
<style scoped lang="stylus">
 | 
			
		||||
.audio-chat-page {
 | 
			
		||||
  display flex
 | 
			
		||||
  flex-flow column
 | 
			
		||||
  justify-content center
 | 
			
		||||
  align-items center
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
canvas {
 | 
			
		||||
  background-color: transparent;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
</style>
 | 
			
		||||
@@ -15,7 +15,7 @@
 | 
			
		||||
import {ref} from 'vue';
 | 
			
		||||
import { RealtimeClient } from '@openai/realtime-api-beta';
 | 
			
		||||
import Calling from "@/components/Calling.vue";
 | 
			
		||||
import Conversation from "@/components/Conversation .vue";
 | 
			
		||||
import Conversation from "@/components/RealtimeConversation .vue";
 | 
			
		||||
import {playPCM16} from "@/utils/wav_player";
 | 
			
		||||
import {showMessageError} from "@/utils/dialog";
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user