webRTC推流

2025-12-29 16:32:22 阅读:8 编辑

推流端


<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">

    <title>WebRTC 推流到 Oryx</title>
    <script src="https://unpkg.com/vconsole@latest/dist/vconsole.min.js"></script>

<video id="localVideo" autoplay muted playsinline></video>
<br>
<button id="startBtn">开始推流</button>
<button id="stopBtn" disabled>停止推流</button>

<script>
    // 配置 Oryx 服务器信息 (请根据你的实际情况替换)
    const ORYX_SERVER = 'https://rtc.n8y.cn';
    const APP_NAME = 'live';
    const STREAM_NAME = 'livestream';
    const STREAM_SECRET = 'afe686e193ab452ba0836e9a6483435d'; // 从Oryx控制台获取

    // 完整的推流URL,使用正确的WHIP协议端点路径
    const WHIP_URL = `${ORYX_SERVER}/rtc/v1/whip/?app=${APP_NAME}&stream=${STREAM_NAME}&secret=${STREAM_SECRET}`;

    let localStream = null;
    let peerConnection = null;

    // 获取本地音视频流
    async function startCapture() {
        try {
            // 使用基础的音频约束配置,确保兼容性
            const mediaConstraints = {
                video: true,
                audio: {
                    echoCancellation: true,
                    noiseSuppression: true,
                    autoGainControl: true
                }
            };

            localStream = await navigator.mediaDevices.getUserMedia(mediaConstraints);

            // 验证是否获取到音频轨道
            const audioTracks = localStream.getAudioTracks();
            if (audioTracks.length > 0) {
                console.log('成功获取音频轨道:', audioTracks[0].label);
            } else {
                console.warn('未获取到音频轨道');
            }

            document.getElementById('localVideo').srcObject = localStream;
            document.getElementById('startBtn').disabled = true;
            document.getElementById('stopBtn').disabled = false;
            console.log('本地媒体流获取成功');
            return localStream;
        } catch (err) {
            console.error('获取媒体设备失败:', err);
            alert('无法访问摄像头或麦克风,请检查权限。');
            throw err;
        }
    }

    // 主要的推流函数
    async function startPublishing() {
        try {
            // 1. 创建 RTCPeerConnection
            const config = {
                iceServers: [{ urls: 'stun:stun.l.google.com:19302' }], // STUN服务器帮助穿透NAT
                iceCandidatePoolSize: 10
            };

            // 使用简化的媒体约束
            const mediaConstraints = {
                offerToReceiveAudio: false,
                offerToReceiveVideo: false
            };

            peerConnection = new RTCPeerConnection(config);

            // 2. 将本地音视频轨道添加到连接中
            if (localStream) {
                localStream.getTracks().forEach(track => {
                    console.log('添加轨道到连接:', track.kind);
                    peerConnection.addTrack(track, localStream);
                });
            }

            // 3. 创建 Offer (SDP)
            const offer = await peerConnection.createOffer(mediaConstraints);
            await peerConnection.setLocalDescription(offer);
            console.log('本地Offer SDP创建完成');
            console.log('Offer包含音频:', offer.sdp.includes('audio'));

            // 4. 将 Offer 发送给 Oryx (WHIP 协议)
            const response = await fetch(WHIP_URL, {
                method: 'POST',
                headers: {
                    'Content-Type': 'application/sdp'
                },
                body: offer.sdp
            });

            if (!response.ok) {
                throw new Error(`Oryx 服务器响应错误: ${response.status}`);
            }

            // 5. 接收 Oryx 返回的 Answer (SDP)
            const answerSDP = await response.text();
            console.log('收到 Oryx Answer SDP');
            console.log('Answer包含音频:', answerSDP.includes('audio'));

            const answer = new RTCSessionDescription({
                type: 'answer',
                sdp: answerSDP
            });

            // 6. 设置远端描述,完成连接协商
            await peerConnection.setRemoteDescription(answer);
            console.log('WebRTC 连接协商完成,推流已开始。');

        } catch (error) {
            console.error('推流过程中发生错误:', error);
            stopPublishing(); // 出错时清理资源
        }
    }

    // 停止推流,清理资源
    function stopPublishing() {
        if (peerConnection) {
            peerConnection.close();
            peerConnection = null;
        }

        if (localStream) {
            localStream.getTracks().forEach(track => track.stop());
            document.getElementById('localVideo').srcObject = null;
            localStream = null;
        }

        document.getElementById('startBtn').disabled = false;
        document.getElementById('stopBtn').disabled = true;
        console.log('推流已停止,资源已释放。');
    }

    // 绑定按钮事件
    document.getElementById('startBtn').onclick = async () => {
        try {
            await startCapture(); // 先获取媒体流
            await startPublishing(); // 再开始推流
        } catch (e) {
            console.error('启动失败', e);
        }
    };

    document.getElementById('stopBtn').onclick = stopPublishing;

    // 初始化 vConsole
    var vConsole = new VConsole();

    // 之后你可以正常使用 console.log 等方法
    console.log('你好,vConsole 已启用!');
</script>

播放端


    <title>SRS</title>
    <meta charset="utf-8">
    <style>
        body{
            padding-top: 30px;
        }
    </style>
    <script type="text/javascript" src="https://rtc.n8y.cn/players/js/jquery-1.12.2.min.js"></script>
    <script type="text/javascript" src="https://rtc.n8y.cn/players/js/srs.sdk.js"></script>

<style>
    body{
        padding-top: 30px;
    }
    #rtc_media_player {
        width: 640px;
        height: 360px;
        display: block;
       /* background-color: #000;*/
    }
</style>

<div class="container">

    <div class="form-inline">
        URL:
        <input type="text" id="txt_url" class="input-xxlarge" value="">
        <button class="btn btn-primary" id="btn_play">Play</button>
    </div>
    <video id="rtc_media_player" autoplay playsinline></video>
</div>
<script type="text/javascript">
    $(function(){
        var sdk = null; // Global handler to do cleanup when republishing.
        var startPlay = function() {
            $('#rtc_media_player').show();

            // Close PC when user replay.
            if (sdk) {
                sdk.close();
            }
            sdk = new SrsRtcWhipWhepAsync();

            // User should set the stream when publish is done, @see https://webrtc.org/getting-started/media-devices
            // However SRS SDK provides a consist API like https://webrtc.org/getting-started/remote-streams
            $('#rtc_media_player').prop('srcObject', sdk.stream);
            // Optional callback, SDK will add track to stream.
            // sdk.ontrack = function (event) { console.log('Got track', event); sdk.stream.addTrack(event.track); };
            // 确保视频元素不静音,有声音
           // $('#rtc_media_player').prop('muted', false);
            // For example: webrtc://r.ossrs.net/live/livestream
            var url = "https://rtc.n8y.cn/rtc/v1/whep/?app=live&stream=livestream";
            sdk.play(url).then(function(session){
               // $('#sessionid').html(session.sessionid);
               // $('#simulator-drop').attr('href', session.simulator + '?drop=1&username=' + session.sessionid);
            }).catch(function (reason) {
                sdk.close();
                //$('#rtc_media_player').hide();
                //console.error(reason);
            });
        };
        // 将静音属性设置为false以启用声音
      //  $('#rtc_media_player').prop('muted', false);
        // 添加controls属性到video标签,让用户可以手动控制声音
       // $('#rtc_media_player').attr('controls', false);
       // $('#rtc_media_player').prop('muted', true);
        console.warn('For autostart, we should mute it, see https://www.jianshu.com/p/c3c6944eed5a ' +
            'or https://developers.google.com/web/updates/2017/09/autoplay-policy-changes#audiovideo_elements');
        window.addEventListener("load", function(){ startPlay(); });

        // 点击播放按钮时重新播放
        $('#btn_play').click(function(){
            startPlay();
        });
    });
</script>