当前位置:   article > 正文

Flutter WebRtc RTMP 拉流 支持 (Android Ios Web)

flutter webrtc

很久没有写 文章了,本文 给大家提供源码 参考 下载地址在 页尾

废话  不多说 直接 开始

本文主要讲解 如何 flutter中 使用 WebRTC 协议,实现 实时拉流播放 

对于WebRTC 不太了解的 可以先去 了解一下 rtc协议, 基本概念就是 p2p 点对点传输 双方 交换SDP 信息,发送 offer 然后 接受者 应答 返回 信息

web的流媒体服务器 有很多, 开源比较火的有 media stream、srs kurento 流媒体服务器等 这里就不多说了

本案例采用的 是srs 流媒体服务 Docker | SRS

需要先 搭建流媒体服务器,我 这边用的ubuntu搭建,比较 方面 直接命令安装,windows用户可能 要先 配置 doker 环境, 搭建 srs的时候有很多坑, 建议不要 使用 https ,除非你的运维能力很强,不然很多配置 会把你绕晕。 

搭建 完成后 可以 用ffmpege 进行推流 

 //RTMP 推流
ffmpeg -re -i  time.flv  -vcodec copy -acodec copy  -f flv -y rtmp://192.168.13.129/live/livestream

拉流 可以直接在 自己 搭建的 srs 服务上进行观看,或者使用 android 设备 ios 进行拉流。 本文的 flutter 程序 主要 用来做拉流操作的

第一步 先导入

fijkplayer: ^0.10.1  //ijk 用来支持 rtmp 播放
flutter_webrtc: 0.9.3 //rtc 协议
dio: ^4.0.6  //网络框架

接下来 放出 webrtc 的 widegt 

class WebRTCStreamingPlayer extends StatefulWidget {
  final String _url;

  const WebRTCStreamingPlayer(this._url, {Key? key}) : super(key: key);

  @override
  State<StatefulWidget> createState() => _WebRTCStreamingPlayerState();
}

class _WebRTCStreamingPlayerState extends State<WebRTCStreamingPlayer> {
  final webrtc.RTCVideoRenderer _video = webrtc.RTCVideoRenderer();
  final flutter_live.WebRTCPlayer _player = flutter_live.WebRTCPlayer();

  @override
  Widget build(BuildContext context) {
    return Scaffold(
      appBar: AppBar(title: const Text('WebRTC Streaming')),
      body: GestureDetector(
          child: Container(
              decoration: BoxDecoration(color: Colors.grey[500]),
              child: webrtc.RTCVideoView(_video))),
    );
  }

  @override
  void initState() {
    super.initState();
    _player.initState();
    autoPlay();
  }

  void autoPlay() async {
    await _video.initialize();

    // Render stream when got remote stream.
    _player.onRemoteStream = (webrtc.MediaStream stream) {
      //使用 setState 设置 srcObject 并通知渲染.
      setState(() {
        _video.srcObject = stream;
      });
    };
    // 自动开始播放 WebRTC 流。
    await _player.play(widget._url);
  }

  @override
  void dispose() {
    super.dispose();
    _video.dispose();
    _player.dispose();
  }

下面是 核心 的 WebRTCPlayer 播放类

WebRTCUri 主要 用来 切割 请求 api地址 和 流媒体 地址

class WebRTCUri {
  ///WebRTC 流的 api 服务器 url。
  late String api;

  /// 要播放或发布的流 url。
  late String streamUrl;

  /// 将 url 解析为 WebRTC uri.
  static WebRTCUri parse(String url) {
    Uri uri = Uri.parse(url);
    String? schema = 'https'; // For native, default to HTTPS
    if (uri.queryParameters.containsKey('schema')) {
      schema = uri.queryParameters['schema'];
    } else {
      schema = 'https';
    }
    var port = (uri.port > 0) ? uri.port : 443;
    if (schema == 'https') {
      port = (uri.port > 0) ? uri.port : 443;
    } else if (schema == 'http') {
      port = (uri.port > 0) ? uri.port : 1985;
    }

    String? api = '/rtc/v1/play/';
    if (uri.queryParameters.containsKey('play')) {
      api = uri.queryParameters['play'];
    }

    var apiParams = [];
    for (var key in uri.queryParameters.keys) {
      if (key != 'api' && key != 'play' && key != 'schema') {
        apiParams.add('$key=${uri.queryParameters[key]}');
      }
    }

    var apiUrl = '$schema://${uri.host}:$port$api';
    if (apiParams.isNotEmpty) {
      apiUrl += '?${apiParams.join('&')}';
    }

    WebRTCUri r = WebRTCUri();
    r.api = apiUrl;
    r.streamUrl = url;
    print('Url $url parsed to api=${r.api}, stream=${r.streamUrl}');
    return r;
  }
}

class WebRTCPlayer {
  late webrtc.AddStreamCallback _onRemoteStream;
  late webrtc.RTCPeerConnection _pc;

  /// 设置远程流.
  set onRemoteStream(webrtc.AddStreamCallback v) {
    _onRemoteStream = v;
  }

  /// 初始化播放器。
  void initState() {}

  Future<void> play(String url) async {
    // if (_pc != null) {
    //   await _pc.close();
    // }

    //创建对等连接
    _pc = await webrtc.createPeerConnection({
      // AddTransceiver 仅适用于 Unified Plan SdpSemantics
      'sdpSemantics': "unified-plan"
    });

    print('WebRTC: createPeerConnection done');

    //设置对等连接
    _pc.onAddStream = (webrtc.MediaStream stream) {
      print('WebRTC: got stream ${stream.id}');
      if (_onRemoteStream == null) {
        print('Warning: Stream ${stream.id} is leak');
        return;
      }
      _onRemoteStream(stream);
    };

    _pc.addTransceiver(
      kind: webrtc.RTCRtpMediaType.RTCRtpMediaTypeAudio,
      init: webrtc.RTCRtpTransceiverInit(
          direction: webrtc.TransceiverDirection.RecvOnly),
    );
    _pc.addTransceiver(
      kind: webrtc.RTCRtpMediaType.RTCRtpMediaTypeVideo,
      init: webrtc.RTCRtpTransceiverInit(
          direction: webrtc.TransceiverDirection.RecvOnly),
    );
    print('WebRTC: Setup PC done, A|V RecvOnly');

    // 开始 SDP 握手
    webrtc.RTCSessionDescription offer = await _pc.createOffer({
      'mandatory': {'OfferToReceiveAudio': true, 'OfferToReceiveVideo': true},
    });
    await _pc.setLocalDescription(offer);
    print(
        'WebRTC: createOffer, ${offer.type} is ${offer.sdp?.replaceAll('\n', '\\n').replaceAll('\r', '\\r')}');
    print(url);
    print("-----------------------00000--------------------------------");
    webrtc.RTCSessionDescription answer = await _handshake(url, offer.sdp!);
    print(
        'WebRTC: got ${answer.type} is ${answer.sdp?.replaceAll('\n', '\\n').replaceAll('\r', '\\r')}');

    await _pc.setRemoteDescription(answer);
  }

  ///握手请求  Handshake to exchange SDP, send offer and got answer.
  Future<webrtc.RTCSessionDescription> _handshake(
      String url, String offer) async {
    // Setup the client for HTTP or HTTPS.
    print("-----------------------11--------------------------------");
    //  HttpClient client = HttpClient();
    print("-----------------------22--------------------------------");

    try {
      // client.badCertificateCallback =
      //     (X509Certificate cert, String host, int port) => true;
      // // Parsing the WebRTC uri form url.
      WebRTCUri uri = WebRTCUri.parse(url);
      print(
          "-------------------------------------------------------${uri.api}");


        



      Response response;
      var dio = Dio();
      response = await dio.post("http://192.168.13.129:1985/rtc/v1/play/",
          data: {"api": uri.api, "streamurl": uri.streamUrl, "sdp": offer},
          options: Options(headers: {'Content-Type': 'application/json'}));

      Map<String, dynamic> o = json.decode(response.toString());
      print("DIO------------------------------${response.data}");

      // Do signaling for WebRTC.
      // POST http://d.ossrs.net:1985/rtc/v1/play/
      //    {api: "xxx", sdp: "offer", streamurl: "webrtc://d.ossrs.net:11985/live/livestream"}
      // Response:
      //    {code: 0, sdp: "answer", sessionid: "007r51l7:X2Lv"}
      // HttpClientRequest req = await client
      //     .postUrl(Uri.parse("http://192.168.13.129:1985/rtc/v1/play/"));
      // print("-----------------------3--------------------------------");
      // req.headers.set('Content-Type', 'application/json');
      // req.add(utf8.encode(json
      //     .encode({'api': uri.api, 'streamurl': uri.streamUrl, 'sdp': offer})));
      // print('WebRTC request: ${uri.api} offer=${offer.length}B');
      // HttpClientResponse res = await req.close();
      // print(
      //     "-----------------------${res.toString()}.--------------------------------");
      // String reply = await res.transform(utf8.decoder).join();
      // print('WebRTC reply: ${reply.length}B, ${res.statusCode}');
      //
      // Map<String, dynamic> o = json.decode(reply);
      // if (!o.containsKey('code') || !o.containsKey('sdp') || o['code'] != 0) {
      //   return Future.error(reply);
      // }

      return Future.value(webrtc.RTCSessionDescription(o['sdp'], 'answer'));
    } finally {
      // client.close();
    }
  }

  /// 回收 RTC 播放器.
  void dispose() {
    _pc.close();
  }

上面 这一部分讲解是 webRTC 拉流

下面讲解的是 使用fijk 进行 rtmp拉流

/// Flutter 的直播工具.
class FlutterLive {
  /// 平台通道.
  static const MethodChannel _channel = MethodChannel('flutter_live');

  /// 获取平台信息.
  static Future<String> get platformVersion async {
    final String version = await _channel.invokeMethod('getPlatformVersion');
    return version;
  }

  FlutterLive();
}

class RealtimePlayer {
  final fijkplayer.FijkPlayer _player;

  RealtimePlayer(this._player);

  fijkplayer.FijkPlayer get fijk => _player;

  /// 初始化播放器.
  void initState() {
    _player.enterFullScreen();
  }

  /// 开始播放网址.
  /// [url] 必须是 [FijkPlayer.setDataSource] 的路径(https:pub.devdocumentationfijkplayerlatestfijkplayerFijkPlayersetDataSource.html
  /// 支持 RTMP 直播流,如 [FlutterLive.rtmp] 或 hls 如 [FlutterLive.hls],或 flv 如 [FlutterLive.flv].
  /// 对于通过 HTTPS 进行的安全直播,例如用于 HTTPS-FLV 的 [FlutterLive.flvs],或 hls over HTTPS [FlutterLive.hlss].
  /// 支持FFmpeg的所有 url。
  Future<void> play(String url) async {
    print('Start play live streaming $url');
    await _player.setOption(
        fijkplayer.FijkOption.playerCategory, "mediacodec-all-videos", 1);
    await _player.setOption(
        fijkplayer.FijkOption.hostCategory, "request-screen-on", 1);
    await _player.setOption(
        fijkplayer.FijkOption.hostCategory, "request-audio-focus", 1);
    // For all options, read https://github.com/Bilibili/ijkplayer/blob/master/ijkmedia/ijkplayer/ff_ffplay_options.h
    await _player.setOption(fijkplayer.FijkOption.formatCategory, "probesize",
        16 * 1024); // in bytes
    await _player.setOption(fijkplayer.FijkOption.formatCategory,
        "analyzeduration", 100 * 1000); // in us
    await _player.setOption(fijkplayer.FijkOption.playerCategory,
        "packet-buffering", 0); // 0, no buffer.
    await _player.setOption(fijkplayer.FijkOption.playerCategory,
        "max_cached_duration", 800); // in ms
    await _player.setOption(fijkplayer.FijkOption.playerCategory,
        "max-buffer-size", 32 * 1024); // in bytes
    await _player.setOption(
        fijkplayer.FijkOption.playerCategory, "infbuf", 1); // 1 for realtime.
    await _player.setOption(
        fijkplayer.FijkOption.playerCategory, "min-frames", 1); // in frames

    await _player.setDataSource(url, autoPlay: true).catchError((e) {
      print("setDataSource error: $e");
    });
  }

  ///回收ijk播放器.
  void dispose() {
    _player.release();
  }
}}

// fijk 播放 widget 组件

class LiveStreamingPlayer extends StatefulWidget {
  final String _url;

  const LiveStreamingPlayer(this._url, {Key? key}) : super(key: key);

  @override
  _LiveStreamingPlayerState createState() => _LiveStreamingPlayerState();
}

class _LiveStreamingPlayerState extends State<LiveStreamingPlayer> {
  final flutter_live.RealtimePlayer _player =
      flutter_live.RealtimePlayer(fijkplayer.FijkPlayer());

  @override
  Widget build(BuildContext context) {
    return Scaffold(
      appBar: AppBar(title: const Text('Live Streaming')),
      body: fijkplayer.FijkView(
          player: _player.fijk,
          panelBuilder: fijkplayer.fijkPanel2Builder(),
          fsFit: fijkplayer.FijkFit.fill),
    );
  }

  @override
  void initState() {
    super.initState();
    _player.initState();
    autoPlay();
  }

  void autoPlay() async {
    await _player.play(widget._url);
  }

  @override
  void dispose() {
    super.dispose();
    _player.dispose();
  }
}

下载地址

flutter_webrtc_rtsp-其它文档类资源-CSDN下载

希望大家多多点赞  binbinsongstudio@outlook.com 有疑问也可以 邮件 提出来。

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/盐析白兔/article/detail/194892
推荐阅读
相关标签
  

闽ICP备14008679号