使用 Flutter web RTC 的视频通话显示黑屏

问题描述 投票:0回答:0

我正在使用 Flutter WebRTC 插件进行视频通话。此时此刻,一切看起来都很完美。我没有收到任何错误。但是我的远程流媒体没有来。它显示黑屏。远程流媒体。此外,本地流媒体也不会显示到远程主机屏幕。 我有用于处理所有流媒体作品的信号类 信号.dart:

import 'dart:convert';
import 'package:flutter_webrtc/flutter_webrtc.dart';
import 'package:nextgen_myhealth_patients/constants/api_endpoints.dart';
import 'package:nextgen_myhealth_patients/constants/strings.dart';
import 'package:web_socket_channel/io.dart';

typedef StreamStateCallback = void Function(MediaStream stream);

class Signaling {
  Map<String, dynamic> configuration = {
    "iceServers": [
      {"url": "stun:75.119.141.80:3478"},
      {
        "url": "turn:75.119.141.80:3478",
        "username": "nextg",
        "credential": "123456"
      },
      {"url": "stun:openrelay.metered.ca:80"},
      {
        "url": "turn:openrelay.metered.ca:443",
        "username": "openrelayproject",
        "credential": "openrelayproject"
      },
      {
        'urls': [
          'stun:stun1.l.google.com:19302',
          'stun:stun2.l.google.com:19302'
        ]
      }
    ]
  };

  RTCPeerConnection? peerConnection;
  MediaStream? localStream;
  MediaStream? remoteStream;
  String? roomId;
  String? currentRoomText;
  StreamStateCallback? onAddRemoteStream;
  final channel = IOWebSocketChannel.connect(ApiEndpoints.webSocketUrl);

  // Future<String> createRoom(RTCVideoRenderer remoteRenderer) async {
  //   FirebaseFirestore db = FirebaseFirestore.instance;
  //   DocumentReference roomRef = db.collection('rooms').doc();

  //   print('Create PeerConnection with configuration: $configuration');

  //   peerConnection = await createPeerConnection(configuration);

  //   registerPeerConnectionListeners();

  //   localStream?.getTracks().forEach((track) {
  //     peerConnection?.addTrack(track, localStream!);
  //   });

  //   // Code for collecting ICE candidates below
  //   var callerCandidatesCollection = roomRef.collection('callerCandidates');

  //   peerConnection?.onIceCandidate = (RTCIceCandidate candidate) {
  //     print('Got candidate: ${candidate.toMap()}');
  //     callerCandidatesCollection.add(candidate.toMap());
  //   };
  //   // Finish Code for collecting ICE candidate

  //   // Add code for creating a room
  //   RTCSessionDescription offer = await peerConnection!.createOffer();
  //   await peerConnection!.setLocalDescription(offer);
  //   print('Created offer: $offer');

  //   Map<String, dynamic> roomWithOffer = {'offer': offer.toMap()};

  //   await roomRef.set(roomWithOffer);
  //   var roomId = roomRef.id;
  //   print('New room created with SDK offer. Room ID: $roomId');
  //   currentRoomText = 'Current room is $roomId - You are the caller!';
  //   // Created a Room

  //   peerConnection?.onTrack = (RTCTrackEvent event) {
  //     print('Got remote track: ${event.streams[0]}');

  //     event.streams[0].getTracks().forEach((track) {
  //       print('Add a track to the remoteStream $track');
  //       remoteStream?.addTrack(track);
  //     });
  //   };

  //   // Listening for remote session description below
  //   roomRef.snapshots().listen((snapshot) async {
  //     print('Got updated room: ${snapshot.data()}');

  //     Map<String, dynamic> data = snapshot.data() as Map<String, dynamic>;
  //     if (peerConnection?.getRemoteDescription() != null &&
  //         data['answer'] != null) {
  //       var answer = RTCSessionDescription(
  //         data['answer']['sdp'],
  //         data['answer']['type'],
  //       );

  //       print("Someone tried to connect");
  //       await peerConnection?.setRemoteDescription(answer);
  //     }
  //   });
  //   // Listening for remote session description above

  //   // Listen for remote Ice candidates below
  //   roomRef.collection('calleeCandidates').snapshots().listen((snapshot) {
  //     snapshot.docChanges.forEach((change) {
  //       if (change.type == DocumentChangeType.added) {
  //         Map<String, dynamic> data = change.doc.data() as Map<String, dynamic>;
  //         print('Got new remote ICE candidate: ${jsonEncode(data)}');
  //         peerConnection!.addCandidate(
  //           RTCIceCandidate(
  //             data['candidate'],
  //             data['sdpMid'],
  //             data['sdpMLineIndex'],
  //           ),
  //         );
  //       }
  //     });
  //   });
  //   // Listen for remote ICE candidates above

  //   return roomId;
  // }

  Future<void> joinRoom(
      bool callReceived,
      String caller,
      RTCVideoRenderer remoteVideo,
      List<Map<String, dynamic>> iceCandidate,
      dynamic sdpOffer) async {
    // FirebaseFirestore db = FirebaseFirestore.instance;
    // DocumentReference roomRef = db.collection('rooms').doc('$roomId');
    // var roomSnapshot = await roomRef.get();

    if (callReceived) {
      print('Create PeerConnection with configuration: $configuration');
      peerConnection = await createPeerConnection(configuration);

      registerPeerConnectionListeners();

      localStream?.getTracks().forEach((track) {
        peerConnection?.addTrack(track, localStream!);
      });

      // Code for collecting ICE candidates below
      // var calleeCandidatesCollection = roomRef.collection('calleeCandidates');
      peerConnection!.onIceCandidate = (RTCIceCandidate candidate) {
        if (candidate == null) {
          print('onIceCandidate: complete!');
          return;
        }
        print('onIceCandidate: ${candidate.toMap()}');
        // Send ICECandidate to remote
        channel.sink.add(jsonEncode({
          'type': WebSocketMessageType.iceCandidate,
          'data': {
            'user': caller,
            'rtcMessage': {
              'label': candidate.toMap()['sdpMLineIndex'],
              'id': candidate.toMap()['sdpMid'],
              'candidate': candidate.toMap()['candidate']
            }
          }
        }));
      };
      print("Peer: ${peerConnection!.onIceCandidate}");
      // Code for collecting ICE candidate above

      peerConnection?.onTrack = (RTCTrackEvent event) {
        print('Got remote track: ${event.streams[0]}');
        event.streams[0].getTracks().forEach((track) {
          print('Add a track to the remoteStream: $track');
          remoteStream?.addTrack(track);
        });
      };

      // Listening for remote ICE candidates below
      // roomRef.collection('callerCandidates').snapshots().listen((snapshot) {
      //   snapshot.docChanges.forEach((document) {
      //     var data = document.doc.data() as Map<String, dynamic>;
      //     print(data);
      //     print('Got new remote ICE candidate: $data');
      //     peerConnection!.addCandidate(
      //       RTCIceCandidate(
      //         data['candidate'],
      //         data['sdpMid'],
      //         data['sdpMLineIndex'],
      //       ),
      //     );
      //   });
      // });

      for (var element in iceCandidate) {
        print('Got new remote ICE candidate: $element');
        await peerConnection!.addCandidate(RTCIceCandidate(
          element['candidate'],
          element['id'],
          element['label'],
        ));
      }

      // Code for creating SDP answer below
      //var data = roomSnapshot.data() as Map<String, dynamic>;
      print('Got SDP offer $sdpOffer');
      // var offer = sdpOffer['offer'];
      await peerConnection
          ?.setRemoteDescription(RTCSessionDescription(sdpOffer, 'offer'));
      var answer = await peerConnection!.createAnswer();
      print('Created Answer ${answer.sdp}');

      await peerConnection!.setLocalDescription(answer);

      // Map<String, dynamic> roomWithAnswer = {
      //   'answer': {'type': answer.type, 'sdp': answer.sdp}
      // };

      // await roomRef.update(roomWithAnswer);
      channel.sink.add(jsonEncode({
        'type': WebSocketMessageType.callAnswered,
        'data': {'caller': caller, 'rtcMessage': answer.toMap()}
      }));
      // Finished creating SDP answer
    }
  }

  Future<MediaStream> openUserMedia(
    RTCVideoRenderer localVideo,
    RTCVideoRenderer remoteVideo,
  ) async {
    var stream = await navigator.mediaDevices
        .getUserMedia({'video': true, 'audio': true});

    localVideo.srcObject = stream;
    localStream = stream;

    return stream;
    // remoteVideo.srcObject = await createLocalMediaStream('key');
  }

  Future<void> hangUp(RTCVideoRenderer localVideo) async {
    List<MediaStreamTrack> tracks = localVideo.srcObject!.getTracks();
    tracks.forEach((track) {
      track.stop();
    });

    if (remoteStream != null) {
      remoteStream!.getTracks().forEach((track) => track.stop());
    }
    if (peerConnection != null) peerConnection!.close();

    if (roomId != null) {
      // var db = FirebaseFirestore.instance;
      // var roomRef = db.collection('rooms').doc(roomId);
      // var calleeCandidates = await roomRef.collection('calleeCandidates').get();
      // calleeCandidates.docs.forEach((document) => document.reference.delete());

      // var callerCandidates = await roomRef.collection('callerCandidates').get();
      // callerCandidates.docs.forEach((document) => document.reference.delete());

      // await roomRef.delete();
    }

    localStream!.dispose();
    remoteStream?.dispose();
  }

  void registerPeerConnectionListeners() {
    peerConnection?.onIceGatheringState = (RTCIceGatheringState state) {
      print('ICE gathering state changed: $state');
    };

    peerConnection?.onConnectionState = (RTCPeerConnectionState state) {
      print('Connection state change: $state');
    };

    peerConnection?.onSignalingState = (RTCSignalingState state) {
      print('Signaling state change: $state');
    };

    peerConnection?.onIceGatheringState = (RTCIceGatheringState state) {
      print('ICE connection state change: $state');
    };

    peerConnection?.onAddStream = (MediaStream stream) {
      print("Add remote stream");
      onAddRemoteStream?.call(stream);
      remoteStream = stream;
    };

    // peerConnection?.
  }
}

用于显示流的调用小部件。 调用.dart:

import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';
import 'package:nextgen_myhealth_patients/services/socket/signal_service.dart';
import 'package:nextgen_myhealth_patients/services/video/signaling.dart';

class Call extends StatefulWidget {
  final String callerId, calleeId;
  final dynamic offer;
  final dynamic iceCandidate;
  const Call({
    super.key,
    this.offer,
    this.iceCandidate,
    required this.callerId,
    required this.calleeId,
  });

  @override
  State<Call> createState() => _CallScreenState();
}

class _CallScreenState extends State<Call> {
  // socket instance
  final socket = SignallingService.instance.socket;

  // videoRenderer for localPeer
  final _localRTCVideoRenderer = RTCVideoRenderer();

  // videoRenderer for remotePeer
  final _remoteRTCVideoRenderer = RTCVideoRenderer();

  // mediaStream for localPeer
  MediaStream? _localStream;

  // RTC peer connection
  RTCPeerConnection? _rtcPeerConnection;

  // list of rtcCandidates to be sent over signalling
  List<RTCIceCandidate> rtcIceCadidates = [];

  // media status
  bool isAudioOn = true, isVideoOn = true, isFrontCameraSelected = true;

  Signaling signaling = Signaling();

  double xPositionRemoteRTCVideoView = 20;
  double yPositionRemoteRTCVideoView = 20;

  double deltaX = 20;
  double deltaY = 20;
  @override
  void initState() {
    // initializing renderers
    _localRTCVideoRenderer.initialize();
    _remoteRTCVideoRenderer.initialize();

    // setup Peer Connection
    // _setupPeerConnection();
    // signaling.on
    signaling.onAddRemoteStream = ((stream) {
      _remoteRTCVideoRenderer.srcObject = stream;
      setState(() {});
    });

    _init();

    signaling.joinRoom(true, widget.callerId, _remoteRTCVideoRenderer,
        widget.iceCandidate, widget.offer);
    super.initState();
  }

  @override
  void setState(fn) {
    if (mounted) {
      super.setState(fn);
    }
  }

  _init() async {
    _localStream = await signaling.openUserMedia(
        _localRTCVideoRenderer, _remoteRTCVideoRenderer);
    _localRTCVideoRenderer.srcObject = _localStream;
    setState(() {});
  }

  _leaveCall() {
    Navigator.pop(context);
    signaling.hangUp(_localRTCVideoRenderer);
  }

  _toggleMic() {
    // change status
    isAudioOn = !isAudioOn;
    // enable or disable audio track
    _localStream?.getAudioTracks().forEach((track) {
      track.enabled = isAudioOn;
    });
    setState(() {});
  }

  _toggleCamera() {
    // change status
    isVideoOn = !isVideoOn;

    // enable or disable video track
    _localStream?.getVideoTracks().forEach((track) {
      track.enabled = isVideoOn;
    });
    setState(() {});
  }

  _switchCamera() {
    // change status
    print('here');
    isFrontCameraSelected = !isFrontCameraSelected;

    // switch camera
    _localStream?.getVideoTracks().forEach((track) {
      // ignore: deprecated_member_use
      track.switchCamera();
    });
    setState(() {});
  }

  @override
  Widget build(BuildContext context) {
    return Scaffold(
      // appBar: AppBar(
      //   title: Text("Welcome to Flutter Explained - WebRTC"),
      // ),
      body: AnnotatedRegion<SystemUiOverlayStyle>(
        value: SystemUiOverlayStyle.light
            .copyWith(systemNavigationBarColor: Colors.white),
        child: Column(
          children: [
            Expanded(
              child: Stack(children: [
                RTCVideoView(
                  _remoteRTCVideoRenderer,
                  objectFit: RTCVideoViewObjectFit.RTCVideoViewObjectFitCover,
                ),
                Positioned(
                  right: xPositionRemoteRTCVideoView,
                  bottom: yPositionRemoteRTCVideoView,
                  child: GestureDetector(
                    onPanStart: (details) {
                      setState(() {
                        deltaX = 20;
                        deltaY = 20;
                      });
                    },
                    onPanUpdate: (details) {
                      setState(() {
                        deltaX += details.delta.dx;
                        deltaY += details.delta.dy;
                        xPositionRemoteRTCVideoView +=
                            details.delta.dx.sign * 5;
                        yPositionRemoteRTCVideoView +=
                            details.delta.dy.sign * 5;
                      });
                    },
                    child: SizedBox(
                      height: 180,
                      width: 150,
                      child: RTCVideoView(
                        _localRTCVideoRenderer,
                        mirror: isFrontCameraSelected,
                        objectFit:
                            RTCVideoViewObjectFit.RTCVideoViewObjectFitCover,
                      ),
                    ),
                  ),
                )
              ]),
            ),
            Padding(
              padding: const EdgeInsets.symmetric(vertical: 12),
              child: Row(
                mainAxisAlignment: MainAxisAlignment.spaceAround,
                children: [
                  IconButton(
                    icon: Icon(isAudioOn ? Icons.mic : Icons.mic_off),
                    onPressed: _toggleMic,
                  ),
                  IconButton(
                    icon: const Icon(Icons.call_end),
                    iconSize: 30,
                    onPressed: _leaveCall,
                  ),
                  IconButton(
                    icon: const Icon(Icons.cameraswitch),
                    onPressed: _switchCamera,
                  ),
                  IconButton(
                    icon: Icon(isVideoOn ? Icons.videocam : Icons.videocam_off),
                    onPressed: _toggleCamera,
                  ),
                ],
              ),
            ),
          ],
        ),
      ),
    );
  }

  @override
  void dispose() {
    _localRTCVideoRenderer.dispose();
    _remoteRTCVideoRenderer.dispose();
    _localStream?.dispose();
    _rtcPeerConnection?.dispose();
    super.dispose();
  }
}


此时,我该如何解决这个问题?

flutter flutter-webrtc
© www.soinside.com 2019 - 2024. All rights reserved.