使用 flutter-webrtc 时发生 flutter 崩溃

问题描述 投票:0回答:2

我使用以下代码使用 flutter-webrtc 包进行视频通话,该代码在网络中有效,但在 Android 中崩溃 一切都已更新,我使用所有工具的最新版本。我将编译Sdk版本设置为32

import 'dart:convert';

import 'package:flutter/material.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';
import 'package:sdp_transform/sdp_transform.dart';

void main() {
  runApp(const MyApp());
}

class MyApp extends StatelessWidget {
  const MyApp({Key? key}) : super(key: key);

  // This widget is the root of your application.
  @override
  Widget build(BuildContext context) {
    return MaterialApp(
      title: 'Flutter Demo',
      theme: ThemeData(
        primarySwatch: Colors.blue,
      ),
      home: const MyHomePage(title: 'Flutter Demo Home Page'),
    );
  }
}

class MyHomePage extends StatefulWidget {
  const MyHomePage({Key? key, required this.title}) : super(key: key);

  final String title;

  @override
  State<MyHomePage> createState() => _MyHomePageState();
}

class _MyHomePageState extends State<MyHomePage> {
  final _localVideoRenderer = RTCVideoRenderer(

  );
  final _remoteVideoRenderer = RTCVideoRenderer();
  final sdpController = TextEditingController();

  bool _offer = false;

  RTCPeerConnection? _peerConnection;
  MediaStream? _localStream;

  initRenderer() async {
    await _localVideoRenderer.initialize();
    await _remoteVideoRenderer.initialize();
  }

  _getUserMedia() async {
    final Map<String, dynamic> mediaConstraints = {
      'audio': true,
      'video': true
    };

    MediaStream stream =
    await navigator.mediaDevices.getUserMedia(mediaConstraints);

    _localVideoRenderer.srcObject = stream;
    return stream;
  }

  _createPeerConnecion() async {
    Map<String, dynamic> configuration = {
      "iceServers": [
        {"url": "stun:stun.l.google.com:19302"},
      ]
    };

    final Map<String, dynamic> offerSdpConstraints = {
      "mandatory": {
        "OfferToReceiveAudio": true,
        "OfferToReceiveVideo": true,
      },
      "optional": [],
    };

    _localStream = await _getUserMedia();

    RTCPeerConnection pc =
    await createPeerConnection(configuration, offerSdpConstraints);

    pc.addStream(_localStream!);

    pc.onIceCandidate = (e) {
      if (e.candidate != null) {
        print(json.encode({
          'candidate': e.candidate.toString(),
          'sdpMid': e.sdpMid.toString(),
          'sdpMlineIndex': e.sdpMLineIndex,
        }));
      }
    };


    pc.onIceConnectionState = (e) {
      print(e);
    };

    pc.onAddStream = (stream) {
      print('addStream: ' + stream.id);
      _remoteVideoRenderer.srcObject = stream;
    };

    return pc;
  }

  void _createOffer() async {
    RTCSessionDescription description =
    await _peerConnection!.createOffer({'offerToReceiveVideo': 1});
    var session = parse(description.sdp.toString());
    print(json.encode(session));
    _offer = true;

    _peerConnection!.setLocalDescription(description);
  }

  void _createAnswer() async {
    RTCSessionDescription description =
    await _peerConnection!.createAnswer({'offerToReceiveVideo': 1});

    var session = parse(description.sdp.toString());
    print(json.encode(session));

    _peerConnection!.setLocalDescription(description);
  }

  void _setRemoteDescription() async {
    String jsonString = sdpController.text;
    dynamic session = await jsonDecode(jsonString);

    String sdp = write(session, null);

    RTCSessionDescription description =
    RTCSessionDescription(sdp, _offer ? 'answer' : 'offer');
    print(description.toMap());

    await _peerConnection!.setRemoteDescription(description);
  }

  void _addCandidate() async {
    String jsonString = sdpController.text;
    dynamic session = await jsonDecode(jsonString);
    print(session['candidate']);
    dynamic candidate = RTCIceCandidate(
        session['candidate'], session['sdpMid'], session['sdpMlineIndex']);
    await _peerConnection!.addCandidate(candidate);
  }

  @override
  void initState() {
    initRenderer();
    _createPeerConnecion().then((pc) {
      _peerConnection = pc;

    });
    // _getUserMedia();
    super.initState();
  }

  @override
  void dispose() async {
    await _localVideoRenderer.dispose();
    sdpController.dispose();
    super.dispose();
  }

  SizedBox videoRenderers() => SizedBox(
    height: 210,
    child: Column(children: [
      Flexible(
        child: Container(
          key: const Key('local'),
          margin: const EdgeInsets.fromLTRB(5.0, 5.0, 5.0, 5.0),
          decoration: const BoxDecoration(color: Colors.black),
          child: RTCVideoView(_localVideoRenderer),
        ),
      ),
      Flexible(
        child: Container(
          key: const Key('remote'),
          margin: const EdgeInsets.fromLTRB(5.0, 5.0, 5.0, 5.0),
          decoration: const BoxDecoration(color: Colors.black),
          child: RTCVideoView(_remoteVideoRenderer),
        ),
      ),
    ]),
  );

  @override
  Widget build(BuildContext context) {
    return Scaffold(
        appBar: AppBar(
          title: Text(widget.title),
        ),
        body: Column(
          children: [
            videoRenderers(),
            Row(
              children: [
                Padding(
                  padding: const EdgeInsets.all(16.0),
                  child: SizedBox(
                    width: MediaQuery.of(context).size.width * 0.5,
                    child: TextField(
                      controller: sdpController,
                      keyboardType: TextInputType.multiline,
                      maxLines: 4,
                      maxLength: TextField.noMaxLength,
                    ),
                  ),
                ),
                Column(
                  crossAxisAlignment: CrossAxisAlignment.center,
                  children: [
                    ElevatedButton(
                      onPressed: _createOffer,
                      child: const Text("Offer"),
                    ),
                    const SizedBox(
                      height: 10,
                    ),
                    ElevatedButton(
                      onPressed: _createAnswer,
                      child: const Text("Answer"),
                    ),
                    const SizedBox(
                      height: 10,
                    ),
                    ElevatedButton(
                      onPressed: _setRemoteDescription,
                      child: const Text("Set Remote Description"),
                    ),
                    const SizedBox(
                      height: 10,
                    ),
                    ElevatedButton(
                      onPressed: _addCandidate,
                      child: const Text("Set Candidate"),
                    ),
                  ],
                )
              ],
            ),
          ],
        ));
  }
}

使用-v后将此日志代码返回给我

[+2824 ms] I/org.webrtc.Logging(12049): EglRenderer: Duration: 4003 ms. Frames received: 0. Dropped: 0. Rendered: 0. Render fps: .0. Average render time: NA. Average swapBuffer time: NA.
[ +142 ms] I/FlutterWebRTCPlugin(12049): getUserMedia(audio): mandatory: [], optional: [googNoiseSuppression: true, googEchoCancellation: true, echoCancellation: true, googEchoCancellation2: true, googDAEchoCancellation: true]
[        ] I/FlutterWebRTCPlugin(12049): getUserMedia(video): null
[        ] I/CameraManagerGlobal(12049): Connecting to camera service
[  +16 ms] D/EGL_emulation(12049): app_time_stats: avg=7500.71ms min=43.54ms max=14957.89ms count=2
[   +5 ms] D/FlutterWebRTCPlugin(12049): Creating video capturer using Camera2 API.
[   +5 ms] D/FlutterWebRTCPlugin(12049): Create front camera 1 succeeded
[   +3 ms] D/HostConnection(12049): createUnique: call
[        ] D/HostConnection(12049): HostConnection::get() New Host Connection established 0x7c7e120f9150, tid 12168
[   +6 ms] D/HostConnection(12049): HostComposition ext ANDROID_EMU_CHECKSUM_HELPER_v1 ANDROID_EMU_native_sync_v2 ANDROID_EMU_native_sync_v3 ANDROID_EMU_native_sync_v4 ANDROID_EMU_dma_v1 ANDROID_EMU_direct_mem ANDROID_EMU_host_composition_v1 ANDROID_EMU_host_composition_v2 ANDROID_EMU_vulkan ANDROID_EMU_deferred_vulkan_commands ANDROID_EMU_vulkan_null_optional_strings ANDROID_EMU_vulkan_create_resources_with_requirements ANDROID_EMU_YUV_Cache ANDROID_EMU_vulkan_ignored_handles ANDROID_EMU_has_shared_slots_host_memory_allocator ANDROID_EMU_vulkan_free_memory_sync ANDROID_EMU_vulkan_shader_float16_int8 ANDROID_EMU_vulkan_async_queue_submit ANDROID_EMU_vulkan_queue_submit_with_commands ANDROID_EMU_sync_buffer_data ANDROID_EMU_read_color_buffer_dma GL_OES_EGL_image_external_essl3 GL_OES_vertex_array_object GL_KHR_texture_compression_astc_ldr ANDROID_EMU_host_side_tracing ANDROID_EMU_gles_max_version_3_1 
[        ] I/org.webrtc.Logging(12049): EglBase14Impl: Using OpenGL ES version 2
[        ] D/EGL_emulation(12049): eglCreateContext: 0x7c7e120f8910: maj 3 min 1 rcv 4
[   +1 ms] D/EGL_emulation(12049): eglMakeCurrent: 0x7c7e120f8910: ver 3 1 (tinfo 0x7c802a65b380) (first time)
[   +1 ms] E/EGL_emulation(12049): eglQueryContext 32c0  EGL_BAD_ATTRIBUTE
[        ] E/EGL_emulation(12049): tid 12168: eglQueryContext(2160): error 0x3004 (EGL_BAD_ATTRIBUTE)
[        ] I/org.webrtc.Logging(12049): CameraCapturer: startCapture: 1280x720@30
[        ] D/FlutterWebRTCPlugin(12049): changeCaptureFormat: 1280x720@30
[   +1 ms] I/org.webrtc.Logging(12049): Camera2Session: Create new camera2 session on camera 1
[        ] I/org.webrtc.Logging(12049): Camera2Session: start
[   +1 ms] D/FlutterWebRTCPlugin(12049): MediaStream id: 6d00bd17-0818-4d0f-92f6-4f860f3f28e7
[   +2 ms] I/flutter (12049): here2
[   +1 ms] I/flutter (12049): here3
[   +3 ms] I/org.webrtc.Logging(12049): Camera2Session: Available preview sizes: [1920x1440, 1920x1080, 1920x960, 1600x1200, 1440x1080, 1280x960, 1280x720, 1024x768, 800x600, 720x480, 640x480, 640x360, 352x288, 320x240, 176x144]
[        ] I/org.webrtc.Logging(12049): Camera2Session: Available fps ranges: [[15.0:15.0], [7.0:30.0], [15.0:30.0], [30.0:30.0]]
[        ] I/org.webrtc.Logging(12049): Camera2Session: Using capture format: 1280x720@[7.0:30.0]
[        ] I/org.webrtc.Logging(12049): Camera2Session: Opening camera 1
[        ] D/FlutterWebRTCPlugin(12049): CameraEventsHandler.onCameraOpening: cameraName=1
[   +7 ms] W/FlutterWebRTCPlugin(12049): FlutterRTCVideoRenderer.setVideoTrack, set video track to cd1aeae1-28eb-4a89-bbb7-54c3508e9832
[        ] I/org.webrtc.Logging(12049): EglRenderer: Releasing.
[   +1 ms] I/org.webrtc.Logging(12049): EglRenderer: eglBase detach and release.
[        ] I/org.webrtc.Logging(12049): EglRenderer: Quitting render thread.
[   +2 ms] I/org.webrtc.Logging(12049): EglRenderer: Releasing done.
[        ] I/org.webrtc.Logging(12049): EglRenderer: Initializing EglRenderer
[        ] I/org.webrtc.Logging(12049): EglRenderer: EglBase.create shared context
[        ] D/HostConnection(12049): createUnique: call
[        ] D/HostConnection(12049): HostConnection::get() New Host Connection established 0x7c7e120f5550, tid 12169
[   +5 ms] D/HostConnection(12049): HostComposition ext ANDROID_EMU_CHECKSUM_HELPER_v1 ANDROID_EMU_native_sync_v2 ANDROID_EMU_native_sync_v3 ANDROID_EMU_native_sync_v4 ANDROID_EMU_dma_v1 ANDROID_EMU_direct_mem ANDROID_EMU_host_composition_v1 ANDROID_EMU_host_composition_v2 ANDROID_EMU_vulkan ANDROID_EMU_deferred_vulkan_commands ANDROID_EMU_vulkan_null_optional_strings ANDROID_EMU_vulkan_create_resources_with_requirements ANDROID_EMU_YUV_Cache ANDROID_EMU_vulkan_ignored_handles ANDROID_EMU_has_shared_slots_host_memory_allocator ANDROID_EMU_vulkan_free_memory_sync ANDROID_EMU_vulkan_shader_float16_int8 ANDROID_EMU_vulkan_async_queue_submit ANDROID_EMU_vulkan_queue_submit_with_commands ANDROID_EMU_sync_buffer_data ANDROID_EMU_read_color_buffer_dma GL_OES_EGL_image_external_essl3 GL_OES_vertex_array_object GL_KHR_texture_compression_astc_ldr ANDROID_EMU_host_side_tracing ANDROID_EMU_gles_max_version_3_1 
[        ] I/org.webrtc.Logging(12049): EglBase14Impl: Using OpenGL ES version 2
[   +2 ms] D/EGL_emulation(12049): eglCreateContext: 0x7c7e120f5fd0: maj 3 min 1 rcv 4
[   +1 ms] D/EGL_emulation(12049): eglMakeCurrent: 0x7c7e120f5fd0: ver 3 1 (tinfo 0x7c802a65b280) (first time)
[  +72 ms] I/org.webrtc.Logging(12049): Camera2Session: Camera opened.
[   +9 ms] I/org.webrtc.Logging(12049): Camera2Session: Camera capture session configured.
[   +4 ms] I/org.webrtc.Logging(12049): Camera2Session: Stabilization not available.
[   +1 ms] I/org.webrtc.Logging(12049): Camera2Session: Using continuous video auto-focus.
[   +2 ms] W/FlutterWebRTCPlugin(12049): audioFocusChangeListener [Speakerphone(name=Speakerphone), Earpiece(name=Earpiece)] Speakerphone(name=Speakerphone)
[        ] I/flutter (12049): here4
[   +8 ms] I/flutter (12049): here5
[   +3 ms] I/flutter (12049): here6
[   +5 ms] I/flutter (12049): here7
[   +6 ms] I/flutter (12049): here8
[   +5 ms] I/org.webrtc.Logging(12049): Camera2Session: Camera device successfully started.
[        ] I/org.webrtc.Logging(12049): CameraCapturer: Create session done. Switch state: IDLE
[  +38 ms] I/org.webrtc.Logging(12049): SurfaceTextureHelper: Setting listener to org.webrtc.Camera2Session$CaptureSessionCallback$$ExternalSyntheticLambda0@b337dc6
[  +19 ms] E/rtc     (12049): #
[        ] E/rtc     (12049): # Fatal error in: ../../pc/peer_connection.cc, line 823
[        ] E/rtc     (12049): # last system error: 0
[        ] E/rtc     (12049): # Check failed: !IsUnifiedPlan()
[        ] E/rtc     (12049): # AddStream is not available with Unified Plan SdpSemantics. Please use AddTrack instead.
[  +50 ms] D/FlutterWebRTCPlugin(12049): CameraEventsHandler.onFirstFrameAvailable
[ +215 ms] F/libc    (12049): Fatal signal 6 (SIGABRT), code -6 (SI_TKILL) in tid 12158 (signaling_threa), pid 12049 (example.meetbin)
[ +345 ms] *** *** *** *** *** *** *** *** *** *** *** *** *** *** *** ***
[        ] Build fingerprint: 'google/sdk_gphone64_x86_64/emulator64_x86_64_arm64:12/SE1A.220630.001/8789670:userdebug/dev-keys'
[        ] Revision: '0'
[        ] ABI: 'x86_64'
[        ] Timestamp: 2022-12-10 16:58:42.309837842+0330
[        ] Process uptime: 0s
[        ] Cmdline: com.example.meetbin
[        ] pid: 12049, tid: 12158, name: signaling_threa  >>> com.example.meetbin <<<
[        ] uid: 10187
[        ] signal 6 (SIGABRT), code -6 (SI_TKILL), fault addr --------
[        ]     rax 0000000000000000  rbx 0000000000002f11  rcx 00007c80141355cf  rdx 0000000000000006
[        ]     r8  00007c7e32114eb0  r9  00007c7e32114eb0  r10 00007c7d002fb910  r11 0000000000000207
[        ]     r12 00007c7bf61b7e4f  r13 00007c7d002fbc40  r14 00007c7d002fb908  r15 0000000000002f7e
[        ]     rdi 0000000000002f11  rsi 0000000000002f7e
[        ]     rbp 00007c7d002fba18  rsp 00007c7d002fb900  rip 00007c80141355cf
[        ] backtrace:
[        ]       #00 pc 000000000005e5cf  /apex/com.android.runtime/lib64/bionic/libc.so (abort+191) (BuildId: 5db8d317d3741b337ef046540bbdd0f7)
[        ]       #01 pc 000000000056d2f1  /data/app/~~G9ljGq1djjR8w8Pu4e90qg==/com.example.meetbin-50CjO9EMjWwQX8M0TvtAXw==/base.apk!libjingle_peerconnection_so.so (BuildId: 6090771d6338227b)
[        ]       #02 pc 000000000056d269  /data/app/~~G9ljGq1djjR8w8Pu4e90qg==/com.example.meetbin-50CjO9EMjWwQX8M0TvtAXw==/base.apk!libjingle_peerconnection_so.so (BuildId: 6090771d6338227b)
[        ]       #03 pc 00000000004fbb9f  /data/app/~~G9ljGq1djjR8w8Pu4e90qg==/com.example.meetbin-50CjO9EMjWwQX8M0TvtAXw==/base.apk!libjingle_peerconnection_so.so (BuildId: 6090771d6338227b)
[        ]       #04 pc 00000000004f4f0e  /data/app/~~G9ljGq1djjR8w8Pu4e90qg==/com.example.meetbin-50CjO9EMjWwQX8M0TvtAXw==/base.apk!libjingle_peerconnection_so.so (BuildId: 6090771d6338227b)
[        ]       #05 pc 00000000004f4ec2  /data/app/~~G9ljGq1djjR8w8Pu4e90qg==/com.example.meetbin-50CjO9EMjWwQX8M0TvtAXw==/base.apk!libjingle_peerconnection_so.so (BuildId: 6090771d6338227b)
[        ]       #06 pc 0000000000593bed  /data/app/~~G9ljGq1djjR8w8Pu4e90qg==/com.example.meetbin-50CjO9EMjWwQX8M0TvtAXw==/base.apk!libjingle_peerconnection_so.so (BuildId: 6090771d6338227b)
[        ]       #07 pc 00000000005934ea  /data/app/~~G9ljGq1djjR8w8Pu4e90qg==/com.example.meetbin-50CjO9EMjWwQX8M0TvtAXw==/base.apk!libjingle_peerconnection_so.so (BuildId: 6090771d6338227b)
[        ]       #08 pc 000000000059282e  /data/app/~~G9ljGq1djjR8w8Pu4e90qg==/com.example.meetbin-50CjO9EMjWwQX8M0TvtAXw==/base.apk!libjingle_peerconnection_so.so (BuildId: 6090771d6338227b)
[        ]       #09 pc 000000000059384d  /data/app/~~G9ljGq1djjR8w8Pu4e90qg==/com.example.meetbin-50CjO9EMjWwQX8M0TvtAXw==/base.apk!libjingle_peerconnection_so.so (BuildId: 6090771d6338227b)
[        ]       #10 pc 00000000000c758a  /apex/com.android.runtime/lib64/bionic/libc.so (__pthread_start(void*)+58) (BuildId: 5db8d317d3741b337ef046540bbdd0f7)
[        ]       #11 pc 000000000005fd87  /apex/com.android.runtime/lib64/bionic/libc.so (__start_thread+55) (BuildId: 5db8d317d3741b337ef046540bbdd0f7)
[  +91 ms] Service protocol connection closed.
[        ] Lost connection to device.
[        ] DevFS: Deleting filesystem on the device (file:///data/user/0/com.example.meetbin/code_cache/meetbin_appNWCNXA/meetbin_app/)
[  +21 ms] executing: /home/dwalves/Documents/development/android/SDK/platform-tools/adb -s emulator-5554 shell am force-stop com.example.meetbin

flutter_webrtc: ^0.9.17
sdp_transform: ^0.3.2

我猜不出问题是什么

android flutter dart webrtc flutter-webrtc
2个回答
3
投票

更新您的对等连接配置以包含“sdpSemantics”

检查主要问题

Map<String, dynamic> configuration = {
    "sdpSemantics": "plan-b", // Add this line
    'iceServers': [
      {
        'urls': [
           // ice server urls
        ]
      }
    ]
  };

0
投票

不知道这是否能为你提供线索。虽然我使用 RTCPeerConnection.addTrack 而不是 RTCPeerConnection.addStream,但我的问题是我遵循 https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/addTrack 的文档,其中写着“如果没有指定了流,则轨道是无流的,这是完全可以接受的,尽管将由远程对等方决定将轨道插入哪个流(如果有)。”但flutter_webrtc的实现不同,你必须为轨道指定一个流:

 _localStream = await navigator.mediaDevices.getUserMedia(
  {
    'audio': true,
    'video': true,
  },
);

for (var track in _localStream.getTracks()) {
  _pc.addTrack(track, _localStream);
  // _pc.addTrack(track); // This will crash the app.
}
© www.soinside.com 2019 - 2024. All rights reserved.