在.NET核心项目中使用UWP api的Miracast不能接收视频帧。

问题描述 投票:0回答:1

我们设法使用UWP创建了一个工作的Miracast水槽,我们想在.NET Core应用程序中使用这个功能。因此,我们按照这个指南在.NET Core项目中使用UWP apis。

使用UWP apis与WPF和.NET Core。

项目运行后,我们得到了一个从智能手机到应用程序的连接,但我们没有从MediaPlayer对象中接收到任何视频帧(不像在原来的UWP项目中工作正常)。

我们观察到,在MediaSource对象中,我们获得了一个mcrecv url (Example -> mcrecv:/192.168.137.247:7236h-0000000c192.168.137.1)

但是消耗它的MediaPlayer却没有触发任何VideoFrameAvailable事件。

我们如何解决这个问题?以下是我们使用的基本实现。

using System;
using System.Diagnostics;
using System.Windows;
using Windows.Graphics.Imaging;
using Windows.Media.Miracast;
using Windows.Media.Playback;

namespace Miracast_GUI
{
    /// <summary>
    /// Interaction logic for MainWindow.xaml
    /// </summary>
    public partial class MainWindow : Window
    {
        public MiracastReceiver receiver;
        public MiracastReceiverSession session;
        public MediaPlayer mp;
        public SoftwareBitmap frameServerDest;
        public MiracastReceiverConnection connection;

        public MainWindow()
        {
            InitializeComponent();
            // Starts service
            StartMiracastService();
        }

        public void StartMiracastService()
        {
            receiver = new MiracastReceiver();
            receiver.StatusChanged += Receiver_StatusChanged;
            MiracastReceiverSettings settings = receiver.GetDefaultSettings();

            settings.FriendlyName = "Miracast-Service-Test";
            settings.AuthorizationMethod = MiracastReceiverAuthorizationMethod.None;
            settings.ModelName = receiver.GetDefaultSettings().ModelName;
            settings.ModelNumber = receiver.GetDefaultSettings().ModelNumber;
            settings.RequireAuthorizationFromKnownTransmitters = receiver.GetDefaultSettings().RequireAuthorizationFromKnownTransmitters;

            receiver.DisconnectAllAndApplySettings(settings);

            session = receiver.CreateSession(/*CoreApplication.MainView*/ null);
            session.AllowConnectionTakeover = true;

            session.ConnectionCreated += Session_ConnectionCreated;
            session.MediaSourceCreated += Session_MediaSourceCreated;
            session.Disconnected += Session_Disconnected;

            MiracastReceiverSessionStartResult result = session.Start();
            Debug.WriteLine("Status: " + result.Status);
        }

        private void Session_Disconnected(MiracastReceiverSession sender, MiracastReceiverDisconnectedEventArgs args)
        {
            session.Dispose();
        }

        private void Receiver_StatusChanged(MiracastReceiver sender, object args)
        {
            Debug.WriteLine(receiver.GetStatus().ListeningStatus);
        }

        private void Session_ConnectionCreated(MiracastReceiverSession sender, MiracastReceiverConnectionCreatedEventArgs args)
        {
            connection = args.Connection;
            connection.InputDevices.Keyboard.TransmitInput = true;
            connection.InputDevices.GameController.Mode =
                MiracastReceiverGameControllerDeviceUsageMode.AsMouseAndKeyboard;

            Debug.WriteLine("CONNECTION CREATED");
        }

        private void Session_MediaSourceCreated(MiracastReceiverSession sender, MiracastReceiverMediaSourceCreatedEventArgs args)
        {
            mp = new MediaPlayer
            {
                IsVideoFrameServerEnabled = true,
                AutoPlay = true,
                Source = args.MediaSource,
                RealTimePlayback = true
            };

            mp.VideoFrameAvailable += Mp_VideoFrameAvailable;
            Debug.WriteLine(mp.PlaybackSession.PlaybackState);
            mp.Play();

            Debug.WriteLine("MEDIA SOURCE CREATED");
        }

        private void Mp_VideoFrameAvailable(MediaPlayer sender, object args)
        {
            Console.WriteLine("Received frame...");
        }
    }
}
c# wpf uwp mirroring miracast
1个回答
© www.soinside.com 2019 - 2024. All rights reserved.