Unity Agora SDK - AR 相机问题

问题描述 投票:0回答:1

嗨,我试图遵循本指南:https://www.agora.io/en/blog/video-chat-with-unity3d-ar-foundation-pt3-remote-assistance-app/

基本上,我想创建一个远程协助应用程序,将技术人员连接到增强现实中的查看器。问题是,即使在花了几天时间研究代码之后,我也无法将我的 AR 摄像头视频从广播公司发送给其他人。另外,Agora SDK 已经发生了很大的变化,所以我不确定我是否正确实现了代码。请在这方面需要帮助。

声网视频SDK版本:v4.2.6 Unity版本:2019.4.40.f1

这是我的 TechnicianManager.cs

using System.Collections;
using System.Collections.Generic;
using Agora_RTC_Plugin.API_Example.Examples.Advanced.WriteBackVideoRawData;
using Agora.Rtc;
using UnityEngine;
using UnityEngine.Android;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
using UnityEngine.XR.ARFoundation;

public class TechnicianManager : MonoBehaviour
{
    private IRtcEngine mRtcEngine;
    [SerializeField] private ARCameraManager cameraManager;

    [SerializeField] public string appId;
    [SerializeField] public string channelName;
    
    private Texture2D BufferTexture;
    private static TextureFormat ConvertFormat = TextureFormat.BGRA32;
    private static VIDEO_PIXEL_FORMAT PixelFormat = VIDEO_PIXEL_FORMAT.VIDEO_PIXEL_BGRA;
    
    private int i = 0; // monotonic timestamp counter

    private void SetupAgoraEngine()
    {
        mRtcEngine = Agora.Rtc.RtcEngine.CreateAgoraRtcEngine();
        
        UserEventHandler handler = new UserEventHandler(this);
        
        RtcEngineContext context = new RtcEngineContext(appId, 0,
            CHANNEL_PROFILE_TYPE.CHANNEL_PROFILE_LIVE_BROADCASTING,
            AUDIO_SCENARIO_TYPE.AUDIO_SCENARIO_DEFAULT, AREA_CODE.AREA_CODE_GLOB);
        
        mRtcEngine.Initialize(context);
        mRtcEngine.InitEventHandler(handler);
        
        mRtcEngine.SetVideoEncoderConfiguration(new VideoEncoderConfiguration
        {
            dimensions = new VideoDimensions{width = 360, height = 640},
            frameRate = 24,
            bitrate = 800,
            orientationMode = ORIENTATION_MODE.ORIENTATION_MODE_FIXED_PORTRAIT
        });
        
        // allow camera output callback
        //mRtcEngine.EnableVideoObserver();
        //mRtcEngine.EnableLocalVideo(false);

        //mRtcEngine.RegisterVideoFrameObserver(new VideoFrameObserver(this), VIDEO_OBSERVER_FRAME_TYPE.FRAME_TYPE_NV21,
        //    VIDEO_OBSERVER_POSITION.POSITION_POST_CAPTURER, OBSERVER_MODE.RAW_DATA);
        
        //  mRtcEngine.SetVideoQualityParameters(true);

        SenderOptions senderOptions = new SenderOptions
        {
            targetBitrate = 800
        };
        //senderOptions.
        
        mRtcEngine.SetExternalVideoSource(true, false, EXTERNAL_VIDEO_SOURCE_TYPE.VIDEO_FRAME, senderOptions);
        
        // enable video
        mRtcEngine.EnableAudio();
        mRtcEngine.EnableVideo();
        
        //mRtcEngine.SetChannelProfile(CHANNEL_PROFILE_TYPE.CHANNEL_PROFILE_COMMUNICATION);
        mRtcEngine.SetClientRole(CLIENT_ROLE_TYPE.CLIENT_ROLE_BROADCASTER);
        
        mRtcEngine.JoinChannel("", channelName);

        // Optional: if a data stream is required, here is a good place to create it
        int streamID = 0;
        DataStreamConfig dataStreamConfig = new DataStreamConfig();
        dataStreamConfig.ordered = true;
        mRtcEngine.CreateDataStream(ref streamID, dataStreamConfig);
        Debug.Log("ARHelper: initializeEngine done, data stream id = " + streamID);
    }
    
    // Start is called before the first frame update
    void Start()
    {
        GameObject go = GameObject.Find("myImage");
        if (go == null)
        {
            return;
        }

        VideoSurface videoSurface = go.AddComponent<VideoSurface>();
        videoSurface.enabled = false;


        go = GameObject.Find("ButtonExit");
        if (go != null)
        {
            Button button = go.GetComponent<Button>();
            if (button != null)
            {
                button.onClick.AddListener(OnLeaveButtonClicked);
            }
        }
        SetupToggleMic();
        
        go = GameObject.Find("ButtonColor");
        if (go != null)
        {
            // the button is only available for AudienceVC
            go.SetActive(false);
        }

        go = GameObject.Find("AR Camera");
        if (go != null)
        {
            cameraManager = go.GetComponent<ARCameraManager>();
        }


        go = GameObject.Find("sphere");
        if (go != null)
        {
            var sphere = go;
            // hide this before AR Camera start capturing
            sphere.SetActive(false);
             
            this.StartCoroutine(DelayAction(.5f,
                () =>
                {
                    sphere.SetActive(true);
                }));
        }
        
        SetupAgoraEngine();
    }

    void EnableSharing()
    {
        cameraManager.frameReceived += OnCameraFrameReceived;
        if (Camera.main != null)
        {
            RenderTexture renderTexture = Camera.main.targetTexture;
            if (renderTexture != null)
            {
                BufferTexture = new Texture2D(renderTexture.width, renderTexture.height, ConvertFormat, false);

                // Editor only, where onFrameReceived won't invoke
                if (Application.platform == RuntimePlatform.OSXEditor || Application.platform == RuntimePlatform.WindowsEditor)
                {
                    Debug.LogWarning(">>> Testing in Editor, start coroutine to capture Render data");
                    StartCoroutine(CoShareRenderData());
                }
            }
            else
            {
                Debug.LogError("ARHelper: renderTexture is NULL");
            }
        }
        else
        {
            Debug.LogError("ARHelper: Camera.main is NULL");
        }
    }
    
    /// <summary>
    ///    For use in Editor testing only.
    /// </summary>
    /// <returns></returns>
    IEnumerator CoShareRenderData()
    {
        yield return new WaitForEndOfFrame();
        OnCameraFrameReceived(default);
        yield return null;
    }
    
    /// <summary>
    ///   Delegate callback handles every frame generated by the AR Camera.
    /// </summary>
    /// <param name="eventArgs"></param>
    private void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        Debug.Log("ARHelper: OnCameraFrameReceived");
        if (BufferTexture == null) // offlined
        {
            Debug.LogError("ARHelper: BufferTexture is NULL");
            return;
        }
        Camera targetCamera = Camera.main; // AR Camera
        RenderTexture.active = targetCamera.targetTexture; // the targetTexture holds render texture
        Rect rect = new Rect(0, 0, targetCamera.targetTexture.width, targetCamera.targetTexture.height);
        BufferTexture.ReadPixels(rect, 0, 0);
        BufferTexture.Apply();

        byte[] bytes = BufferTexture.GetRawTextureData();
        
        Debug.Log("Bytes obtained from buffer with byteLength: " + bytes.Length);
        
        // sends the Raw data contained in bytes
        StartCoroutine(PushFrame(bytes, (int)rect.width, (int)rect.height,
            () =>
            {
                bytes = null;
            }));
        RenderTexture.active = null;
    }
    
    /// <summary>
    /// Push frame to the remote client.  This is the same code that does ScreenSharing.
    /// </summary>
    /// <param name="bytes">raw video image data</param>
    /// <param name="width"></param>
    /// <param name="height"></param>
    /// <param name="onFinish">callback upon finish of the function</param>
    /// <returns></returns>
    IEnumerator PushFrame(byte[] bytes, int width, int height, System.Action onFinish)
    {
        if (bytes == null || bytes.Length == 0)
        {
            Debug.LogError("ARHelper: Zero bytes found!!!!");
            yield break;
        }
        
        //if the engine is present
        if (mRtcEngine != null)
        {
            //Create a new external video frame
            ExternalVideoFrame externalVideoFrame = new ExternalVideoFrame();
            //Set the buffer type of the video frame
            externalVideoFrame.type = VIDEO_BUFFER_TYPE.VIDEO_BUFFER_RAW_DATA;
            // Set the video pixel format
            externalVideoFrame.format = PixelFormat; // VIDEO_PIXEL_BGRA for now
            //apply raw data you are pulling from the rectangle you created earlier to the video frame
            externalVideoFrame.buffer = bytes;
            //Set the width of the video frame (in pixels)
            externalVideoFrame.stride = width;
            //Set the height of the video frame
            externalVideoFrame.height = height;
            //Remove pixels from the sides of the frame
            externalVideoFrame.cropLeft = 10;
            externalVideoFrame.cropTop = 10;
            externalVideoFrame.cropRight = 10;
            externalVideoFrame.cropBottom = 10;
            //Rotate the video frame (0, 90, 180, or 270)
            externalVideoFrame.rotation = 180;
            // increment i with the video timestamp
            externalVideoFrame.timestamp = i++;
            //Push the external video frame with the frame we just created
            // int a = 
            int successCode = mRtcEngine.PushVideoFrame(externalVideoFrame);
            Debug.Log("ARHelper: Pushing Code: " + successCode);
            // Debug.Log(" pushVideoFrame(" + i + ") size:" + bytes.Length + " => " + a);
        }
        else
            Debug.LogError("ARHelper: RTC ENGINE NULL!");
        
        yield return null;
        onFinish();
    }
    
    private void SetupToggleMic()
    {
        GameObject go = GameObject.Find("ToggleButton");
        if (go != null)
        {
            ToggleButton toggle = go.GetComponent<ToggleButton>();
            if (toggle != null)
            {
                toggle.button1.onClick.AddListener(() =>
                {
                    toggle.Tap();
                    mRtcEngine.EnableLocalAudio(false);
                    mRtcEngine.MuteLocalAudioStream(true);
                });
                toggle.button2.onClick.AddListener(() =>
                {
                    toggle.Tap();
                    mRtcEngine.EnableLocalAudio(true);
                    mRtcEngine.MuteLocalAudioStream(false);
                });
            }
        }
    }
    private void OnLeaveButtonClicked()
    {
        // leave channel
        mRtcEngine.LeaveChannel();
        // deregister video frame observers in native-c code
        //mRtcEngine.DisableVideo();
        //mRtcEngine.DisableAudio();
        
        // delete engine
        
        mRtcEngine.Dispose();  // Place this call in ApplicationQuit
        mRtcEngine = null;
        
        SceneManager.LoadScene("MainScene", LoadSceneMode.Single);
    }
    
    IEnumerator DelayAction(float delay, System.Action doAction)
    {
        yield return new WaitForSeconds(delay);
        doAction();
    }
    
    internal class UserEventHandler : IRtcEngineEventHandler
    {
        private readonly TechnicianManager _technicianManager;

        internal UserEventHandler(TechnicianManager technicianManager)
        {
            _technicianManager = technicianManager;
        }
        
        public override void OnError(int err, string msg)
        {
            Debug.LogError(string.Format("ARHelper: OnError err: {0}, msg: {1}", err, msg));
        }

        public override void OnFirstRemoteVideoDecoded(RtcConnection connection, uint remoteUid, int width, int height, int elapsed)
        {
            Debug.LogWarningFormat("ARHelper: OnFirstRemoteVideoDecoded: uid:{0} w:{1} h:{2} elapsed:{3}", remoteUid, width, height, elapsed);
        }

        public override void OnJoinChannelSuccess(RtcConnection connection, int elapsed)
        {
            int build = 0;
            Debug.Log("ARHelper: Agora: OnJoinChannelSuccess ");
            Debug.Log(string.Format("ARHelper: sdk version: ${0}", _technicianManager.mRtcEngine.GetVersion(ref build)));
            Debug.Log(string.Format("ARHelper: sdk build: ${0}", build));
            Debug.Log(string.Format("ARHelper: OnJoinChannelSuccess channelName: {0}, uid: {1}, elapsed: {2}",
                                connection.channelId, connection.localUid, elapsed));
            
            _technicianManager.EnableSharing();
        }

        /*
        public override void OnRejoinChannelSuccess(RtcConnection connection, int elapsed)
        {
            Debug.Log("OnRejoinChannelSuccess");
        }

        public override void OnLeaveChannel(RtcConnection connection, RtcStats stats)
        {
            Debug.Log("OnLeaveChannel");
        }

        public override void OnClientRoleChanged(RtcConnection connection, CLIENT_ROLE_TYPE oldRole, CLIENT_ROLE_TYPE newRole, ClientRoleOptions newRoleOptions)
        {
            Debug.Log("OnClientRoleChanged");
        }*/

        public override void OnUserJoined(RtcConnection connection, uint uid, int elapsed)
        {
            Debug.Log(string.Format("ARHelper: OnUserJoined uid: ${0} elapsed: ${1}", uid, elapsed));
            
            GameObject go = GameObject.Find("myImage");
            if (go == null)
            {
                return;
            }

            VideoSurface videoSurface = go.GetComponent<VideoSurface>();
            if (videoSurface != null)
            {
                videoSurface.enabled = true;
                // configure videoSurface
                videoSurface.SetForUser(uid, _technicianManager.channelName);
                videoSurface.SetEnable(true);
                //videoSurface.
                //videoSurface.SetGameFps(30);
            }
            
            //TechnicianManager.MakeVideoView(uid, _technicianManager.channelName);
        }

        public override void OnUserOffline(RtcConnection connection, uint uid, USER_OFFLINE_REASON_TYPE reason)
        {
            // remove video stream
            Debug.Log(string.Format("ARHelper: OnUserOffLine uid: ${0}, reason: ${1}", uid, (int)reason));
            // this is called in main thread
            GameObject go = GameObject.Find(uid.ToString());
            if (!ReferenceEquals(go, null))
            {
                UnityEngine.Object.Destroy(go);
            }
            //TechnicianManager.DestroyVideoView(uid);
        }
        /*
        public override void OnUplinkNetworkInfoUpdated(UplinkNetworkInfo info)
        {
            Debug.Log("OnUplinkNetworkInfoUpdated");
        }

        public override void OnDownlinkNetworkInfoUpdated(DownlinkNetworkInfo info)
        {
            Debug.Log("OnDownlinkNetworkInfoUpdated");
        }*/
    }
    
    internal class VideoFrameObserver : IVideoFrameObserver
    {
        private readonly TechnicianManager _technicianManager;

        internal VideoFrameObserver(TechnicianManager technicianManager)
        {
            _technicianManager = technicianManager;
        }

        public override bool OnCaptureVideoFrame(VIDEO_SOURCE_TYPE type, VideoFrame videoFrame)
        {
            Debug.Log("OnCaptureVideoFrame-----------" + " width:" + videoFrame.width + " height:" +
                      videoFrame.height);
            //_agoraVideoRawData.VideoFrameWidth = videoFrame.width;
            //_agoraVideoRawData.VideoFrameHeight = videoFrame.height;
            //lock (_agoraVideoRawData.VideoBuffer)
            //{
            //    _agoraVideoRawData.VideoBuffer = videoFrame.yBuffer;
            //}
            return true;
        }

        public override bool OnRenderVideoFrame(string channelId, uint uid, VideoFrame videoFrame)
        {
            Debug.Log("OnRenderVideoFrameHandler-----------" + " uid:" + uid + " width:" + videoFrame.width +
                      " height:" + videoFrame.height);
            return true;
        }
    }
}
c# android unity-game-engine augmented-reality agora.io
1个回答
0
投票

查看以下链接,我在使用 Agora Video SDK 时也遇到了同样的错误。 https://api-ref.agora.io/en/video-sdk/windows-csharp/3.x/API/class_ivideoframeobserver.html#class_ivideoframeobserver

© www.soinside.com 2019 - 2024. All rights reserved.