使用 C# SharpAvi 和 NAudio 录制带有输入和输出音频的屏幕

问题描述 投票:0回答:1

我正在尝试制作一个可以录制屏幕的应用程序,同时可以录制输入和输出音频。我在网上查了很多东西,但找不到任何可以一起完成的东西。

我尝试用 Sharpavi 和 naudio 做一些事情。底部的代码可以用来自麦克风的声音录制屏幕,也可以录制来自扬声器的声音。问题是屏幕视频和来自麦克风的音频创建一个视频文件,而来自斯皮克斯的声音创建其他 mp3 文件。 (如果我的老板想做这样的事情,我就做不到:)

所以我想创建我的视频文件,其中包含屏幕录制、输入和输出语音。我希望你可以帮助我。

        private readonly int screenWidth;
        private readonly int screenHeight;

        private readonly AviWriter writer;
        private readonly IAviVideoStream videoStream;
        private readonly IAviAudioStream audioStream;

        private readonly WaveInEvent audioSource;
        private readonly Thread screenThread;

        private readonly ManualResetEvent stopThread = new ManualResetEvent(false);
        private readonly AutoResetEvent videoFrameWritten = new AutoResetEvent(false);
        private readonly AutoResetEvent audioBlockWritten = new AutoResetEvent(false);

        WasapiLoopbackCapture capture = new WasapiLoopbackCapture();
        WaveFileWriter writerx;

        public Recorder(string fileName,
            FourCC codec, int quality,
            int audioSourceIndex, SupportedWaveFormat audioWaveFormat, bool encodeAudio, int audioBitRate)
        {
            System.Windows.Media.Matrix toDevice;
            using (var source = new HwndSource(new HwndSourceParameters()))
            {
                toDevice = source.CompositionTarget.TransformToDevice;
            }

            screenWidth = (int)Math.Round(SystemParameters.PrimaryScreenWidth * toDevice.M11);
            screenHeight = (int)Math.Round(SystemParameters.PrimaryScreenHeight * toDevice.M22);

            // Create AVI writer and specify FPS
            writer = new AviWriter(fileName)
            {
                FramesPerSecond = 10,
                EmitIndex1 = true,
            };

            // Create video stream
            videoStream = CreateVideoStream(codec, quality);
            videoStream.Name = "Screencast";


            var outputFolder = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.Desktop), "NAudio");
            Directory.CreateDirectory(outputFolder);
            var outputFilePath = Path.Combine(outputFolder, "recordedx.wav");

            writerx = new WaveFileWriter(outputFilePath, capture.WaveFormat);

            if (audioSourceIndex >= 0)
            {
                while (capture.CaptureState != NAudio.CoreAudioApi.CaptureState.Stopped)
                {
                    Thread.Sleep(500);
                }


                var waveFormat = ToWaveFormat(audioWaveFormat);
                audioStream = CreateAudioStream(waveFormat, encodeAudio, audioBitRate);
                audioStream.Name = "Voice";

                audioSource = new WaveInEvent
                {
                    DeviceNumber = audioSourceIndex,
                    WaveFormat = waveFormat,
                    BufferMilliseconds = (int)Math.Ceiling(1000 / writer.FramesPerSecond),
                    NumberOfBuffers = 3,
                };

                audioSource.DataAvailable += audioSource_DataAvailable;
                capture.DataAvailable += Capture_DataAvailable;   //     
            }

            screenThread = new Thread(RecordScreen)
            {
                Name = typeof(Recorder).Name + ".RecordScreen",
                IsBackground = true
            };

            if (audioSource != null)
            {
                videoFrameWritten.Set();
                audioBlockWritten.Reset();
                audioSource.StartRecording();
                capture.StartRecording();//
            }

            screenThread.Start();
        }

        private void Capture_DataAvailable(object sender, WaveInEventArgs e)
        {
                writerx.Write(e.Buffer, 0, e.BytesRecorded);
        }

        private IAviVideoStream CreateVideoStream(FourCC codec, int quality)
        {

            if (codec == KnownFourCCs.Codecs.Uncompressed)
            {
                return writer.AddUncompressedVideoStream(screenWidth, screenHeight);
            }
            else if (codec == KnownFourCCs.Codecs.MotionJpeg)
            {
                return writer.AddMotionJpegVideoStream(screenWidth, screenHeight, quality);
            }
            else
            {
                return writer.AddMpeg4VideoStream(screenWidth, screenHeight, (double)writer.FramesPerSecond,

                    quality: quality,
                    codec: codec,
                   forceSingleThreadedAccess: true);
            }
        }

        private IAviAudioStream CreateAudioStream(WaveFormat waveFormat, bool encode, int bitRate)
        {

            if (encode)
            {

                return writer.AddMp3AudioStream(waveFormat.Channels, waveFormat.SampleRate, bitRate);
            }
            else
            {
                return writer.AddAudioStream(
                    channelCount: waveFormat.Channels,
                    samplesPerSecond: waveFormat.SampleRate,
                    bitsPerSample: waveFormat.BitsPerSample);
            }
        }

        private static WaveFormat ToWaveFormat(SupportedWaveFormat waveFormat)
        {
            switch (waveFormat)
            {
                case SupportedWaveFormat.WAVE_FORMAT_44M16:
                    return new WaveFormat(44100, 16, 1);
                case SupportedWaveFormat.WAVE_FORMAT_44S16:
                    return new WaveFormat(44100, 16, 2);
                default:
                    throw new NotSupportedException("Wave formats other than '16-bit 44.1kHz' are not currently supported.");
            }
        }

        public void Dispose()
        {
            stopThread.Set();
            screenThread.Join();

            writerx.Dispose();//
            writerx = null;//
            capture.Dispose();//

            if (audioSource != null)
            {
                audioSource.StopRecording();
                audioSource.DataAvailable -= audioSource_DataAvailable;
            }


            writer.Close();
            stopThread.Close();
        }

        private void RecordScreen()
        {
            var stopwatch = new Stopwatch();
            var buffer = new byte[screenWidth * screenHeight * 4];

            Task videoWriteTask = null;

            var isFirstFrame = true;
            var shotsTaken = 0;
            var timeTillNextFrame = TimeSpan.Zero;
            stopwatch.Start();

            while (!stopThread.WaitOne(timeTillNextFrame))
            {
                GetScreenshot(buffer);
                shotsTaken++;

                // Wait for the previous frame is written
                if (!isFirstFrame)
                {

                    videoWriteTask.Wait();

                    videoFrameWritten.Set();
                }

                if (audioStream != null)
                {
                    var signalled = WaitHandle.WaitAny(new WaitHandle[] { audioBlockWritten, stopThread });
                    if (signalled == 1)
                        break;
                }


                videoWriteTask = videoStream.WriteFrameAsync(true, buffer, 0, 

                timeTillNextFrame = TimeSpan.FromSeconds(shotsTaken / (double)writer.FramesPerSecond - stopwatch.Elapsed.TotalSeconds);
                if (timeTillNextFrame < TimeSpan.Zero)
                    timeTillNextFrame = TimeSpan.Zero;

                isFirstFrame = false;
            }

            stopwatch.Stop();

            // Wait for the last frame is written
            if (!isFirstFrame)
            {

                videoWriteTask.Wait();

            }
        }

        private void GetScreenshot(byte[] buffer)
        {
            using (var bitmap = new Bitmap(screenWidth, screenHeight))
            using (var graphics = Graphics.FromImage(bitmap))
            {
                graphics.CopyFromScreen(0, 0, 0, 0, new System.Drawing.Size(screenWidth, screenHeight));
                var bits = bitmap.LockBits(new Rectangle(0, 0, screenWidth, screenHeight), ImageLockMode.ReadOnly, PixelFormat.Format32bppRgb);
                Marshal.Copy(bits.Scan0, buffer, 0, buffer.Length);
                bitmap.UnlockBits(bits);          
            }
        }

        private void audioSource_DataAvailable(object sender, WaveInEventArgs e)
        {
            var signalled = WaitHandle.WaitAny(new WaitHandle[] { videoFrameWritten, stopThread });
            if (signalled == 0)
            {
                audioStream.WriteBlock(e.Buffer, 0, e.BytesRecorded);
                audioBlockWritten.Set();
            }
        }
c# video-capture audio-recording naudio
1个回答
0
投票

这是我捕获视频(avi)和音频(扬声器)的解决方案:

NuGet:NAudio.Wasapi、SharpAvi

recordTask = new Task((cancelationToken) =>
{
    using (var writer = new AviWriter(_capturedVideoPath)
    {
        FramesPerSecond = 30,
        EmitIndex1 = true
    })
    {

        int channels = 1;
        int sampleRate = 44100;
        int bytesPerSample = 2;
        WasapiLoopbackCapture audioSource = new WasapiLoopbackCapture();
        audioSource.WaveFormat = new WaveFormat(sampleRate, bytesPerSample * 8, channels);

        var audioStream = writer.AddAudioStream(audioSource.WaveFormat.Channels,
            samplesPerSecond: audioSource.WaveFormat.SampleRate,
            bitsPerSample: audioSource.WaveFormat.BitsPerSample);

        audioSource.DataAvailable += (sender, e) =>
        {
            //if (e.BytesRecorded > 0)
            //{
            //    Debug.WriteLine("A");
            //    if (audioSource.WaveFormat.BitsPerSample == 32)
            //    {
            //        byte[] newArray16Bit = new byte[e.BytesRecorded / 2];
            //        short two;
            //        float value;
            //        for (int i = 0, j = 0; i < e.BytesRecorded; i += 4, j += 2)
            //        {
            //            value = (BitConverter.ToSingle(e.Buffer, i));
            //            two = (short)(value * short.MaxValue);
            //            //var two = (byte)(value * byte.MaxValue); //maybe need?

            //            newArray16Bit[j] = (byte)(two & 0xFF);
            //            newArray16Bit[j + 1] = (byte)((two >> 8) & 0xFF);
            //        }

            //        audioStream.WriteBlock(newArray16Bit, 0, e.BytesRecorded / 2);
            //    }
            //    else
            //    {
            //        audioStream.WriteBlock(e.Buffer, 0, e.BytesRecorded);
            //    }
            //}
            //else
            {
                audioStream.WriteBlock(e.Buffer, 0, e.Buffer.Length / bytesPerSample);
            }
        };
        audioSource.StartRecording();

        var videoStream = writer.AddMJpegWpfVideoStream(width, height, quality: 70);
        var frameData = new byte[videoStream.Width * videoStream.Height * 4];
        while (!recordTaskCancel.IsCancellationRequested)
        {
            GetScreenshot(frameData, rect.Left, rect.Top, videoStream.Width, videoStream.Height);
            // write data to a frame
            videoStream.WriteFrame(true, // is a key frame? (many codecs use the concept of key frames, for others - all frames are keys)
                              frameData, // an array with frame data
                              0, // a starting index in the array
                              frameData.Length // a length of the data
            );
        }
        if (audioSource != null)
        {
            audioSource.StopRecording();
            audioSource.Dispose();
            audioSource = null;
        }

    }
}, recordTaskCancel.Token);
recordTask.Start();



private void GetScreenshot(byte[] buffer, int left, int top, int screenWidth, int screenHeight)
{
    using (var bitmap = new Bitmap(screenWidth, screenHeight))
    using (var graphics = Graphics.FromImage(bitmap))
    {
        graphics.CopyFromScreen(left, top, 0, 0, new System.Drawing.Size(screenWidth, screenHeight));
        var mousePos = WinProcess.GetMousePosition();
        var mouseDpiAdditionalOffset = -(200.0 - (200.0 * dpiKoeff));
        graphics.FillEllipse(
            new SolidBrush(Color.Red), 
            (float)(mousePos.X - (left / dpiKoeff) + mouseDpiAdditionalOffset), 
            (float)(mousePos.Y - (top / dpiKoeff) + mouseDpiAdditionalOffset), 
            10, 
            10);
        var bits = bitmap.LockBits(new Rectangle(0, 0, screenWidth, screenHeight), ImageLockMode.ReadOnly, System.Drawing.Imaging.PixelFormat.Format32bppRgb);
        Marshal.Copy(bits.Scan0, buffer, 0, buffer.Length);
        bitmap.UnlockBits(bits);
    }
}


[DllImport("user32.dll")]
[return: MarshalAs(UnmanagedType.Bool)]
internal static extern bool GetCursorPos(ref Win32Point pt);
[StructLayout(LayoutKind.Sequential)]
internal struct Win32Point
{
    public Int32 X;
    public Int32 Y;
};
public static Point GetMousePosition()
{
    var w32Mouse = new Win32Point();
    GetCursorPos(ref w32Mouse);
    return new Point(w32Mouse.X, w32Mouse.Y);
}
© www.soinside.com 2019 - 2024. All rights reserved.