画布仅在重新渲染后才加载叠加层

问题描述 投票:0回答:1

我正在尝试创建一个画布,它将拍摄视频,然后添加自拍分割,最后添加叠加。当以下代码运行时,我可以看到视频元素、带有视频和自拍分割的画布以及覆盖视频元素。重新渲染页面后,Canvas 会显示视频、自拍分割和叠加。如何更新代码以使画布从一开始就完全加载视频、自拍分割和叠加?请帮助我。请检查代码:

import sample from './assets/sample.mp4'
import overlay from './assets/foreground.webm'
import React, { useState, useRef, useEffect } from "react";
import { SelfieSegmentation } from "@mediapipe/selfie_segmentation";


function Cam()
 {
  const streamVideoRef= useRef(null)
  const overlayRef = useRef(null);
  const canvasRef = useRef(null);
  let selfieSegmentation;
   const setupSelfieSegmentation = async () => {
    try {
      selfieSegmentation = new SelfieSegmentation({
        locateFile: (file) =>
          `https://cdn.jsdelivr.net/npm/@mediapipe/selfie_segmentation/${file}`,
      });
      selfieSegmentation.setOptions({
        modelSelection: 0,
      });

      const ctx = canvasRef.current.getContext("2d");

      await selfieSegmentation.initialize();
      console.log("SelfieSegmentation model initialized successfully");

      // Set canvas size based on video dimensions
      canvasRef.current.width = 400;
      canvasRef.current.height = 300;

      // Ensure overlay video is loaded before playing
      overlayRef.current.addEventListener("loadeddata", () => {
        overlayRef.current.play();
      });

      overlayRef.current.src = overlay;
      selfieSegmentation.onResults((results) => {
        // Create an off-screen canvas
        const offscreenCanvas = document.createElement("canvas");
        const offscreenCtx = offscreenCanvas.getContext("2d");
        offscreenCanvas.width = canvasRef.current.width;
        offscreenCanvas.height = canvasRef.current.height;

        // Draw the segmentation mask without any blending mode A
        offscreenCtx.drawImage(
          results.segmentationMask,
          0,
          0,
          canvasRef.current.width,
          canvasRef.current.height
        );

        offscreenCtx.save();
        //Camera Stream
        const imageDataMask = offscreenCtx.getImageData(
          0,
          0,
          canvasRef.current.width,
          canvasRef.current.height
        );
        const dataMask = imageDataMask.data;

        offscreenCtx.globalCompositeOperation = "source-over";

        // Draw the original image (camera stream)
        offscreenCtx.drawImage(
          results.image,
          0,
          0,
          canvasRef.current.width,
          canvasRef.current.height
        );

        const imageDataOrg = offscreenCtx.getImageData(
          0,
          0,
          canvasRef.current.width,
          canvasRef.current.height
        );
        const dataOrg = imageDataOrg.data;

        // Draw the Overlay(camera stream)

        offscreenCtx.drawImage(
          overlayRef.current,
          0,
          0,
          canvasRef.current.width,
          canvasRef.current.height
        );

        offscreenCtx.restore();

        // Get the image data of the entire canvas
        const imageData = offscreenCtx.getImageData(
          0,
          0,
          canvasRef.current.width,
          canvasRef.current.height
        );

        // Access the pixel data
        const data = imageData.data;

        // Define the RGB values for the red color you want to make transparent
        const red = 0;
        const green = 0;
        const blue = 0;

        // Loop through each pixel and set the alpha value to 0 for the red color
        for (let i = 0; i < dataMask.length; i += 4) {
          if (
            dataMask[i] > red &&
            dataMask[i + 1] === green &&
            dataMask[i + 2] === blue
          ) {
            data[i] = dataOrg[i];
            data[i + 1] = dataOrg[i + 1];
            data[i + 2] = dataOrg[i + 2];
          }
        }
        // Put the modified image data back onto the canvas
        offscreenCtx.putImageData(imageData, 0, 0);
        ctx.drawImage(offscreenCanvas, 0, 0);
      });
    } catch (error) {
      console.error("Error initializing SelfieSegmentation:", error);
    }
  };

  const segmentFrame = async () => {
    try {
      if (streamVideoRef.current) {
        const videoElement = streamVideoRef.current;
        if (canvasRef.current) {
          const context = canvasRef.current.getContext("2d");
          context.drawImage(
            videoElement,
            0,
            0,
            canvasRef.current.width,
            canvasRef.current.height
          );
        }
        const image = new Image();
        image.width = canvasRef.current.width;
        image.height = canvasRef.current.height;
        image.src = canvasRef.current.toDataURL("image/png");
        await selfieSegmentation.send({ image });
      } else {
        console.error("No video element found");
      }
    } catch (error) {
      console.error("Error segmenting frame:", error);
    }
  };
  useEffect(() => {
    const setup = async () => {
      if (streamVideoRef.current) {
        console.log("Setting up selfie segmentation...");        
        await setupSelfieSegmentation();
        streamVideoRef.current.play().catch((error) => {
          console.error("Error playing video:", error);
        });
        const intervalId = setInterval(segmentFrame, 1000 / 30);
        // Cleanup function
        return () => {
          clearInterval(intervalId);
          if (selfieSegmentation) {
            selfieSegmentation.close();
          }
        };
      }
    };
    setup();
    overlayRef.current.src = overlay;
    
  }, []);

  return (    
    <div>
      <video ref={streamVideoRef} width="320" height="240" controls>
        <source src={sample} type="video/mp4" />
      </video>
      <canvas ref={canvasRef} width="320" height="240" />
      <video width="320" height="240" ref={overlayRef} controls/>      
    </div>   
  );
};

export default Cam;

我很感激任何建议。

reactjs react-hooks html5-canvas mediapipe
1个回答
0
投票

你无法从一开始就得到你想要的,因为

useEffect
钩子在组件渲染后运行。

相反,在 JSX 或与之相关的东西中使用加载...。

  useEffect(() => {
    .....
    setLoading(false) // place it after all your required useEffect operations are completed
    ....

  }, []);

  if (loading) {
    return <h4>Loading...</h4>;
  }
© www.soinside.com 2019 - 2024. All rights reserved.