ThreeJS视频像素化效果

195 阅读4分钟

在3D场景中实现视频像素化风格处理,实现LED电子大屏吹一股复古风~

效果演示

前往体验demo,原文可参考我的公众号文章《ThreeJS LED 电子屏幕-视频像素化

threejs_3d_led.gif

要实现视频像素化效果,大致思路是:通过 Canvas 获取视频的像素信息,然后根据像素信息创建电子屏幕的每个 LED 块 Object。

大致分为以下几个步骤:

1.获取视频贴图像素信息

加载视频资源

先原生 js 简单实现一个创建视频的方法:

function createVideo({
  src = "",
  loop = true,
  muted = true,
  width = 640,
  height = 480,
  auto_play = false,
}) {
  const video = document.createElement("video");
  video.src = src;
  video.loop = loop;
  video.setAttribute("webkit-playsinline", true); // 禁止自动全屏播放
  video.setAttribute("playsinline", true);
  video.setAttribute("crossorigin", "anonymous"); //解决视频资源跨域问题,否则在设置VideoTexture时会报错。【THREE.WebGLState: DOMException: Failed to execute 'texImage2D' on 'WebGLRenderingContext': The video element contains cross-origin data, and may not be loaded.】
  video.muted = muted;
  video.width = width;
  video.height = height;

  if (auto_play) {
    try {
      video.play(); //如果直接播放 ios上会出错!
    } catch (err) {
      console.error(err);
    }
  }

  return video;
}

获取视频画面的 Canvas 图片数据:

var canvas = null;
var ctx = null;

function syncToCanvas() {
  return new Promise((resolve, reject) => {
    try {
      const { width = 0, height = 0 } = video || {};
      if (!canvas) {
        canvas = document.createElement("canvas");
        canvas.id = "cvideo";
        canvas.width = width;
        canvas.height = height;
        canvas.title = "canvas同步视频";

        ctx = canvas.getContext("2d");
        canvas.style.cssText = `
              position: absolute;
              right: 30px;
              top: 20px;
              border: solid 2px red;
          `;
      }
      ctx.drawImage(video, 0, 0, width, height);
      const imgData = ctx.getImageData(0, 0, width, height).data;
      resolve(imgData);
    } catch (err) {
      reject({ message: "call get_Image_Data error.", error: err });
    }
  });
}

把 Canvas 图片数据转换为像素数据:

const gap = 5; //取值范围 0~10,值越小表示还原度越高,led画面越清晰,但是性能消耗也更大。

function turnImageData2Pixel(imageData) {
  return new Promise((resolve, reject) => {
    try {
      let piexls = []; // 画面像素排布
      for (let h = 0; h < video.height; h += gap) {
        let row = [];
        for (let w = 0; w < video.width; w += gap) {
          let pos = (h * video.width + w) * 4;
          let r = imageData[pos];
          let g = imageData[pos + 1];
          let b = imageData[pos + 2];
          row.push([r, g, b]);
        }
        piexls.push(row);
      }
      resolve(piexls);
    } catch (err) {
      reject({ message: "call turnImageData2Pixel error.", err: err });
    }
  });
}

2.创建电子像素屏幕

电子屏幕的大小应该和我们获取到的视频 Canvas 像素信息一致。

let ledPixels = [];
let ledGroup = new THREE.Group();

function createLedScreen(
  row,
  col,
  pixelWidth,
  pixelHeight,
  pixelGap = 0.1,
  imgData = null
) {
  // let row = Math.floor(height / (pixelHeight + pixelGap));
  // let col = Math.floor(width / (pixelWidth + pixelGap));
  ledPixels = [];

  let width = col * (pixelWidth + pixelGap) - pixelGap;
  let height = row * (pixelHeight + pixelGap) - pixelGap;

  console.log(col, row);

  // 复用Geo优化性能
  let geo = new THREE.PlaneGeometry(pixelWidth, pixelHeight);

  for (let i = 0; i < row; i++) {
    let colArr = [];
    for (let j = 0; j < col; j++) {
      let mesh = new THREE.Mesh(geo, new THREE.MeshBasicMaterial());
      if (imgData) {
        mesh.material.color.setRGB(...imgData[i][col - 1 - j]);
      }

      let x = j * pixelWidth - width / 2 + pixelGap * j + pixelWidth / 2;
      let y = i * pixelHeight - height / 2 + pixelGap * i + pixelHeight / 2;
      let z = 25.04;
      mesh.userData = { x, y, z };
      mesh.position.set(x, y, z);
      colArr.push(mesh);

      ledGroup.add(mesh);
    }

    ledPixels.push(colArr);
  }

  // 调整LED屏幕的角度和位置
  ledGroup.position.set(0, 30, 0);
  ledGroup.rotateZ(-Math.PI);
  // ledGroup.rotateX(-Math.PI/4);

  // 添加到场景
  scene.add(ledGroup);
}

3.实时更新屏幕

有了以上的一些方法后,就可以在视频播放的过程中持续获取视频的像素信息,然后根据像素信息创建电子屏幕的每个 LED 块 Object,并实时更新这些 LED 块的颜色信息,实现视频像素风播放效果。

function loopSync() {
  syncToCanvas()
    .then((imgData) => {
      return turnImageData2Pixel(imgData);
    })
    .then((pixels) => {
      let r = pixels.length;
      let c = pixels[0].length;

      // 初始化3D屏幕
      if (!row && !col) {
        row = r;
        col = c;

        createLedScreen(row, col, 0.4, 0.4, 0.1);
      }

      let cutRowStart = 0; //Math.floor(row / 3);
      let cutRowEnd = row; //Math.floor((2 * row) / 3);
      // 行
      for (let i = cutRowStart; i < cutRowEnd; i++) {
        // 列
        for (let j = 0; j < col; j++) {
          if (pixels[i] && pixels[i][j]) {
            let [r, g, b] = pixels[i][col - 1 - j]; // j=>镜像 col-1-j=>正像
            let color = `rgb(${r},${g},${b})`;
            pixels[i][j].material.color = new THREE.Color(color);
          }
        }
      }
    });
}

/** 持续更新threejs场景 */

function animate() {
  ledPixels.length && loopSync();

  renderer.render(scene, camera);
  window.requestAnimationFrame(animate);
}

视频播放,并同步到 3D LED 屏幕上:

let video = null;

function loadVideo(
  width = 130,
  height = 350,
  src = "https://xxx.com/video.mp4"
) {
  video = createVideo({ src, muted: true, width, height });

  try {
    video.play();
  } catch (err) {}

  // 视频开始播放后,持续获取视频像素信息并渲染到3D电子屏幕上播放
  animate();
}

loadVideo();

hi

完整代码

<template>
  <div>
    <div class="container" ref="container"></div>
    <div class="pick">
      <div>(^_-)</div>
      <select class="videolist" @change="changeVideo" v-model="theVideo">
        <option :value="{}">选择视频</option>
        <option v-for="(item, index) in videos" :key="index" :value="item">
          {{ item.name }}
        </option>
        >
      </select>
    </div>
  </div>
</template>

<script setup>
import { onMounted, onUnmounted, ref } from "vue";
import * as THREE from "three";
import { OrbitControls } from "three/examples/jsm/controls/OrbitControls";

let scene = null;
let camera = null;
let renderer = null;
let container = ref(null);
let controls = null;

let videos = [
  {
    name: "我爱你",
    src: "https://moveharder.github.io/wc3d/videos/iloveu.mp4",
    width: 130,
    height: 350,
  },
  {
    name: "赛博人",
    src: "https://moveharder.github.io/wc3d/videos/cyber.mp4",
    width: 200,
    height: 200,
  },
  {
    name: "Bear River",
    src: "https://moveharder.github.io/wc3d/videos/bear_river.mp4",
    width: 250,
    height: 140,
  },
  {
    name: "Song MV",
    src: "https://moveharder.github.io/wc3d/videos/mv.mp4",
    width: 1280 / 5,
    height: 720 / 5,
  },
];
let theVideo = ref({});
let isOpen = ref(false);

let video = null;
var canvas = null;
var ctx = null;
const gap = 4; //取值范围 0~10,值越小表示还原度越高,led画面越清晰,但是性能消耗也更大。
let ledPixels = [];
let ledGroup = new THREE.Group();
let row = 0;
let col = 0;

function initTHREE() {
  // 创建场景
  scene = new THREE.Scene();

  const FOG_COLOR = "#262837";
  camera = new THREE.PerspectiveCamera(
    75,
    window.innerWidth / window.innerHeight,
    0.1,
    500
  );
  camera.position.set(0, 10, 100);

  // 创建渲染器
  renderer = new THREE.WebGL1Renderer({
    // alpha: true, //透明设置1
    antialias: true,
    // 设置对数缓冲区,解决多模型交合处闪烁(多面渲染不知道应该渲染哪个面)
    logarithmicDepthBuffer: true,
  });
  // renderer.setClearAlpha(0);//透明设置2
  renderer.setSize(window.innerWidth, window.innerHeight);
  renderer.setPixelRatio(Math.max(window.devicePixelRatio, 2));
  // renderer.setClearColor("#262837");
  renderer.shadowMap.enabled = true;
  renderer.shadowMap.type = THREE.PCFSoftShadowMap;
  renderer.physicallyCorrectLights = true;
  renderer.outputEncoding = THREE.sRGBEncoding; //色彩饱和度高一点
  // renderer.toneMapping = THREE.ACESFilmicToneMapping; //电影渲染效果
  // renderer.toneMappingExposure = 1.5;

  // 迷雾效果
  renderer.setClearColor(FOG_COLOR);
  renderer.toneMapping = THREE.ACESFilmicToneMapping; //电影渲染效果
  scene.fog = new THREE.Fog(FOG_COLOR, 1, 400);
}

function createLight() {
  // 环境光
  const ambientLight = new THREE.AmbientLight(0xffffff, 2);
  scene.add(ambientLight);

  // 直射光
  const moonLight = new THREE.DirectionalLight("#ffffff", 1.5);
  moonLight.position.set(-5, 20, 10);
  moonLight.castShadow = true;
  scene.add(moonLight);
}

/**添加轨道控制器 */
function createOrbitControls(object, domElement) {
  controls = new OrbitControls(object, domElement);
  controls.enableDamping = true;
  controls.minDistance = 0.01; //相机跟随距离

  controls.target.set(0, 20, 0);
  controls.enablePan = false;

  controls.maxPolarAngle = 1.5;
  controls.minPolarAngle = 1.5;

  controls.minAzimuthAngle = -0.5; // radians
  controls.maxAzimuthAngle = 0.5; // radians
}

/**自动更新threejs场景 */
function animate() {
  ledPixels.length && loopSync();

  controls && controls.update();

  renderer.render(scene, camera);
  window.requestAnimationFrame(animate);
}

function createVideo({
  src = "",
  loop = true,
  muted = false,
  width = 640,
  height = 480,
  auto_play = false,
}) {
  const video = document.createElement("video");
  video.id = "source_video";
  video.src = src;
  video.loop = loop;
  video.setAttribute("webkit-playsinline", true); // 禁止自动全屏播放
  video.setAttribute("playsinline", true);
  video.setAttribute("crossorigin", "anonymous"); //解决视频资源跨域问题,否则在设置VideoTexture时会报错。【THREE.WebGLState: DOMException: Failed to execute 'texImage2D' on 'WebGLRenderingContext': The video element contains cross-origin data, and may not be loaded.】
  video.muted = muted;
  video.width = width;
  video.height = height;
  video.style.cssText = `
        position: absolute;
        left: 50%;
        top: 100px;
        z-index:1001;
        transform: translateX(-50%);
    `;

  if (auto_play) {
    try {
      video.play(); //如果直接播放 ios上会出错!
    } catch (err) {
      console.error(err);
    }
  }

  return video;
}

function syncToCanvas() {
  return new Promise((resolve, reject) => {
    try {
      const { width = 0, height = 0 } = video || {};
      if (!canvas) {
        canvas = document.createElement("canvas");
        canvas.id = "cvideo";
        canvas.width = width;
        canvas.height = height;
        canvas.title = "canvas同步视频";

        ctx = canvas.getContext("2d");
        canvas.style.cssText = `
                    position: absolute;
                    right: 30px;
                    top: 20px;
                    border: solid 2px red;
                `;
      }
      ctx.drawImage(video, 0, 0, width, height);
      const imgData = ctx.getImageData(0, 0, width, height).data;
      resolve(imgData);
    } catch (err) {
      reject({ message: "call get_Image_Data error.", error: err });
    }
  });
}
function turnImageData2Pixel(imageData) {
  return new Promise((resolve, reject) => {
    try {
      let piexls = []; // 画面像素排布
      for (let h = 0; h < video.height; h += gap) {
        let row = [];
        for (let w = 0; w < video.width; w += gap) {
          let pos = (h * video.width + w) * 4;
          let r = imageData[pos];
          let g = imageData[pos + 1];
          let b = imageData[pos + 2];
          row.push([r, g, b]);
        }
        piexls.push(row);
      }
      resolve(piexls);
    } catch (err) {
      reject({ message: "call turnImageData2Pixel error.", err: err });
    }
  });
}

function createLedScreen(
  row,
  col,
  pixelWidth,
  pixelHeight,
  pixelGap = 0.1,
  imgData = null
) {
  // let row = Math.floor(height / (pixelHeight + pixelGap));
  // let col = Math.floor(width / (pixelWidth + pixelGap));
  ledPixels = [];

  let width = col * (pixelWidth + pixelGap) - pixelGap;
  let height = row * (pixelHeight + pixelGap) - pixelGap;

  console.log("创建led屏幕:", col, row);

  // 复用Geo优化性能
  let geo = new THREE.PlaneGeometry(pixelWidth, pixelHeight);

  for (let i = 0; i < row; i++) {
    let colArr = [];
    for (let j = 0; j < col; j++) {
      let mesh = new THREE.Mesh(geo, new THREE.MeshBasicMaterial());
      if (imgData) {
        mesh.material.color.setRGB(...imgData[i][col - 1 - j]);
      }

      let x = j * pixelWidth - width / 2 + pixelGap * j + pixelWidth / 2;
      let y = i * pixelHeight - height / 2 + pixelGap * i + pixelHeight / 2;
      let z = 25.04;
      mesh.userData = { x, y, z };
      mesh.position.set(x, y, z);
      colArr.push(mesh);

      ledGroup.add(mesh);
    }

    ledPixels.push(colArr);
  }

  // 调整LED屏幕的角度和位置
  ledGroup.position.set(0, 30, 0);
  ledGroup.rotateZ(-Math.PI);
  // ledGroup.rotateX(-Math.PI/4);

  // 添加到场景
  scene.add(ledGroup);
}

function loopSync() {
  syncToCanvas()
    .then((imgData) => {
      return turnImageData2Pixel(imgData);
    })
    .then((pixels) => {
      let r = pixels.length;
      let c = pixels[0].length;

      // 初始化3D屏幕
      if (!row && !col) {
        row = r;
        col = c;

        createLedScreen(row, col, 0.4, 0.4, 0.1);
      }

      let cutRowStart = 0; //Math.floor(row / 3);
      let cutRowEnd = row; //Math.floor((2 * row) / 3);
      // 行
      for (let i = cutRowStart; i < cutRowEnd; i++) {
        // 列
        for (let j = 0; j < col; j++) {
          if (pixels[i] && pixels[i][j]) {
            let [r, g, b] = pixels[i][col - 1 - j]; // j=>镜像 col-1-j=>正像
            let color = `rgb(${r},${g},${b})`;
            ledPixels[i][j].material.color = new THREE.Color(color);
            // pixels[i][j].material.color = new THREE.Color(color);
          }
        }
      }
    });
}

function loadVideo(
  width = 130,
  height = 350,
  src = "https://xxx.com/video.mp4"
) {
  video = createVideo({ src, muted: false, width, height });

  let sourceVideoEl = document.querySelector("#source_video");
  if (sourceVideoEl) {
    document.body.removeChild(sourceVideoEl);
  }
  document.body.appendChild(video);

  try {
    video.play();
  } catch (err) {}

  // 视频开始播放后,持续获取视频像素信息并渲染到3D电子屏幕上播放
  animate();

  pick();
}

const changeVideo = (e) => {
  const { width, height, src } = theVideo.value;
  row = 0;
  col = 0;
  try {
    ledGroup.removeFromParent();
  } catch (err) {
    console.error(err);
  }

  ledGroup = new THREE.Group();
  ledPixels = [];

  canvas = null;
  ctx = null;

  resetVideo();

  loadVideo(width, height, src);
};

const resetVideo = () => {
  if (video) {
    video.pause();
    video = null;
  }
};

const pick = () => {
  isOpen.value = true;
  if (video) {
    video.play();
  }

  loopSync();
};

onMounted(() => {
  initTHREE();

  createLight();

  if (container) {
    createOrbitControls(camera, container.value);
    container.value.appendChild(renderer.domElement);
  }

  // animate(); // 在选择视频后开始执行
});

onUnmounted(() => {
  resetVideo();

  ledPixels = [];

  // 停止渲染
  renderer.dispose();
  // 停止requestFrame
  window.cancelAnimationFrame(animate);
});
</script>
<style lang="less" scoped>
.container {
  width: 100vw;
  height: 100vh;
  background-color: #262837;
}

.pick {
  position: absolute;
  z-index: 100;
  font-size: 50px;
  text-align: center;
  top: 75%;
  left: 50%;
  transform: translateX(-50%);
  color: #fff;
  border-radius: 50%;
  height: 214px;
  width: 214px;
  display: flex;
  justify-content: center;
  align-items: center;
  padding: 20px;
  box-sizing: border-box;
  cursor: grab;
  box-shadow: 0 0 15px #ffffff, inset 0 0 10px #f03030;
  animation: breathing ease-in infinite 3s alternate;
  transform-origin: left top;
  display: flex;
  flex-direction: column;
}

@keyframes breathing {
  0% {
    transform: rotateZ(-10deg) translateX(-50%) translateY(-50%);
  }

  100% {
    transform: rotateZ(10deg) translateX(-50%) translateY(-50%);
  }
}

.videolist {
  margin-top: 20px;
  border: none;
  outline: none;
  font-size: 18px;
  background-color: black;
  color: #fff;
  padding: 5px;
  cursor: pointer;
}
</style>