在浏览器中如何对视频做格式转化

390 阅读1分钟

原来在浏览器端可以通过js直接对视频进行格式转换,js原来这么强大。下面通过ffmpeg.wasm做了一个演示demo。但这种方案有一个问题,就是吃内存。

效果演示

001.gif

方案概述

  • 通过 这篇文章 来前端生成一个视频
  • 然后通过 ffmpeg.wasm 将生成的视频转换成期望的格式

关键代码

<template>
  <div class="box">
    <div>
      <h1>配置</h1>
      <div>
        视频格式:<a-select
          v-model:value="formatValue"
          :options="options"
        ></a-select>
      </div>
      <!-- <div>
        选择音频:<input
          type="file"
          id="uploader"
          accept="audio/mpeg"
          @change="uploadAudio"
        />
      </div> -->
    </div>
    <div>
      <h1>画布</h1>
      <p>
        <a-button :disabled="isRecording" @click="play">开启录制</a-button>
        <a-button :disabled="!isRecording" @click="stopRecord"
          >停止录制</a-button
        >
      </p>
      <canvas style="width: 50vw; height: 50vh" id="canvas" />
    </div>
  </div>
</template>

<script lang="ts" setup>
/**
 * @title Lite Collision Detection
 * @category Physics
 */
import {
  WebGLEngine,
  SphereColliderShape,
  BoxColliderShape,
  Vector3,
  MeshRenderer,
  BlinnPhongMaterial,
  PointLight,
  PrimitiveMesh,
  Camera,
  StaticCollider,
  Script,
  DynamicCollider,
  AssetType,
  Engine,
  Entity,
  GLTFResource,
  BoundingBox,
  SkinnedMeshRenderer,
} from "oasis-engine";
import { OrbitControl } from "@oasis-engine/controls";

import { LitePhysics } from "@oasis-engine/physics-lite";
import { onMounted, ref } from "vue";
import { onUnmounted } from "vue";
import { message } from "ant-design-vue";
import * as TWEEN from "@tweenjs/tween.js";
import { createFFmpeg, fetchFile } from "@ffmpeg/ffmpeg";
const ffmpeg = createFFmpeg({ log: true });

const formatValue = ref("video/mp4");
const options = [
  { value: "video/mp4", label: "video/mp4" },
  { value: "video/avi", label: "video/avi" },
  { value: "video/webm", label: "video/webm" },
  { value: "image/gif", label: "image/gif" },
];

onMounted(() => {
  createOasis();
});

let engine: WebGLEngine;
let rootEntity: Entity;
let camera: Camera;
let rotateControl: Rotate;
function createOasis() {
  engine = new WebGLEngine("canvas", LitePhysics, {
    alpha: false,
    premultipliedAlpha: false,
  });
  engine.canvas.resizeByClientSize();
  const scene = engine.sceneManager.activeScene;
  scene.background.solidColor.setValue(0, 0, 0, 0);
  rootEntity = scene.createRootEntity("root");

  scene.ambientLight.diffuseSolidColor.setValue(1, 1, 1, 1);
  scene.ambientLight.diffuseIntensity = 1.2;

  // init camera
  const cameraEntity = rootEntity.createChild("camera");
  camera = cameraEntity.addComponent(Camera);
  cameraEntity.transform.setPosition(10, 10, 10);
  let orbitControl = cameraEntity.addComponent(OrbitControl);

  // init point light
  const light = rootEntity.createChild("light");
  light.transform.setPosition(0, 3, 0);
  const pointLight = light.addComponent(PointLight);
  pointLight.intensity = 0.3;

  // create box test entity
  let modelEntity = rootEntity.createChild("ModelEntity");
  engine.resourceManager
    .load<GLTFResource>("./models/a0ff1c60-5e21-49e5-946a-675515d98736.glb")
    .then((asset) => {
      const { animations, defaultSceneRoot } = asset;
      modelEntity.addChild(defaultSceneRoot);

      setTargetCenter(modelEntity, orbitControl);

      rotateControl = modelEntity.addComponent(Rotate);
    });

  // Run engine
  engine.run();
  onUnmounted(() => scene.destroy());
}

/**根据模型尺寸 将其设置为中间 */
function setTargetCenter(modelEntity: Entity, orbitControl: OrbitControl) {
  const meshRenderers = new Array<MeshRenderer>();
  modelEntity.getComponentsIncludeChildren(MeshRenderer, meshRenderers);

  const skinnedMeshRenderers: SkinnedMeshRenderer[] = [];
  modelEntity.getComponentsIncludeChildren(
    SkinnedMeshRenderer,
    skinnedMeshRenderers
  );

  const renderers = meshRenderers.concat(skinnedMeshRenderers);
  const boundingBox = new BoundingBox();
  const center = new Vector3();
  const extent = new Vector3();

  boundingBox.min.setValue(0, 0, 0);
  boundingBox.max.setValue(0, 0, 0);

  renderers.forEach((renderer) => {
    BoundingBox.merge(renderer.bounds, boundingBox, boundingBox);
  });
  boundingBox.getExtent(extent);
  const size = extent.length();

  boundingBox.getCenter(center);
  orbitControl.target.setValue(center.x, center.y, center.z);
  const cameraEntity = orbitControl.camera;
  const camera = cameraEntity.getComponent(Camera);
  cameraEntity.transform.setPosition(size * 2, size * 1.5, size * 4);
  // 记录模型初始化视角

  camera.farClipPlane = size * 12;

  if (camera.nearClipPlane > size) {
    camera.nearClipPlane = size / 10;
  } else {
    camera.nearClipPlane = 0.1;
  }
}

/**自旋转一周 */
class Rotate extends Script {
  isRotating: boolean = false;
  originRot = this.entity.transform.rotation.clone();
  start(): void {
    if (this.isRotating) return;
    let { x: x0, y: y0, z: z0 } = this.originRot;
    this.isRotating = true;
    new TWEEN.Tween({ y: y0 })
      .to({ y: y0 + 360 }, 500)
      .onUpdate(({ y }) => {
        this.entity.transform.setRotation(x0, y, z0);
      })
      // .yoyo(true)
      .onComplete(() => {
        this.isRotating = false;
      })
      .easing(TWEEN.Easing.Sinusoidal.InOut)
      .start();
  }
  startReverse() {
    if (this.isRotating) return;
    let { x: x0, y: y0, z: z0 } = this.originRot;
    this.isRotating = true;
    new TWEEN.Tween({ y: y0 + 360 })
      .to({ y: y0 }, 500)
      .onUpdate(({ y }) => {
        this.entity.transform.setRotation(x0, y, z0);
      })
      // .yoyo(true)
      .onComplete(() => {
        this.isRotating = false;
      })
      .easing(TWEEN.Easing.Sinusoidal.InOut)
      .start();
  }
  onUpdate(deltaTime: number): void {
    TWEEN.update();
  }
}

let stream = ref<MediaStream>();
let recorder: MediaRecorder;
function play() {
  if (!stream.value) {
    // 将画布内容生成媒体流
    stream.value = engine.canvas._webCanvas.captureStream(60);

    // 生成媒体录制器对象并传入一个媒体流以便录制
    stream.value &&
      (recorder = new MediaRecorder(stream.value, {
        mimeType: "video/webm;codecs=vp8",
      }));
    // 有可录制的媒体资源事件
    recorder.ondataavailable = (event) => {
      blobs.push(event.data);
    };
    // 媒体录制器停止录制事件
    recorder.onstop = (event) => {
      isRecording.value = false;
      transcode();
    };
  }
  // 媒体录制器开始录制
  isRecording.value = true;
  recorder.start();
}

let blobs: Blob[] = [];
let webm = ref<Blob>();
let isRecording = ref(false);

/**
 * 停止录制
 */
function stopRecord() {
  recorder.stop();
}

function download() {
  if (webm.value) {
    const a = document.createElement("a");

    document.body.appendChild(a);
    a.style.display = "none";
    a.href = URL.createObjectURL(webm.value);
    a.download = "video";

    a.addEventListener("click", () => {
      if (a.parentElement) {
        a.parentElement.removeChild(a);
      }
    });

    a.click();
  }
}

let audioFile;
function uploadAudio({ target: { files } }) {
  audioFile = files[0];
}

let count = 0
const transcode = async () => {
  const name = `test${count}`;
  count++
  !ffmpeg.isLoaded() && (await ffmpeg.load());
  ffmpeg.FS("writeFile", name, await fetchFile(new Blob(blobs, { type: `${formatValue.value}` })));
  // if (!formatValue.value.includes("gif")) {
  //   ffmpeg.FS("writeFile", 'audio', await fetchFile('./video/record.mp3'));
  // }
  // await ffmpeg.run('-i', name, '-i', 'audio', '-c:v', 'copy', '-c:a', 'aac', '-strict', 'experimental', '-map', '0:v:0', '-map', '1:a:0', `output.${formatValue.value.split("/")[1]}`);
  await ffmpeg.run("-i", name, `output.${formatValue.value.split("/")[1]}`);
  const data = ffmpeg.FS(
    "readFile",
    `output.${formatValue.value.split("/")[1]}`
  );
  webm.value = new Blob([data.buffer], { type: formatValue.value })
  // ffmpeg.FS('unlink', name);
  open(
    URL.createObjectURL(webm.value)
  );
};
</script>

<style scoped lang="scss">
.box {
  padding-top: 60px;
}
#canvas {
  background-color: #222;
}
</style>