基于pako的前端数据压缩与解压缩

1,088 阅读1分钟
/*
 * @Author: luguodong
 * @Date: 2022-11-30 15:32:19
 * @LastEditors: luguodong
 * @LastEditTime: 2022-12-01 14:55:40
 */

import pako from "pako";

// 压缩
const zip = (data) => {
  if (!data) return data;

  const dataJson =
    typeof data !== "string" && typeof data !== "number"
      ? JSON.stringify(data)
      : data;

  const str = encodeURIComponent(dataJson);

  const binaryString = pako.gzip(str);

  const arr = Array.from(binaryString);

  let s = "";
  arr.forEach((item) => {
    s += String.fromCharCode(item);
  });

  return btoa(s);
};

const chunk = 8 * 1024;

// 解压
const unzip = (compressedData) => {
  const strData = atob(compressedData);
  const charData = strData.split("").map(function (x) {
    return x.charCodeAt(0);
  });

  const binData = new Uint8Array(charData);

  const data = pako.ungzip(binData);

  // start 切片处理数据,防止内存溢出报错
  let str = "";

  let i;
  for (i = 0; i < data.length / chunk; i++) {
    str += String.fromCharCode(...data.slice(i * chunk, (i + 1) * chunk));
  }

  str += String.fromCharCode(...data.slice(i * chunk));
  // end 切片处理数据,防止内存溢出报错

  const unzipStr = decodeURIComponent(str);

  let result = "";

  try {
    result = JSON.parse(unzipStr);
  } catch (_) {
    // 基本数据类型无法parse,进入catch直接赋值即可
    result = unzipStr;
  }

  return result;
};

export { zip, unzip };