FFmpeg.wasmの使い方の一例として、
・ブラウザで動画をアップロード
・動画から音声を抽出
・動画からすべてのフレーム画像を取得
・すべてのフレームをグレースケールに加工
・グレースケール動画を作成
・グレースケール動画と音声を合成して表示
するコードを以下に示します。
動画のプレイヤー上で右クリックするとファイルの保存選択ができます。
一通りのことができる例題となっています。
加工の一例として単純なグレースケール変換をしましたが、この部分をいじると色々な事ができると思います。
ご活用ください。
コード
grayVideo.php
<?php
ini_set('mbstring.internal_encoding' , 'UTF-8');
header('Cross-Origin-Opener-Policy: same-origin');
header('Cross-Origin-Embedder-Policy: require-corp');
?>
<html>
<head>
<script>
if (crossOriginIsolated) {
// Post SharedArrayBuffer
console.log('crossOriginIsolated');
} else {
// Do something else
console.log('NOT crossOriginIsolated');
}
</script>
<script src="ffmpeg.min.js" ></script>
</head>
<body>
<input type="file" id="fileInput">
</input><br>
<video id="my-video" controls="true" crossorigin="anonymous">
</video>
<audio id="my-audio" controls="true" crossorigin="anonymous">
</audio>
<img id="my-image" >
</img><br>
<canvas id="my-canvas" ></canvas>
<script src="app5.js"></script>
</body>
</html>
app5.js
(async () => {
console.log('app.js');
const { createFFmpeg, fetchFile } = FFmpeg
const ffmpeg = createFFmpeg({
corePath: 'ffmpeg-core.js',
log: true
});
await ffmpeg.load()
const sizeLimit = 1024 * 1024 * 10; // 制限サイズ
const fileInput = document.getElementById('fileInput'); // input要素
let mDuration = 0;
const handleFileSelect = async () => {
const files = fileInput.files;
for (let i = 0; i < files.length; i++) {
if (files[i].size > sizeLimit) {
alert('ファイルサイズは10MB以下にしてください');
fileInput.value = ''; // inputの中身をリセット
return;
}//if
}//for
const t_start = performance.now();
console.log('size:'+files[0].size );
console.log('name:'+files[0].name );
console.log('URL:'+URL.createObjectURL(files[0]) );
//var mVideo = document.createElement("video");
var mVideo = document.getElementById("my-video");
mVideo.src = URL.createObjectURL(files[0]);
mVideo.addEventListener('loadedmetadata', function() {
console.log('幅:', mVideo.videoWidth);
console.log('高さ:', mVideo.videoHeight);
console.log('長さ:', mVideo.duration);
mDuration = mVideo.duration;
});
mVideo.addEventListener("play", function() {
}, false);
const { name } = files[0];
console.log('name:'+name);
await ffmpeg.write(name, files[0] );
var strCmd = '-i '+ name + ' -f mp3 -ab 192000 -vn output.mp3';
console.log('strCmd:'+strCmd);
await ffmpeg.run(strCmd);
const data = ffmpeg.read('output.mp3')
//var mAudio = new Audio( URL.createObjectURL(new Blob([data.buffer], { type: 'audio/mp3' })) );
var mAudio = document.getElementById("my-audio");
mAudio.src = URL.createObjectURL(new Blob([data.buffer], { type: 'audio/mp3' }));
//mAudio.play();
//strCmd = '-i '+ name + ' -vcodec png image_%03d.png';
strCmd = '-i '+ name + ' image_%03d.jpg';
console.log('strCmd:'+strCmd);
await ffmpeg.run(strCmd);
let mArray = [];
let imgData = [];
let mImage = [];
let c_ = 1;
let e_ = 1;
while(e_==1){
let str_num = '00' + c_;
str_num = str_num.substring(str_num.length-3,str_num.length);
//let str_image = 'image_' + str_num + '.png';
let str_image = 'image_' + str_num + '.jpg';
console.log('str_image:'+str_image);
try {
imgData[c_] = await ffmpeg.read(str_image);
console.log('imgData:'+imgData[c_].length);
c_ += 1;
}
catch (e) {
c_ -= 1;
e_ = 0;
break;
}
}//
console.log('c_:'+c_);
let mFPS = c_ / mDuration;
console.log('mFPS:'+mFPS);
let m = 0;
var ks = 1;
var ke = c_;
for (let k = ks; k <= ke; k++)
{
//var mImage = document.getElementById("my-image");
mImage[k] = new Image();
//mImage[k].src = URL.createObjectURL(new Blob([imgData[k].buffer], { type: 'image/png' }));
mImage[k].src = URL.createObjectURL(new Blob([imgData[k].buffer], { type: 'image/jpg' }));
mImage[k].onload = function() {
console.log('mImage['+k+']:'+mImage[k].width+', '+mImage[k].height);
let w_ = mImage[k].width;
let h_ = mImage[k].height;
//let canvas = document.getElementById('my-canvas');
const canvas = document.createElement('canvas')
canvas.width = w_;
canvas.height = h_;
let ctx = canvas.getContext("2d");
ctx.clearRect(0, 0, canvas.width, canvas.height)
ctx.drawImage(mImage[k], 0, 0, w_, h_)
let src = ctx.getImageData(0, 0, w_, h_)
let dst = ctx.createImageData(w_, h_)
console.log('src:'+src.data.length);
console.log('src:'+src.data[w_*h_/2+w_/2]);
for (let i = 0; i < src.data.length; i += 4) {
var g = (src.data[i+0] + src.data[i+1] + src.data[i+2]) / 3;
dst.data[i] = g
dst.data[i + 1] = g
dst.data[i + 2] = g
dst.data[i + 3] = src.data[i + 3]
}
ctx.putImageData(dst, 0, 0)
const dataUrl = canvas.toDataURL()
mArray[k] = dataUrl
m += 1;
}// onload
}// for k
var mTimer;
mTimer = setInterval(() => {
console.log('mArray:'+m);
if(m==c_){
clearInterval(mTimer);
console.log('Timer cleared.');
mMakeGrayVideo(mArray, mFPS)
}//
}, 1000);
const t_end = performance.now();
console.log('time:'+ (t_end - t_start) +'[ms]' );
}//
fileInput.addEventListener('change', handleFileSelect);
async function mMakeGrayVideo(images, mFPS){
const video = await generateVideo(images, mFPS)
const objectUrl = createObjectUrl(, { type: 'video/mp4' })
insertVideo(objectUrl)
}//
async function generateVideo(images, mFPS) {
console.log('generateVideo');
images.forEach(async (image, i) => {
await ffmpeg.write(`gimage${i}.png`, image)
})
let str_cmd = '-r ' + mFPS + ' -i gimage%d.png -pix_fmt yuv420p output.mp4'
await ffmpeg.run(str_cmd)
let str_cmd2 = '-i output.mp4 -i output.mp3 output2.mp4'
await ffmpeg.run(str_cmd2)
const data = ffmpeg.read('output2.mp4')
return data
}
function createObjectUrl(array, options) {
console.log('createObjectUrl');
const blob = new Blob(array, options)
const objectUrl = URL.createObjectURL(blob)
return objectUrl
}
function insertVideo(src) {
console.log('insertVideo');
const video = document.createElement('video')
video.controls = true
video.onloadedmetadata = () => {
document.body.appendChild(video)
}
video.src = src
}
})()
こちらで動作確認できます。
https://g-llc.co.jp/grayVideo.php