Canvas, 视频处理, 视频滤镜, 绿幕抠像, 视频合成, 实时处理Canvas提供了强大的视频处理能力,可以实现实时视频滤镜、绿幕抠像、视频合成等高级功能。本章将深入探讨Canvas视频处理的各种技术。
class VideoPlayer {
constructor(container, options = {}) {
this.container = typeof container === 'string'
? document.querySelector(container)
: container
this.options = {
width: 640,
height: 360,
autoplay: false,
loop: false,
muted: false,
...options
}
this.video = null
this.canvas = null
this.ctx = null
this.isPlaying = false
this.animationId = null
this.init()
}
init() {
this.createVideo()
this.createCanvas()
this.setupEvents()
}
createVideo() {
this.video = document.createElement('video')
this.video.width = this.options.width
this.video.height = this.options.height
this.video.autoplay = this.options.autoplay
this.video.loop = this.options.loop
this.video.muted = this.options.muted
this.video.playsInline = true
this.video.crossOrigin = 'anonymous'
this.container.appendChild(this.video)
}
createCanvas() {
this.canvas = document.createElement('canvas')
this.canvas.width = this.options.width
this.canvas.height = this.options.height
this.canvas.style.display = 'none'
this.ctx = this.canvas.getContext('2d')
this.container.appendChild(this.canvas)
}
setupEvents() {
this.video.addEventListener('play', () => {
this.isPlaying = true
this.render()
})
this.video.addEventListener('pause', () => {
this.isPlaying = false
if (this.animationId) {
cancelAnimationFrame(this.animationId)
}
})
this.video.addEventListener('ended', () => {
this.isPlaying = false
})
this.video.addEventListener('loadedmetadata', () => {
this.canvas.width = this.video.videoWidth
this.canvas.height = this.video.videoHeight
})
}
load(source) {
if (typeof source === 'string') {
this.video.src = source
} else if (source instanceof MediaStream) {
this.video.srcObject = source
}
return this
}
play() {
this.video.play()
return this
}
pause() {
this.video.pause()
return this
}
stop() {
this.video.pause()
this.video.currentTime = 0
this.isPlaying = false
return this
}
render() {
if (!this.isPlaying) return
this.ctx.drawImage(this.video, 0, 0, this.canvas.width, this.canvas.height)
this.animationId = requestAnimationFrame(() => this.render())
}
captureFrame() {
this.ctx.drawImage(this.video, 0, 0, this.canvas.width, this.canvas.height)
return this.ctx.getImageData(0, 0, this.canvas.width, this.canvas.height)
}
captureFrameAsDataURL(format = 'image/png', quality = 0.92) {
this.ctx.drawImage(this.video, 0, 0, this.canvas.width, this.canvas.height)
return this.canvas.toDataURL(format, quality)
}
captureFrameAsBlob(format = 'image/png', quality = 0.92) {
return new Promise((resolve, reject) => {
this.ctx.drawImage(this.video, 0, 0, this.canvas.width, this.canvas.height)
this.canvas.toBlob(resolve, format, quality)
})
}
getCurrentTime() {
return this.video.currentTime
}
setCurrentTime(time) {
this.video.currentTime = time
return this
}
getDuration() {
return this.video.duration
}
setPlaybackRate(rate) {
this.video.playbackRate = rate
return this
}
setVolume(volume) {
this.video.volume = Math.max(0, Math.min(1, volume))
return this
}
destroy() {
this.stop()
this.video.src = ''
this.video.srcObject = null
this.container.removeChild(this.video)
this.container.removeChild(this.canvas)
}
}
class CameraCapture extends VideoPlayer {
constructor(container, options = {}) {
super(container, {
width: 640,
height: 480,
facingMode: 'user',
frameRate: 30,
...options
})
this.stream = null
this.track = null
}
async start() {
try {
const constraints = {
video: {
width: { ideal: this.options.width },
height: { ideal: this.options.height },
facingMode: this.options.facingMode,
frameRate: { ideal: this.options.frameRate }
},
audio: false
}
this.stream = await navigator.mediaDevices.getUserMedia(constraints)
this.track = this.stream.getVideoTracks()[0]
this.video.srcObject = this.stream
await this.video.play()
return true
} catch (error) {
console.error('摄像头启动失败:', error)
return false
}
}
stop() {
if (this.stream) {
this.stream.getTracks().forEach(track => track.stop())
this.stream = null
}
super.stop()
}
async switchCamera() {
const currentFacing = this.options.facingMode
this.options.facingMode = currentFacing === 'user' ? 'environment' : 'user'
this.stop()
await this.start()
}
getSettings() {
return this.track ? this.track.getSettings() : null
}
getCapabilities() {
return this.track ? this.track.getCapabilities() : null
}
async applyConstraints(constraints) {
if (this.track) {
await this.track.applyConstraints(constraints)
}
}
takePhoto(format = 'image/png', quality = 0.92) {
return this.captureFrameAsDataURL(format, quality)
}
async takePhotoAsBlob(format = 'image/png', quality = 0.92) {
return this.captureFrameAsBlob(format, quality)
}
}
class VideoFilterProcessor {
constructor(videoPlayer) {
this.player = videoPlayer
this.filters = new Map()
this.enabled = true
}
addFilter(name, filter) {
this.filters.set(name, filter)
return this
}
removeFilter(name) {
this.filters.delete(name)
return this
}
clearFilters() {
this.filters.clear()
return this
}
applyFilters(imageData) {
if (!this.enabled) return imageData
let data = imageData
this.filters.forEach(filter => {
data = filter.apply(data)
})
return data
}
render() {
if (!this.player.isPlaying) return
const ctx = this.player.ctx
const canvas = this.player.canvas
ctx.drawImage(this.player.video, 0, 0, canvas.width, canvas.height)
if (this.filters.size > 0) {
const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height)
const filtered = this.applyFilters(imageData)
ctx.putImageData(filtered, 0, 0)
}
requestAnimationFrame(() => this.render())
}
}
class GrayscaleFilter {
constructor(intensity = 1) {
this.intensity = intensity
}
apply(imageData) {
const data = imageData.data
const intensity = this.intensity
for (let i = 0; i < data.length; i += 4) {
const gray = data[i] * 0.299 + data[i + 1] * 0.587 + data[i + 2] * 0.114
data[i] = data[i] + (gray - data[i]) * intensity
data[i + 1] = data[i + 1] + (gray - data[i + 1]) * intensity
data[i + 2] = data[i + 2] + (gray - data[i + 2]) * intensity
}
return imageData
}
}
class SepiaFilter {
constructor(intensity = 1) {
this.intensity = intensity
}
apply(imageData) {
const data = imageData.data
const intensity = this.intensity
for (let i = 0; i < data.length; i += 4) {
const r = data[i]
const g = data[i + 1]
const b = data[i + 2]
const sepiaR = Math.min(255, r * 0.393 + g * 0.769 + b * 0.189)
const sepiaG = Math.min(255, r * 0.349 + g * 0.686 + b * 0.168)
const sepiaB = Math.min(255, r * 0.272 + g * 0.534 + b * 0.131)
data[i] = r + (sepiaR - r) * intensity
data[i + 1] = g + (sepiaG - g) * intensity
data[i + 2] = b + (sepiaB - b) * intensity
}
return imageData
}
}
class InvertFilter {
constructor(intensity = 1) {
this.intensity = intensity
}
apply(imageData) {
const data = imageData.data
const intensity = this.intensity
for (let i = 0; i < data.length; i += 4) {
data[i] = data[i] + (255 - data[i] * 2) * intensity
data[i + 1] = data[i + 1] + (255 - data[i + 1] * 2) * intensity
data[i + 2] = data[i + 2] + (255 - data[i + 2] * 2) * intensity
}
return imageData
}
}
class BrightnessFilter {
constructor(value = 0) {
this.value = value
}
apply(imageData) {
const data = imageData.data
const adjustment = this.value * 255
for (let i = 0; i < data.length; i += 4) {
data[i] = Math.max(0, Math.min(255, data[i] + adjustment))
data[i + 1] = Math.max(0, Math.min(255, data[i + 1] + adjustment))
data[i + 2] = Math.max(0, Math.min(255, data[i + 2] + adjustment))
}
return imageData
}
}
class ContrastFilter {
constructor(value = 1) {
this.value = value
}
apply(imageData) {
const data = imageData.data
const factor = (259 * (this.value * 255 + 255)) / (255 * (259 - this.value * 255))
for (let i = 0; i < data.length; i += 4) {
data[i] = Math.max(0, Math.min(255, factor * (data[i] - 128) + 128))
data[i + 1] = Math.max(0, Math.min(255, factor * (data[i + 1] - 128) + 128))
data[i + 2] = Math.max(0, Math.min(255, factor * (data[i + 2] - 128) + 128))
}
return imageData
}
}
class SaturationFilter {
constructor(value = 1) {
this.value = value
}
apply(imageData) {
const data = imageData.data
const saturation = this.value
for (let i = 0; i < data.length; i += 4) {
const gray = data[i] * 0.299 + data[i + 1] * 0.587 + data[i + 2] * 0.114
data[i] = Math.max(0, Math.min(255, gray + (data[i] - gray) * saturation))
data[i + 1] = Math.max(0, Math.min(255, gray + (data[i + 1] - gray) * saturation))
data[i + 2] = Math.max(0, Math.min(255, gray + (data[i + 2] - gray) * saturation))
}
return imageData
}
}
class HueRotateFilter {
constructor(degrees = 0) {
this.degrees = degrees
}
apply(imageData) {
const data = imageData.data
const angle = this.degrees * Math.PI / 180
const cos = Math.cos(angle)
const sin = Math.sin(angle)
for (let i = 0; i < data.length; i += 4) {
const r = data[i]
const g = data[i + 1]
const b = data[i + 2]
data[i] = Math.max(0, Math.min(255,
r * (0.213 + cos * 0.787 - sin * 0.213) +
g * (0.715 - cos * 0.715 - sin * 0.715) +
b * (0.072 - cos * 0.072 + sin * 0.928)
))
data[i + 1] = Math.max(0, Math.min(255,
r * (0.213 - cos * 0.213 + sin * 0.143) +
g * (0.715 + cos * 0.285 + sin * 0.140) +
b * (0.072 - cos * 0.072 - sin * 0.283)
))
data[i + 2] = Math.max(0, Math.min(255,
r * (0.213 - cos * 0.213 - sin * 0.787) +
g * (0.715 - cos * 0.715 + sin * 0.715) +
b * (0.072 + cos * 0.928 + sin * 0.072)
))
}
return imageData
}
}
class BlurFilter {
constructor(radius = 3) {
this.radius = radius
}
apply(imageData) {
const data = imageData.data
const width = imageData.width
const height = imageData.height
const radius = this.radius
const copy = new Uint8ClampedArray(data)
for (let y = 0; y < height; y++) {
for (let x = 0; x < width; x++) {
let r = 0, g = 0, b = 0, count = 0
for (let dy = -radius; dy <= radius; dy++) {
for (let dx = -radius; dx <= radius; dx++) {
const nx = x + dx
const ny = y + dy
if (nx >= 0 && nx < width && ny >= 0 && ny < height) {
const idx = (ny * width + nx) * 4
r += copy[idx]
g += copy[idx + 1]
b += copy[idx + 2]
count++
}
}
}
const idx = (y * width + x) * 4
data[idx] = r / count
data[idx + 1] = g / count
data[idx + 2] = b / count
}
}
return imageData
}
}
class SharpenFilter {
constructor(amount = 1) {
this.amount = amount
}
apply(imageData) {
const data = imageData.data
const width = imageData.width
const height = imageData.height
const copy = new Uint8ClampedArray(data)
const kernel = [
0, -1, 0,
-1, 5, -1,
0, -1, 0
]
for (let y = 1; y < height - 1; y++) {
for (let x = 1; x < width - 1; x++) {
for (let c = 0; c < 3; c++) {
let sum = 0
for (let ky = -1; ky <= 1; ky++) {
for (let kx = -1; kx <= 1; kx++) {
const idx = ((y + ky) * width + (x + kx)) * 4 + c
sum += copy[idx] * kernel[(ky + 1) * 3 + (kx + 1)]
}
}
const idx = (y * width + x) * 4 + c
data[idx] = Math.max(0, Math.min(255, copy[idx] + (sum - copy[idx]) * this.amount))
}
}
}
return imageData
}
}
class VignetteFilter {
constructor(intensity = 0.5, radius = 0.8) {
this.intensity = intensity
this.radius = radius
}
apply(imageData) {
const data = imageData.data
const width = imageData.width
const height = imageData.height
const cx = width / 2
const cy = height / 2
const maxDist = Math.sqrt(cx * cx + cy * cy)
for (let y = 0; y < height; y++) {
for (let x = 0; x < width; x++) {
const dx = x - cx
const dy = y - cy
const dist = Math.sqrt(dx * dx + dy * dy)
const vignette = 1 - Math.pow(dist / (maxDist * this.radius), 2) * this.intensity
const idx = (y * width + x) * 4
data[idx] *= vignette
data[idx + 1] *= vignette
data[idx + 2] *= vignette
}
}
return imageData
}
}
class ChromaKey {
constructor(options = {}) {
this.options = {
keyColor: { r: 0, g: 255, b: 0 },
threshold: 40,
softness: 10,
spillRemoval: true,
spillThreshold: 50,
...options
}
}
setKeyColor(r, g, b) {
this.options.keyColor = { r, g, b }
return this
}
setThreshold(threshold) {
this.options.threshold = threshold
return this
}
setSoftness(softness) {
this.options.softness = softness
return this
}
pickKeyColor(imageData, x, y) {
const idx = (y * imageData.width + x) * 4
this.options.keyColor = {
r: imageData.data[idx],
g: imageData.data[idx + 1],
b: imageData.data[idx + 2]
}
return this
}
apply(imageData) {
const data = imageData.data
const { keyColor, threshold, softness, spillRemoval, spillThreshold } = this.options
for (let i = 0; i < data.length; i += 4) {
const r = data[i]
const g = data[i + 1]
const b = data[i + 2]
const distance = Math.sqrt(
Math.pow(r - keyColor.r, 2) +
Math.pow(g - keyColor.g, 2) +
Math.pow(b - keyColor.b, 2)
)
if (distance < threshold) {
data[i + 3] = 0
} else if (distance < threshold + softness) {
const alpha = (distance - threshold) / softness
data[i + 3] = Math.round(alpha * 255)
if (spillRemoval) {
this.removeSpill(data, i, keyColor, spillThreshold)
}
}
}
return imageData
}
removeSpill(data, i, keyColor, threshold) {
const r = data[i]
const g = data[i + 1]
const b = data[i + 2]
const keyMatch = Math.max(
0,
Math.min(
1,
(g - Math.max(r, b)) / threshold
)
)
if (keyMatch > 0) {
const desaturate = 1 - keyMatch * 0.5
data[i] = Math.min(255, r + (255 - r) * keyMatch * 0.3)
data[i + 1] = g * desaturate
data[i + 2] = b * desaturate
}
}
autoDetectKeyColor(imageData, sampleRegion = null) {
const data = imageData.data
const width = imageData.width
const height = imageData.height
const region = sampleRegion || {
x: Math.floor(width * 0.1),
y: Math.floor(height * 0.1),
width: Math.floor(width * 0.2),
height: Math.floor(height * 0.2)
}
let totalR = 0, totalG = 0, totalB = 0, count = 0
for (let y = region.y; y < region.y + region.height; y++) {
for (let x = region.x; x < region.x + region.width; x++) {
const idx = (y * width + x) * 4
totalR += data[idx]
totalG += data[idx + 1]
totalB += data[idx + 2]
count++
}
}
this.options.keyColor = {
r: Math.round(totalR / count),
g: Math.round(totalG / count),
b: Math.round(totalB / count)
}
return this
}
}
class VideoCompositor {
constructor(canvas, options = {}) {
this.canvas = canvas
this.ctx = canvas.getContext('2d')
this.options = {
width: canvas.width,
height: canvas.height,
backgroundColor: '#000000',
...options
}
this.layers = []
this.isPlaying = false
this.animationId = null
}
addLayer(layer) {
layer.zIndex = layer.zIndex || this.layers.length
this.layers.push(layer)
this.layers.sort((a, b) => a.zIndex - b.zIndex)
return this
}
removeLayer(id) {
this.layers = this.layers.filter(l => l.id !== id)
return this
}
clearLayers() {
this.layers = []
return this
}
start() {
this.isPlaying = true
this.render()
}
stop() {
this.isPlaying = false
if (this.animationId) {
cancelAnimationFrame(this.animationId)
}
}
render() {
if (!this.isPlaying) return
this.ctx.fillStyle = this.options.backgroundColor
this.ctx.fillRect(0, 0, this.canvas.width, this.canvas.height)
this.layers.forEach(layer => {
this.renderLayer(layer)
})
this.animationId = requestAnimationFrame(() => this.render())
}
renderLayer(layer) {
const ctx = this.ctx
ctx.save()
ctx.globalAlpha = layer.opacity !== undefined ? layer.opacity : 1
ctx.globalCompositeOperation = layer.blendMode || 'source-over'
if (layer.transform) {
const { x = 0, y = 0, rotation = 0, scaleX = 1, scaleY = 1 } = layer.transform
ctx.translate(x, y)
ctx.rotate(rotation * Math.PI / 180)
ctx.scale(scaleX, scaleY)
}
if (layer.chromaKey) {
this.renderChromaKeyLayer(layer)
} else if (layer.type === 'video') {
this.renderVideoLayer(layer)
} else if (layer.type === 'image') {
this.renderImageLayer(layer)
} else if (layer.type === 'canvas') {
this.renderCanvasLayer(layer)
} else if (layer.type === 'color') {
this.renderColorLayer(layer)
}
ctx.restore()
}
renderVideoLayer(layer) {
const { source, x = 0, y = 0, width, height } = layer
if (source.readyState >= 2) {
this.ctx.drawImage(
source,
x, y,
width || source.videoWidth,
height || source.videoHeight
)
}
}
renderImageLayer(layer) {
const { source, x = 0, y = 0, width, height } = layer
this.ctx.drawImage(
source,
x, y,
width || source.width,
height || source.height
)
}
renderCanvasLayer(layer) {
const { source, x = 0, y = 0, width, height } = layer
this.ctx.drawImage(
source,
x, y,
width || source.width,
height || source.height
)
}
renderColorLayer(layer) {
const { color, x = 0, y = 0, width, height } = layer
this.ctx.fillStyle = color
this.ctx.fillRect(x, y, width, height)
}
renderChromaKeyLayer(layer) {
const { source, chromaKey, x = 0, y = 0, width, height } = layer
if (source.readyState < 2) return
const tempCanvas = document.createElement('canvas')
tempCanvas.width = width || source.videoWidth
tempCanvas.height = height || source.videoHeight
const tempCtx = tempCanvas.getContext('2d')
tempCtx.drawImage(source, 0, 0, tempCanvas.width, tempCanvas.height)
const imageData = tempCtx.getImageData(0, 0, tempCanvas.width, tempCanvas.height)
chromaKey.apply(imageData)
tempCtx.putImageData(imageData, 0, 0)
this.ctx.drawImage(tempCanvas, x, y)
}
exportFrame(format = 'image/png', quality = 0.92) {
return this.canvas.toDataURL(format, quality)
}
async exportFrameAsBlob(format = 'image/png', quality = 0.92) {
return new Promise((resolve) => {
this.canvas.toBlob(resolve, format, quality)
})
}
}
class VideoRecorder {
constructor(source, options = {}) {
this.source = source
this.options = {
mimeType: 'video/webm;codecs=vp9',
videoBitsPerSecond: 2500000,
audioBitsPerSecond: 128000,
...options
}
this.mediaRecorder = null
this.chunks = []
this.isRecording = false
this.startTime = 0
this.onDataAvailable = null
this.onStop = null
}
async init() {
let stream
if (this.source instanceof HTMLCanvasElement) {
stream = this.source.captureStream(30)
} else if (this.source instanceof HTMLVideoElement) {
if (this.source.captureStream) {
stream = this.source.captureStream()
} else if (this.source.mozCaptureStream) {
stream = this.source.mozCaptureStream()
}
} else if (this.source instanceof MediaStream) {
stream = this.source
}
if (!stream) {
throw new Error('无法创建媒体流')
}
const mimeType = this.getSupportedMimeType()
this.mediaRecorder = new MediaRecorder(stream, {
...this.options,
mimeType
})
this.mediaRecorder.ondataavailable = (e) => {
if (e.data.size > 0) {
this.chunks.push(e.data)
if (this.onDataAvailable) {
this.onDataAvailable(e.data)
}
}
}
this.mediaRecorder.onstop = () => {
if (this.onStop) {
this.onStop(this.getBlob())
}
}
}
getSupportedMimeType() {
const types = [
'video/webm;codecs=vp9,opus',
'video/webm;codecs=vp9',
'video/webm;codecs=vp8,opus',
'video/webm;codecs=vp8',
'video/webm',
'video/mp4'
]
for (const type of types) {
if (MediaRecorder.isTypeSupported(type)) {
return type
}
}
return 'video/webm'
}
start(timeslice = 1000) {
if (this.isRecording) return
this.chunks = []
this.startTime = Date.now()
this.mediaRecorder.start(timeslice)
this.isRecording = true
}
stop() {
return new Promise((resolve) => {
if (!this.isRecording) {
resolve(null)
return
}
this.onStop = (blob) => {
this.isRecording = false
resolve(blob)
}
this.mediaRecorder.stop()
})
}
pause() {
if (this.isRecording && this.mediaRecorder.state === 'recording') {
this.mediaRecorder.pause()
}
}
resume() {
if (this.isRecording && this.mediaRecorder.state === 'paused') {
this.mediaRecorder.resume()
}
}
getBlob() {
return new Blob(this.chunks, { type: this.options.mimeType })
}
getURL() {
return URL.createObjectURL(this.getBlob())
}
getDuration() {
return (Date.now() - this.startTime) / 1000
}
download(filename = 'recording.webm') {
const url = this.getURL()
const a = document.createElement('a')
a.href = url
a.download = filename
a.click()
URL.revokeObjectURL(url)
}
}
class FrameRecorder {
constructor(canvas, options = {}) {
this.canvas = canvas
this.options = {
format: 'webm',
frameRate: 30,
quality: 0.92,
...options
}
this.frames = []
this.isRecording = false
this.encoder = null
}
start() {
this.frames = []
this.isRecording = true
this.lastCaptureTime = 0
this.frameInterval = 1000 / this.options.frameRate
}
capture() {
if (!this.isRecording) return
const now = performance.now()
if (now - this.lastCaptureTime >= this.frameInterval) {
this.frames.push(this.canvas.toDataURL('image/png', this.options.quality))
this.lastCaptureTime = now
}
}
async stop() {
this.isRecording = false
return this.encode()
}
async encode() {
if (this.frames.length === 0) return null
if (this.options.format === 'gif') {
return this.encodeGIF()
} else if (this.options.format === 'webm') {
return this.encodeWebM()
}
return null
}
async encodeGIF() {
const encoder = new GIFEncoder()
encoder.setRepeat(0)
encoder.setDelay(1000 / this.options.frameRate)
encoder.start()
const ctx = this.canvas.getContext('2d')
for (const frame of this.frames) {
const img = await this.loadImage(frame)
ctx.drawImage(img, 0, 0)
encoder.addFrame(ctx)
}
encoder.finish()
return new Blob([encoder.output()], { type: 'image/gif' })
}
async encodeWebM() {
const stream = this.canvas.captureStream(this.options.frameRate)
const recorder = new MediaRecorder(stream, {
mimeType: 'video/webm',
videoBitsPerSecond: 5000000
})
const chunks = []
return new Promise((resolve) => {
recorder.ondataavailable = (e) => {
if (e.data.size > 0) {
chunks.push(e.data)
}
}
recorder.onstop = () => {
resolve(new Blob(chunks, { type: 'video/webm' }))
}
recorder.start()
let frameIndex = 0
const playFrames = () => {
if (frameIndex < this.frames.length) {
this.loadImage(this.frames[frameIndex]).then(img => {
const ctx = this.canvas.getContext('2d')
ctx.drawImage(img, 0, 0)
frameIndex++
setTimeout(playFrames, this.frameInterval)
})
} else {
recorder.stop()
}
}
playFrames()
})
}
loadImage(src) {
return new Promise((resolve, reject) => {
const img = new Image()
img.onload = () => resolve(img)
img.onerror = reject
img.src = src
})
}
getFrameCount() {
return this.frames.length
}
getDuration() {
return this.frames.length / this.options.frameRate
}
}
class ParticleVideoEffect {
constructor(videoPlayer, options = {}) {
this.player = videoPlayer
this.options = {
particleSize: 2,
particleGap: 4,
effect: 'dissolve',
colorMode: 'original',
...options
}
this.particles = []
this.tempCanvas = document.createElement('canvas')
this.tempCtx = this.tempCanvas.getContext('2d')
}
init() {
this.tempCanvas.width = this.player.canvas.width
this.tempCanvas.height = this.player.canvas.height
this.createParticles()
}
createParticles() {
const { particleGap, particleSize } = this.options
const width = this.player.canvas.width
const height = this.player.canvas.height
this.particles = []
for (let y = 0; y < height; y += particleGap) {
for (let x = 0; x < width; x += particleGap) {
this.particles.push({
x: x,
y: y,
originalX: x,
originalY: y,
size: particleSize,
color: { r: 0, g: 0, b: 0 },
vx: 0,
vy: 0,
life: 1
})
}
}
}
update() {
const ctx = this.tempCtx
const canvas = this.tempCanvas
ctx.drawImage(this.player.video, 0, 0, canvas.width, canvas.height)
const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height)
const data = imageData.data
this.particles.forEach(particle => {
const idx = (Math.floor(particle.originalY) * canvas.width + Math.floor(particle.originalX)) * 4
particle.color.r = data[idx]
particle.color.g = data[idx + 1]
particle.color.b = data[idx + 2]
this.applyEffect(particle)
})
}
applyEffect(particle) {
const { effect } = this.options
switch (effect) {
case 'dissolve':
particle.x += (Math.random() - 0.5) * 2
particle.y += (Math.random() - 0.5) * 2
break
case 'explode':
const cx = this.player.canvas.width / 2
const cy = this.player.canvas.height / 2
const dx = particle.originalX - cx
const dy = particle.originalY - cy
const dist = Math.sqrt(dx * dx + dy * dy) || 1
particle.vx += dx / dist * 0.5
particle.vy += dy / dist * 0.5
particle.x += particle.vx
particle.y += particle.vy
particle.vx *= 0.98
particle.vy *= 0.98
break
case 'wave':
const time = Date.now() / 1000
particle.x = particle.originalX + Math.sin(particle.originalY / 20 + time) * 10
particle.y = particle.originalY + Math.cos(particle.originalX / 20 + time) * 10
break
case 'gravity':
particle.vy += 0.1
particle.y += particle.vy
if (particle.y > this.player.canvas.height) {
particle.y = 0
particle.vy = 0
}
break
default:
particle.x = particle.originalX
particle.y = particle.originalY
}
}
render() {
const ctx = this.player.ctx
const { colorMode } = this.options
ctx.fillStyle = '#000000'
ctx.fillRect(0, 0, this.player.canvas.width, this.player.canvas.height)
this.particles.forEach(particle => {
ctx.beginPath()
ctx.arc(particle.x, particle.y, particle.size, 0, Math.PI * 2)
if (colorMode === 'original') {
ctx.fillStyle = `rgb(${particle.color.r}, ${particle.color.g}, ${particle.color.b})`
} else if (colorMode === 'grayscale') {
const gray = (particle.color.r + particle.color.g + particle.color.b) / 3
ctx.fillStyle = `rgb(${gray}, ${gray}, ${gray})`
} else if (colorMode === 'neon') {
const brightness = (particle.color.r + particle.color.g + particle.color.b) / 3
const hue = brightness / 255 * 360
ctx.fillStyle = `hsl(${hue}, 100%, 50%)`
}
ctx.fill()
})
}
setEffect(effect) {
this.options.effect = effect
this.reset()
}
reset() {
this.particles.forEach(particle => {
particle.x = particle.originalX
particle.y = particle.originalY
particle.vx = 0
particle.vy = 0
})
}
}
class GlitchEffect {
constructor(videoPlayer, options = {}) {
this.player = videoPlayer
this.options = {
intensity: 0.5,
frequency: 0.1,
rgbShift: true,
scanLines: true,
noise: true,
...options
}
this.glitchActive = false
this.glitchTimer = 0
}
apply(imageData) {
if (Math.random() < this.options.frequency) {
this.glitchActive = true
this.glitchTimer = 5 + Math.random() * 10
}
if (!this.glitchActive) return imageData
this.glitchTimer--
if (this.glitchTimer <= 0) {
this.glitchActive = false
return imageData
}
const data = imageData.data
const width = imageData.width
const height = imageData.height
const intensity = this.options.intensity
if (this.options.rgbShift) {
this.applyRGBShift(data, width, height, intensity)
}
if (this.options.scanLines) {
this.applyScanLines(data, width, height)
}
if (this.options.noise) {
this.applyNoise(data, intensity)
}
this.applyHorizontalGlitch(data, width, height, intensity)
return imageData
}
applyRGBShift(data, width, height, intensity) {
const shift = Math.floor(intensity * 10)
const copy = new Uint8ClampedArray(data)
for (let y = 0; y < height; y++) {
for (let x = 0; x < width; x++) {
const idx = (y * width + x) * 4
const shiftedX = Math.min(width - 1, Math.max(0, x + shift))
const shiftedIdx = (y * width + shiftedX) * 4
data[idx] = copy[shiftedIdx]
}
}
}
applyScanLines(data, width, height) {
for (let y = 0; y < height; y += 2) {
for (let x = 0; x < width; x++) {
const idx = (y * width + x) * 4
data[idx] *= 0.8
data[idx + 1] *= 0.8
data[idx + 2] *= 0.8
}
}
}
applyNoise(data, intensity) {
const noiseAmount = intensity * 50
for (let i = 0; i < data.length; i += 4) {
if (Math.random() < 0.05) {
const noise = (Math.random() - 0.5) * noiseAmount
data[i] += noise
data[i + 1] += noise
data[i + 2] += noise
}
}
}
applyHorizontalGlitch(data, width, height, intensity) {
const numGlitches = Math.floor(intensity * 10)
for (let i = 0; i < numGlitches; i++) {
const y = Math.floor(Math.random() * height)
const glitchHeight = Math.floor(Math.random() * 10) + 1
const offset = Math.floor((Math.random() - 0.5) * width * intensity * 0.5)
for (let dy = 0; dy < glitchHeight && y + dy < height; dy++) {
for (let x = 0; x < width; x++) {
const srcX = Math.min(width - 1, Math.max(0, x + offset))
const srcIdx = ((y + dy) * width + srcX) * 4
const dstIdx = ((y + dy) * width + x) * 4
data[dstIdx] = data[srcIdx]
data[dstIdx + 1] = data[srcIdx + 1]
data[dstIdx + 2] = data[srcIdx + 2]
}
}
}
}
setIntensity(intensity) {
this.options.intensity = Math.max(0, Math.min(1, intensity))
return this
}
setFrequency(frequency) {
this.options.frequency = Math.max(0, Math.min(1, frequency))
return this
}
}
<!DOCTYPE html>
<html lang="zh-CN">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Canvas 视频处理演示</title>
<style>
body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
margin: 0;
padding: 20px;
background: #1a1a2e;
color: #eee;
}
.container {
max-width: 1200px;
margin: 0 auto;
}
h1 {
text-align: center;
margin-bottom: 30px;
}
.video-section {
display: grid;
grid-template-columns: 1fr 1fr;
gap: 20px;
margin-bottom: 20px;
}
.video-card {
background: #16213e;
border-radius: 8px;
padding: 20px;
}
.video-card h3 {
margin: 0 0 15px 0;
color: #e94560;
}
video, canvas {
width: 100%;
border-radius: 4px;
background: #000;
}
.controls {
margin-top: 15px;
display: flex;
flex-wrap: wrap;
gap: 10px;
}
button {
padding: 8px 16px;
border: none;
border-radius: 4px;
background: #e94560;
color: white;
cursor: pointer;
font-size: 14px;
transition: background 0.3s;
}
button:hover {
background: #ff6b6b;
}
button.active {
background: #0f3460;
}
.slider-group {
display: flex;
align-items: center;
gap: 10px;
margin-top: 10px;
}
.slider-group label {
min-width: 80px;
}
input[type="range"] {
flex: 1;
}
.filter-grid {
display: grid;
grid-template-columns: repeat(auto-fill, minmax(120px, 1fr));
gap: 10px;
margin-top: 15px;
}
.filter-btn {
padding: 10px;
text-align: center;
}
.chroma-key-picker {
position: relative;
display: inline-block;
}
.color-preview {
width: 30px;
height: 30px;
border-radius: 4px;
border: 2px solid white;
cursor: pointer;
}
</style>
</head>
<body>
<div class="container">
<h1>Canvas 视频处理演示</h1>
<div class="video-section">
<div class="video-card">
<h3>摄像头 + 实时滤镜</h3>
<canvas id="filter-canvas"></canvas>
<div class="controls">
<button id="camera-btn" onclick="toggleCamera()">启动摄像头</button>
<button onclick="capturePhoto()">拍照</button>
</div>
<div class="filter-grid">
<button class="filter-btn" onclick="setFilter('none')">原图</button>
<button class="filter-btn" onclick="setFilter('grayscale')">灰度</button>
<button class="filter-btn" onclick="setFilter('sepia')">复古</button>
<button class="filter-btn" onclick="setFilter('invert')">反色</button>
<button class="filter-btn" onclick="setFilter('blur')">模糊</button>
<button class="filter-btn" onclick="setFilter('sharpen')">锐化</button>
<button class="filter-btn" onclick="setFilter('vignette')">暗角</button>
<button class="filter-btn" onclick="setFilter('glitch')">故障</button>
</div>
<div class="slider-group">
<label>亮度:</label>
<input type="range" id="brightness" min="-1" max="1" step="0.1" value="0" onchange="updateBrightness(this.value)">
</div>
<div class="slider-group">
<label>对比度:</label>
<input type="range" id="contrast" min="0" max="2" step="0.1" value="1" onchange="updateContrast(this.value)">
</div>
<div class="slider-group">
<label>饱和度:</label>
<input type="range" id="saturation" min="0" max="2" step="0.1" value="1" onchange="updateSaturation(this.value)">
</div>
</div>
<div class="video-card">
<h3>绿幕抠像</h3>
<canvas id="chroma-canvas"></canvas>
<div class="controls">
<button id="chroma-camera-btn" onclick="toggleChromaCamera()">启动摄像头</button>
<button onclick="toggleChromaKey()">启用抠像</button>
<div class="chroma-key-picker">
<div class="color-preview" id="key-color-preview" style="background: #00ff00"></div>
</div>
</div>
<div class="slider-group">
<label>阈值:</label>
<input type="range" id="threshold" min="10" max="100" value="40" onchange="updateThreshold(this.value)">
</div>
<div class="slider-group">
<label>柔和度:</label>
<input type="range" id="softness" min="0" max="50" value="10" onchange="updateSoftness(this.value)">
</div>
<div class="controls" style="margin-top: 15px;">
<button onclick="setBackground('none')">无背景</button>
<button onclick="setBackground('gradient')">渐变</button>
<button onclick="setBackground('image')">图片</button>
</div>
</div>
</div>
<div class="video-card">
<h3>视频录制</h3>
<canvas id="record-canvas"></canvas>
<div class="controls">
<button id="record-btn" onclick="toggleRecording()">开始录制</button>
<button onclick="downloadRecording()">下载视频</button>
<span id="record-status"></span>
</div>
</div>
</div>
<script>
// 滤镜演示
const filterCanvas = document.getElementById('filter-canvas')
const filterCtx = filterCanvas.getContext('2d')
filterCanvas.width = 640
filterCanvas.height = 480
let camera = null
let filterProcessor = null
let currentFilter = 'none'
let glitchEffect = null
async function toggleCamera() {
const btn = document.getElementById('camera-btn')
if (!camera) {
camera = new CameraCapture(filterCanvas.parentElement, {
width: 640,
height: 480
})
await camera.start()
filterProcessor = new VideoFilterProcessor(camera)
glitchEffect = new GlitchEffect(camera, { intensity: 0.5, frequency: 0.1 })
btn.textContent = '停止摄像头'
renderFilterLoop()
} else {
camera.stop()
camera = null
btn.textContent = '启动摄像头'
}
}
function renderFilterLoop() {
if (!camera || !camera.isPlaying) return
filterCtx.drawImage(camera.video, 0, 0, filterCanvas.width, filterCanvas.height)
if (currentFilter !== 'none' || filterProcessor.filters.size > 0) {
let imageData = filterCtx.getImageData(0, 0, filterCanvas.width, filterCanvas.height)
if (currentFilter === 'glitch') {
imageData = glitchEffect.apply(imageData)
} else {
imageData = filterProcessor.applyFilters(imageData)
}
filterCtx.putImageData(imageData, 0, 0)
}
requestAnimationFrame(renderFilterLoop)
}
function setFilter(filter) {
currentFilter = filter
filterProcessor.clearFilters()
switch (filter) {
case 'grayscale':
filterProcessor.addFilter('grayscale', new GrayscaleFilter(1))
break
case 'sepia':
filterProcessor.addFilter('sepia', new SepiaFilter(1))
break
case 'invert':
filterProcessor.addFilter('invert', new InvertFilter(1))
break
case 'blur':
filterProcessor.addFilter('blur', new BlurFilter(3))
break
case 'sharpen':
filterProcessor.addFilter('sharpen', new SharpenFilter(1))
break
case 'vignette':
filterProcessor.addFilter('vignette', new VignetteFilter(0.5, 0.8))
break
}
}
function updateBrightness(value) {
filterProcessor.removeFilter('brightness')
if (value != 0) {
filterProcessor.addFilter('brightness', new BrightnessFilter(parseFloat(value)))
}
}
function updateContrast(value) {
filterProcessor.removeFilter('contrast')
filterProcessor.addFilter('contrast', new ContrastFilter(parseFloat(value)))
}
function updateSaturation(value) {
filterProcessor.removeFilter('saturation')
filterProcessor.addFilter('saturation', new SaturationFilter(parseFloat(value)))
}
function capturePhoto() {
if (!camera) return
const dataURL = filterCanvas.toDataURL('image/png')
const link = document.createElement('a')
link.download = `photo_${Date.now()}.png`
link.href = dataURL
link.click()
}
// 绿幕抠像演示
const chromaCanvas = document.getElementById('chroma-canvas')
const chromaCtx = chromaCanvas.getContext('2d')
chromaCanvas.width = 640
chromaCanvas.height = 480
let chromaCamera = null
let chromaKey = null
let chromaEnabled = false
let backgroundImage = null
async function toggleChromaCamera() {
const btn = document.getElementById('chroma-camera-btn')
if (!chromaCamera) {
chromaCamera = new CameraCapture(chromaCanvas.parentElement, {
width: 640,
height: 480
})
await chromaCamera.start()
chromaKey = new ChromaKey({
keyColor: { r: 0, g: 255, b: 0 },
threshold: 40,
softness: 10
})
btn.textContent = '停止摄像头'
renderChromaLoop()
} else {
chromaCamera.stop()
chromaCamera = null
btn.textContent = '启动摄像头'
}
}
function renderChromaLoop() {
if (!chromaCamera || !chromaCamera.isPlaying) return
chromaCtx.drawImage(chromaCamera.video, 0, 0, chromaCanvas.width, chromaCanvas.height)
if (chromaEnabled && chromaKey) {
let imageData = chromaCtx.getImageData(0, 0, chromaCanvas.width, chromaCanvas.height)
imageData = chromaKey.apply(imageData)
chromaCtx.clearRect(0, 0, chromaCanvas.width, chromaCanvas.height)
if (backgroundImage) {
chromaCtx.drawImage(backgroundImage, 0, 0, chromaCanvas.width, chromaCanvas.height)
} else {
const gradient = chromaCtx.createLinearGradient(0, 0, chromaCanvas.width, chromaCanvas.height)
gradient.addColorStop(0, '#1a1a2e')
gradient.addColorStop(1, '#16213e')
chromaCtx.fillStyle = gradient
chromaCtx.fillRect(0, 0, chromaCanvas.width, chromaCanvas.height)
}
chromaCtx.putImageData(imageData, 0, 0)
}
requestAnimationFrame(renderChromaLoop)
}
function toggleChromaKey() {
chromaEnabled = !chromaEnabled
}
function updateThreshold(value) {
if (chromaKey) {
chromaKey.setThreshold(parseInt(value))
}
}
function updateSoftness(value) {
if (chromaKey) {
chromaKey.setSoftness(parseInt(value))
}
}
function setBackground(type) {
if (type === 'none') {
backgroundImage = null
} else if (type === 'gradient') {
backgroundImage = null
} else if (type === 'image') {
const img = new Image()
img.onload = () => {
backgroundImage = img
}
img.src = 'https://picsum.photos/640/480'
}
}
// 录制演示
const recordCanvas = document.getElementById('record-canvas')
const recordCtx = recordCanvas.getContext('2d')
recordCanvas.width = 640
recordCanvas.height = 480
let recorder = null
let isRecording = false
let animationTime = 0
function renderRecordCanvas() {
animationTime += 0.02
recordCtx.fillStyle = '#1a1a2e'
recordCtx.fillRect(0, 0, recordCanvas.width, recordCanvas.height)
for (let i = 0; i < 5; i++) {
const x = recordCanvas.width / 2 + Math.cos(animationTime + i * 0.5) * 150
const y = recordCanvas.height / 2 + Math.sin(animationTime * 1.5 + i * 0.5) * 100
const radius = 20 + Math.sin(animationTime + i) * 10
recordCtx.beginPath()
recordCtx.arc(x, y, radius, 0, Math.PI * 2)
recordCtx.fillStyle = `hsl(${(i * 60 + animationTime * 30) % 360}, 70%, 50%)`
recordCtx.fill()
}
recordCtx.fillStyle = '#e94560'
recordCtx.font = '24px sans-serif'
recordCtx.textAlign = 'center'
recordCtx.fillText('Canvas 动画录制演示', recordCanvas.width / 2, 50)
if (isRecording) {
recordCtx.fillStyle = '#ff0000'
recordCtx.beginPath()
recordCtx.arc(30, 30, 10, 0, Math.PI * 2)
recordCtx.fill()
recordCtx.fillStyle = '#ffffff'
recordCtx.font = '14px sans-serif'
recordCtx.textAlign = 'left'
recordCtx.fillText('REC', 50, 35)
}
requestAnimationFrame(renderRecordCanvas)
}
renderRecordCanvas()
async function toggleRecording() {
const btn = document.getElementById('record-btn')
const status = document.getElementById('record-status')
if (!isRecording) {
recorder = new VideoRecorder(recordCanvas)
await recorder.init()
recorder.start()
isRecording = true
btn.textContent = '停止录制'
btn.classList.add('active')
status.textContent = '录制中...'
} else {
const blob = await recorder.stop()
isRecording = false
btn.textContent = '开始录制'
btn.classList.remove('active')
status.textContent = `录制完成: ${(blob.size / 1024 / 1024).toFixed(2)} MB`
}
}
function downloadRecording() {
if (recorder) {
recorder.download(`recording_${Date.now()}.webm`)
}
}
</script>
</body>
</html>
class OptimizedVideoProcessor {
constructor(canvas, options = {}) {
this.canvas = canvas
this.ctx = canvas.getContext('2d', {
alpha: false,
desynchronized: true,
willReadFrequently: true
})
this.options = {
useWebWorker: false,
skipFrames: 1,
downscale: 1,
...options
}
this.frameCount = 0
this.worker = null
this.offscreenCanvas = null
}
setupOffscreen() {
if (typeof OffscreenCanvas !== 'undefined') {
this.offscreenCanvas = new OffscreenCanvas(
this.canvas.width,
this.canvas.height
)
this.offscreenCtx = this.offscreenCanvas.getContext('2d')
}
}
setupWorker(workerScript) {
if (this.options.useWebWorker && typeof Worker !== 'undefined') {
this.worker = new Worker(workerScript)
this.worker.onmessage = (e) => {
if (e.data.type === 'processed') {
this.ctx.putImageData(e.data.imageData, 0, 0)
}
}
}
}
shouldProcessFrame() {
this.frameCount++
return this.frameCount % (this.options.skipFrames + 1) === 0
}
processWithWorker(imageData) {
if (this.worker) {
this.worker.postMessage({
type: 'process',
imageData: imageData
})
return true
}
return false
}
getDownscaledData(video) {
const { downscale } = this.options
const width = Math.floor(video.videoWidth / downscale)
const height = Math.floor(video.videoHeight / downscale)
if (!this.tempCanvas) {
this.tempCanvas = document.createElement('canvas')
}
this.tempCanvas.width = width
this.tempCanvas.height = height
const tempCtx = this.tempCanvas.getContext('2d')
tempCtx.drawImage(video, 0, 0, width, height)
return tempCtx.getImageData(0, 0, width, height)
}
}
class VideoMemoryManager {
constructor() {
this.canvases = []
this.imageDataCache = new Map()
this.maxCacheSize = 10
}
getCanvas(width, height) {
let canvas = this.canvases.find(c =>
c.width === width && c.height === height && !c.inUse
)
if (!canvas) {
canvas = document.createElement('canvas')
canvas.width = width
canvas.height = height
this.canvases.push(canvas)
}
canvas.inUse = true
return canvas
}
releaseCanvas(canvas) {
canvas.inUse = false
}
cacheImageData(key, imageData) {
if (this.imageDataCache.size >= this.maxCacheSize) {
const firstKey = this.imageDataCache.keys().next().value
this.imageDataCache.delete(firstKey)
}
this.imageDataCache.set(key, imageData)
}
getCachedImageData(key) {
return this.imageDataCache.get(key)
}
clearCache() {
this.imageDataCache.clear()
}
getMemoryUsage() {
let total = 0
this.canvases.forEach(canvas => {
total += canvas.width * canvas.height * 4
})
this.imageDataCache.forEach(imageData => {
total += imageData.data.length
})
return total
}
}