Media
Overview
VBsbMedia component provides a robust interface for playback and recording of video and audio. It allows recording video and audio streams, taking snapshots from the video feed, and playing back recorded media. The component is highly customizable and supports various configurations such as auto-play, looping, compact mode, and control positioning. The component is compatible with Vuetify and leverages Vuetify's UI components.
Source
Component
vue
<template>
<div class="container">
<video
v-if="props.video"
:src
:autoplay
:loop
ref="videoElement"
:controls="!!props.src"
></video>
<audio
v-if="props.audio"
:src
:autoplay
:loop
ref="audioElement"
:controls="!!props.src && !props.compact"
></audio>
<v-btn
v-if="props.video && props.snap"
:variant
:density
:class="'controls ma-2 controls-' + props.snapPosition"
@click="captureSnapshot"
:icon="props.snapIcon"
/>
<v-btn
v-if="!props.src && !props.compact"
:variant
:density
:class="props.video ? 'controls ma-2 controls-' + props.recorderPosition : ''"
@click="videoRecording"
:prepend-icon="isRecording ? '$mdiStop' : '$mdiRecord'"
color="red"
>{{ recordingTimeHMS }}</v-btn
>
<v-btn
v-if="!props.src && props.compact"
:variant
:density
:class="props.video ? 'controls ma-2 controls-' + props.recorderPosition : ''"
@click="videoRecording"
:icon="isRecording ? '$mdiStop' : '$mdiRecord'"
color="red"
/>
<v-btn
v-if="props.audio && props.src && props.compact"
:variant
:density
@click="audioPlayback"
:icon="isPlaying ? '$mdiStop' : '$mdiPlay'"
></v-btn>
<div class="slot">
<slot></slot>
</div>
<canvas ref="canvasElement" style="display: none"></canvas>
</div>
</template>
<script setup lang="ts">
const props = defineProps({
src: {
type: String,
default: null,
},
autoplay: {
type: Boolean,
default: true,
},
loop: {
type: Boolean,
default: false,
},
video: {
type: Boolean,
default: false,
},
audio: {
type: Boolean,
default: false,
},
recorderPosition: {
type: String as () => 'top-left' | 'top-right' | 'bottom-left' | 'bottom-right',
default: 'bottom-left',
},
snap: {
type: Boolean,
default: false,
},
snapPosition: {
type: String as () => 'top-left' | 'top-right' | 'bottom-left' | 'bottom-right',
default: 'top-right',
},
snapIcon: {
type: String,
default: '$mdiCamera',
},
format: {
type: String as () => 'base64' | 'blob',
default: 'base64',
},
videoConstraints: {
type: Object as () => MediaTrackConstraints,
default: {} as MediaTrackConstraints,
},
audioConstraints: {
type: Object as () => MediaTrackConstraints,
default: {} as MediaTrackConstraints,
},
compact: {
type: Boolean,
default: false,
},
variant: {
type: String as () => 'outlined' | 'flat' | 'text' | 'elevated' | 'tonal' | 'plain' | undefined,
default: 'flat',
},
density: {
type: String as () => 'default' | 'comfortable' | 'compact' | undefined,
default: 'default',
},
})
const emits = defineEmits([
'loading',
'device',
'started',
'paused',
'resumed',
'stopped',
'snapped',
'recorded',
'error',
])
const videoElement = ref<HTMLVideoElement | null>(null)
const audioElement = ref<HTMLAudioElement | null>(null)
const canvasElement = ref<HTMLCanvasElement | null>(null)
const audioPlayback = async () => {
const audio = audioElement.value
if (!audio) {
emits('error', new Error('Audio element is not available'))
return
}
audio.onpause = () => {
isPlaying.value = false
}
if (isPlaying.value) {
isPlaying.value = false
await audio.pause()
audio.currentTime = 0
} else {
isPlaying.value = true
await audio.play()
}
}
const videoRecording = async () => {
if (props.video && !stream.value) {
emits('error', new Error('Camera is not started'))
return
}
if (isRecording.value) {
stopRecording()
} else {
startRecording()
}
}
const devices = ref<MediaDeviceInfo[]>([])
const videoDeviceId = ref('')
const audioDeviceId = ref('')
const stream = ref<MediaStream | null>(null)
const isPlaying = ref(false)
const mediaRecorder = ref<MediaRecorder | null>(null)
const recordedChunks = ref<Blob[]>([])
const isRecording = ref(false)
let recordingTimer: ReturnType<typeof setInterval> | undefined = undefined
const recordingTime = ref(0)
const recordingTimeHMS = computed(() =>
recordingTime.value >= 3600
? [
parseInt((recordingTime.value / 60 / 60).toString()),
parseInt(((recordingTime.value / 60) % 60).toString()),
parseInt((recordingTime.value % 60).toString()),
]
: [
parseInt(((recordingTime.value / 60) % 60).toString()),
parseInt((recordingTime.value % 60).toString()),
]
.join(':')
.replace(/\b(\d)\b/g, '0$1'),
)
defineExpose({
listDevices,
setDevice,
audioPlayback,
videoRecording,
isPlaying,
})
onMounted(async () => {
if (props.src) return
if (!props.video && !props.audio) {
emits('error', new Error('At least one of video or audio props must be true'))
return
}
try {
emits('loading', true)
await startCamera()
} catch (error) {
emits('error', error)
} finally {
emits('loading', false)
}
})
onBeforeUnmount(() => {
stopRecording()
stopCamera()
if (recordingTimer) clearInterval(recordingTimer)
})
function listDevices() {
return devices.value.filter(
(device) =>
(device.kind === 'videoinput' && props.video) ||
(device.kind === 'audioinput' && props.audio),
)
}
function setDevice(newDeviceId: string) {
const device = devices.value.find((device) => device.deviceId === newDeviceId)
if (device?.kind == 'videoinput') {
videoDeviceId.value = newDeviceId
} else if (device?.kind == 'audioinput') {
audioDeviceId.value = newDeviceId
}
}
const startCamera = async () => {
try {
if (stream.value) {
stopCamera()
}
devices.value = await navigator.mediaDevices.enumerateDevices()
if (!videoDeviceId.value && props.video) {
videoDeviceId.value =
devices.value.find((device) => device.kind === 'videoinput')?.deviceId || ''
emits('device', { devices: devices.value, device: videoDeviceId.value })
}
if (!audioDeviceId.value) {
audioDeviceId.value =
devices.value.find((device) => device.kind === 'audioinput')?.deviceId || ''
emits('device', { devices: devices.value, device: audioDeviceId.value })
}
const videoConstraints = reactive({ ...props.videoConstraints })
videoConstraints.deviceId = videoDeviceId.value ? { exact: videoDeviceId.value } : undefined
stream.value = await navigator.mediaDevices.getUserMedia({
video: videoConstraints,
audio: false,
})
devices.value = await navigator.mediaDevices.enumerateDevices()
if (videoElement.value) {
videoElement.value.srcObject = stream.value
}
emits('started', videoDeviceId.value)
} catch {
emits('error', new Error('Camera is not started'))
}
}
const stopCamera = () => {
if (stream.value) {
stream.value.getTracks().forEach((track) => track.stop())
stream.value = null
emits('stopped')
}
}
const captureSnapshot = () => {
if (!stream.value && !props.src) {
emits('error', new Error('Camera is not started'))
return
}
const canvas = canvasElement.value
if (!canvas) {
emits('error', new Error('Canvas element is not available'))
return
}
const video = videoElement.value
if (video) {
canvas.width = video.videoWidth
canvas.height = video.videoHeight
video.crossOrigin = 'Anonymous'
}
const context = canvas.getContext('2d')
if (context && video) {
context.drawImage(video, 0, 0, canvas.width, canvas.height)
if (props.format == 'base64') {
const image = canvas.toDataURL('image/png')
emits('snapped', image)
} else {
canvas.toBlob((blob) => {
if (blob) {
emits('snapped', blob)
} else {
emits('error', new Error('Failed to capture snapshot as blob'))
}
})
}
} else {
emits('error', new Error('Failed to capture snapshot'))
}
}
const startRecording = async () => {
if (!stream.value) {
emits('error', new Error('Camera is not started'))
return
}
recordedChunks.value = []
recordingTime.value = 0
const recordStream = await navigator.mediaDevices.getUserMedia({
audio: props.audioConstraints,
video: props.videoConstraints,
})
mediaRecorder.value = new MediaRecorder(recordStream)
mediaRecorder.value.ondataavailable = (event) => {
if (event.data.size > 0) {
recordedChunks.value.push(event.data)
}
}
mediaRecorder.value.onstop = saveRecording
mediaRecorder.value.start()
isRecording.value = true
emits('started', { video: videoDeviceId.value, audio: audioDeviceId.value })
recordingTimer = setInterval(() => {
recordingTime.value += 1
}, 1000)
}
const stopRecording = () => {
if (mediaRecorder.value && isRecording.value) {
mediaRecorder.value.stop()
isRecording.value = false
clearInterval(recordingTimer)
emits('stopped')
}
}
const saveRecording = () => {
const blob = new Blob(recordedChunks.value, { type: 'video/webm' })
const reader = new FileReader()
reader.onloadend = () => {
if (props.format == 'blob') {
emits('recorded', blob)
} else {
const base64Video = typeof reader.result === 'string' ? reader.result.split(',')[1] : ''
emits('recorded', base64Video)
}
}
reader.readAsDataURL(blob)
}
</script>
<style scoped>
.container {
position: relative;
width: 100%;
height: 100%;
}
.controls,
.slot {
position: absolute;
z-index: 1;
}
.controls-top-left {
top: 0;
left: 0;
}
.controls-top-right {
top: 0;
right: 0;
}
.controls-bottom-right {
bottom: 0;
right: 0;
}
.controls-bottom-left {
bottom: 0;
left: 0;
}
video {
display: block;
width: 100%;
height: 100%;
object-fit: cover;
}
</style>
Test
ts
import { describe, it, expect, beforeEach } from 'vitest'
import { mount, VueWrapper } from '@vue/test-utils'
import vuetify from '../../plugins/vuetify'
import VBsbMedia from '../../components/VBsbMedia.vue'
describe('VBsbMedia.vue', () => {
let wrapper: VueWrapper
beforeEach(() => {
wrapper = mount(VBsbMedia, {
global: {
components: {
VBsbMedia,
},
plugins: [vuetify],
},
props: {
video: true,
audio: false,
snap: true,
},
})
})
it('renders video element when video prop is true', () => {
const video = wrapper.find('video')
expect(video.exists()).toBe(true)
})
it('does not render audio element when audio prop is false', () => {
const audio = wrapper.find('audio')
expect(audio.exists()).toBe(false)
})
it('emits "loading" event on component mount', () => {
expect(wrapper.emitted('loading')).toBeTruthy()
expect(wrapper.emitted('loading')?.[0]).toEqual([true])
})
it('emits error if no video or audio is enabled', async () => {
const wrapperWithoutMedia = mount(VBsbMedia, {
global: {
components: {
VBsbMedia,
},
plugins: [vuetify],
},
props: { video: false, audio: false },
})
await wrapperWithoutMedia.vm.$nextTick()
expect(wrapperWithoutMedia.emitted('error')).toBeTruthy()
const error = wrapperWithoutMedia.emitted('error')?.[0]?.[0]
expect((error as Error).message).toBe('At least one of video or audio props must be true')
})
})
Usage
Basic Example: Video Recorder
vue
<template>
<MediaCapture
:video="true"
:autoplay="true"
:loop="false"
:snap="true"
snapIcon="$mdiCamera"
@snapped="handleSnapshot"
@recorded="handleRecording"
:videoConstraints="{ width: 1280, height: 720 }"
/>
</template>
<script setup>
import MediaCapture from '@/components/MediaCapture.vue'
function handleSnapshot(image) {
console.log('Snapshot captured:', image)
}
function handleRecording(recordedData) {
console.log('Recording completed:', recordedData)
}
</script>
Audio Player Example
vue
<template>
<MediaCapture
:audio="true"
:src="'/path/to/audio-file.mp3'"
:compact="true"
@error="handleError"
/>
</template>
<script setup>
import MediaCapture from '@/components/MediaCapture.vue'
function handleError(err) {
console.error('Error:', err.message)
}
</script>
Snapshot and Record Example with Custom Constraints
vue
<template>
<MediaCapture
:video="true"
:audio="true"
:snap="true"
:snapPosition="'top-left'"
:recorderPosition="'bottom-right'"
@snapped="handleSnapshot"
@recorded="handleRecording"
:videoConstraints="{ width: 1920, height: 1080 }"
:audioConstraints="{ sampleRate: 44100 }"
/>
</template>
<script setup>
import MediaCapture from '@/components/MediaCapture.vue'
function handleSnapshot(image) {
console.log('Snapshot captured:', image)
}
function handleRecording(recordedData) {
console.log('Recording saved:', recordedData)
}
</script>
API
Props
Prop | Type | Default | Description |
---|---|---|---|
src | String | null | The source URL of the media (either video or audio). If provided, the media will be loaded from this source. |
autoplay | Boolean | true | Automatically start the video or audio upon component load. |
loop | Boolean | false | Loop the media playback when it ends. |
video | Boolean | false | Enable video recording or playback. |
audio | Boolean | false | Enable audio recording or playback. |
recorderPosition | String (one of: 'top-left' , 'top-right' , 'bottom-left' , 'bottom-right' ) | 'bottom-left' | Position of the recording control button. |
snap | Boolean | false | Show a snapshot button to capture an image from the video stream. |
snapPosition | String (one of: 'top-left' , 'top-right' , 'bottom-left' , 'bottom-right' ) | 'top-right' | Position of the snapshot button. |
snapIcon | String | '$mdiCamera' | Icon for the snapshot button. |
format | String (one of: 'base64' , 'blob' ) | 'base64' | Format of the snapshot or recording output. Either Base64-encoded string or a Blob object. |
videoConstraints | Object | {} | Custom video constraints for the camera (e.g., resolution, aspect ratio). |
audioConstraints | Object | {} | Custom audio constraints for the microphone (e.g., sample rate, channel count). |
compact | Boolean | false | Whether to show a compact version of the controls (e.g., for audio playback). |
variant | String (one of: 'outlined' , 'flat' , 'text' , 'elevated' , 'tonal' , 'plain' ) | 'flat' | Visual style for the buttons. |
density | String (one of: 'default' , 'comfortable' , 'compact' ) | 'default' | Density of the buttons (spacing and size). |
Events
Event | Payload | Description |
---|---|---|
loading | Boolean | Emitted when the component is starting or stopping media devices. |
device | { devices: MediaDeviceInfo[], device: string } | Emitted when media devices are selected. |
started | { video: string, audio: string } | Emitted when media capture (video/audio) starts. |
paused | None | Emitted when the media recording or playback is paused. |
resumed | None | Emitted when media playback or recording is resumed after being paused. |
stopped | None | Emitted when media capture (video/audio) stops. |
snapped | String (Base64) / Blob | Emitted when a snapshot is captured from the video. |
recorded | String (Base64) / Blob | Emitted when video recording finishes. |
error | Error | Emitted if an error occurs (e.g., camera or microphone access failure). |
Methods
listDevices()
Returns a list of available media devices (video and audio input devices).setDevice(newDeviceId: string)
Manually set the video or audio device by device ID.
Slots
default
Custom content to be displayed inside the component (can be used to add additional controls or display).