The ImageCapture
interface of the MediaStream Image Capture API provides methods to enable the capture of images or photos from a camera or other photographic device. It provides an interface for capturing images from a photographic device referenced through a valid MediaStreamTrack
.
The ImageCapture
interface is based on EventTarget
, so it includes the methods defined by that interface as well as the ones listed below.
-
ImageCapture.takePhoto()
Experimental
-
Takes a single exposure using the video capture device sourcing a MediaStreamTrack
and returns a Promise
that resolves with a Blob
containing the data.
-
ImageCapture.getPhotoCapabilities()
Experimental
-
Returns a Promise
that resolves with a PhotoCapabilities
object containing the ranges of available configuration options.
-
ImageCapture.getPhotoSettings()
Experimental
-
Returns a Promise
that resolves with a PhotoSettings
object containing the current photo configuration settings.
-
ImageCapture.grabFrame()
Experimental
-
Takes a snapshot of the live video in a MediaStreamTrack
, returning an ImageBitmap
, if successful.
The following code is taken from Chrome's Grab Frame - Take Photo Sample. Since ImageCapture
requires some place to capture an image from, the example below starts with a device's media device (in other words a camera).
This example shows, roughly, a MediaStreamTrack
extracted from a device's MediaStream
. The track is then used to create an ImageCapture
object so that takePhoto()
and grabFrame()
can be called. Finally, it shows how to apply the results of these calls to a canvas object.
let imageCapture;
function onGetUserMediaButtonClick() {
navigator.mediaDevices
.getUserMedia({ video: true })
.then((mediaStream) => {
document.querySelector("video").srcObject = mediaStream;
const track = mediaStream.getVideoTracks()[0];
imageCapture = new ImageCapture(track);
})
.catch((error) => console.error(error));
}
function onGrabFrameButtonClick() {
imageCapture
.grabFrame()
.then((imageBitmap) => {
const canvas = document.querySelector("#grabFrameCanvas");
drawCanvas(canvas, imageBitmap);
})
.catch((error) => console.error(error));
}
function onTakePhotoButtonClick() {
imageCapture
.takePhoto()
.then((blob) => createImageBitmap(blob))
.then((imageBitmap) => {
const canvas = document.querySelector("#takePhotoCanvas");
drawCanvas(canvas, imageBitmap);
})
.catch((error) => console.error(error));
}
function drawCanvas(canvas, img) {
canvas.width = getComputedStyle(canvas).width.split("px")[0];
canvas.height = getComputedStyle(canvas).height.split("px")[0];
let ratio = Math.min(canvas.width / img.width, canvas.height / img.height);
let x = (canvas.width - img.width * ratio) / 2;
let y = (canvas.height - img.height * ratio) / 2;
canvas.getContext("2d").clearRect(0, 0, canvas.width, canvas.height);
canvas
.getContext("2d")
.drawImage(
img,
0,
0,
img.width,
img.height,
x,
y,
img.width * ratio,
img.height * ratio,
);
}
document.querySelector("video").addEventListener("play", () => {
document.querySelector("#grabFrameButton").disabled = false;
document.querySelector("#takePhotoButton").disabled = false;
});