Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make sure last camera rendering is completed when switching UI #219

Merged
merged 1 commit into from
Apr 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 37 additions & 7 deletions face_recognition/main.js
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,8 @@ let deviceType = '';
let lastdeviceType = '';
let backend = '';
let lastBackend = '';
let stopRender = true;
let isRendering = false;
const disabledSelectors = ['#tabs > li', '.btn'];

$(document).ready(async () => {
Expand All @@ -50,27 +52,34 @@ $(document).ready(async () => {
});

$('#backendBtns .btn').on('change', async (e) => {
if (inputType === 'camera') utils.stopCameraStream(rafReq, stream);
if (inputType === 'camera') {
await stopCamRender();
}
layout = utils.getDefaultLayout($(e.target).attr('id'));
await main();
});

$('#fdModelBtns .btn').on('change', async (e) => {
if (inputType === 'camera') {
await stopCamRender();
}
fdModelName = $(e.target).attr('id');
if (inputType === 'camera') utils.stopCameraStream(rafReq, stream);
await main();
});

// $('#layoutBtns .btn').on('change', async (e) => {
// if (inputType === 'camera') {
// await stopCamRender();
// }
// layout = $(e.target).attr('id');
// if (inputType === 'camera') utils.stopCameraStream(rafReq, stream);
// await main();
// });

// Click trigger to do inference with <img> element
$('#img').click(async () => {
if (inputType === 'camera') {
utils.stopCameraStream(rafReq, stream);
await ui.showProgressComponent('current', 'pending', 'pending');
await stopCamRender();
// Set timeout to leave more time to make sure searchEmbeddings
// is clear after switching from camera tab to image tab
await new Promise((resolve) => {
Expand All @@ -79,9 +88,10 @@ $('#img').click(async () => {
resolve();
}, 1000);
});
} else {
return;
}
inputType = 'image';
$('.shoulddisplay').hide();
searchEmbeddings = null;
await main();
});
Expand Down Expand Up @@ -118,22 +128,38 @@ $('#searchImage').on('load', async () => {

// Click trigger to do inference with <video> media element
$('#cam').click(async () => {
if (inputType == 'camera') return;
inputType = 'camera';
$('.shoulddisplay').hide();
await main();
});

function stopCamRender() {
stopRender = true;
utils.stopCameraStream(rafReq, stream);
return new Promise((resolve) => {
// if the rendering is not stopped, check it every 100ms
setInterval(() => {
// resolve when the rendering is stopped
if (!isRendering) {
resolve();
}
}, 100);
});
}

/**
* This method is used to render live camera tab.
*/
async function renderCamStream() {
if (!stream.active) return;
if (!stream.active || stopRender) return;
// If the video element's readyState is 0, the video's width and height are 0.
// So check the readState here to make sure it is greater than 0.
if (camElem.readyState === 0) {
rafReq = requestAnimationFrame(renderCamStream);
return;
}
isRendering = true;
// Clear search embeddings for each frame
searchEmbeddings = null;
const inputCanvas = utils.getVideoFrame(camElem);
Expand All @@ -143,7 +169,10 @@ async function renderCamStream() {
showPerfResult();
await drawOutput(inputCanvas, searchCanvasCamShowElem);
$('#fps').text(`${(1000/computeTime).toFixed(0)} FPS`);
rafReq = requestAnimationFrame(renderCamStream);
isRendering = false;
if (!stopRender) {
rafReq = requestAnimationFrame(renderCamStream);
}
}

async function getEmbeddings(inputElem) {
Expand Down Expand Up @@ -373,6 +402,7 @@ async function main() {
} else if (inputType === 'camera') {
stream = await utils.getMediaStream();
camElem.srcObject = stream;
stopRender = false;
camElem.onloadeddata = await renderCamStream();
await ui.showProgressComponent('done', 'done', 'done');
$('#fps').show();
Expand Down
44 changes: 38 additions & 6 deletions facial_landmark_detection/main.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@ let deviceType = '';
let lastdeviceType = '';
let backend = '';
let lastBackend = '';
let stopRender = true;
let isRendering = false;
const disabledSelectors = ['#tabs > li', '.btn'];

$(document).ready(async () => {
Expand All @@ -46,26 +48,36 @@ $(document).ready(async () => {
});

$('#backendBtns .btn').on('change', async (e) => {
if (inputType === 'camera') utils.stopCameraStream(rafReq, stream);
if (inputType === 'camera') {
await stopCamRender();
}
layout = utils.getDefaultLayout($(e.target).attr('id'));
await main();
});

$('#fdModelBtns .btn').on('change', async (e) => {
if (inputType === 'camera') {
await stopCamRender();
}
fdModelName = $(e.target).attr('id');
if (inputType === 'camera') utils.stopCameraStream(rafReq, stream);
await main();
});

// $('#layoutBtns .btn').on('change', async (e) => {
// if (inputType === 'camera') {
// await stopCamRender();
// }
// layout = $(e.target).attr('id');
// if (inputType === 'camera') utils.stopCameraStream(rafReq, stream);
// await main();
// });

// Click trigger to do inference with <img> element
$('#img').click(async () => {
if (inputType === 'camera') utils.stopCameraStream(rafReq, stream);
if (inputType === 'camera') {
await stopCamRender();
} else {
return;
}
inputType = 'image';
$('.shoulddisplay').hide();
await main();
Expand All @@ -86,22 +98,38 @@ $('#feedElement').on('load', async () => {

// Click trigger to do inference with <video> media element
$('#cam').click(async () => {
if (inputType == 'camera') return;
inputType = 'camera';
$('.shoulddisplay').hide();
await main();
});

function stopCamRender() {
stopRender = true;
utils.stopCameraStream(rafReq, stream);
return new Promise((resolve) => {
// if the rendering is not stopped, check it every 100ms
setInterval(() => {
// resolve when the rendering is stopped
if (!isRendering) {
resolve();
}
}, 100);
});
}

/**
* This method is used to render live camera tab.
*/
async function renderCamStream() {
if (!stream.active) return;
if (!stream.active || stopRender) return;
// If the video element's readyState is 0, the video's width and height are 0.
// So check the readState here to make sure it is greater than 0.
if (camElement.readyState === 0) {
rafReq = requestAnimationFrame(renderCamStream);
return;
}
isRendering = true;
const inputCanvas = utils.getVideoFrame(camElement);
console.log('- Computing... ');
const [totalComputeTime, strokedRects, keyPoints] =
Expand All @@ -111,7 +139,10 @@ async function renderCamStream() {
showPerfResult();
await drawOutput(inputCanvas, strokedRects, keyPoints);
$('#fps').text(`${(1000/totalComputeTime).toFixed(0)} FPS`);
rafReq = requestAnimationFrame(renderCamStream);
isRendering = false;
if (!stopRender) {
rafReq = requestAnimationFrame(renderCamStream);
}
}

async function predict(inputElement) {
Expand Down Expand Up @@ -306,6 +337,7 @@ async function main() {
} else if (inputType === 'camera') {
stream = await utils.getMediaStream();
camElement.srcObject = stream;
stopRender = false;
camElement.onloadeddata = await renderCamStream();
await ui.showProgressComponent('done', 'done', 'done');
$('#fps').show();
Expand Down
44 changes: 38 additions & 6 deletions image_classification/main.js
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ let deviceType = '';
let lastdeviceType = '';
let backend = '';
let lastBackend = '';
let stopRender = true;
let isRendering = false;
const disabledSelectors = ['#tabs > li', '.btn'];

async function fetchLabels(url) {
Expand All @@ -50,26 +52,36 @@ $(document).ready(async () => {
});

$('#backendBtns .btn').on('change', async (e) => {
if (inputType === 'camera') utils.stopCameraStream(rafReq, stream);
if (inputType === 'camera') {
await stopCamRender();
}
layout = utils.getDefaultLayout($(e.target).attr('id'));
await main();
});

$('#modelBtns .btn').on('change', async (e) => {
if (inputType === 'camera') {
await stopCamRender();
}
modelName = $(e.target).attr('id');
if (inputType === 'camera') utils.stopCameraStream(rafReq, stream);
await main();
});

// $('#layoutBtns .btn').on('change', async (e) => {
// if (inputType === 'camera') {
// await stopCamRender();
// }
// layout = $(e.target).attr('id');
// if (inputType === 'camera') utils.stopCameraStream(rafReq, stream);
// await main();
// });

// Click trigger to do inference with <img> element
$('#img').click(async () => {
if (inputType === 'camera') utils.stopCameraStream(rafReq, stream);
if (inputType === 'camera') {
await stopCamRender();
} else {
return;
}
inputType = 'image';
$('.shoulddisplay').hide();
await main();
Expand All @@ -90,22 +102,38 @@ $('#feedElement').on('load', async () => {

// Click trigger to do inference with <video> media element
$('#cam').click(async () => {
if (inputType == 'camera') return;
inputType = 'camera';
$('.shoulddisplay').hide();
await main();
});

function stopCamRender() {
stopRender = true;
utils.stopCameraStream(rafReq, stream);
return new Promise((resolve) => {
// if the rendering is not stopped, check it every 100ms
setInterval(() => {
// resolve when the rendering is stopped
if (!isRendering) {
resolve();
}
}, 100);
});
}

/**
* This method is used to render live camera tab.
*/
async function renderCamStream() {
if (!stream.active) return;
if (!stream.active || stopRender) return;
// If the video element's readyState is 0, the video's width and height are 0.
// So check the readState here to make sure it is greater than 0.
if (camElement.readyState === 0) {
rafReq = requestAnimationFrame(renderCamStream);
return;
}
isRendering = true;
const inputBuffer = utils.getInputTensor(camElement, inputOptions);
const inputCanvas = utils.getVideoFrame(camElement);
console.log('- Computing... ');
Expand All @@ -118,7 +146,10 @@ async function renderCamStream() {
showPerfResult();
await drawOutput(outputBuffer, labels);
$('#fps').text(`${(1000/computeTime).toFixed(0)} FPS`);
rafReq = requestAnimationFrame(renderCamStream);
isRendering = false;
if (!stopRender) {
rafReq = requestAnimationFrame(renderCamStream);
}
}

// Get top 3 classes of labels from output buffer
Expand Down Expand Up @@ -285,6 +316,7 @@ async function main() {
} else if (inputType === 'camera') {
stream = await utils.getMediaStream();
camElement.srcObject = stream;
stopRender = false;
camElement.onloadeddata = await renderCamStream();
await ui.showProgressComponent('done', 'done', 'done');
ui.readyShowResultComponents();
Expand Down
Loading