Added camera logic in ocr

This commit is contained in:
dhanabalan
2025-10-25 09:11:02 +05:30
parent 2bf73c3ac2
commit ed98fd5cfa

View File

@@ -315,33 +315,9 @@ function cameraCapture() {
photoTaken: false, photoTaken: false,
photo1: '', photo1: '',
textDetectionInterval: null, textDetectionInterval: null,
worker: null,
async initCamera() {
try {
if (this.stream) this.stream.getTracks().forEach(track => track.stop());
const video = this.$refs.video;
this.stream = await navigator.mediaDevices.getUserMedia({
video: { facingMode: this.currentFacingMode }
});
this.$refs.video.srcObject = this.stream;
await video.play(); // ✅ ensure camera actually starts
const overlay = this.$refs.overlay;
overlay.width = video.videoWidth;
overlay.height = video.videoHeight;
setTimeout(() => this.startDetection(), 300);
//this.startDetection();
} catch (err) {
console.error("Camera error:", err);
alert("Cannot access camera. Enable permissions or use HTTPS.");
}
},
// async initCamera() { // async initCamera() {
// try { // try {
// if (this.stream) this.stream.getTracks().forEach(track => track.stop()); // if (this.stream) this.stream.getTracks().forEach(track => track.stop());
@@ -350,21 +326,50 @@ function cameraCapture() {
// video: { facingMode: this.currentFacingMode } // video: { facingMode: this.currentFacingMode }
// }); // });
// const video = this.$refs.video; // this.$refs.video.srcObject = this.stream;
// video.srcObject = this.stream; // //this.startDetection();
// await video.play();
// const overlay = this.$refs.overlay;
// overlay.width = video.videoWidth;
// overlay.height = video.videoHeight;
// this.startTextDetection(); //auto detection after camera start
// } catch (err) { // } catch (err) {
// console.error("Camera error:", err); // console.error("Camera error:", err);
// alert("Camera access failed!"); // alert("Cannot access camera. Enable permissions or use HTTPS.");
// } // }
// }, // },
async initCamera() {
try {
if (this.stream) this.stream.getTracks().forEach(track => track.stop());
const video = this.$refs.video;
this.stream = await navigator.mediaDevices.getUserMedia({
video: { facingMode: this.currentFacingMode }
});
video.srcObject = this.stream;
await new Promise(resolve => video.onloadedmetadata = resolve);
// Overlay size
const overlay = this.$refs.overlay;
overlay.width = video.videoWidth;
overlay.height = video.videoHeight;
// Initialize Tesseract Worker
if (!this.worker) {
this.worker = Tesseract.createWorker({
logger: m => console.log(m)
});
await this.worker.load();
await this.worker.loadLanguage('eng');
await this.worker.initialize('eng');
}
this.startDetection();
} catch (err) {
console.error("Camera error:", err);
alert("Cannot access camera. Enable permissions or use HTTPS.");
}
},
async switchCamera() { async switchCamera() {
this.currentFacingMode = this.currentFacingMode === 'user' ? 'environment' : 'user'; this.currentFacingMode = this.currentFacingMode === 'user' ? 'environment' : 'user';
await this.initCamera(); await this.initCamera();
@@ -511,39 +516,31 @@ function cameraCapture() {
}, },
async detectText() { async detectText() {
const video = this.$refs.video; const video = this.$refs.video;
const overlay = this.$refs.overlay; const overlay = this.$refs.overlay;
const ctx = overlay.getContext("2d"); const ctx = overlay.getContext("2d");
if (!video.videoWidth) return; if (!video.videoWidth) return;
// Clear overlay first // Draw video frame to temp canvas
ctx.clearRect(0, 0, overlay.width, overlay.height); const tempCanvas = document.createElement('canvas');
// Draw current video frame to temp canvas
const tempCanvas = document.createElement("canvas");
tempCanvas.width = video.videoWidth; tempCanvas.width = video.videoWidth;
tempCanvas.height = video.videoHeight; tempCanvas.height = video.videoHeight;
const tempCtx = tempCanvas.getContext("2d"); const tempCtx = tempCanvas.getContext('2d');
tempCtx.drawImage(video, 0, 0); tempCtx.drawImage(video, 0, 0);
// Use Tesseract worker for better performance // Run OCR in worker
if (!this.worker) {
this.worker = Tesseract.createWorker({
logger: m => console.log(m)
});
await this.worker.load();
await this.worker.loadLanguage('eng');
await this.worker.initialize('eng');
}
const { data: { words } } = await this.worker.recognize(tempCanvas); const { data: { words } } = await this.worker.recognize(tempCanvas);
ctx.strokeStyle = "lime"; // Clear overlay
ctx.clearRect(0, 0, overlay.width, overlay.height);
ctx.strokeStyle = 'lime';
ctx.lineWidth = 2; ctx.lineWidth = 2;
words.forEach(w => { words.forEach(w => {
if (!w.bbox || w.confidence < 50) return; if (!w.bbox || w.confidence < 50) return;
const { x0, y0, x1, y1 } = w.bbox; const { x0, y0, x1, y1 } = w.bbox;
ctx.strokeRect(x0, y0, x1 - x0, y1 - y0); ctx.strokeRect(x0, y0, x1 - x0, y1 - y0);
}); });