modified total method logic camera capture in ocr

This commit is contained in:
dhanabalan
2025-11-14 10:20:58 +05:30
parent 6bd3ca61f7
commit 9491b37a89

View File

@@ -746,43 +746,374 @@ function cameraCapture() {
<script>
// function cameraCapture() {
// return {
// stream: null,
// currentFacingMode: 'user',
// textDetectionInterval: null,
// capturedPhoto: null, // store captured image
// serialNumbers: [],
// ocrWorker: null,
// isWorkerReady: false,
// async initCamera() {
// try {
// await this.initWorker();
// if (this.stream) this.stream.getTracks().forEach(track => track.stop());
// const video = this.$refs.video;
// this.stream = await navigator.mediaDevices.getUserMedia({
// video: { facingMode: this.currentFacingMode }
// });
// video.srcObject = this.stream;
// await new Promise(resolve => video.onloadedmetadata = resolve);
// video.play();
// // Overlay size matches video
// const overlay = this.$refs.overlay;
// overlay.width = video.videoWidth;
// overlay.height = video.videoHeight;
// this.startDetection();
// } catch (err) {
// console.error("Camera error:", err);
// alert("Camera error:\n" + (err.message || err));
// this.stopDetection();
// }
// },
// async initWorker() {
// if (this.ocrWorker) return;
// console.log("⏳ Loading OCR worker...");
// this.ocrWorker = await Tesseract.createWorker({
// logger: info => console.log(info.status, info.progress)
// });
// await this.ocrWorker.loadLanguage('eng');
// await this.ocrWorker.initialize('eng');
// this.isWorkerReady = true;
// console.log("✅ OCR Worker Ready");
// },
// async switchCamera() {
// this.currentFacingMode = this.currentFacingMode === 'user' ? 'environment' : 'user';
// await this.initCamera();
// },
// // async capturePhoto() {
// // const video = this.$refs.video;
// // const canvas = this.$refs.canvas;
// // const ctx = canvas.getContext('2d');
// // canvas.width = video.videoWidth;
// // canvas.height = video.videoHeight;
// // ctx.drawImage(video, 0, 0);
// // // const snapshotData = canvas.toDataURL('image/png');
// // // this.$refs.hiddenInput.value = snapshotData;
// // // this.capturedPhoto = snapshotData; // store for verification
// // const snapshotData = canvas.toDataURL('image/png');
// // this.$refs.hiddenInput.value = snapshotData;
// // this.capturedPhoto = snapshotData;
// // // Stop camera stream
// // if (this.stream) this.stream.getTracks().forEach(track => track.stop());
// // // snapshot.src = dataUrl;
// // // snapshot.classList.remove('hidden');
// // // video.classList.add('hidden');
// // // const snapshot = this.$refs.snapshot;
// // // snapshot.src = snapshotData;
// // // snapshot.classList.remove('hidden');
// // // video.classList.add('hidden');
// // // overlay.classList.add('hidden');
// // snapshot.src = dataUrl;
// // snapshot.classList.remove('hidden');
// // video.classList.add('hidden');
// // alert("Photo captured!");
// // this.stopDetection();
// // },
// async capturePhoto() {
// const video = this.$refs.video;
// const canvas = this.$refs.canvas;
// const overlay = this.$refs.overlay;
// const snapshot = this.$refs.snapshot; // ✅ Fix: define snapshot reference
// const ctx = canvas.getContext('2d');
// canvas.width = video.videoWidth;
// canvas.height = video.videoHeight;
// ctx.drawImage(video, 0, 0);
// const snapshotData = canvas.toDataURL('image/png'); // ✅ Correct data var
// this.$refs.hiddenInput.value = snapshotData;
// this.capturedPhoto = snapshotData;
// // ✅ Stop camera
// if (this.stream) this.stream.getTracks().forEach(track => track.stop());
// // ✅ Hide video + overlay
// video.classList.add('hidden');
// overlay.classList.add('hidden');
// // ✅ Show captured image
// snapshot.src = snapshotData; // ✅ Correct variable
// snapshot.classList.remove('hidden');
// alert("Photo captured!");
// this.stopDetection();
// },
// async verifyPhoto() {
// if (!this.capturedPhoto) {
// alert("Please capture a photo first!");
// return;
// }
// if (!this.isWorkerReady) {
// alert("OCR worker not ready yet!");
// return;
// }
// try {
// const img = new Image();
// img.src = this.capturedPhoto;
// img.onload = async () => {
// const canvas = document.createElement('canvas');
// canvas.width = img.width;
// canvas.height = img.height;
// const ctx = canvas.getContext('2d');
// ctx.drawImage(img, 0, 0);
// // const result = await Tesseract.recognize(canvas, 'eng', {
// // logger: m => console.log(m)
// // });
// // const result = await Tesseract.recognize(canvas, 'eng', {
// // logger: m => console.log(m.status, m.progress)
// // });
// const result = await this.ocrWorker.recognize(img);
// const detectedText = result.data.text.trim();
// // const matches = detectedText.match(/\d+/g) || [];
// // const serialRegex = /Serial\s*No[:\-]?\s*([A-Za-z0-9]+)/i;
// // const match = detectedText.match(serialRegex);
// // this.serialNumbers = matches.slice(0, 4); // take first 4 serials
// const serialWithLabelRegex = /Serial\s*No[:\-]?\s*([A-Za-z0-9]+)/i;
// const match = detectedText.match(serialWithLabelRegex);
// if (match && match[1]) {
// //Scenario Found "Serial No"
// this.serialNumbers = [match[1].trim()];
// console.log("Serial with Label:", this.serialNumbers[0]);
// }
// else
// {
// //Extract first 4 numbers
// const generalNums = detectedText.match(/[A-Za-z0-9]{4,}/g) || [];
// this.serialNumbers = generalNums.slice(0, 4);
// if (this.serialNumbers.length == 0) {
// alert("No serial numbers detected!");
// return;
// }
// console.log("Serial Numbers List:", this.serialNumbers);
// }
// this.$refs.hiddenInputSerials.value = JSON.stringify(this.serialNumbers);
// alert("Serial numbers:\n" + this.$refs.hiddenInputSerials.value);
// fetch('/save-serials-to-session', {
// method: 'POST',
// credentials: 'same-origin',
// headers: {
// 'Content-Type': 'application/json',
// 'X-CSRF-TOKEN': document.querySelector('meta[name="csrf-token"]').content,
// },
// body: JSON.stringify({
// serial_numbers: this.serialNumbers,
// }),
// })
// .then(response => response.json())
// .then(data => {
// console.log("✅ Session Updated:", data);
// alert("✅ Serial numbers saved to session!");
// })
// }
// } catch (err) {
// console.error("OCR verify error:", err);
// alert("OCR verify failed:\n" + (err.message || err));
// }
// },
// data() {
// return {
// tempCanvas: null,
// tempCtx: null,
// isDetecting: false,
// };
// },
// mounted() {
// this.tempCanvas = document.createElement('canvas');
// this.tempCtx = this.tempCanvas.getContext('2d');
// },
// async detectText() {
// if (this.isDetecting) return;
// this.isDetecting = true;
// const video = this.$refs.video;
// const overlay = this.$refs.overlay;
// const ctx = overlay.getContext('2d');
// if (!video.videoWidth) {
// this.isDetecting = false;
// return;
// }
// // 🔥 Reuse temp canvas (no memory leak)
// this.tempCanvas.width = video.videoWidth;
// this.tempCanvas.height = video.videoHeight;
// this.tempCtx.drawImage(video, 0, 0);
// try {
// const result = await Tesseract.recognize(this.tempCanvas, 'eng');
// const words = result.data.words;
// ctx.clearRect(0, 0, overlay.width, overlay.height);
// ctx.strokeStyle = 'lime';
// ctx.lineWidth = 2;
// words.forEach(w => {
// if (!w.bbox || w.confidence < 50) return;
// const { x0, y0, x1, y1 } = w.bbox;
// ctx.strokeRect(x0, y0, x1 - x0, y1 - y0);
// });
// } catch (err) {
// console.error("Live OCR error:", err);
// }
// this.isDetecting = false;
// }
// // async detectText() {
// // const video = this.$refs.video;
// // const overlay = this.$refs.overlay;
// // const ctx = overlay.getContext('2d');
// // if (!video.videoWidth) return;
// // const tempCanvas = document.createElement('canvas');
// // tempCanvas.width = video.videoWidth;
// // tempCanvas.height = video.videoHeight;
// // const tempCtx = tempCanvas.getContext('2d');
// // tempCtx.drawImage(video, 0, 0);
// // try {
// // const result = await Tesseract.recognize(tempCanvas, 'eng');
// // const words = result.data.words;
// // ctx.clearRect(0, 0, overlay.width, overlay.height);
// // ctx.strokeStyle = 'lime';
// // ctx.lineWidth = 2;
// // words.forEach(w => {
// // if (!w.bbox || w.confidence < 50) return;
// // const { x0, y0, x1, y1 } = w.bbox;
// // ctx.strokeRect(x0, y0, x1 - x0, y1 - y0);
// // });
// // } catch (err) {
// // console.error("Live OCR error:", err);
// // }
// // },
// async retakePhoto() {
// this.photoTaken = false;
// this.$refs.snapshot.classList.add('hidden');
// this.$refs.video.classList.remove('hidden');
// await this.initCamera();
// await new Promise(resolve => {
// this.$refs.video.onloadedmetadata = resolve;
// });
// const video = this.$refs.video;
// const overlay = this.$refs.overlay;
// overlay.width = video.videoWidth;
// overlay.height = video.videoHeight;
// // Clear old green boxes
// const ctx = overlay.getContext('2d');
// ctx.clearRect(0, 0, overlay.width, overlay.height);
// // Make overlay visible if hidden
// overlay.classList.remove('hidden');
// this.startDetection();
// },
// // startDetection() {
// // if (this.textDetectionInterval) clearInterval(this.textDetectionInterval);
// // this.textDetectionInterval = setInterval(() => this.detectText(), 1500);
// // },
// startDetection() {
// if (this.textDetectionInterval)
// clearInterval(this.textDetectionInterval);
// // Run IMMEDIATELY after retake
// this.detectText();
// // Then keep scanning every 1200ms
// this.textDetectionInterval = setInterval(() => {
// this.detectText();
// }, 1200);
// }
// stopDetection() {
// if (this.textDetectionInterval) {
// clearInterval(this.textDetectionInterval);
// this.textDetectionInterval = null;
// console.log("Text detection stopped");
// }
// }
// }
// }
function cameraCapture() {
return {
stream: null,
currentFacingMode: 'user',
textDetectionInterval: null,
capturedPhoto: null, // store captured image
capturedPhoto: null,
serialNumbers: [],
// OCR Worker
ocrWorker: null,
isWorkerReady: false,
isDetecting: false,
async initCamera() {
try {
// Reusable canvas
tempCanvas: null,
tempCtx: null,
async init() {
this.tempCanvas = document.createElement('canvas');
this.tempCtx = this.tempCanvas.getContext('2d');
await this.initWorker();
if (this.stream) this.stream.getTracks().forEach(track => track.stop());
const video = this.$refs.video;
this.stream = await navigator.mediaDevices.getUserMedia({
video: { facingMode: this.currentFacingMode }
});
video.srcObject = this.stream;
await new Promise(resolve => video.onloadedmetadata = resolve);
video.play();
// Overlay size matches video
const overlay = this.$refs.overlay;
overlay.width = video.videoWidth;
overlay.height = video.videoHeight;
this.startDetection();
} catch (err) {
console.error("Camera error:", err);
alert("Camera error:\n" + (err.message || err));
this.stopDetection();
}
},
async initWorker() {
@@ -791,239 +1122,162 @@ function cameraCapture() {
console.log("⏳ Loading OCR worker...");
this.ocrWorker = await Tesseract.createWorker({
logger: info => console.log(info.status, info.progress)
logger: m => console.log(m.status, m.progress)
});
await this.ocrWorker.loadLanguage('eng');
await this.ocrWorker.initialize('eng');
this.isWorkerReady = true;
await this.ocrWorker.loadLanguage("eng");
await this.ocrWorker.initialize("eng");
this.isWorkerReady = true;
console.log("✅ OCR Worker Ready");
},
async switchCamera() {
this.currentFacingMode = this.currentFacingMode === 'user' ? 'environment' : 'user';
await this.initCamera();
},
// async capturePhoto() {
// const video = this.$refs.video;
// const canvas = this.$refs.canvas;
// const ctx = canvas.getContext('2d');
// canvas.width = video.videoWidth;
// canvas.height = video.videoHeight;
// ctx.drawImage(video, 0, 0);
// // const snapshotData = canvas.toDataURL('image/png');
// // this.$refs.hiddenInput.value = snapshotData;
// // this.capturedPhoto = snapshotData; // store for verification
// const snapshotData = canvas.toDataURL('image/png');
// this.$refs.hiddenInput.value = snapshotData;
// this.capturedPhoto = snapshotData;
// // Stop camera stream
// if (this.stream) this.stream.getTracks().forEach(track => track.stop());
// // snapshot.src = dataUrl;
// // snapshot.classList.remove('hidden');
// // video.classList.add('hidden');
// // const snapshot = this.$refs.snapshot;
// // snapshot.src = snapshotData;
// // snapshot.classList.remove('hidden');
// // video.classList.add('hidden');
// // overlay.classList.add('hidden');
// snapshot.src = dataUrl;
// snapshot.classList.remove('hidden');
// video.classList.add('hidden');
// alert("Photo captured!");
// this.stopDetection();
// },
async capturePhoto() {
const video = this.$refs.video;
const canvas = this.$refs.canvas;
const overlay = this.$refs.overlay;
const snapshot = this.$refs.snapshot; // ✅ Fix: define snapshot reference
const ctx = canvas.getContext('2d');
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
ctx.drawImage(video, 0, 0);
const snapshotData = canvas.toDataURL('image/png'); // ✅ Correct data var
this.$refs.hiddenInput.value = snapshotData;
this.capturedPhoto = snapshotData;
// ✅ Stop camera
if (this.stream) this.stream.getTracks().forEach(track => track.stop());
// ✅ Hide video + overlay
video.classList.add('hidden');
overlay.classList.add('hidden');
// ✅ Show captured image
snapshot.src = snapshotData; // ✅ Correct variable
snapshot.classList.remove('hidden');
alert("Photo captured!");
this.stopDetection();
},
async verifyPhoto() {
if (!this.capturedPhoto) {
alert("Please capture a photo first!");
return;
}
if (!this.isWorkerReady) {
alert("OCR worker not ready yet!");
return;
}
async initCamera() {
try {
const img = new Image();
img.src = this.capturedPhoto;
img.onload = async () => {
const canvas = document.createElement('canvas');
canvas.width = img.width;
canvas.height = img.height;
const ctx = canvas.getContext('2d');
ctx.drawImage(img, 0, 0);
// const result = await Tesseract.recognize(canvas, 'eng', {
// logger: m => console.log(m)
// });
// const result = await Tesseract.recognize(canvas, 'eng', {
// logger: m => console.log(m.status, m.progress)
// });
const result = await this.ocrWorker.recognize(img);
const detectedText = result.data.text.trim();
// const matches = detectedText.match(/\d+/g) || [];
// const serialRegex = /Serial\s*No[:\-]?\s*([A-Za-z0-9]+)/i;
// const match = detectedText.match(serialRegex);
// this.serialNumbers = matches.slice(0, 4); // take first 4 serials
const serialWithLabelRegex = /Serial\s*No[:\-]?\s*([A-Za-z0-9]+)/i;
const match = detectedText.match(serialWithLabelRegex);
if (match && match[1]) {
//Scenario Found "Serial No"
this.serialNumbers = [match[1].trim()];
console.log("Serial with Label:", this.serialNumbers[0]);
}
else
{
//Extract first 4 numbers
const generalNums = detectedText.match(/[A-Za-z0-9]{4,}/g) || [];
this.serialNumbers = generalNums.slice(0, 4);
if (this.serialNumbers.length == 0) {
alert("No serial numbers detected!");
return;
if (this.stream) {
this.stream.getTracks().forEach(t => t.stop());
}
console.log("Serial Numbers List:", this.serialNumbers);
}
this.$refs.hiddenInputSerials.value = JSON.stringify(this.serialNumbers);
alert("Serial numbers:\n" + this.$refs.hiddenInputSerials.value);
fetch('/save-serials-to-session', {
method: 'POST',
credentials: 'same-origin',
headers: {
'Content-Type': 'application/json',
'X-CSRF-TOKEN': document.querySelector('meta[name="csrf-token"]').content,
},
body: JSON.stringify({
serial_numbers: this.serialNumbers,
}),
})
.then(response => response.json())
.then(data => {
console.log("✅ Session Updated:", data);
alert("✅ Serial numbers saved to session!");
})
}
} catch (err) {
console.error("OCR verify error:", err);
alert("OCR verify failed:\n" + (err.message || err));
}
},
async detectText() {
const video = this.$refs.video;
const overlay = this.$refs.overlay;
const ctx = overlay.getContext('2d');
if (!video.videoWidth) return;
const tempCanvas = document.createElement('canvas');
tempCanvas.width = video.videoWidth;
tempCanvas.height = video.videoHeight;
const tempCtx = tempCanvas.getContext('2d');
tempCtx.drawImage(video, 0, 0);
try {
const result = await Tesseract.recognize(tempCanvas, 'eng');
const words = result.data.words;
ctx.clearRect(0, 0, overlay.width, overlay.height);
ctx.strokeStyle = 'lime';
ctx.lineWidth = 2;
words.forEach(w => {
if (!w.bbox || w.confidence < 50) return;
const { x0, y0, x1, y1 } = w.bbox;
ctx.strokeRect(x0, y0, x1 - x0, y1 - y0);
this.stream = await navigator.mediaDevices.getUserMedia({
video: { facingMode: this.currentFacingMode }
});
} catch (err) {
console.error("Live OCR error:", err);
}
},
async retakePhoto() {
this.photoTaken = false;
this.$refs.snapshot.classList.add('hidden');
this.$refs.video.classList.remove('hidden');
await this.initCamera();
await new Promise(resolve => {
this.$refs.video.onloadedmetadata = resolve;
});
const video = this.$refs.video;
video.srcObject = this.stream;
await new Promise(res => video.onloadedmetadata = res);
video.play();
// Resize overlay to match video
const overlay = this.$refs.overlay;
overlay.width = video.videoWidth;
overlay.height = video.videoHeight;
// Clear old green boxes
const ctx = overlay.getContext('2d');
ctx.clearRect(0, 0, overlay.width, overlay.height);
// Make overlay visible if hidden
overlay.classList.remove('hidden');
// Start OCR once per camera start
this.startDetection();
} catch (err) {
console.error("Camera error:", err);
alert("Camera Error:\n" + err.message);
this.stopDetection();
}
},
async switchCamera() {
this.currentFacingMode =
this.currentFacingMode === "user"
? "environment"
: "user";
await this.initCamera();
},
async detectText() {
if (!this.isWorkerReady) return;
if (this.isDetecting) return;
this.isDetecting = true;
const video = this.$refs.video;
const overlay = this.$refs.overlay;
const ctx = overlay.getContext("2d");
if (!video.videoWidth) {
this.isDetecting = false;
return;
}
// Draw video frame into reusable canvas
this.tempCanvas.width = video.videoWidth;
this.tempCanvas.height = video.videoHeight;
this.tempCtx.drawImage(video, 0, 0);
try {
const result = await this.ocrWorker.recognize(this.tempCanvas);
const words = result.data.words;
ctx.clearRect(0, 0, overlay.width, overlay.height);
ctx.strokeStyle = "lime";
ctx.lineWidth = 2;
words.forEach(w => {
if (w.confidence < 50) return;
const { x0, y0, x1, y1 } = w.bbox;
ctx.strokeRect(x0, y0, x1 - x0, y1 - y0);
});
} catch (err) {
console.error("Live OCR error:", err);
}
this.isDetecting = false;
},
startDetection() {
if (this.textDetectionInterval) clearInterval(this.textDetectionInterval);
this.textDetectionInterval = setInterval(() => this.detectText(), 500);
if (this.textDetectionInterval) {
clearInterval(this.textDetectionInterval);
}
// run once instantly
this.detectText();
// run continuous every 1.2s
this.textDetectionInterval = setInterval(() => {
this.detectText();
}, 1200);
},
stopDetection() {
if (this.textDetectionInterval) {
clearInterval(this.textDetectionInterval);
this.textDetectionInterval = null;
console.log("Text detection stopped");
}
}
},
async capturePhoto() {
const video = this.$refs.video;
const canvas = this.$refs.canvas;
const ctx = canvas.getContext("2d");
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
ctx.drawImage(video, 0, 0);
const snapshotData = canvas.toDataURL("image/png");
this.$refs.hiddenInput.value = snapshotData;
this.capturedPhoto = snapshotData;
// Stop detection + camera
this.stopDetection();
if (this.stream) this.stream.getTracks().forEach(t => t.stop());
// Switch UI
video.classList.add("hidden");
this.$refs.overlay.classList.add("hidden");
const snap = this.$refs.snapshot;
snap.src = snapshotData;
snap.classList.remove("hidden");
alert("Photo captured!");
},
async retakePhoto() {
this.$refs.snapshot.classList.add("hidden");
this.$refs.video.classList.remove("hidden");
await this.initCamera();
const overlay = this.$refs.overlay;
const ctx = overlay.getContext("2d");
ctx.clearRect(0, 0, overlay.width, overlay.height);
overlay.classList.remove("hidden");
this.startDetection();
}
};
}
</script>