added detect text in ocr

This commit is contained in:
dhanabalan
2025-10-25 08:31:28 +05:30
parent b8339b8b0c
commit e313b0be4b

View File

@@ -298,6 +298,8 @@ document.addEventListener('DOMContentLoaded', () => {
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/cropperjs/1.5.13/cropper.min.css"> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/cropperjs/1.5.13/cropper.min.css">
<script src="https://cdnjs.cloudflare.com/ajax/libs/cropperjs/1.5.13/cropper.min.js"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/cropperjs/1.5.13/cropper.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@latest"></script>
<script> <script>
@@ -309,6 +311,21 @@ function cameraCapture() {
photo1: '', photo1: '',
// async initCamera() {
// try {
// if (this.stream) this.stream.getTracks().forEach(track => track.stop());
// this.stream = await navigator.mediaDevices.getUserMedia({
// video: { facingMode: this.currentFacingMode }
// });
// this.$refs.video.srcObject = this.stream;
// } catch (err) {
// console.error("Camera error:", err);
// alert("Cannot access camera. Enable permissions or use HTTPS.");
// }
// },
async initCamera() { async initCamera() {
try { try {
if (this.stream) this.stream.getTracks().forEach(track => track.stop()); if (this.stream) this.stream.getTracks().forEach(track => track.stop());
@@ -317,10 +334,18 @@ function cameraCapture() {
video: { facingMode: this.currentFacingMode } video: { facingMode: this.currentFacingMode }
}); });
this.$refs.video.srcObject = this.stream; const video = this.$refs.video;
video.srcObject = this.stream;
await video.play();
const overlay = this.$refs.overlay;
overlay.width = video.videoWidth;
overlay.height = video.videoHeight;
this.startTextDetection(); //auto detection after camera start
} catch (err) { } catch (err) {
console.error("Camera error:", err); console.error("Camera error:", err);
alert("Cannot access camera. Enable permissions or use HTTPS."); alert("Camera access failed!");
} }
}, },
@@ -329,7 +354,6 @@ function cameraCapture() {
await this.initCamera(); await this.initCamera();
}, },
async capturePhoto() { async capturePhoto() {
const video = this.$refs.video; const video = this.$refs.video;
const canvas = this.$refs.canvas; const canvas = this.$refs.canvas;
@@ -342,36 +366,38 @@ function cameraCapture() {
const snapshot = this.$refs.snapshot; const snapshot = this.$refs.snapshot;
snapshot.src = canvas.toDataURL('image/png'); snapshot.src = canvas.toDataURL('image/png');
//Wait until image is loaded //Wait until image is loaded
snapshot.onload = () => { // snapshot.onload = () => {
snapshot.classList.remove('hidden'); // snapshot.classList.remove('hidden');
video.classList.add('hidden'); // video.classList.add('hidden');
//Alpine reactive update inside nextTick // //Alpine reactive update inside nextTick
this.$nextTick(() => { // this.$nextTick(() => {
this.photoTaken = true; // this.photoTaken = true;
//Destroy old cropper if exists // //Destroy old cropper if exists
if (this.cropper) this.cropper.destroy(); // if (this.cropper) this.cropper.destroy();
// ✅ Use requestAnimationFrame to ensure browser painted the image // // ✅ Use requestAnimationFrame to ensure browser painted the image
requestAnimationFrame(() => { // requestAnimationFrame(() => {
this.cropper = new Cropper(snapshot, { // this.cropper = new Cropper(snapshot, {
aspectRatio: NaN, // aspectRatio: NaN,
dragMode: 'crop', // dragMode: 'crop',
viewMode: 1, // viewMode: 1,
autoCropArea: 0.8, // autoCropArea: 0.8,
background: true, // background: true,
movable: true, // movable: true,
zoomable: true, // zoomable: true,
responsive: true, // responsive: true,
}); // });
console.log("✅ Cropper initialized"); // console.log("✅ Cropper initialized");
}); // });
this.stopCamera(); // stop camera after Cropper starts // this.stopCamera(); // stop camera after Cropper starts
}); // });
}; // };
}, },
@@ -467,6 +493,47 @@ function cameraCapture() {
this.cropper?.destroy(); this.cropper?.destroy();
await this.initCamera(); await this.initCamera();
} }
async detectText() {
// Draw video frame to temporary canvas
const tempCanvas = document.createElement('canvas');
tempCanvas.width = video.videoWidth;
tempCanvas.height = video.videoHeight;
const tempCtx = tempCanvas.getContext('2d');
tempCtx.drawImage(video, 0, 0, tempCanvas.width, tempCanvas.height);
// Run Tesseract.js
const { data: { words } } = await Tesseract.recognize(tempCanvas.toDataURL(), 'eng');
// Clear previous overlay
ctx.clearRect(0, 0, overlay.width, overlay.height);
// Draw bounding boxes for each detected word
ctx.strokeStyle = 'red';
ctx.lineWidth = 2;
ctx.font = '18px Arial';
ctx.fillStyle = 'red';
words.forEach(word => {
if (word.bbox) {
const { x0, y0, x1, y1 } = word.bbox;
ctx.strokeRect(x0, y0, x1 - x0, y1 - y0);
// Optional: draw recognized text
// ctx.fillText(word.text, x0, y0 - 2);
}
});
}
// Periodically detect text
async startDetection() {
setInterval(detectText, 500); // every 500ms
},
// Initialize camera and detection
(async () => {
await startCamera();
startDetection();
})();
} }
} }