Added camera logic in ocr
This commit is contained in:
@@ -315,33 +315,9 @@ function cameraCapture() {
|
||||
photoTaken: false,
|
||||
photo1: '',
|
||||
textDetectionInterval: null,
|
||||
worker: null,
|
||||
|
||||
|
||||
async initCamera() {
|
||||
try {
|
||||
if (this.stream) this.stream.getTracks().forEach(track => track.stop());
|
||||
|
||||
const video = this.$refs.video;
|
||||
|
||||
this.stream = await navigator.mediaDevices.getUserMedia({
|
||||
video: { facingMode: this.currentFacingMode }
|
||||
});
|
||||
|
||||
this.$refs.video.srcObject = this.stream;
|
||||
await video.play(); // ✅ ensure camera actually starts
|
||||
|
||||
const overlay = this.$refs.overlay;
|
||||
overlay.width = video.videoWidth;
|
||||
overlay.height = video.videoHeight;
|
||||
|
||||
setTimeout(() => this.startDetection(), 300);
|
||||
//this.startDetection();
|
||||
} catch (err) {
|
||||
console.error("Camera error:", err);
|
||||
alert("Cannot access camera. Enable permissions or use HTTPS.");
|
||||
}
|
||||
},
|
||||
|
||||
// async initCamera() {
|
||||
// try {
|
||||
// if (this.stream) this.stream.getTracks().forEach(track => track.stop());
|
||||
@@ -350,21 +326,50 @@ function cameraCapture() {
|
||||
// video: { facingMode: this.currentFacingMode }
|
||||
// });
|
||||
|
||||
// const video = this.$refs.video;
|
||||
// video.srcObject = this.stream;
|
||||
// await video.play();
|
||||
|
||||
// const overlay = this.$refs.overlay;
|
||||
// overlay.width = video.videoWidth;
|
||||
// overlay.height = video.videoHeight;
|
||||
|
||||
// this.startTextDetection(); //auto detection after camera start
|
||||
// this.$refs.video.srcObject = this.stream;
|
||||
// //this.startDetection();
|
||||
// } catch (err) {
|
||||
// console.error("Camera error:", err);
|
||||
// alert("Camera access failed!");
|
||||
// alert("Cannot access camera. Enable permissions or use HTTPS.");
|
||||
// }
|
||||
// },
|
||||
|
||||
async initCamera() {
|
||||
try {
|
||||
if (this.stream) this.stream.getTracks().forEach(track => track.stop());
|
||||
|
||||
const video = this.$refs.video;
|
||||
this.stream = await navigator.mediaDevices.getUserMedia({
|
||||
video: { facingMode: this.currentFacingMode }
|
||||
});
|
||||
|
||||
video.srcObject = this.stream;
|
||||
|
||||
await new Promise(resolve => video.onloadedmetadata = resolve);
|
||||
|
||||
// Overlay size
|
||||
const overlay = this.$refs.overlay;
|
||||
overlay.width = video.videoWidth;
|
||||
overlay.height = video.videoHeight;
|
||||
|
||||
// Initialize Tesseract Worker
|
||||
if (!this.worker) {
|
||||
this.worker = Tesseract.createWorker({
|
||||
logger: m => console.log(m)
|
||||
});
|
||||
await this.worker.load();
|
||||
await this.worker.loadLanguage('eng');
|
||||
await this.worker.initialize('eng');
|
||||
}
|
||||
|
||||
this.startDetection();
|
||||
} catch (err) {
|
||||
console.error("Camera error:", err);
|
||||
alert("Cannot access camera. Enable permissions or use HTTPS.");
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
async switchCamera() {
|
||||
this.currentFacingMode = this.currentFacingMode === 'user' ? 'environment' : 'user';
|
||||
await this.initCamera();
|
||||
@@ -517,33 +522,25 @@ function cameraCapture() {
|
||||
|
||||
if (!video.videoWidth) return;
|
||||
|
||||
// Clear overlay first
|
||||
ctx.clearRect(0, 0, overlay.width, overlay.height);
|
||||
|
||||
// Draw current video frame to temp canvas
|
||||
const tempCanvas = document.createElement("canvas");
|
||||
// Draw video frame to temp canvas
|
||||
const tempCanvas = document.createElement('canvas');
|
||||
tempCanvas.width = video.videoWidth;
|
||||
tempCanvas.height = video.videoHeight;
|
||||
const tempCtx = tempCanvas.getContext("2d");
|
||||
const tempCtx = tempCanvas.getContext('2d');
|
||||
tempCtx.drawImage(video, 0, 0);
|
||||
|
||||
// Use Tesseract worker for better performance
|
||||
if (!this.worker) {
|
||||
this.worker = Tesseract.createWorker({
|
||||
logger: m => console.log(m)
|
||||
});
|
||||
await this.worker.load();
|
||||
await this.worker.loadLanguage('eng');
|
||||
await this.worker.initialize('eng');
|
||||
}
|
||||
|
||||
// Run OCR in worker
|
||||
const { data: { words } } = await this.worker.recognize(tempCanvas);
|
||||
|
||||
ctx.strokeStyle = "lime";
|
||||
// Clear overlay
|
||||
ctx.clearRect(0, 0, overlay.width, overlay.height);
|
||||
|
||||
ctx.strokeStyle = 'lime';
|
||||
ctx.lineWidth = 2;
|
||||
|
||||
words.forEach(w => {
|
||||
if (!w.bbox || w.confidence < 50) return;
|
||||
|
||||
const { x0, y0, x1, y1 } = w.bbox;
|
||||
ctx.strokeRect(x0, y0, x1 - x0, y1 - y0);
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user