Browse Source

Dec 04: [FIX] Bug Fixed 'face_recognized_attendance_login'

pull/301/merge
Cybrosys Technologies 7 months ago
parent
commit
bbbc8d0aa1
  1. 14
      face_recognized_attendance_login/__manifest__.py
  2. 6
      face_recognized_attendance_login/doc/RELEASE_NOTES.md
  3. 109
      face_recognized_attendance_login/models/hr_employee.py
  4. BIN
      face_recognized_attendance_login/static/description/assets/screenshots/img.png
  5. BIN
      face_recognized_attendance_login/static/description/assets/screenshots/img_1.png
  6. BIN
      face_recognized_attendance_login/static/description/assets/screenshots/img_2.png
  7. BIN
      face_recognized_attendance_login/static/description/assets/screenshots/img_3.png
  8. BIN
      face_recognized_attendance_login/static/description/assets/screenshots/img_4.png
  9. BIN
      face_recognized_attendance_login/static/description/assets/screenshots/img_5.png
  10. BIN
      face_recognized_attendance_login/static/description/assets/screenshots/img_6.png
  11. BIN
      face_recognized_attendance_login/static/description/assets/screenshots/img_7.png
  12. BIN
      face_recognized_attendance_login/static/description/assets/screenshots/img_8.png
  13. BIN
      face_recognized_attendance_login/static/description/assets/screenshots/img_9.png
  14. 73
      face_recognized_attendance_login/static/description/index.html
  15. 26
      face_recognized_attendance_login/static/src/css/my_attendance.css
  16. 1
      face_recognized_attendance_login/static/src/js/face-api.min.js
  17. 196
      face_recognized_attendance_login/static/src/js/kiosk_confirm.js
  18. 229
      face_recognized_attendance_login/static/src/js/my_attendance.js
  19. BIN
      face_recognized_attendance_login/static/src/js/weights/age_gender_model-shard1
  20. 618
      face_recognized_attendance_login/static/src/js/weights/age_gender_model-weights_manifest.json
  21. BIN
      face_recognized_attendance_login/static/src/js/weights/face_expression_model-shard1
  22. 606
      face_recognized_attendance_login/static/src/js/weights/face_expression_model-weights_manifest.json
  23. BIN
      face_recognized_attendance_login/static/src/js/weights/face_landmark_68_model-shard1
  24. 691
      face_recognized_attendance_login/static/src/js/weights/face_landmark_68_model-weights_manifest.json
  25. BIN
      face_recognized_attendance_login/static/src/js/weights/face_landmark_68_tiny_model-shard1
  26. 397
      face_recognized_attendance_login/static/src/js/weights/face_landmark_68_tiny_model-weights_manifest.json
  27. BIN
      face_recognized_attendance_login/static/src/js/weights/face_recognition_model-shard1
  28. 6
      face_recognized_attendance_login/static/src/js/weights/face_recognition_model-shard2
  29. 1462
      face_recognized_attendance_login/static/src/js/weights/face_recognition_model-weights_manifest.json
  30. BIN
      face_recognized_attendance_login/static/src/js/weights/mtcnn_model-shard1
  31. 402
      face_recognized_attendance_login/static/src/js/weights/mtcnn_model-weights_manifest.json
  32. BIN
      face_recognized_attendance_login/static/src/js/weights/ssd_mobilenetv1_model-shard1
  33. 137
      face_recognized_attendance_login/static/src/js/weights/ssd_mobilenetv1_model-shard2
  34. 1936
      face_recognized_attendance_login/static/src/js/weights/ssd_mobilenetv1_model-weights_manifest.json
  35. BIN
      face_recognized_attendance_login/static/src/js/weights/tiny_face_detector_model-shard1
  36. 273
      face_recognized_attendance_login/static/src/js/weights/tiny_face_detector_model-weights_manifest.json
  37. 50
      face_recognized_attendance_login/static/src/xml/attendance.xml
  38. 75
      face_recognized_attendance_login/static/src/xml/kiosk_confirm.xml
  39. 24
      face_recognized_attendance_login/views/login_templates.xml

14
face_recognized_attendance_login/__manifest__.py

@ -21,7 +21,7 @@
#############################################################################
{
'name': 'Face Recognized Attendance Login',
'version': '16.0.1.0.0',
'version': '16.0.2.0.0',
'category': 'Human Resources',
'summary': """Mark the attendance of employee by recognizing their face""",
'description': """This module introduces a face recognition system in the
@ -32,15 +32,19 @@
'maintainer': 'Cybrosys Techno Solutions',
'website': "https://www.cybrosys.com",
'depends': ['base', 'mail', 'hr', 'hr_attendance'],
'data': [
],
'assets': {
'web.assets_backend': [
'face_recognized_attendance_login/static/src/js/my_attendance.js',
'face_recognized_attendance_login/static/src/js/face-api.min.js',
'face_recognized_attendance_login/static/src/xml/attendance.xml',
'face_recognized_attendance_login/static/src/css/my_attendance.css',
'face_recognized_attendance_login/static/src/xml/kiosk_confirm.xml',
'face_recognized_attendance_login/static/src/js/kiosk_confirm.js',
]
},
'external_dependencies': {
'python': ['cv2', 'face_recognition', 'cmake', 'dlib', 'PIL',
'numpy'],
},
'images': ['static/description/banner.jpg'],
'license': 'LGPL-3',
'installable': True,

6
face_recognized_attendance_login/doc/RELEASE_NOTES.md

@ -3,3 +3,9 @@
#### Version 16.0.1.0.0
##### ADD
- Initial Commit for Face Recognized Attendance Login
## Module <face_recognized_attendance_login>
#### 02.12.2024
#### Version 16.0.2.0.0
##### ADD
- Update

109
face_recognized_attendance_login/models/hr_employee.py

@ -19,14 +19,14 @@
# If not, see <http://www.gnu.org/licenses/>.
#
#############################################################################
import base64
import cv2
import face_recognition
import numpy as np
import os
import time
from io import BytesIO
from PIL import Image
# import base64
# import cv2
# import face_recognition
# import numpy as np
# import os
# import time
# from io import BytesIO
# from PIL import Image
from odoo import api, models
@ -44,84 +44,15 @@ class HrEmployee(models.Model):
to ensure that it's a human, not an image of employee"""
employee_pic = self.search(
[('user_id', '=', self.env.user.id)]).image_1920
sub_folder = os.path.abspath(os.path.dirname(__file__))
project_folder = os.path.abspath(os.path.join(sub_folder, os.pardir))
eye_cascade_path = os.path.join(project_folder, 'data',
'haarcascade_eye_tree_eyeglasses.xml')
face_cascade_path = os.path.join(project_folder, 'data',
'haarcascade_frontalface_default.xml')
face_cascade = cv2.CascadeClassifier(face_cascade_path)
eye_cascade = cv2.CascadeClassifier(eye_cascade_path)
binary_data = base64.b64decode(employee_pic)
image_bytes = BytesIO(binary_data)
pil_image = Image.open(image_bytes)
np_image = np.array(pil_image)
img = cv2.cvtColor(np_image, cv2.COLOR_BGR2RGB)
# Extract features from the referenced eye(s)
orb = cv2.ORB_create()
referenced_key_points, referenced_descriptors = orb.detectAndCompute(img, None)
encoded_face = face_recognition.face_encodings(img)
start_time = time.time()
camera_time = 0
face_recognized = 0
eyes_match_fail_index = 0
eyes_match_index = 0
cap = cv2.VideoCapture(0)
ret, frame = cap.read()
while ret:
ret, frame = cap.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray = cv2.bilateralFilter(gray, 5, 1, 1)
faces = face_cascade.detectMultiScale(gray, 1.3, 5,
minSize=(200, 200))
if camera_time < 100:
camera_time = camera_time + 1
else:
break
cv2.putText(frame, "Please wait... your face is detecting",
(100, 100),
cv2.FONT_HERSHEY_PLAIN, 3, (0, 0, 255), 2)
if len(faces) == 1:
for (x, y, w, h) in faces:
frame = cv2.rectangle(frame, (x, y), (x + w, y + h),
(0, 255, 0), 2)
eyes = eye_cascade.detectMultiScale(gray, scaleFactor=1.3,
minNeighbors=5)
# Extract features from the eye(s) in the current frame
current_key_points, current_descriptors = orb.detectAndCompute(gray, None)
# Match the features of the current eye(s) to those in
# the reference eye(s)
bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=True)
matches = bf.match(referenced_descriptors, current_descriptors)
good_matches = [m for m in matches if m.distance < 50]
if len(good_matches) >= 10:
eyes_match_index = eyes_match_index + 1
else:
eyes_match_fail_index = eyes_match_fail_index + 1
if len(eyes) == 0:
img_frame = cv2.resize(frame, (0, 0), None, 0.25, 0.25)
img_frame = cv2.cvtColor(img_frame, cv2.COLOR_BGR2RGB)
face_current_frame = face_recognition.face_locations(
img_frame)
encode_current_frame = face_recognition.face_encodings(
img_frame,
face_current_frame)
for encode_face, face_loc in zip(encode_current_frame,
face_current_frame):
face_matches = face_recognition.compare_faces(
encoded_face, encode_face)
face_distance = face_recognition.face_distance(
encoded_face, encode_face)
match_index = np.argmin(face_distance)
elapsed_time = time.time() - start_time
if face_matches[match_index] and eyes_match_index > eyes_match_fail_index:
face_recognized = 1
if elapsed_time > 6:
time.sleep(1)
if camera_time >= 100:
break
cv2.imshow('frame', frame)
cv2.waitKey(1)
cap.release()
cv2.destroyAllWindows()
return face_recognized
return employee_pic
@api.model
def get_kiosk_image(self, id):
"""This function is used for attendance Check In and Check Out in kiosk mode.
It works by compare the image of employee that already uploaded
to the image that get currently from the webcam. This function
also detect the blinking of eyes and calculate the eye match index,
to ensure that it's a human, not an image of employee"""
employee_pic = self.browse(id).image_1920
return employee_pic

BIN
face_recognized_attendance_login/static/description/assets/screenshots/img.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 628 KiB

BIN
face_recognized_attendance_login/static/description/assets/screenshots/img_1.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 295 KiB

BIN
face_recognized_attendance_login/static/description/assets/screenshots/img_2.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 664 KiB

BIN
face_recognized_attendance_login/static/description/assets/screenshots/img_3.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 MiB

BIN
face_recognized_attendance_login/static/description/assets/screenshots/img_4.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 MiB

BIN
face_recognized_attendance_login/static/description/assets/screenshots/img_5.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 673 KiB

BIN
face_recognized_attendance_login/static/description/assets/screenshots/img_6.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 877 KiB

BIN
face_recognized_attendance_login/static/description/assets/screenshots/img_7.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 661 KiB

BIN
face_recognized_attendance_login/static/description/assets/screenshots/img_8.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 654 KiB

BIN
face_recognized_attendance_login/static/description/assets/screenshots/img_9.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 661 KiB

73
face_recognized_attendance_login/static/description/index.html

@ -116,45 +116,6 @@
</div>
</div>
<!-- END OF OVERVIEW SECTION -->
<!-- CONFIGURATION -->
<div class="d-flex align-items-center" style="border-bottom: 2px solid #714B67; padding: 15px 0px;" id="configuration">
<div class="d-flex justify-content-center align-items-center mr-2"
style="background-color: #F5F5F5; border-radius: 0px; width: 40px; height: 40px;">
<img src="assets/misc/features.png" />
</div>
<h2 class="mt-2" style="font-family: 'Montserrat', sans-serif; font-size: 24px; font-weight: bold;">Configuration
</h2>
</div>
</div>
<div class="row" style="font-family: 'Montserrat', sans-serif; font-weight: 400; font-size: 14px; line-height: 200%;">
<div class="col-sm-12 col-md-6">
<div class="d-flex align-items-center" style="margin-top: 40px; margin-bottom: 40px">
<img src="assets/misc/check-box.png" class="mr-2" />
<div class="row" style="font-family: 'Montserrat', sans-serif; font-weight: 400; font-size: 14px; line-height: 200%;">
<div class="col py-4">
For the proper working of this module, please ensure that the following are installed before the installation of this module
<li> <b><i>Opencv</i></b> -For ubuntu 20.04 you can download the Opencv from following link</br>
<a href="https://linuxize.com/post/how-to-install-opencv-on-ubuntu-20-04/">https://linuxize.com/post/how-to-install-opencv-on-ubuntu-20-04/</a>
or<br/> you can use the command - sudo apt install libopencv-dev python3-opencv</li>
<li><b><i>cmake</i></b> - pip install cmake</li>
<li><b><i>dlib</i></b> - pip install dlib</li>
<li><b><i>PIL</i></b> - pip install Pillow</li>
<li><b><i>numpy</i></b> - pip install numpy</li>
<li><b><i>face_recognition</i></b> - pip install face-recognition</li>
<li>Also, ensure that you provide the camera access</li>
</div>
</div>
</div>
</div>
<!-- END OF CONFIGURATION SECTION -->
<!-- FEATURES SECTION -->
<div class="d-flex align-items-center" style="border-bottom: 2px solid #714B67; padding: 15px 0px;" id="features">
<div class="d-flex justify-content-center align-items-center mr-2"
@ -198,23 +159,47 @@
<div style="display: block; margin: 30px auto;">
<h3 style="font-family: 'Montserrat', sans-serif; font-size: 18px; font-weight: bold;">From attendance page you can click CheckIn button</h3>
<img src="assets/screenshots/fr_01.png" class="img-thumbnail">
<img src="assets/screenshots/img_5.png" class="img-thumbnail">
</div>
<div style="display: block; margin: 30px auto;">
<h3 style="font-family: 'Montserrat', sans-serif; font-size: 18px; font-weight: bold;">A wizard will open, that access the webcam of user</h3>
<img src="assets/screenshots/fr_02.png" class="img-thumbnail">
<img src="assets/screenshots/img_6.png" class="img-thumbnail" style="width:100%;">
</div>
<div style="display: block; margin: 30px auto;">
<h3 style="font-family: 'Montserrat', sans-serif; font-size: 18px; font-weight: bold;">If the face is recognized by the
system, the employee can CheckIn and later can CheckOut by clicking the CheckOut button</h3>
<img src="assets/screenshots/fr_03.png" class="img-thumbnail">
<img src="assets/screenshots/img_8.png" class="img-thumbnail">
<img src="assets/screenshots/img_9.png" class="img-thumbnail">
</div>
<div style="display: block; margin: 30px auto;">
<h3 style="font-family: 'Montserrat', sans-serif; font-size: 18px; font-weight: bold;">A wizard will open, that access the webcam of user
<h3 style="font-family: 'Montserrat', sans-serif; font-size: 18px; font-weight: bold;">In the kiosk mode section also the face recognition step is added,
GO to Kiosk Mode, Click "Identify Manually".
</h3>
<img src="assets/screenshots/img.png" class="img-thumbnail">
</div>
<div style="display: block; margin: 30px auto;">
<h3 style="font-family: 'Montserrat', sans-serif; font-size: 18px; font-weight: bold;">We can see the list of employees
</h3>
<img src="assets/screenshots/img_1.png" class="img-thumbnail">
</div>
<div style="display: block; margin: 30px auto;">
<h3 style="font-family: 'Montserrat', sans-serif; font-size: 18px; font-weight: bold;">By choosing employee , we can see the Check in and Checkout option
</h3>
<img src="assets/screenshots/img_2.png" class="img-thumbnail">
</div>
<div style="display: block; margin: 30px auto;">
<h3 style="font-family: 'Montserrat', sans-serif; font-size: 18px; font-weight: bold;">By clicking Checkin/Checkout , the face recognition starts. If it is not a match , it will notify.
</h3>
<img src="assets/screenshots/img_3.png" class="img-thumbnail" style="width:100%;">
</div>
<div style="display: block; margin: 30px auto;">
<h3 style="font-family: 'Montserrat', sans-serif; font-size: 18px; font-weight: bold;">If it matches we can check in .
</h3>
<img src="assets/screenshots/fr_02.png" class="img-thumbnail">
<img src="assets/screenshots/img_5.png" class="img-thumbnail">
<img src="assets/screenshots/img_6.png" class="img-thumbnail" style="width:100%">
<img src="assets/screenshots/img_7.png" class="img-thumbnail" style="width:100%">
</div>
<!-- END OF SCREENSHOTS SECTION -->

26
face_recognized_attendance_login/static/src/css/my_attendance.css

@ -0,0 +1,26 @@
.qr_video {
display: block; /* Initially visible */
}
.qr_video.hidden {
display: none; /* Will hide the QR scanner when the close button is clicked */
}
#close_qr_scanner {
cursor: pointer;
color: red;
font-weight: bold;
}
.qr_video_kiosk {
display: block; /* Initially visible */
}
.qr_video_kiosk.hidden {
display: none; /* Will hide the QR scanner when the close button is clicked */
}
#close_qr_scanner_kiosk {
cursor: pointer;
color: red;
font-weight: bold;
}

1
face_recognized_attendance_login/static/src/js/face-api.min.js

File diff suppressed because one or more lines are too long

196
face_recognized_attendance_login/static/src/js/kiosk_confirm.js

@ -0,0 +1,196 @@
/** @odoo-module **/
import { patch } from "@web/core/utils/patch";
const KioskConfirm = require("hr_attendance.kiosk_confirm")
const session = require('web.session');
var rpc = require('web.rpc');
const MODEL_URL = '/face_recognized_attendance_login/static/src/js/weights';
faceapi.nets.ssdMobilenetv1.loadFromUri(MODEL_URL);
faceapi.nets.faceLandmark68Net.loadFromUri(MODEL_URL);
faceapi.nets.faceRecognitionNet.loadFromUri(MODEL_URL);
faceapi.nets.tinyFaceDetector.load(MODEL_URL);
faceapi.nets.faceLandmark68TinyNet.load(MODEL_URL);
faceapi.nets.faceExpressionNet.load(MODEL_URL);
faceapi.nets.ageGenderNet.load(MODEL_URL);
patch(KioskConfirm.prototype,'face_recognized_attendance_login.kiosk',{
events: {
"click .o_hr_attendance_back_button": function () { this.do_action(this.next_action, {clear_breadcrumbs: true}); },
"click .o_hr_attendance_sign_in_out_icon": _.debounce(async function () {
await this.startWebcam();
}, 200, true),
},
// -------To start the camera-------
async startWebcam() {
const video = this.el.querySelector('#video');
try {
const video = this.el.querySelector('#video');
if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) {
throw new Error('getUserMedia is not supported in this browser.');
}
const stream = await navigator.mediaDevices.getUserMedia({ video: true, audio: false });
video.srcObject = stream;
video.onloadedmetadata = () => {
video.play();
};
this.faceRecognition(video);
} catch (error) {
console.error('An error occurred while accessing the camera:', error);
this.__parentedParent.notifications.add(
'Unable to access webcam. Please check your device permissions or use a supported browser.', {
title: 'Webcam Error',
type: 'danger',
sticky: true,
className: "p-4"
}
);
}
},
// -----To start the face recognition-----------
async faceRecognition(video) {
const labeledFaceDescriptors = await this.getLabeledFaceDescriptions(video);
if (!labeledFaceDescriptors) {
console.error('No labeled face descriptors available.');
this.stopWebcamAndDetection();
return;
}
if (!this.faceMatcher) {
const labeledFaceDescriptors = await this.getLabeledFaceDescriptions();
this.faceMatcher = new faceapi.FaceMatcher([labeledFaceDescriptors]);
if (labeledFaceDescriptors && labeledFaceDescriptors.descriptor) {
this.faceMatcher = new faceapi.FaceMatcher([labeledFaceDescriptors.descriptor]);
} else {
console.error("Could not get face descriptor from reference image");
this.__parentedParent.notification.add("Failed to initialize face recognition, Please upload a new, properly formatted image.", {
type: "danger",
title: "Image detection failed!",
});
this.stopRecognition(video);
return;
}
}
let attendanceMarked = false;
let notificationSent = false;
this.faceRecognitionInterval = setInterval(async () => {
try {
const detections = await faceapi
.detectAllFaces(video)
.withFaceLandmarks()
.withFaceDescriptors();
if (detections.length === 0) {
if (!notificationSent) {
this.__parentedParent.notifications.add(
'No face detected.', {
title: 'Detection Failed!',
type: 'danger',
sticky: false,
className: "p-4"
}
);
notificationSent = true;
}
this.stopWebcamAndDetection();
return;
}
detections.forEach((detection) => {
const match = this.faceMatcher.findBestMatch(detection.descriptor);
if (match._distance < 0.4 && !attendanceMarked) {
const modal = this.el.querySelector('#video');
if (modal) {
modal.style.display = 'none';
}
attendanceMarked = true;
notificationSent = false;
this.markAttendance();
clearInterval(this.faceRecognitionInterval);
this.stopWebcamAndDetection();
}
});
if (!attendanceMarked && !notificationSent) {
this.__parentedParent.notifications.add(
'Face is not recognized.', {
title: 'No Match!',
type: 'danger',
sticky: false,
className: "p-4"
}
);
notificationSent = true;
this.stopWebcamAndDetection();
}
} catch (error) {
console.error('Error during face recognition:', error);
this.stopWebcamAndDetection();
}
}, 100);
},
// ---------Fetch labeled face descriptions (employee's face data)------
async getLabeledFaceDescriptions(video) {
const employee_image_base64 = await rpc.query({
model: 'hr.employee',
method: 'get_kiosk_image',
args: [this.employee_id]
});
if (employee_image_base64) {
const employee_image = new Image();
employee_image.src = "data:image/jpeg;base64," + employee_image_base64;
try {
const detections = await faceapi
.detectSingleFace(employee_image)
.withFaceLandmarks()
.withFaceExpressions()
.withFaceDescriptor();
if (!detections) {
console.error('No face detected in the image.');
this.__parentedParent.notifications.add(
'No face detected in the image.Please upload a new, properly formatted image in the profile.', {
title: 'Image detection failed!',
type: 'danger',
sticky: false,
className: "p-4"
}
);
return;
}
return detections;
} catch (error) {
console.error('Error during face detection:', error);
}
} else {
console.error('No image data found for the employee.');
}
},
// ----------Function to stop webcam and face detection-----
stopWebcamAndDetection() {
const video = this.el.querySelector('#video');
if (video.srcObject) {
const stream = video.srcObject;
const tracks = stream.getTracks();
tracks.forEach(track => track.stop());
video.srcObject = null; //
}
if (this.faceRecognitionInterval) {
clearInterval(this.faceRecognitionInterval);
this.faceRecognitionInterval = null;
}
this.faceMatcher = null;
},
// ------------Redirecting to welcome/checkout page ----------------------------------
markAttendance() {
const self = this;
this._rpc({
model: 'hr.employee',
method: 'attendance_manual',
args: [[this.employee_id], 'hr_attendance.hr_attendance_action_my_attendances']
}).then((result) => {
if (result.action) {
self.do_action(result.action);
} else if (result.warning) {
self.do_warn(result.warning);
}
}).catch((error) => {
console.error('Error marking attendance:', error);
});
},
})

229
face_recognized_attendance_login/static/src/js/my_attendance.js

@ -1,42 +1,205 @@
odoo.define('face_recognized_attendance_login.my_attendance', function(require){
"use strict";
/**
* This file inherit the class MyAttendances, and added the functionality, that
the login/logout is possible only after the face detection
*/
var core = require('web.core');
var Widget = require('web.Widget');
var rpc = require('web.rpc');
var MyAttendances = require('hr_attendance.my_attendances');
var login = 0
// Login made possible, if and only if the captured image and face of the
// employee matched
MyAttendances.include({
update_attendance: async function () {
await rpc.query({
model:'hr.employee',
method:'get_login_screen'
}).then(function (data) {
login = data
});
if (login==1){
var self = this;
this._rpc({
odoo.define('face_recognized_attendance_login.my_attendance', function(require) {
"use strict";
// Required Odoo dependencies
var core = require('web.core');
var Widget = require('web.Widget');
var rpc = require('web.rpc');
var MyAttendances = require('hr_attendance.my_attendances');
var _t = core._t;
var login = 0;
const MODEL_URL = '/face_recognized_attendance_login/static/src/js/weights';
// Load face-api.js models
faceapi.nets.ssdMobilenetv1.loadFromUri(MODEL_URL);
faceapi.nets.faceLandmark68Net.loadFromUri(MODEL_URL);
faceapi.nets.faceRecognitionNet.loadFromUri(MODEL_URL);
faceapi.nets.tinyFaceDetector.load(MODEL_URL);
faceapi.nets.faceLandmark68TinyNet.load(MODEL_URL);
faceapi.nets.faceExpressionNet.load(MODEL_URL);
faceapi.nets.ageGenderNet.load(MODEL_URL);
// Extend MyAttendances widget
MyAttendances.include({
events: _.extend({}, MyAttendances.prototype.events, {
'click #close_qr_scanner': 'stopWebcamAndDetection',
}),
update_attendance: async function() {
this.faceMatcher = null;
this.el.querySelector('.close_button').classList.remove('d-none'); // Show the close button
await this.startWebcam();
},
//--------------------------------------------------------------------
async startWebcam() {
const video = this.el.querySelector('#video');
console.log("navigator",navigator)
try {
if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) {
throw new Error('Webcam access is not supported or allowed in this browser.');
}
const stream = await navigator.mediaDevices.getUserMedia({ video: true, audio: false });
video.srcObject = stream;
video.onloadedmetadata = () => {
video.play();
};
this.faceRecognition(video);
} catch (error) {
this.el.querySelector('.close_button').classList.add('d-none');
this.__parentedParent.notifications.add(
'Unable to access webcam. Please check your device permissions or use a supported browser.', {
title: 'Webcam Error',
type: 'danger',
sticky: true,
className: "p-4"
}
);
}
},
// --------------------Function to stop webcam and face detection------------
stopWebcamAndDetection() {
const video = this.el.querySelector('#video');
this.el.querySelector('.close_button').classList.add('d-none');
if (video.srcObject) {
const stream = video.srcObject;
const tracks = stream.getTracks();
tracks.forEach(track => track.stop());
video.srcObject = null; //
}
if (this.faceRecognitionInterval) {
clearInterval(this.faceRecognitionInterval);
this.faceRecognitionInterval = null;
}
this.faceMatcher = null;
console.log('Camera and detection stopped.');
},
//------------------ Fetch labeled face descriptions (employee's face data)------
async getLabeledFaceDescriptions(video) {
const employee_image_base64 = await rpc.query({
model: 'hr.employee',
method: 'get_login_screen',
});
if (employee_image_base64) {
const employee_image = new Image();
employee_image.src = "data:image/jpeg;base64," + employee_image_base64;
try {
const detections = await faceapi
.detectSingleFace(employee_image)
.withFaceLandmarks()
.withFaceExpressions()
.withFaceDescriptor();
console.log(detections)
if (!detections) {
console.error('No face detected in the image.');
this.__parentedParent.notifications.add(
'No face detected in the image.Please upload a new, properly formatted image in the profile.', {
title: 'Image detection failed!',
type: 'danger',
sticky: false,
className: "p-4"
}
);
return;
}
return detections;
} catch (error) {
console.error('Error during face detection:', error);
}
} else {
console.error('No image data found for the employee.');
}
},
//----------------------------- Face recognition logic---------------
async faceRecognition(video) {
const labeledFaceDescriptors = await this.getLabeledFaceDescriptions(video);
if (!labeledFaceDescriptors) {
console.error('No labeled face descriptors available.');
this.stopWebcamAndDetection();
return;
}
if (!this.faceMatcher) {
const labeledFaceDescriptors = await this.getLabeledFaceDescriptions();
this.faceMatcher = new faceapi.FaceMatcher([labeledFaceDescriptors]);
if (labeledFaceDescriptors && labeledFaceDescriptors.descriptor) {
this.faceMatcher = new faceapi.FaceMatcher([labeledFaceDescriptors.descriptor]);
} else {
console.error("Could not get face descriptor from reference image");
this.__parentedParent.notification.add("Failed to initialize face recognition, Please upload a new, properly formatted image.", {
type: "danger",
title: "Image detection failed!",
});
this.stopRecognition(video);
return;
}
}
let attendanceMarked = false;
let notificationSent = false;
this.faceRecognitionInterval = setInterval(async () => {
try {
const detections = await faceapi
.detectAllFaces(video)
.withFaceLandmarks()
.withFaceDescriptors();
if (detections.length === 0) {
if (!notificationSent) {
this.__parentedParent.notifications.add(
'No face detected.', {
title: 'Detection Failed!',
type: 'danger',
sticky: false,
className: "p-4"
}
);
notificationSent = true; // Prevent duplicate notifications
}
this.stopWebcamAndDetection();
return;
}
detections.forEach((detection) => {
const match = this.faceMatcher.findBestMatch(detection.descriptor);
if (match._distance < 0.4 && !attendanceMarked) {
const modal = this.el.querySelector('#video');
if (modal) {
modal.style.display = 'none';
}
attendanceMarked = true; // Set flag to prevent re-matching
notificationSent = false; // Reset notification flag
this.markAttendance();
clearInterval(this.faceRecognitionInterval);
this.stopWebcamAndDetection(); // Stop webcam and detection
}
});
if (!attendanceMarked && !notificationSent) {
this.__parentedParent.notifications.add(
'Face is not recognized.', {
title: 'No Match!',
type: 'danger',
sticky: false,
className: "p-4"
}
);
notificationSent = true;
this.stopWebcamAndDetection();
}
} catch (error) {
console.error('Error during face recognition:', error);
this.stopWebcamAndDetection();
}
}, 100);
},
// ------------Redirecting to welcome/checkout page ----------------------------------
markAttendance() {
const self = this;
this._rpc({
model: 'hr.employee',
method: 'attendance_manual',
args: [[self.employee.id], 'hr_attendance.hr_attendance_action_my_attendances'],
})
.then(function(result) {
args: [[self.employee.id], 'hr_attendance.hr_attendance_action_my_attendances']
}).then((result) => {
if (result.action) {
self.do_action(result.action);
} else if (result.warning) {
self.do_warn(result.warning);
}
}).catch((error) => {
console.error('Error marking attendance:', error);
});
}
else{
window.alert("Failed to recognize the face. Please try again....")
}
}
});
},
});
});

BIN
face_recognized_attendance_login/static/src/js/weights/age_gender_model-shard1

Binary file not shown.

618
face_recognized_attendance_login/static/src/js/weights/age_gender_model-weights_manifest.json

@ -0,0 +1,618 @@
[
{
"weights": [
{
"name": "entry_flow/conv_in/filters",
"shape": [
3,
3,
3,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005431825039433498,
"min": -0.7441600304023892
}
},
{
"name": "entry_flow/conv_in/bias",
"shape": [
32
],
"dtype": "float32"
},
{
"name": "entry_flow/reduction_block_0/separable_conv0/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005691980614381678,
"min": -0.6090419257388395
}
},
{
"name": "entry_flow/reduction_block_0/separable_conv0/pointwise_filter",
"shape": [
1,
1,
32,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.009089225881239947,
"min": -1.1179747833925135
}
},
{
"name": "entry_flow/reduction_block_0/separable_conv0/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "entry_flow/reduction_block_0/separable_conv1/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00683894624897078,
"min": -0.8138346036275228
}
},
{
"name": "entry_flow/reduction_block_0/separable_conv1/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011632566358528886,
"min": -1.3028474321552352
}
},
{
"name": "entry_flow/reduction_block_0/separable_conv1/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "entry_flow/reduction_block_0/expansion_conv/filters",
"shape": [
1,
1,
32,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010254812240600587,
"min": -0.9229331016540528
}
},
{
"name": "entry_flow/reduction_block_0/expansion_conv/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "entry_flow/reduction_block_1/separable_conv0/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0052509616403018725,
"min": -0.6406173201168285
}
},
{
"name": "entry_flow/reduction_block_1/separable_conv0/pointwise_filter",
"shape": [
1,
1,
64,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010788509424994973,
"min": -1.4564487723743214
}
},
{
"name": "entry_flow/reduction_block_1/separable_conv0/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "entry_flow/reduction_block_1/separable_conv1/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00553213918910307,
"min": -0.7025816770160899
}
},
{
"name": "entry_flow/reduction_block_1/separable_conv1/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.013602388606351965,
"min": -1.6186842441558837
}
},
{
"name": "entry_flow/reduction_block_1/separable_conv1/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "entry_flow/reduction_block_1/expansion_conv/filters",
"shape": [
1,
1,
64,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.007571851038465313,
"min": -1.158493208885193
}
},
{
"name": "entry_flow/reduction_block_1/expansion_conv/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "middle_flow/main_block_0/separable_conv0/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005766328409606335,
"min": -0.6688940955143349
}
},
{
"name": "middle_flow/main_block_0/separable_conv0/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.012136116214826995,
"min": -1.5776951079275094
}
},
{
"name": "middle_flow/main_block_0/separable_conv0/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "middle_flow/main_block_0/separable_conv1/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004314773222979377,
"min": -0.5652352922102984
}
},
{
"name": "middle_flow/main_block_0/separable_conv1/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01107162026798024,
"min": -1.2400214700137868
}
},
{
"name": "middle_flow/main_block_0/separable_conv1/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "middle_flow/main_block_0/separable_conv2/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0036451735917259667,
"min": -0.4848080876995536
}
},
{
"name": "middle_flow/main_block_0/separable_conv2/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008791744942758598,
"min": -1.134135097615859
}
},
{
"name": "middle_flow/main_block_0/separable_conv2/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "middle_flow/main_block_1/separable_conv0/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004915751896652521,
"min": -0.6095532351849126
}
},
{
"name": "middle_flow/main_block_1/separable_conv0/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010868691463096469,
"min": -1.3368490499608656
}
},
{
"name": "middle_flow/main_block_1/separable_conv0/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "middle_flow/main_block_1/separable_conv1/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005010117269029804,
"min": -0.6012140722835765
}
},
{
"name": "middle_flow/main_block_1/separable_conv1/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010311148213405235,
"min": -1.3816938605963016
}
},
{
"name": "middle_flow/main_block_1/separable_conv1/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "middle_flow/main_block_1/separable_conv2/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004911523706772748,
"min": -0.7367285560159123
}
},
{
"name": "middle_flow/main_block_1/separable_conv2/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008976466047997568,
"min": -1.2207993825276693
}
},
{
"name": "middle_flow/main_block_1/separable_conv2/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "exit_flow/reduction_block/separable_conv0/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005074804436926748,
"min": -0.7104726211697447
}
},
{
"name": "exit_flow/reduction_block/separable_conv0/pointwise_filter",
"shape": [
1,
1,
128,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011453078307357489,
"min": -1.4545409450344011
}
},
{
"name": "exit_flow/reduction_block/separable_conv0/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "exit_flow/reduction_block/separable_conv1/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.007741751390344957,
"min": -1.1380374543807086
}
},
{
"name": "exit_flow/reduction_block/separable_conv1/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011347713189966538,
"min": -1.497898141075583
}
},
{
"name": "exit_flow/reduction_block/separable_conv1/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "exit_flow/reduction_block/expansion_conv/filters",
"shape": [
1,
1,
128,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006717281014311547,
"min": -0.8329428457746318
}
},
{
"name": "exit_flow/reduction_block/expansion_conv/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "exit_flow/separable_conv/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0027201742518181892,
"min": -0.3237007359663645
}
},
{
"name": "exit_flow/separable_conv/pointwise_filter",
"shape": [
1,
1,
256,
512
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010076364348916447,
"min": -1.330080094056971
}
},
{
"name": "exit_flow/separable_conv/bias",
"shape": [
512
],
"dtype": "float32"
},
{
"name": "fc/age/weights",
"shape": [
512,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008674054987290326,
"min": -1.2664120281443876
}
},
{
"name": "fc/age/bias",
"shape": [
1
],
"dtype": "float32"
},
{
"name": "fc/gender/weights",
"shape": [
512,
2
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0029948226377075793,
"min": -0.34140978069866407
}
},
{
"name": "fc/gender/bias",
"shape": [
2
],
"dtype": "float32"
}
],
"paths": [
"age_gender_model-shard1"
]
}
]

BIN
face_recognized_attendance_login/static/src/js/weights/face_expression_model-shard1

Binary file not shown.

606
face_recognized_attendance_login/static/src/js/weights/face_expression_model-weights_manifest.json

@ -0,0 +1,606 @@
[
{
"weights": [
{
"name": "dense0/conv0/filters",
"shape": [
3,
3,
3,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0057930146946626555,
"min": -0.7125408074435067
}
},
{
"name": "dense0/conv0/bias",
"shape": [
32
],
"dtype": "float32"
},
{
"name": "dense0/conv1/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006473719839956246,
"min": -0.6408982641556684
}
},
{
"name": "dense0/conv1/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010509579321917366,
"min": -1.408283629136927
}
},
{
"name": "dense0/conv1/bias",
"shape": [
32
],
"dtype": "float32"
},
{
"name": "dense0/conv2/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005666389652326995,
"min": -0.7252978754978554
}
},
{
"name": "dense0/conv2/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010316079270605948,
"min": -1.1760330368490781
}
},
{
"name": "dense0/conv2/bias",
"shape": [
32
],
"dtype": "float32"
},
{
"name": "dense0/conv3/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0063220320963392074,
"min": -0.853474333005793
}
},
{
"name": "dense0/conv3/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010322785377502442,
"min": -1.4658355236053466
}
},
{
"name": "dense0/conv3/bias",
"shape": [
32
],
"dtype": "float32"
},
{
"name": "dense1/conv0/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0042531527724920535,
"min": -0.5741756242864272
}
},
{
"name": "dense1/conv0/pointwise_filter",
"shape": [
1,
1,
32,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010653339647779278,
"min": -1.1825207009035
}
},
{
"name": "dense1/conv0/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "dense1/conv1/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005166931012097527,
"min": -0.6355325144879957
}
},
{
"name": "dense1/conv1/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011478300188101974,
"min": -1.3888743227603388
}
},
{
"name": "dense1/conv1/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "dense1/conv2/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006144821410085641,
"min": -0.8479853545918185
}
},
{
"name": "dense1/conv2/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010541967317169788,
"min": -1.3809977185492421
}
},
{
"name": "dense1/conv2/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "dense1/conv3/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005769844849904378,
"min": -0.686611537138621
}
},
{
"name": "dense1/conv3/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010939095534530341,
"min": -1.2689350820055196
}
},
{
"name": "dense1/conv3/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "dense2/conv0/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0037769308277204924,
"min": -0.40790852939381317
}
},
{
"name": "dense2/conv0/pointwise_filter",
"shape": [
1,
1,
64,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01188667194516051,
"min": -1.4382873053644218
}
},
{
"name": "dense2/conv0/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "dense2/conv1/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006497045825509464,
"min": -0.8381189114907208
}
},
{
"name": "dense2/conv1/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011632198913424622,
"min": -1.3377028750438316
}
},
{
"name": "dense2/conv1/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "dense2/conv2/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005947182225246056,
"min": -0.7969224181829715
}
},
{
"name": "dense2/conv2/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011436844339557722,
"min": -1.4524792311238306
}
},
{
"name": "dense2/conv2/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "dense2/conv3/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006665432686899222,
"min": -0.8998334127313949
}
},
{
"name": "dense2/conv3/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01283421422920975,
"min": -1.642779421338848
}
},
{
"name": "dense2/conv3/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "dense3/conv0/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004711699953266218,
"min": -0.6737730933170692
}
},
{
"name": "dense3/conv0/pointwise_filter",
"shape": [
1,
1,
128,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010955964817720302,
"min": -1.3914075318504784
}
},
{
"name": "dense3/conv0/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "dense3/conv1/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00554193468654857,
"min": -0.7149095745647656
}
},
{
"name": "dense3/conv1/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.016790372250126858,
"min": -2.484975093018775
}
},
{
"name": "dense3/conv1/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "dense3/conv2/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006361540626077091,
"min": -0.8142772001378676
}
},
{
"name": "dense3/conv2/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01777329678628959,
"min": -1.7062364914838006
}
},
{
"name": "dense3/conv2/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "dense3/conv3/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006900275922289082,
"min": -0.8625344902861353
}
},
{
"name": "dense3/conv3/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.015449936717164282,
"min": -1.9003422162112067
}
},
{
"name": "dense3/conv3/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "fc/weights",
"shape": [
256,
7
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004834276554631252,
"min": -0.7203072066400565
}
},
{
"name": "fc/bias",
"shape": [
7
],
"dtype": "float32"
}
],
"paths": [
"face_expression_model-shard1"
]
}
]

BIN
face_recognized_attendance_login/static/src/js/weights/face_landmark_68_model-shard1

Binary file not shown.

691
face_recognized_attendance_login/static/src/js/weights/face_landmark_68_model-weights_manifest.json

@ -0,0 +1,691 @@
[
{
"weights": [
{
"name": "dense0/conv0/filters",
"shape": [
3,
3,
3,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004853619781194949,
"min": -0.5872879935245888
}
},
{
"name": "dense0/conv0/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004396426443960153,
"min": -0.7298067896973853
}
},
{
"name": "dense0/conv1/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00635151559231328,
"min": -0.5589333721235686
}
},
{
"name": "dense0/conv1/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.009354315552057004,
"min": -1.2628325995276957
}
},
{
"name": "dense0/conv1/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0029380727048013726,
"min": -0.5846764682554731
}
},
{
"name": "dense0/conv2/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0049374802439820535,
"min": -0.6171850304977566
}
},
{
"name": "dense0/conv2/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.009941946758943446,
"min": -1.3421628124573652
}
},
{
"name": "dense0/conv2/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0030300481062309416,
"min": -0.5272283704841838
}
},
{
"name": "dense0/conv3/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005672684837790097,
"min": -0.7431217137505026
}
},
{
"name": "dense0/conv3/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010712201455060173,
"min": -1.5639814124387852
}
},
{
"name": "dense0/conv3/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0030966934035806097,
"min": -0.3839899820439956
}
},
{
"name": "dense1/conv0/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0039155554537679636,
"min": -0.48161332081345953
}
},
{
"name": "dense1/conv0/pointwise_filter",
"shape": [
1,
1,
32,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01023082966898002,
"min": -1.094698774580862
}
},
{
"name": "dense1/conv0/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0027264176630506327,
"min": -0.3871513081531898
}
},
{
"name": "dense1/conv1/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004583378632863362,
"min": -0.5454220573107401
}
},
{
"name": "dense1/conv1/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00915846403907327,
"min": -1.117332612766939
}
},
{
"name": "dense1/conv1/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003091680419211294,
"min": -0.5966943209077797
}
},
{
"name": "dense1/conv2/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005407439727409214,
"min": -0.708374604290607
}
},
{
"name": "dense1/conv2/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00946493943532308,
"min": -1.2399070660273235
}
},
{
"name": "dense1/conv2/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004409168514550901,
"min": -0.9788354102303
}
},
{
"name": "dense1/conv3/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004478132958505668,
"min": -0.6493292789833219
}
},
{
"name": "dense1/conv3/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011063695888893277,
"min": -1.2501976354449402
}
},
{
"name": "dense1/conv3/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003909627596537272,
"min": -0.6646366914113363
}
},
{
"name": "dense2/conv0/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003213915404151468,
"min": -0.3374611174359041
}
},
{
"name": "dense2/conv0/pointwise_filter",
"shape": [
1,
1,
64,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010917326048308728,
"min": -1.4520043644250609
}
},
{
"name": "dense2/conv0/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002800439152063108,
"min": -0.38085972468058266
}
},
{
"name": "dense2/conv1/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0050568851770139206,
"min": -0.6927932692509071
}
},
{
"name": "dense2/conv1/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01074961213504567,
"min": -1.3222022926106174
}
},
{
"name": "dense2/conv1/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0030654204242369708,
"min": -0.5487102559384177
}
},
{
"name": "dense2/conv2/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00591809165244009,
"min": -0.917304206128214
}
},
{
"name": "dense2/conv2/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01092823346455892,
"min": -1.366029183069865
}
},
{
"name": "dense2/conv2/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002681120470458386,
"min": -0.36463238398234055
}
},
{
"name": "dense2/conv3/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0048311497650894465,
"min": -0.5797379718107336
}
},
{
"name": "dense2/conv3/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011227761062921263,
"min": -1.4483811771168429
}
},
{
"name": "dense2/conv3/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0034643323982463162,
"min": -0.3360402426298927
}
},
{
"name": "dense3/conv0/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003394978887894574,
"min": -0.49227193874471326
}
},
{
"name": "dense3/conv0/pointwise_filter",
"shape": [
1,
1,
128,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010051267287310432,
"min": -1.2765109454884247
}
},
{
"name": "dense3/conv0/bias",
"shape": [
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003142924752889895,
"min": -0.4588670139219247
}
},
{
"name": "dense3/conv1/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00448304671867221,
"min": -0.5872791201460595
}
},
{
"name": "dense3/conv1/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.016063522357566685,
"min": -2.3613377865623026
}
},
{
"name": "dense3/conv1/bias",
"shape": [
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00287135781026354,
"min": -0.47664539650374765
}
},
{
"name": "dense3/conv2/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006002906724518421,
"min": -0.7923836876364315
}
},
{
"name": "dense3/conv2/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.017087187019048954,
"min": -1.6061955797906016
}
},
{
"name": "dense3/conv2/bias",
"shape": [
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003124481205846749,
"min": -0.46242321846531886
}
},
{
"name": "dense3/conv3/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006576311588287353,
"min": -1.0193282961845398
}
},
{
"name": "dense3/conv3/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.015590153955945782,
"min": -1.99553970636106
}
},
{
"name": "dense3/conv3/bias",
"shape": [
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004453541601405424,
"min": -0.6546706154065973
}
},
{
"name": "fc/weights",
"shape": [
256,
136
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010417488509533453,
"min": -1.500118345372817
}
},
{
"name": "fc/bias",
"shape": [
136
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0025084222648658005,
"min": 0.07683877646923065
}
}
],
"paths": [
"face_landmark_68_model-shard1"
]
}
]

BIN
face_recognized_attendance_login/static/src/js/weights/face_landmark_68_tiny_model-shard1

Binary file not shown.

397
face_recognized_attendance_login/static/src/js/weights/face_landmark_68_tiny_model-weights_manifest.json

@ -0,0 +1,397 @@
[
{
"weights": [
{
"name": "dense0/conv0/filters",
"shape": [
3,
3,
3,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008194216092427571,
"min": -0.9423348506291708
}
},
{
"name": "dense0/conv0/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006839508168837603,
"min": -0.8412595047670252
}
},
{
"name": "dense0/conv1/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.009194007106855804,
"min": -1.2779669878529567
}
},
{
"name": "dense0/conv1/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0036026100317637128,
"min": -0.3170296827952067
}
},
{
"name": "dense0/conv1/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.000740380117706224,
"min": -0.06367269012273527
}
},
{
"name": "dense0/conv2/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 1,
"min": 0
}
},
{
"name": "dense0/conv2/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 1,
"min": 0
}
},
{
"name": "dense0/conv2/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0037702228508743585,
"min": -0.6220867703942692
}
},
{
"name": "dense1/conv0/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0033707996209462483,
"min": -0.421349952618281
}
},
{
"name": "dense1/conv0/pointwise_filter",
"shape": [
1,
1,
32,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.014611541991140328,
"min": -1.8556658328748217
}
},
{
"name": "dense1/conv0/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002832523046755323,
"min": -0.30307996600281956
}
},
{
"name": "dense1/conv1/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006593170586754294,
"min": -0.6329443763284123
}
},
{
"name": "dense1/conv1/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.012215249211180444,
"min": -1.6001976466646382
}
},
{
"name": "dense1/conv1/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002384825547536214,
"min": -0.3028728445370992
}
},
{
"name": "dense1/conv2/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005859645441466687,
"min": -0.7617539073906693
}
},
{
"name": "dense1/conv2/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.013121426806730382,
"min": -1.7845140457153321
}
},
{
"name": "dense1/conv2/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0032247188044529336,
"min": -0.46435950784122243
}
},
{
"name": "dense2/conv0/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002659512618008782,
"min": -0.32977956463308894
}
},
{
"name": "dense2/conv0/pointwise_filter",
"shape": [
1,
1,
64,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.015499923743453681,
"min": -1.9839902391620712
}
},
{
"name": "dense2/conv0/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0032450980999890497,
"min": -0.522460794098237
}
},
{
"name": "dense2/conv1/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005911862382701799,
"min": -0.792189559282041
}
},
{
"name": "dense2/conv1/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.021025861478319356,
"min": -2.2077154552235325
}
},
{
"name": "dense2/conv1/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00349616945958605,
"min": -0.46149436866535865
}
},
{
"name": "dense2/conv2/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008104994250278847,
"min": -1.013124281284856
}
},
{
"name": "dense2/conv2/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.029337059282789044,
"min": -3.5791212325002633
}
},
{
"name": "dense2/conv2/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0038808938334969913,
"min": -0.4230174278511721
}
},
{
"name": "fc/weights",
"shape": [
128,
136
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.014016061670639936,
"min": -1.8921683255363912
}
},
{
"name": "fc/bias",
"shape": [
136
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0029505149698724935,
"min": 0.088760145008564
}
}
],
"paths": [
"face_landmark_68_tiny_model-shard1"
]
}
]

BIN
face_recognized_attendance_login/static/src/js/weights/face_recognition_model-shard1

Binary file not shown.

6
face_recognized_attendance_login/static/src/js/weights/face_recognition_model-shard2

File diff suppressed because one or more lines are too long

1462
face_recognized_attendance_login/static/src/js/weights/face_recognition_model-weights_manifest.json

File diff suppressed because it is too large

BIN
face_recognized_attendance_login/static/src/js/weights/mtcnn_model-shard1

Binary file not shown.

402
face_recognized_attendance_login/static/src/js/weights/mtcnn_model-weights_manifest.json

@ -0,0 +1,402 @@
[
{
"paths": [
"mtcnn_model-shard1"
],
"weights": [
{
"dtype": "float32",
"name": "pnet/conv1/weights",
"shape": [
3,
3,
3,
10
]
},
{
"dtype": "float32",
"name": "pnet/conv1/bias",
"shape": [
10
]
},
{
"dtype": "float32",
"name": "pnet/prelu1_alpha",
"shape": [
10
]
},
{
"dtype": "float32",
"name": "pnet/conv2/weights",
"shape": [
3,
3,
10,
16
]
},
{
"dtype": "float32",
"name": "pnet/conv2/bias",
"shape": [
16
]
},
{
"dtype": "float32",
"name": "pnet/prelu2_alpha",
"shape": [
16
]
},
{
"dtype": "float32",
"name": "pnet/conv3/weights",
"shape": [
3,
3,
16,
32
]
},
{
"dtype": "float32",
"name": "pnet/conv3/bias",
"shape": [
32
]
},
{
"dtype": "float32",
"name": "pnet/prelu3_alpha",
"shape": [
32
]
},
{
"dtype": "float32",
"name": "pnet/conv4_1/weights",
"shape": [
1,
1,
32,
2
]
},
{
"dtype": "float32",
"name": "pnet/conv4_1/bias",
"shape": [
2
]
},
{
"dtype": "float32",
"name": "pnet/conv4_2/weights",
"shape": [
1,
1,
32,
4
]
},
{
"dtype": "float32",
"name": "pnet/conv4_2/bias",
"shape": [
4
]
},
{
"dtype": "float32",
"name": "rnet/conv1/weights",
"shape": [
3,
3,
3,
28
]
},
{
"dtype": "float32",
"name": "rnet/conv1/bias",
"shape": [
28
]
},
{
"dtype": "float32",
"name": "rnet/prelu1_alpha",
"shape": [
28
]
},
{
"dtype": "float32",
"name": "rnet/conv2/weights",
"shape": [
3,
3,
28,
48
]
},
{
"dtype": "float32",
"name": "rnet/conv2/bias",
"shape": [
48
]
},
{
"dtype": "float32",
"name": "rnet/prelu2_alpha",
"shape": [
48
]
},
{
"dtype": "float32",
"name": "rnet/conv3/weights",
"shape": [
2,
2,
48,
64
]
},
{
"dtype": "float32",
"name": "rnet/conv3/bias",
"shape": [
64
]
},
{
"dtype": "float32",
"name": "rnet/prelu3_alpha",
"shape": [
64
]
},
{
"dtype": "float32",
"name": "rnet/fc1/weights",
"shape": [
576,
128
]
},
{
"dtype": "float32",
"name": "rnet/fc1/bias",
"shape": [
128
]
},
{
"dtype": "float32",
"name": "rnet/prelu4_alpha",
"shape": [
128
]
},
{
"dtype": "float32",
"name": "rnet/fc2_1/weights",
"shape": [
128,
2
]
},
{
"dtype": "float32",
"name": "rnet/fc2_1/bias",
"shape": [
2
]
},
{
"dtype": "float32",
"name": "rnet/fc2_2/weights",
"shape": [
128,
4
]
},
{
"dtype": "float32",
"name": "rnet/fc2_2/bias",
"shape": [
4
]
},
{
"dtype": "float32",
"name": "onet/conv1/weights",
"shape": [
3,
3,
3,
32
]
},
{
"dtype": "float32",
"name": "onet/conv1/bias",
"shape": [
32
]
},
{
"dtype": "float32",
"name": "onet/prelu1_alpha",
"shape": [
32
]
},
{
"dtype": "float32",
"name": "onet/conv2/weights",
"shape": [
3,
3,
32,
64
]
},
{
"dtype": "float32",
"name": "onet/conv2/bias",
"shape": [
64
]
},
{
"dtype": "float32",
"name": "onet/prelu2_alpha",
"shape": [
64
]
},
{
"dtype": "float32",
"name": "onet/conv3/weights",
"shape": [
3,
3,
64,
64
]
},
{
"dtype": "float32",
"name": "onet/conv3/bias",
"shape": [
64
]
},
{
"dtype": "float32",
"name": "onet/prelu3_alpha",
"shape": [
64
]
},
{
"dtype": "float32",
"name": "onet/conv4/weights",
"shape": [
2,
2,
64,
128
]
},
{
"dtype": "float32",
"name": "onet/conv4/bias",
"shape": [
128
]
},
{
"dtype": "float32",
"name": "onet/prelu4_alpha",
"shape": [
128
]
},
{
"dtype": "float32",
"name": "onet/fc1/weights",
"shape": [
1152,
256
]
},
{
"dtype": "float32",
"name": "onet/fc1/bias",
"shape": [
256
]
},
{
"dtype": "float32",
"name": "onet/prelu5_alpha",
"shape": [
256
]
},
{
"dtype": "float32",
"name": "onet/fc2_1/weights",
"shape": [
256,
2
]
},
{
"dtype": "float32",
"name": "onet/fc2_1/bias",
"shape": [
2
]
},
{
"dtype": "float32",
"name": "onet/fc2_2/weights",
"shape": [
256,
4
]
},
{
"dtype": "float32",
"name": "onet/fc2_2/bias",
"shape": [
4
]
},
{
"dtype": "float32",
"name": "onet/fc2_3/weights",
"shape": [
256,
10
]
},
{
"dtype": "float32",
"name": "onet/fc2_3/bias",
"shape": [
10
]
}
]
}
]

BIN
face_recognized_attendance_login/static/src/js/weights/ssd_mobilenetv1_model-shard1

Binary file not shown.

137
face_recognized_attendance_login/static/src/js/weights/ssd_mobilenetv1_model-shard2

File diff suppressed because one or more lines are too long

1936
face_recognized_attendance_login/static/src/js/weights/ssd_mobilenetv1_model-weights_manifest.json

File diff suppressed because it is too large

BIN
face_recognized_attendance_login/static/src/js/weights/tiny_face_detector_model-shard1

Binary file not shown.

273
face_recognized_attendance_login/static/src/js/weights/tiny_face_detector_model-weights_manifest.json

@ -0,0 +1,273 @@
[
{
"weights": [
{
"name": "conv0/filters",
"shape": [
3,
3,
3,
16
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.009007044399485869,
"min": -1.2069439495311063
}
},
{
"name": "conv0/bias",
"shape": [
16
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005263455241334205,
"min": -0.9211046672334858
}
},
{
"name": "conv1/depthwise_filter",
"shape": [
3,
3,
16,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004001977630690033,
"min": -0.5042491814669441
}
},
{
"name": "conv1/pointwise_filter",
"shape": [
1,
1,
16,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.013836609615999109,
"min": -1.411334180831909
}
},
{
"name": "conv1/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0015159862590771096,
"min": -0.30926119685173037
}
},
{
"name": "conv2/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002666276225856706,
"min": -0.317286870876948
}
},
{
"name": "conv2/pointwise_filter",
"shape": [
1,
1,
32,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.015265831292844286,
"min": -1.6792414422128714
}
},
{
"name": "conv2/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0020280554598453,
"min": -0.37113414915168985
}
},
{
"name": "conv3/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006100742489683862,
"min": -0.8907084034938438
}
},
{
"name": "conv3/pointwise_filter",
"shape": [
1,
1,
64,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.016276211832083907,
"min": -2.0508026908425725
}
},
{
"name": "conv3/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003394414279975143,
"min": -0.7637432129944072
}
},
{
"name": "conv4/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006716050119961009,
"min": -0.8059260143953211
}
},
{
"name": "conv4/pointwise_filter",
"shape": [
1,
1,
128,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.021875603993733724,
"min": -2.8875797271728514
}
},
{
"name": "conv4/bias",
"shape": [
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0041141652009066415,
"min": -0.8187188749804216
}
},
{
"name": "conv5/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008423839597141042,
"min": -0.9013508368940915
}
},
{
"name": "conv5/pointwise_filter",
"shape": [
1,
1,
256,
512
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.030007277283014035,
"min": -3.8709387695088107
}
},
{
"name": "conv5/bias",
"shape": [
512
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008402082966823203,
"min": -1.4871686851277068
}
},
{
"name": "conv8/filters",
"shape": [
1,
1,
512,
25
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.028336129469030042,
"min": -4.675461362389957
}
},
{
"name": "conv8/bias",
"shape": [
25
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002268134028303857,
"min": -0.41053225912299807
}
}
],
"paths": [
"tiny_face_detector_model-shard1"
]
}
]

50
face_recognized_attendance_login/static/src/xml/attendance.xml

@ -0,0 +1,50 @@
<?xml version="1.0" encoding="UTF-8"?>
<templates xml:space="preserve">
<t t-inherit="hr_attendance.HrAttendanceMyMainMenu"
t-inherit-mode="extension">
<xpath expr="//t[@t-set='bodyContent']" position="replace">
<t t-set="bodyContent">
<div class="justify-content-between mt-2 d-flex small oe_qr_login" style="position:absolute;">
<div class="qr_video_kiosk">
<div class="close_button d-none position-absolute"
t-ref="close_button">
<button id="close_qr_scanner_kiosk" t-on-click="stopWebcamAndDetection"
style="position: absolute; right: 5px; z-index: 111;top:-15px;">
X
</button>
<div class="video-container">
<video id="video" width="" height="" autoplay="true"
style="margin-left:-150px;margin-top:-24px;"/>
</div>
</div>
</div>
</div>
<t t-if="widget.employee">
<t t-set="checked_in"
t-value="widget.employee.attendance_state == 'checked_in'"/>
<!-- Custom Badge Section -->
<t t-call="HrAttendanceUserBadge">
<t t-set="userId" t-value="widget.employee.id"/>
<t t-set="userName" t-value="widget.employee.name"/></t>
<div class="flex-grow-1">
<h1 class="mt-5" t-esc="widget.employee.name"/>
<h3>
<t t-if="!checked_in">Welcome!</t>
<t t-else="">Want to check out?</t>
</h3>
<h4 class="mt0 mb0 text-muted" t-if="checked_in">Today's work hours:
<span
t-esc="widget.hours_today"/>
</h4>
</div>
<t t-call="HrAttendanceCheckInOutButtons"/></t>
<div class="alert alert-warning" t-else="">
<b>Warning</b>: Your user should be linked to an employee to
use attendance.
<br/>
Please contact your administrator.
</div>
</t>
</xpath>
</t>
</templates>

75
face_recognized_attendance_login/static/src/xml/kiosk_confirm.xml

@ -0,0 +1,75 @@
<?xml version="1.0" encoding="UTF-8"?>
<templates xml:space="preserve">
<t t-inherit="hr_attendance.HrAttendanceKioskConfirm"
t-inherit-mode="extension">
<xpath expr="//t[@t-set='bodyContent']" position="replace">
<t t-set="bodyContent">
<div class="justify-content-between mt-2 d-flex small oe_qr_login" style="position:absolute; display:block !important;pointer-events: none">
<div class="qr_video">
<!-- <div class="close_button d-none position-absolute"-->
<!-- t-ref="close_button" style="display:block !important;">-->
<!-- <button id="close_qr_scanner" t-on-click="stopWebcamAndDetection"-->
<!-- style="position: absolute; right: 5px; z-index: 111;top:-15px;">-->
<!-- X-->
<!-- </button>-->
<!-- </div>-->
<div class="video-container">
<video id="video" width="" height="" autoplay="true"
style="margin-left:-150px;margin-top:-24px;"/>
</div>
</div>
</div>
<t t-set="checked_in" t-value="widget.employee_state=='checked_in'"/>
<button class="o_hr_attendance_back_button btn btn-block btn-secondary btn-lg d-block d-md-none py-5">
<i class="fa fa-chevron-left me-2"/> Go back
</button>
<t t-if="widget.employee_id" t-call="HrAttendanceUserBadge">
<t t-set="userId" t-value="widget.employee_id"/>
<t t-set="userName" t-value="widget.employee_name"/>
</t>
<button class="o_hr_attendance_back_button o_hr_attendance_back_button_md btn btn-secondary d-none d-md-inline-flex align-items-center position-absolute top-0 start-0 rounded-circle">
<i class="fa fa-2x fa-fw fa-chevron-left me-1" role="img" aria-label="Go back" title="Go back"/>
</button>
<div t-if="widget.employee_id" class="flex-grow-1">
<h1 class="mt-5 mb8"><t t-esc="widget.employee_name"/></h1>
<h3 class="mt8 mb24"><t t-if="!checked_in">Welcome!</t><t t-else="">Want to check out?</t></h3>
<h4 class="mt0 mb0 text-muted" t-if="checked_in">Today's work hours: <span t-esc="widget.employee_hours_today"/></h4>
<t t-if="!widget.use_pin" t-call="HrAttendanceCheckInOutButtons"/>
<t t-else="">
<h3 class="mt-4 mb0 text-muted">Please enter your PIN to <b t-if="checked_in">check out</b><b t-else="">check in</b></h3>
<div class="row">
<div class="col-md-8 offset-md-2 o_hr_attendance_pin_pad">
<div class="row g-0" >
<div class="col-12 mb8 mt8">
<input class="o_hr_attendance_PINbox border-0 bg-white fs-1 text-center" type="password" disabled="true"/>
</div>
</div>
<div class="row g-0">
<t t-foreach="['1', '2', '3', '4', '5', '6', '7', '8', '9', ['C', 'btn-warning'], '0', ['ok', 'btn-primary']]" t-as="btn_name">
<div class="col-4 p-1">
<a href="#" t-attf-class="o_hr_attendance_PINbox_button btn {{btn_name[1]? btn_name[1] : 'btn-secondary border'}} btn-block btn-lg {{ 'o_hr_attendance_pin_pad_button_' + btn_name[0] }} d-flex align-items-center justify-content-center">
<t t-esc="btn_name[0]"/>
</a>
</div>
</t>
</div>
</div>
</div>
</t>
</div>
<div t-else="" class="alert alert-danger mx-3" role="alert">
<h4 class="alert-heading">Error: could not find corresponding employee.</h4>
<p>Please return to the main menu.</p>
</div>
<a role="button" class="oe_attendance_sign_in_out" aria-label="Sign out" title="Sign out"/>
</t>
</xpath>
</t>
</templates>

24
face_recognized_attendance_login/views/login_templates.xml

@ -0,0 +1,24 @@
<?xml version="1.0" encoding="UTF-8" ?>
<odoo>
<!--Calling the controller of scanner from login page-->
<template id="qr_login" inherit_id="web.login" name="QR scanner">
<xpath expr="//div[hasclass('o_login_auth')]" position="before">
<div class="justify-content-between mt-2 d-flex small oe_qr_login">
<a href="#" id="login_click" t-on-click='_onLoginClick'>Login With QR</a>
<div class="qr_video">
<div class="close_button d-none position-absolute" t-ref="close_button">
<button id="close_qr_scanner" style="position: absolute; right: 0px; z-index: 111">
X
</button>
<div class="video-container">
<video id="video" width="" height="" autoplay="true"/>
</div>
</div>
</div>
</div>
</xpath>
</template>
</odoo>
Loading…
Cancel
Save