Browse Source

Nov 21: [FIX] Bug Fixed 'face_recognized_attendance_login'

pull/351/merge
Cybrosys Technologies 6 months ago
parent
commit
cbb465db08
  1. 9
      face_recognized_attendance_login/README.rst
  2. 1
      face_recognized_attendance_login/__init__.py
  3. 11
      face_recognized_attendance_login/__manifest__.py
  4. 127
      face_recognized_attendance_login/controllers/main.py
  5. 22619
      face_recognized_attendance_login/data/haarcascade_eye_tree_eyeglasses.xml
  6. 33314
      face_recognized_attendance_login/data/haarcascade_frontalface_default.xml
  7. 5
      face_recognized_attendance_login/doc/RELEASE_NOTES.md
  8. 1
      face_recognized_attendance_login/static/src/js/face-api.min.js
  9. 209
      face_recognized_attendance_login/static/src/js/face_recognition.js
  10. BIN
      face_recognized_attendance_login/static/src/js/weights/age_gender_model-shard1
  11. 618
      face_recognized_attendance_login/static/src/js/weights/age_gender_model-weights_manifest.json
  12. BIN
      face_recognized_attendance_login/static/src/js/weights/face_expression_model-shard1
  13. 606
      face_recognized_attendance_login/static/src/js/weights/face_expression_model-weights_manifest.json
  14. BIN
      face_recognized_attendance_login/static/src/js/weights/face_landmark_68_model-shard1
  15. 691
      face_recognized_attendance_login/static/src/js/weights/face_landmark_68_model-weights_manifest.json
  16. BIN
      face_recognized_attendance_login/static/src/js/weights/face_landmark_68_tiny_model-shard1
  17. 397
      face_recognized_attendance_login/static/src/js/weights/face_landmark_68_tiny_model-weights_manifest.json
  18. BIN
      face_recognized_attendance_login/static/src/js/weights/face_recognition_model-shard1
  19. 6
      face_recognized_attendance_login/static/src/js/weights/face_recognition_model-shard2
  20. 1462
      face_recognized_attendance_login/static/src/js/weights/face_recognition_model-weights_manifest.json
  21. BIN
      face_recognized_attendance_login/static/src/js/weights/mtcnn_model-shard1
  22. 402
      face_recognized_attendance_login/static/src/js/weights/mtcnn_model-weights_manifest.json
  23. BIN
      face_recognized_attendance_login/static/src/js/weights/ssd_mobilenetv1_model-shard1
  24. 137
      face_recognized_attendance_login/static/src/js/weights/ssd_mobilenetv1_model-shard2
  25. 1936
      face_recognized_attendance_login/static/src/js/weights/ssd_mobilenetv1_model-weights_manifest.json
  26. BIN
      face_recognized_attendance_login/static/src/js/weights/tiny_face_detector_model-shard1
  27. 273
      face_recognized_attendance_login/static/src/js/weights/tiny_face_detector_model-weights_manifest.json
  28. 18
      face_recognized_attendance_login/static/src/xml/face_recognition_template.xml

9
face_recognized_attendance_login/README.rst

@ -8,14 +8,7 @@ Face Recognized Attendance Login
Configuration
=============
Opencv [version: 4.8.1.78]
numpy [version: 1.26.2]
Pillow [version: 9.0.1]
cmake [version: 3.27.7]
dlib [version: 19.24.2]
face-recognition [version: 1.2.3]
Please ensure the camera access also.
Please ensure the camera access.
Installation
============

1
face_recognized_attendance_login/__init__.py

@ -20,4 +20,3 @@
#
#############################################################################
from . import controllers

11
face_recognized_attendance_login/__manifest__.py

@ -21,7 +21,7 @@
#############################################################################
{
'name': 'Face Recognized Attendance Login',
'version': '17.0.1.0.0',
'version': '17.0.2.0.0',
'category': 'Human Resources',
'summary': """Mark the attendance of employee by recognizing their face""",
'description': """This module introduces a face recognition system in the
@ -32,9 +32,12 @@
'maintainer': 'Cybrosys Techno Solutions',
'website': "https://www.cybrosys.com",
'depends': ['base', 'mail', 'hr_attendance'],
'external_dependencies': {
'python': ['face-recognition', 'cmake', 'dlib', 'PIL',
'numpy', 'opencv-python'],
'assets': {
'hr_attendance.assets_public_attendance': [
'face_recognized_attendance_login/static/src/js/face-api.min.js',
'face_recognized_attendance_login/static/src/xml/face_recognition_template.xml',
'face_recognized_attendance_login/static/src/js/face_recognition.js',
]
},
'images': ['static/description/banner.jpg'],
'license': 'LGPL-3',

127
face_recognized_attendance_login/controllers/main.py

@ -19,21 +19,6 @@
# If not, see <http://www.gnu.org/licenses/>.
#
#############################################################################
import base64
import PIL
import cmake
import cv2
import dlib
import face_recognition
from io import BytesIO
import numpy
import numpy as np
import os
from PIL import Image
import time
from odoo.exceptions import AccessError
from odoo.http import request
from odoo.addons.hr_attendance.controllers.main import HrAttendance
from odoo import _, http
@ -42,112 +27,8 @@ from odoo import _, http
class HrAttendances(HrAttendance):
"""Controllers Overrides to add the Face detection feature"""
@http.route('/hr_attendance/attendance_employee_data', type="json",
@http.route('/get_image', type="json",
auth="public")
def employee_attendance_data(self, token, employee_id):
"""In this Code section the face detection is added to
employee_attendance_data"""
company = self._get_company(token)
employee_pic = request.env['hr.employee'].sudo().browse(
employee_id).image_1920
sub_folder = os.path.abspath(os.path.dirname(__file__))
project_folder = os.path.abspath(os.path.join(sub_folder, os.pardir))
eye_cascade_path = os.path.join(project_folder, 'data',
'haarcascade_eye_tree_eyeglasses.xml')
face_cascade_path = os.path.join(project_folder, 'data',
'haarcascade_frontalface_default.xml')
face_cascade = cv2.CascadeClassifier(face_cascade_path)
eye_cascade = cv2.CascadeClassifier(eye_cascade_path)
binary_data = base64.b64decode(employee_pic)
image_bytes = BytesIO(binary_data)
pil_image = Image.open(image_bytes)
np_image = np.array(pil_image)
img = cv2.cvtColor(np_image, cv2.COLOR_BGR2RGB)
# Extract features from the referenced eye(s)
orb = cv2.ORB_create()
referenced_key_points, referenced_descriptors = orb.detectAndCompute(
img, None)
encoded_face = face_recognition.face_encodings(img)
start_time = time.time()
camera_time = 0
face_recognized = 0
eyes_match_fail_index = 0
eyes_match_index = 0
cap = cv2.VideoCapture(0)
ret, frame = cap.read()
while ret and camera_time < 9:
ret, frame = cap.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray = cv2.bilateralFilter(gray, 5, 1, 1)
faces = face_cascade.detectMultiScale(gray, 1.3, 5,
minSize=(200, 200))
if len(faces) == 1:
for (x, y, w, h) in faces:
frame = cv2.rectangle(frame, (x, y), (x + w, y + h),
(0, 255, 0), 2)
eyes = eye_cascade.detectMultiScale(gray, scaleFactor=1.3,
minNeighbors=5)
current_key_points, current_descriptors = orb.detectAndCompute(
gray, None)
bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=True)
matches = bf.match(referenced_descriptors,
current_descriptors)
good_matches = [m for m in matches if m.distance < 70]
if len(good_matches) >= 5:
eyes_match_index += 1
else:
eyes_match_fail_index += 1
if len(eyes) == 0:
img_frame = cv2.resize(frame, (0, 0), None, 0.25, 0.25)
img_frame = cv2.cvtColor(img_frame, cv2.COLOR_BGR2RGB)
face_current_frame = face_recognition.face_locations(
img_frame)
encode_current_frame = face_recognition.face_encodings(
img_frame, face_current_frame)
for encode_face, face_loc in zip(encode_current_frame,
face_current_frame):
face_matches = face_recognition.compare_faces(
encoded_face, encode_face)
face_distance = face_recognition.face_distance(
encoded_face, encode_face)
match_index = np.argmin(face_distance)
elapsed_time = time.time() - start_time
if face_matches[
match_index] and eyes_match_index > eyes_match_fail_index:
face_recognized = 1
if elapsed_time > 6:
time.sleep(1)
else:
face_recognized = 0
# cv2.imshow('frame', frame)
# the imshow is removed from here because it is not needed anymore
# Also it is making error while running the code second time.
cv2.waitKey(0)
else:
# Reset the counters and related variables when no face is
# detected
camera_time += 1
eyes_match_index = 0
eyes_match_fail_index = 0
cap.release()
cv2.destroyAllWindows()
camera_time = 0
if company and face_recognized != 1:
raise AccessError(
_("Sorry, Can't recognize you. Please try again"))
else:
employee = request.env['hr.employee'].sudo().browse(employee_id)
if employee.company_id == company:
return self._get_employee_info_response(employee)
cv2.waitKey(0)
return {}
def get_image(self,employee_id):
image = request.env['hr.employee'].sudo().browse(employee_id).image_1920
return image

22619
face_recognized_attendance_login/data/haarcascade_eye_tree_eyeglasses.xml

File diff suppressed because it is too large

33314
face_recognized_attendance_login/data/haarcascade_frontalface_default.xml

File diff suppressed because it is too large

5
face_recognized_attendance_login/doc/RELEASE_NOTES.md

@ -3,3 +3,8 @@
#### Version 17.0.1.0.0
##### ADD
- Initial Commit for Face Recognized Attendance Login
#### 19.11.2024
#### Version 17.0.2.0.0
##### ADD
- Update

1
face_recognized_attendance_login/static/src/js/face-api.min.js

File diff suppressed because one or more lines are too long

209
face_recognized_attendance_login/static/src/js/face_recognition.js

@ -0,0 +1,209 @@
/** @odoo-module **/
import kiosk from "@hr_attendance/public_kiosk/public_kiosk_app";
import { patch } from "@web/core/utils/patch";
import { useService } from "@web/core/utils/hooks";
import { useRef, useState } from "@odoo/owl";
import { _t } from "@web/core/l10n/translation";
const MODEL_URL = '/face_recognized_attendance_login/static/src/js/weights';
faceapi.nets.ssdMobilenetv1.loadFromUri(MODEL_URL)
faceapi.nets.faceLandmark68Net.loadFromUri(MODEL_URL)
faceapi.nets.faceRecognitionNet.loadFromUri(MODEL_URL)
faceapi.nets.tinyFaceDetector.load(MODEL_URL),
faceapi.nets.faceLandmark68TinyNet.load(MODEL_URL),
faceapi.nets.faceExpressionNet.load(MODEL_URL),
faceapi.nets.ageGenderNet.load(MODEL_URL)
patch(kiosk.kioskAttendanceApp.prototype, {
setup() {
super.setup(...arguments);
this.orm = useService("orm");
this.employee_image = useRef("employee_image");
this.video = useRef("video");
this.notification = useService("notification");
this.state.employee = false;
this.state.verifiedEmployeeId = null
this.isRecognitionActive = false;
this.currentStream = null;
this.faceMatcher = null;
this.noMatchCount = 0;
},
async loadImage(employeeId) {
var image = await this.rpc("/get_image", {
employee_id: employeeId
})
this.have_image = image
const employee_image = this.employee_image.el
employee_image.src = "data:image/jpeg;base64," + image
this.currentVerificationId = employeeId;
},
async startWebcam() {
const video = this.video.el;
if (video) {
video.srcObject = null;
video.style.display = 'block';
}
this.isRecognitionActive = true;
this.state.employee = false;
this.noMatchCount = 0; // Reset no match counter
this.faceMatcher = null; // Reset faceMatcher
try {
const stream = await navigator.mediaDevices.getUserMedia({
video: true,
audio: false
});
this.currentStream = stream;
video.srcObject = stream;
await new Promise(resolve => {
video.onloadedmetadata = resolve;
});
await this.faceRecognition(video);
} catch (error) {
console.error("Error starting webcam:", error);
this.isRecognitionActive = false;
this.notification.add(_t("Your browser does not support camera access. Please try a different browser."), {
title : "Access Denied !",
type: "danger",
});
}
},
async getLabeledFaceDescriptions() {
const employee_image = this.employee_image.el;
const detections = await faceapi
.detectSingleFace(employee_image)
.withFaceLandmarks()
.withFaceExpressions()
.withFaceDescriptor();
return detections;
},
stopRecognition(video, canvas) {
this.isRecognitionActive = false;
if (this.currentStream) {
this.currentStream.getTracks().forEach(track => track.stop());
this.currentStream = null;
}
if (video) {
video.srcObject = null;
video.style.display = 'none';
}
if (canvas && canvas.parentNode) {
canvas.remove();
}
const modal = document.getElementById('WebCamModal');
if (modal) {
modal.style.display = 'none';
}
this.faceMatcher = null;
this.noMatchCount = 0;
},
async faceRecognition(video) {
if (!this.isRecognitionActive) return;
if (!this.faceMatcher) {
const labeledFaceDescriptors = await this.getLabeledFaceDescriptions();
if (labeledFaceDescriptors && labeledFaceDescriptors.descriptor) {
this.faceMatcher = new faceapi.FaceMatcher([labeledFaceDescriptors.descriptor]);
} else {
console.error("Could not get face descriptor from reference image");
this.notification.add(_t("Failed to initialize face recognition, Please upload a new, properly formatted image."), {
type: "danger",
title: "Image detection failed!",
});
this.stopRecognition(video);
return;
}
}
const canvas = faceapi.createCanvasFromMedia(video);
document.body.append(canvas);
canvas.style.display = 'none'
const displaySize = { width: video.videoWidth, height: video.videoHeight };
faceapi.matchDimensions(canvas, displaySize);
const processFrame = async () => {
if (!this.isRecognitionActive) return;
try {
const detections = await faceapi
.detectAllFaces(video)
.withFaceLandmarks()
.withFaceExpressions()
.withFaceDescriptors();
if (detections.length === 0) {
if (this.isRecognitionActive) {
requestAnimationFrame(processFrame);
}
return;
}
for (const detection of detections) {
const match = this.faceMatcher.findBestMatch(detection.descriptor);
if (match._distance < 0.4) {
this.state.employee = true;
this.state.verifiedEmployeeId = this.currentVerificationId;
this.stopRecognition(video, canvas);
return;
} else {
this.noMatchCount++;
if (this.noMatchCount >= 3) {
this.notification.add(_t("Sorry, cannot recognize you"), {
title:"Recognition Failed ! ",
type: "danger",
});
this.stopRecognition(video, canvas);
return;
}
}
}
if (this.isRecognitionActive) {
requestAnimationFrame(processFrame);
}
} catch (error) {
console.error("Face recognition error:", error);
this.stopRecognition(video, canvas);
}
};
processFrame();
},
async onManualSelection(employeeId, enteredPin) {
if (this.isRecognitionActive) {
this.stopRecognition(this.video.el);
}
await this.loadImage(employeeId);
if (this.have_image) {
const modal = document.getElementById('WebCamModal');
if (modal) {
modal.style.display = 'block';
}
await this.startWebcam();
const checkInterval = setInterval(() => {
if (this.state.employee && this.state.verifiedEmployeeId === employeeId) {
clearInterval(checkInterval);
this.rpc("manual_selection", {
token: this.props.token,
employee_id: employeeId,
pin_code: enteredPin,
}).then(result => {
if (result && result.attendance) {
this.employeeData = result;
this.switchDisplay("greet");
} else {
if (enteredPin) {
this.notification.add(_t("Wrong Pin"), {
type: "danger",
});
}
}
});
}
}, 500);
} else {
await this.popup.add(ErrorPopup, {
title: _t("Authentication failed"),
body: _t("Selected cashier has no image."),
});
location.reload();
}
},
});

BIN
face_recognized_attendance_login/static/src/js/weights/age_gender_model-shard1

Binary file not shown.

618
face_recognized_attendance_login/static/src/js/weights/age_gender_model-weights_manifest.json

@ -0,0 +1,618 @@
[
{
"weights": [
{
"name": "entry_flow/conv_in/filters",
"shape": [
3,
3,
3,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005431825039433498,
"min": -0.7441600304023892
}
},
{
"name": "entry_flow/conv_in/bias",
"shape": [
32
],
"dtype": "float32"
},
{
"name": "entry_flow/reduction_block_0/separable_conv0/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005691980614381678,
"min": -0.6090419257388395
}
},
{
"name": "entry_flow/reduction_block_0/separable_conv0/pointwise_filter",
"shape": [
1,
1,
32,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.009089225881239947,
"min": -1.1179747833925135
}
},
{
"name": "entry_flow/reduction_block_0/separable_conv0/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "entry_flow/reduction_block_0/separable_conv1/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00683894624897078,
"min": -0.8138346036275228
}
},
{
"name": "entry_flow/reduction_block_0/separable_conv1/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011632566358528886,
"min": -1.3028474321552352
}
},
{
"name": "entry_flow/reduction_block_0/separable_conv1/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "entry_flow/reduction_block_0/expansion_conv/filters",
"shape": [
1,
1,
32,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010254812240600587,
"min": -0.9229331016540528
}
},
{
"name": "entry_flow/reduction_block_0/expansion_conv/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "entry_flow/reduction_block_1/separable_conv0/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0052509616403018725,
"min": -0.6406173201168285
}
},
{
"name": "entry_flow/reduction_block_1/separable_conv0/pointwise_filter",
"shape": [
1,
1,
64,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010788509424994973,
"min": -1.4564487723743214
}
},
{
"name": "entry_flow/reduction_block_1/separable_conv0/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "entry_flow/reduction_block_1/separable_conv1/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00553213918910307,
"min": -0.7025816770160899
}
},
{
"name": "entry_flow/reduction_block_1/separable_conv1/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.013602388606351965,
"min": -1.6186842441558837
}
},
{
"name": "entry_flow/reduction_block_1/separable_conv1/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "entry_flow/reduction_block_1/expansion_conv/filters",
"shape": [
1,
1,
64,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.007571851038465313,
"min": -1.158493208885193
}
},
{
"name": "entry_flow/reduction_block_1/expansion_conv/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "middle_flow/main_block_0/separable_conv0/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005766328409606335,
"min": -0.6688940955143349
}
},
{
"name": "middle_flow/main_block_0/separable_conv0/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.012136116214826995,
"min": -1.5776951079275094
}
},
{
"name": "middle_flow/main_block_0/separable_conv0/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "middle_flow/main_block_0/separable_conv1/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004314773222979377,
"min": -0.5652352922102984
}
},
{
"name": "middle_flow/main_block_0/separable_conv1/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01107162026798024,
"min": -1.2400214700137868
}
},
{
"name": "middle_flow/main_block_0/separable_conv1/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "middle_flow/main_block_0/separable_conv2/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0036451735917259667,
"min": -0.4848080876995536
}
},
{
"name": "middle_flow/main_block_0/separable_conv2/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008791744942758598,
"min": -1.134135097615859
}
},
{
"name": "middle_flow/main_block_0/separable_conv2/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "middle_flow/main_block_1/separable_conv0/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004915751896652521,
"min": -0.6095532351849126
}
},
{
"name": "middle_flow/main_block_1/separable_conv0/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010868691463096469,
"min": -1.3368490499608656
}
},
{
"name": "middle_flow/main_block_1/separable_conv0/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "middle_flow/main_block_1/separable_conv1/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005010117269029804,
"min": -0.6012140722835765
}
},
{
"name": "middle_flow/main_block_1/separable_conv1/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010311148213405235,
"min": -1.3816938605963016
}
},
{
"name": "middle_flow/main_block_1/separable_conv1/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "middle_flow/main_block_1/separable_conv2/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004911523706772748,
"min": -0.7367285560159123
}
},
{
"name": "middle_flow/main_block_1/separable_conv2/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008976466047997568,
"min": -1.2207993825276693
}
},
{
"name": "middle_flow/main_block_1/separable_conv2/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "exit_flow/reduction_block/separable_conv0/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005074804436926748,
"min": -0.7104726211697447
}
},
{
"name": "exit_flow/reduction_block/separable_conv0/pointwise_filter",
"shape": [
1,
1,
128,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011453078307357489,
"min": -1.4545409450344011
}
},
{
"name": "exit_flow/reduction_block/separable_conv0/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "exit_flow/reduction_block/separable_conv1/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.007741751390344957,
"min": -1.1380374543807086
}
},
{
"name": "exit_flow/reduction_block/separable_conv1/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011347713189966538,
"min": -1.497898141075583
}
},
{
"name": "exit_flow/reduction_block/separable_conv1/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "exit_flow/reduction_block/expansion_conv/filters",
"shape": [
1,
1,
128,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006717281014311547,
"min": -0.8329428457746318
}
},
{
"name": "exit_flow/reduction_block/expansion_conv/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "exit_flow/separable_conv/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0027201742518181892,
"min": -0.3237007359663645
}
},
{
"name": "exit_flow/separable_conv/pointwise_filter",
"shape": [
1,
1,
256,
512
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010076364348916447,
"min": -1.330080094056971
}
},
{
"name": "exit_flow/separable_conv/bias",
"shape": [
512
],
"dtype": "float32"
},
{
"name": "fc/age/weights",
"shape": [
512,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008674054987290326,
"min": -1.2664120281443876
}
},
{
"name": "fc/age/bias",
"shape": [
1
],
"dtype": "float32"
},
{
"name": "fc/gender/weights",
"shape": [
512,
2
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0029948226377075793,
"min": -0.34140978069866407
}
},
{
"name": "fc/gender/bias",
"shape": [
2
],
"dtype": "float32"
}
],
"paths": [
"age_gender_model-shard1"
]
}
]

BIN
face_recognized_attendance_login/static/src/js/weights/face_expression_model-shard1

Binary file not shown.

606
face_recognized_attendance_login/static/src/js/weights/face_expression_model-weights_manifest.json

@ -0,0 +1,606 @@
[
{
"weights": [
{
"name": "dense0/conv0/filters",
"shape": [
3,
3,
3,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0057930146946626555,
"min": -0.7125408074435067
}
},
{
"name": "dense0/conv0/bias",
"shape": [
32
],
"dtype": "float32"
},
{
"name": "dense0/conv1/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006473719839956246,
"min": -0.6408982641556684
}
},
{
"name": "dense0/conv1/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010509579321917366,
"min": -1.408283629136927
}
},
{
"name": "dense0/conv1/bias",
"shape": [
32
],
"dtype": "float32"
},
{
"name": "dense0/conv2/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005666389652326995,
"min": -0.7252978754978554
}
},
{
"name": "dense0/conv2/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010316079270605948,
"min": -1.1760330368490781
}
},
{
"name": "dense0/conv2/bias",
"shape": [
32
],
"dtype": "float32"
},
{
"name": "dense0/conv3/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0063220320963392074,
"min": -0.853474333005793
}
},
{
"name": "dense0/conv3/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010322785377502442,
"min": -1.4658355236053466
}
},
{
"name": "dense0/conv3/bias",
"shape": [
32
],
"dtype": "float32"
},
{
"name": "dense1/conv0/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0042531527724920535,
"min": -0.5741756242864272
}
},
{
"name": "dense1/conv0/pointwise_filter",
"shape": [
1,
1,
32,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010653339647779278,
"min": -1.1825207009035
}
},
{
"name": "dense1/conv0/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "dense1/conv1/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005166931012097527,
"min": -0.6355325144879957
}
},
{
"name": "dense1/conv1/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011478300188101974,
"min": -1.3888743227603388
}
},
{
"name": "dense1/conv1/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "dense1/conv2/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006144821410085641,
"min": -0.8479853545918185
}
},
{
"name": "dense1/conv2/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010541967317169788,
"min": -1.3809977185492421
}
},
{
"name": "dense1/conv2/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "dense1/conv3/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005769844849904378,
"min": -0.686611537138621
}
},
{
"name": "dense1/conv3/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010939095534530341,
"min": -1.2689350820055196
}
},
{
"name": "dense1/conv3/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "dense2/conv0/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0037769308277204924,
"min": -0.40790852939381317
}
},
{
"name": "dense2/conv0/pointwise_filter",
"shape": [
1,
1,
64,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01188667194516051,
"min": -1.4382873053644218
}
},
{
"name": "dense2/conv0/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "dense2/conv1/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006497045825509464,
"min": -0.8381189114907208
}
},
{
"name": "dense2/conv1/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011632198913424622,
"min": -1.3377028750438316
}
},
{
"name": "dense2/conv1/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "dense2/conv2/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005947182225246056,
"min": -0.7969224181829715
}
},
{
"name": "dense2/conv2/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011436844339557722,
"min": -1.4524792311238306
}
},
{
"name": "dense2/conv2/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "dense2/conv3/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006665432686899222,
"min": -0.8998334127313949
}
},
{
"name": "dense2/conv3/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01283421422920975,
"min": -1.642779421338848
}
},
{
"name": "dense2/conv3/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "dense3/conv0/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004711699953266218,
"min": -0.6737730933170692
}
},
{
"name": "dense3/conv0/pointwise_filter",
"shape": [
1,
1,
128,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010955964817720302,
"min": -1.3914075318504784
}
},
{
"name": "dense3/conv0/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "dense3/conv1/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00554193468654857,
"min": -0.7149095745647656
}
},
{
"name": "dense3/conv1/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.016790372250126858,
"min": -2.484975093018775
}
},
{
"name": "dense3/conv1/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "dense3/conv2/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006361540626077091,
"min": -0.8142772001378676
}
},
{
"name": "dense3/conv2/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01777329678628959,
"min": -1.7062364914838006
}
},
{
"name": "dense3/conv2/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "dense3/conv3/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006900275922289082,
"min": -0.8625344902861353
}
},
{
"name": "dense3/conv3/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.015449936717164282,
"min": -1.9003422162112067
}
},
{
"name": "dense3/conv3/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "fc/weights",
"shape": [
256,
7
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004834276554631252,
"min": -0.7203072066400565
}
},
{
"name": "fc/bias",
"shape": [
7
],
"dtype": "float32"
}
],
"paths": [
"face_expression_model-shard1"
]
}
]

BIN
face_recognized_attendance_login/static/src/js/weights/face_landmark_68_model-shard1

Binary file not shown.

691
face_recognized_attendance_login/static/src/js/weights/face_landmark_68_model-weights_manifest.json

@ -0,0 +1,691 @@
[
{
"weights": [
{
"name": "dense0/conv0/filters",
"shape": [
3,
3,
3,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004853619781194949,
"min": -0.5872879935245888
}
},
{
"name": "dense0/conv0/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004396426443960153,
"min": -0.7298067896973853
}
},
{
"name": "dense0/conv1/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00635151559231328,
"min": -0.5589333721235686
}
},
{
"name": "dense0/conv1/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.009354315552057004,
"min": -1.2628325995276957
}
},
{
"name": "dense0/conv1/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0029380727048013726,
"min": -0.5846764682554731
}
},
{
"name": "dense0/conv2/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0049374802439820535,
"min": -0.6171850304977566
}
},
{
"name": "dense0/conv2/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.009941946758943446,
"min": -1.3421628124573652
}
},
{
"name": "dense0/conv2/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0030300481062309416,
"min": -0.5272283704841838
}
},
{
"name": "dense0/conv3/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005672684837790097,
"min": -0.7431217137505026
}
},
{
"name": "dense0/conv3/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010712201455060173,
"min": -1.5639814124387852
}
},
{
"name": "dense0/conv3/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0030966934035806097,
"min": -0.3839899820439956
}
},
{
"name": "dense1/conv0/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0039155554537679636,
"min": -0.48161332081345953
}
},
{
"name": "dense1/conv0/pointwise_filter",
"shape": [
1,
1,
32,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01023082966898002,
"min": -1.094698774580862
}
},
{
"name": "dense1/conv0/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0027264176630506327,
"min": -0.3871513081531898
}
},
{
"name": "dense1/conv1/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004583378632863362,
"min": -0.5454220573107401
}
},
{
"name": "dense1/conv1/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00915846403907327,
"min": -1.117332612766939
}
},
{
"name": "dense1/conv1/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003091680419211294,
"min": -0.5966943209077797
}
},
{
"name": "dense1/conv2/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005407439727409214,
"min": -0.708374604290607
}
},
{
"name": "dense1/conv2/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00946493943532308,
"min": -1.2399070660273235
}
},
{
"name": "dense1/conv2/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004409168514550901,
"min": -0.9788354102303
}
},
{
"name": "dense1/conv3/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004478132958505668,
"min": -0.6493292789833219
}
},
{
"name": "dense1/conv3/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011063695888893277,
"min": -1.2501976354449402
}
},
{
"name": "dense1/conv3/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003909627596537272,
"min": -0.6646366914113363
}
},
{
"name": "dense2/conv0/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003213915404151468,
"min": -0.3374611174359041
}
},
{
"name": "dense2/conv0/pointwise_filter",
"shape": [
1,
1,
64,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010917326048308728,
"min": -1.4520043644250609
}
},
{
"name": "dense2/conv0/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002800439152063108,
"min": -0.38085972468058266
}
},
{
"name": "dense2/conv1/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0050568851770139206,
"min": -0.6927932692509071
}
},
{
"name": "dense2/conv1/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01074961213504567,
"min": -1.3222022926106174
}
},
{
"name": "dense2/conv1/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0030654204242369708,
"min": -0.5487102559384177
}
},
{
"name": "dense2/conv2/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00591809165244009,
"min": -0.917304206128214
}
},
{
"name": "dense2/conv2/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01092823346455892,
"min": -1.366029183069865
}
},
{
"name": "dense2/conv2/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002681120470458386,
"min": -0.36463238398234055
}
},
{
"name": "dense2/conv3/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0048311497650894465,
"min": -0.5797379718107336
}
},
{
"name": "dense2/conv3/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011227761062921263,
"min": -1.4483811771168429
}
},
{
"name": "dense2/conv3/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0034643323982463162,
"min": -0.3360402426298927
}
},
{
"name": "dense3/conv0/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003394978887894574,
"min": -0.49227193874471326
}
},
{
"name": "dense3/conv0/pointwise_filter",
"shape": [
1,
1,
128,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010051267287310432,
"min": -1.2765109454884247
}
},
{
"name": "dense3/conv0/bias",
"shape": [
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003142924752889895,
"min": -0.4588670139219247
}
},
{
"name": "dense3/conv1/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00448304671867221,
"min": -0.5872791201460595
}
},
{
"name": "dense3/conv1/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.016063522357566685,
"min": -2.3613377865623026
}
},
{
"name": "dense3/conv1/bias",
"shape": [
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00287135781026354,
"min": -0.47664539650374765
}
},
{
"name": "dense3/conv2/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006002906724518421,
"min": -0.7923836876364315
}
},
{
"name": "dense3/conv2/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.017087187019048954,
"min": -1.6061955797906016
}
},
{
"name": "dense3/conv2/bias",
"shape": [
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003124481205846749,
"min": -0.46242321846531886
}
},
{
"name": "dense3/conv3/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006576311588287353,
"min": -1.0193282961845398
}
},
{
"name": "dense3/conv3/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.015590153955945782,
"min": -1.99553970636106
}
},
{
"name": "dense3/conv3/bias",
"shape": [
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004453541601405424,
"min": -0.6546706154065973
}
},
{
"name": "fc/weights",
"shape": [
256,
136
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010417488509533453,
"min": -1.500118345372817
}
},
{
"name": "fc/bias",
"shape": [
136
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0025084222648658005,
"min": 0.07683877646923065
}
}
],
"paths": [
"face_landmark_68_model-shard1"
]
}
]

BIN
face_recognized_attendance_login/static/src/js/weights/face_landmark_68_tiny_model-shard1

Binary file not shown.

397
face_recognized_attendance_login/static/src/js/weights/face_landmark_68_tiny_model-weights_manifest.json

@ -0,0 +1,397 @@
[
{
"weights": [
{
"name": "dense0/conv0/filters",
"shape": [
3,
3,
3,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008194216092427571,
"min": -0.9423348506291708
}
},
{
"name": "dense0/conv0/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006839508168837603,
"min": -0.8412595047670252
}
},
{
"name": "dense0/conv1/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.009194007106855804,
"min": -1.2779669878529567
}
},
{
"name": "dense0/conv1/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0036026100317637128,
"min": -0.3170296827952067
}
},
{
"name": "dense0/conv1/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.000740380117706224,
"min": -0.06367269012273527
}
},
{
"name": "dense0/conv2/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 1,
"min": 0
}
},
{
"name": "dense0/conv2/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 1,
"min": 0
}
},
{
"name": "dense0/conv2/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0037702228508743585,
"min": -0.6220867703942692
}
},
{
"name": "dense1/conv0/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0033707996209462483,
"min": -0.421349952618281
}
},
{
"name": "dense1/conv0/pointwise_filter",
"shape": [
1,
1,
32,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.014611541991140328,
"min": -1.8556658328748217
}
},
{
"name": "dense1/conv0/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002832523046755323,
"min": -0.30307996600281956
}
},
{
"name": "dense1/conv1/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006593170586754294,
"min": -0.6329443763284123
}
},
{
"name": "dense1/conv1/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.012215249211180444,
"min": -1.6001976466646382
}
},
{
"name": "dense1/conv1/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002384825547536214,
"min": -0.3028728445370992
}
},
{
"name": "dense1/conv2/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005859645441466687,
"min": -0.7617539073906693
}
},
{
"name": "dense1/conv2/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.013121426806730382,
"min": -1.7845140457153321
}
},
{
"name": "dense1/conv2/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0032247188044529336,
"min": -0.46435950784122243
}
},
{
"name": "dense2/conv0/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002659512618008782,
"min": -0.32977956463308894
}
},
{
"name": "dense2/conv0/pointwise_filter",
"shape": [
1,
1,
64,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.015499923743453681,
"min": -1.9839902391620712
}
},
{
"name": "dense2/conv0/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0032450980999890497,
"min": -0.522460794098237
}
},
{
"name": "dense2/conv1/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005911862382701799,
"min": -0.792189559282041
}
},
{
"name": "dense2/conv1/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.021025861478319356,
"min": -2.2077154552235325
}
},
{
"name": "dense2/conv1/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00349616945958605,
"min": -0.46149436866535865
}
},
{
"name": "dense2/conv2/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008104994250278847,
"min": -1.013124281284856
}
},
{
"name": "dense2/conv2/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.029337059282789044,
"min": -3.5791212325002633
}
},
{
"name": "dense2/conv2/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0038808938334969913,
"min": -0.4230174278511721
}
},
{
"name": "fc/weights",
"shape": [
128,
136
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.014016061670639936,
"min": -1.8921683255363912
}
},
{
"name": "fc/bias",
"shape": [
136
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0029505149698724935,
"min": 0.088760145008564
}
}
],
"paths": [
"face_landmark_68_tiny_model-shard1"
]
}
]

BIN
face_recognized_attendance_login/static/src/js/weights/face_recognition_model-shard1

Binary file not shown.

6
face_recognized_attendance_login/static/src/js/weights/face_recognition_model-shard2

File diff suppressed because one or more lines are too long

1462
face_recognized_attendance_login/static/src/js/weights/face_recognition_model-weights_manifest.json

File diff suppressed because it is too large

BIN
face_recognized_attendance_login/static/src/js/weights/mtcnn_model-shard1

Binary file not shown.

402
face_recognized_attendance_login/static/src/js/weights/mtcnn_model-weights_manifest.json

@ -0,0 +1,402 @@
[
{
"paths": [
"mtcnn_model-shard1"
],
"weights": [
{
"dtype": "float32",
"name": "pnet/conv1/weights",
"shape": [
3,
3,
3,
10
]
},
{
"dtype": "float32",
"name": "pnet/conv1/bias",
"shape": [
10
]
},
{
"dtype": "float32",
"name": "pnet/prelu1_alpha",
"shape": [
10
]
},
{
"dtype": "float32",
"name": "pnet/conv2/weights",
"shape": [
3,
3,
10,
16
]
},
{
"dtype": "float32",
"name": "pnet/conv2/bias",
"shape": [
16
]
},
{
"dtype": "float32",
"name": "pnet/prelu2_alpha",
"shape": [
16
]
},
{
"dtype": "float32",
"name": "pnet/conv3/weights",
"shape": [
3,
3,
16,
32
]
},
{
"dtype": "float32",
"name": "pnet/conv3/bias",
"shape": [
32
]
},
{
"dtype": "float32",
"name": "pnet/prelu3_alpha",
"shape": [
32
]
},
{
"dtype": "float32",
"name": "pnet/conv4_1/weights",
"shape": [
1,
1,
32,
2
]
},
{
"dtype": "float32",
"name": "pnet/conv4_1/bias",
"shape": [
2
]
},
{
"dtype": "float32",
"name": "pnet/conv4_2/weights",
"shape": [
1,
1,
32,
4
]
},
{
"dtype": "float32",
"name": "pnet/conv4_2/bias",
"shape": [
4
]
},
{
"dtype": "float32",
"name": "rnet/conv1/weights",
"shape": [
3,
3,
3,
28
]
},
{
"dtype": "float32",
"name": "rnet/conv1/bias",
"shape": [
28
]
},
{
"dtype": "float32",
"name": "rnet/prelu1_alpha",
"shape": [
28
]
},
{
"dtype": "float32",
"name": "rnet/conv2/weights",
"shape": [
3,
3,
28,
48
]
},
{
"dtype": "float32",
"name": "rnet/conv2/bias",
"shape": [
48
]
},
{
"dtype": "float32",
"name": "rnet/prelu2_alpha",
"shape": [
48
]
},
{
"dtype": "float32",
"name": "rnet/conv3/weights",
"shape": [
2,
2,
48,
64
]
},
{
"dtype": "float32",
"name": "rnet/conv3/bias",
"shape": [
64
]
},
{
"dtype": "float32",
"name": "rnet/prelu3_alpha",
"shape": [
64
]
},
{
"dtype": "float32",
"name": "rnet/fc1/weights",
"shape": [
576,
128
]
},
{
"dtype": "float32",
"name": "rnet/fc1/bias",
"shape": [
128
]
},
{
"dtype": "float32",
"name": "rnet/prelu4_alpha",
"shape": [
128
]
},
{
"dtype": "float32",
"name": "rnet/fc2_1/weights",
"shape": [
128,
2
]
},
{
"dtype": "float32",
"name": "rnet/fc2_1/bias",
"shape": [
2
]
},
{
"dtype": "float32",
"name": "rnet/fc2_2/weights",
"shape": [
128,
4
]
},
{
"dtype": "float32",
"name": "rnet/fc2_2/bias",
"shape": [
4
]
},
{
"dtype": "float32",
"name": "onet/conv1/weights",
"shape": [
3,
3,
3,
32
]
},
{
"dtype": "float32",
"name": "onet/conv1/bias",
"shape": [
32
]
},
{
"dtype": "float32",
"name": "onet/prelu1_alpha",
"shape": [
32
]
},
{
"dtype": "float32",
"name": "onet/conv2/weights",
"shape": [
3,
3,
32,
64
]
},
{
"dtype": "float32",
"name": "onet/conv2/bias",
"shape": [
64
]
},
{
"dtype": "float32",
"name": "onet/prelu2_alpha",
"shape": [
64
]
},
{
"dtype": "float32",
"name": "onet/conv3/weights",
"shape": [
3,
3,
64,
64
]
},
{
"dtype": "float32",
"name": "onet/conv3/bias",
"shape": [
64
]
},
{
"dtype": "float32",
"name": "onet/prelu3_alpha",
"shape": [
64
]
},
{
"dtype": "float32",
"name": "onet/conv4/weights",
"shape": [
2,
2,
64,
128
]
},
{
"dtype": "float32",
"name": "onet/conv4/bias",
"shape": [
128
]
},
{
"dtype": "float32",
"name": "onet/prelu4_alpha",
"shape": [
128
]
},
{
"dtype": "float32",
"name": "onet/fc1/weights",
"shape": [
1152,
256
]
},
{
"dtype": "float32",
"name": "onet/fc1/bias",
"shape": [
256
]
},
{
"dtype": "float32",
"name": "onet/prelu5_alpha",
"shape": [
256
]
},
{
"dtype": "float32",
"name": "onet/fc2_1/weights",
"shape": [
256,
2
]
},
{
"dtype": "float32",
"name": "onet/fc2_1/bias",
"shape": [
2
]
},
{
"dtype": "float32",
"name": "onet/fc2_2/weights",
"shape": [
256,
4
]
},
{
"dtype": "float32",
"name": "onet/fc2_2/bias",
"shape": [
4
]
},
{
"dtype": "float32",
"name": "onet/fc2_3/weights",
"shape": [
256,
10
]
},
{
"dtype": "float32",
"name": "onet/fc2_3/bias",
"shape": [
10
]
}
]
}
]

BIN
face_recognized_attendance_login/static/src/js/weights/ssd_mobilenetv1_model-shard1

Binary file not shown.

137
face_recognized_attendance_login/static/src/js/weights/ssd_mobilenetv1_model-shard2

File diff suppressed because one or more lines are too long

1936
face_recognized_attendance_login/static/src/js/weights/ssd_mobilenetv1_model-weights_manifest.json

File diff suppressed because it is too large

BIN
face_recognized_attendance_login/static/src/js/weights/tiny_face_detector_model-shard1

Binary file not shown.

273
face_recognized_attendance_login/static/src/js/weights/tiny_face_detector_model-weights_manifest.json

@ -0,0 +1,273 @@
[
{
"weights": [
{
"name": "conv0/filters",
"shape": [
3,
3,
3,
16
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.009007044399485869,
"min": -1.2069439495311063
}
},
{
"name": "conv0/bias",
"shape": [
16
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005263455241334205,
"min": -0.9211046672334858
}
},
{
"name": "conv1/depthwise_filter",
"shape": [
3,
3,
16,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004001977630690033,
"min": -0.5042491814669441
}
},
{
"name": "conv1/pointwise_filter",
"shape": [
1,
1,
16,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.013836609615999109,
"min": -1.411334180831909
}
},
{
"name": "conv1/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0015159862590771096,
"min": -0.30926119685173037
}
},
{
"name": "conv2/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002666276225856706,
"min": -0.317286870876948
}
},
{
"name": "conv2/pointwise_filter",
"shape": [
1,
1,
32,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.015265831292844286,
"min": -1.6792414422128714
}
},
{
"name": "conv2/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0020280554598453,
"min": -0.37113414915168985
}
},
{
"name": "conv3/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006100742489683862,
"min": -0.8907084034938438
}
},
{
"name": "conv3/pointwise_filter",
"shape": [
1,
1,
64,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.016276211832083907,
"min": -2.0508026908425725
}
},
{
"name": "conv3/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003394414279975143,
"min": -0.7637432129944072
}
},
{
"name": "conv4/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006716050119961009,
"min": -0.8059260143953211
}
},
{
"name": "conv4/pointwise_filter",
"shape": [
1,
1,
128,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.021875603993733724,
"min": -2.8875797271728514
}
},
{
"name": "conv4/bias",
"shape": [
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0041141652009066415,
"min": -0.8187188749804216
}
},
{
"name": "conv5/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008423839597141042,
"min": -0.9013508368940915
}
},
{
"name": "conv5/pointwise_filter",
"shape": [
1,
1,
256,
512
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.030007277283014035,
"min": -3.8709387695088107
}
},
{
"name": "conv5/bias",
"shape": [
512
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008402082966823203,
"min": -1.4871686851277068
}
},
{
"name": "conv8/filters",
"shape": [
1,
1,
512,
25
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.028336129469030042,
"min": -4.675461362389957
}
},
{
"name": "conv8/bias",
"shape": [
25
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002268134028303857,
"min": -0.41053225912299807
}
}
],
"paths": [
"tiny_face_detector_model-shard1"
]
}
]

18
face_recognized_attendance_login/static/src/xml/face_recognition_template.xml

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="UTF-8"?>
<templates xml:space="preserve">
<t t-inherit="hr_attendance.public_kiosk_app" t-inherit-mode="extension">
<xpath expr="." position="inside">
<div id="WebCamModal"
style="display: block;pointer-events: none; position: fixed; z-index: 9999; top: 50%; left: 30%; transform: translate(-50%, -50%); width: 50px; background-color: transparent">
<div class="container">
<video id="video" width="500" height="500" autoplay=""
t-ref="video"
muted=""
style="display: block;margin-top: 213px; margin-left: 150px;"/>
<img id="employee_image" style="visibility:hidden;height:400px;width:400px"
t-ref="employee_image"/>
</div>
</div>
</xpath>
</t>
</templates>
Loading…
Cancel
Save