Browse Source

Oct 23: [FIX] Bug fixed 'pos_face_recognition'

pull/345/merge
Cybrosys Technologies 6 months ago
parent
commit
575711c334
  1. 10
      pos_face_recognition/README.rst
  2. 23
      pos_face_recognition/__init__.py
  3. 31
      pos_face_recognition/__manifest__.py
  4. 22
      pos_face_recognition/controllers/__init__.py
  5. 38
      pos_face_recognition/controllers/pos_face_recognition.py
  6. 10
      pos_face_recognition/doc/RELEASE_NOTES.md
  7. 20
      pos_face_recognition/models/__init__.py
  8. 87
      pos_face_recognition/models/hr_employee.py
  9. 39
      pos_face_recognition/static/description/index.html
  10. 226
      pos_face_recognition/static/src/js/LoginScreen.js
  11. 1
      pos_face_recognition/static/src/js/face-api.min.js
  12. 64
      pos_face_recognition/static/src/js/pos_face_recognition.js
  13. BIN
      pos_face_recognition/static/src/js/weights/age_gender_model-shard1
  14. 618
      pos_face_recognition/static/src/js/weights/age_gender_model-weights_manifest.json
  15. BIN
      pos_face_recognition/static/src/js/weights/face_expression_model-shard1
  16. 606
      pos_face_recognition/static/src/js/weights/face_expression_model-weights_manifest.json
  17. BIN
      pos_face_recognition/static/src/js/weights/face_landmark_68_model-shard1
  18. 691
      pos_face_recognition/static/src/js/weights/face_landmark_68_model-weights_manifest.json
  19. BIN
      pos_face_recognition/static/src/js/weights/face_landmark_68_tiny_model-shard1
  20. 397
      pos_face_recognition/static/src/js/weights/face_landmark_68_tiny_model-weights_manifest.json
  21. BIN
      pos_face_recognition/static/src/js/weights/face_recognition_model-shard1
  22. 6
      pos_face_recognition/static/src/js/weights/face_recognition_model-shard2
  23. 1462
      pos_face_recognition/static/src/js/weights/face_recognition_model-weights_manifest.json
  24. BIN
      pos_face_recognition/static/src/js/weights/mtcnn_model-shard1
  25. 402
      pos_face_recognition/static/src/js/weights/mtcnn_model-weights_manifest.json
  26. BIN
      pos_face_recognition/static/src/js/weights/ssd_mobilenetv1_model-shard1
  27. 137
      pos_face_recognition/static/src/js/weights/ssd_mobilenetv1_model-shard2
  28. 1936
      pos_face_recognition/static/src/js/weights/ssd_mobilenetv1_model-weights_manifest.json
  29. BIN
      pos_face_recognition/static/src/js/weights/tiny_face_detector_model-shard1
  30. 273
      pos_face_recognition/static/src/js/weights/tiny_face_detector_model-weights_manifest.json
  31. 36
      pos_face_recognition/static/src/xml/LoginScreen.xml
  32. 14
      pos_face_recognition/views/hr_employee_view.xml

10
pos_face_recognition/README.rst

@ -1,15 +1,14 @@
.. image:: https://img.shields.io/badge/license-LGPL--3-green.svg
:target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html
:target: https://www.gnu.org/licenses/lgpl-3.0-standalone.html
:alt: License: LGPL-3
Pos Face Recognition
=======================
====================
This module helps you to login pos session by face recognition
Configuration
=============
Opencv [version:4.2.0]and face recognition need to be installed[
https://linuxize.com/post/how-to-install-opencv-on-ubuntu-20-04/]
* No additional configuration required
Company
-------
@ -22,7 +21,8 @@ General Public License, Version 3 (LGPL v3).
Credits
-------
Developer: (V16) Fathima Mazlin AM , Contact: odoo@cybrosys.com
Developer: (V16) Fathima Mazlin AM
Contact: odoo@cybrosys.com
Contacts
--------

23
pos_face_recognition/__init__.py

@ -1,22 +1,25 @@
# -*- coding: utf-8 -*-
###############################################################################
################################################################################
#
# Cybrosys Technologies Pvt. Ltd.
# Copyright (C) 2023-TODAY Cybrosys Technologies(<https://www.cybrosys.com>)
# Author: Fathima Mazlin AM (odoo@cybrosys.com)
#
# This program is free software: you can modify
# it under the terms of the GNU LESSER GENERAL PUBLIC LICENSE (LGPL) as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# Copyright (C) 2024-TODAY Cybrosys Technologies(<https://www.cybrosys.com>)
# Author: Fathima Mazlin AM (<https://www.cybrosys.com>)
#
# You can modify it under the terms of the GNU LESSER
# GENERAL PUBLIC LICENSE (LGPL v3), Version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU LESSER GENERAL PUBLIC LICENSE for more details.
# GNU LESSER GENERAL PUBLIC LICENSE (LGPL v3) for more details.
#
# You should have received a copy of the GNU LESSER GENERAL PUBLIC LICENSE
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# (LGPL v3) along with this program.
# If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
################################################################################
from . import controllers
from . import models

31
pos_face_recognition/__manifest__.py

@ -1,25 +1,24 @@
# -*- coding: utf-8 -*-
###############################################################################
################################################################################
#
# Cybrosys Technologies Pvt. Ltd.
# Copyright (C) 2023-TODAY Cybrosys Technologies(<https://www.cybrosys.com>)
# Author: Fathima Mazlin AM (odoo@cybrosys.com)
#
# Copyright (C) 2024-TODAY Cybrosys Technologies(<https://www.cybrosys.com>)
# Author: Fathima Mazlin AM (<https://www.cybrosys.com>)
#
# This program is free software: you can modify
# it under the terms of the GNU LESSER GENERAL PUBLIC LICENSE (LGPL) as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# You can modify it under the terms of the GNU LESSER
# GENERAL PUBLIC LICENSE (LGPL v3), Version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU LESSER GENERAL PUBLIC LICENSE for more details.
# GNU LESSER GENERAL PUBLIC LICENSE (LGPL v3) for more details.
#
# You should have received a copy of the GNU LESSER GENERAL PUBLIC LICENSE
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# (LGPL v3) along with this program.
# If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
################################################################################
{
'name': 'Pos Face Recognition',
'version': '16.0.1.0.0',
@ -32,16 +31,20 @@
'company': 'Cybrosys Techno Solutions',
'maintainer': 'Cybrosys Techno Solutions',
'website': 'https://www.cybrosys.com',
'depends': ['base', 'point_of_sale', 'hr'],
'depends': ['point_of_sale', 'pos_hr'],
'data': [
'views/hr_employee_view.xml'
],
'assets': {
'point_of_sale.assets': [
'pos_face_recognition/static/src/js/pos_face_recognition.js',
'pos_face_recognition/static/src/js/face-api.min.js',
'pos_face_recognition/static/src/js/LoginScreen.js',
'pos_face_recognition/static/src/xml/LoginScreen.xml',
],
},
'external_dependencies': {'python': ['cv2', 'face_recognition']},
'images': ['static/description/banner.png'],
'license': 'LGPL-3',
'installable': True,
'auto_install': False,
'application': True,
'application': False,
}

22
pos_face_recognition/controllers/__init__.py

@ -0,0 +1,22 @@
# -*- coding: utf-8 -*-
################################################################################
#
# Cybrosys Technologies Pvt. Ltd.
#
# Copyright (C) 2024-TODAY Cybrosys Technologies(<https://www.cybrosys.com>)
# Author: Fathima Mazlin AM (<https://www.cybrosys.com>)
#
# You can modify it under the terms of the GNU LESSER
# GENERAL PUBLIC LICENSE (LGPL v3), Version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU LESSER GENERAL PUBLIC LICENSE (LGPL v3) for more details.
#
# You should have received a copy of the GNU LESSER GENERAL PUBLIC LICENSE
# (LGPL v3) along with this program.
# If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
from . import pos_face_recognition

38
pos_face_recognition/controllers/pos_face_recognition.py

@ -0,0 +1,38 @@
# -*- coding: utf-8 -*-
################################################################################
#
# Cybrosys Technologies Pvt. Ltd.
#
# Copyright (C) 2024-TODAY Cybrosys Technologies(<https://www.cybrosys.com>)
# Author: Fathima Mazlin AM (<https://www.cybrosys.com>)
#
# You can modify it under the terms of the GNU LESSER
# GENERAL PUBLIC LICENSE (LGPL v3), Version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU LESSER GENERAL PUBLIC LICENSE (LGPL v3) for more details.
#
# You should have received a copy of the GNU LESSER GENERAL PUBLIC LICENSE
# (LGPL v3) along with this program.
# If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
from odoo import http
from odoo.http import request
class CashierImage(http.Controller):
"""
Defining class CashierImage
"""
@http.route('/cashier/image', type='json', auth='public')
def get_cashier_image(self, cashier_id):
"""
Function to get cashier image
:param cashier_id:
:return:
"""
cashier = request.env['hr.employee'].browse(cashier_id)
return cashier.image

10
pos_face_recognition/doc/RELEASE_NOTES.md

@ -1,7 +1,13 @@
## Module <pos_face_recognition>
#### 31.10.2023
#### Version 16.0.1.0.0
#### 24.06.2024
#### Version 15.0.1.0.0
#### ADD
- Initial commit for Pos Face Recognition
#### 23.10.2024
#### Version 16.0.1.1.1
#### UPDT
- bug fix in face recognition.Converted functionality from Python to JS.

20
pos_face_recognition/models/__init__.py

@ -1,22 +1,22 @@
# -*- coding: utf-8 -*-
###############################################################################
################################################################################
#
# Cybrosys Technologies Pvt. Ltd.
# Copyright (C) 2023-TODAY Cybrosys Technologies(<https://www.cybrosys.com>)
# Author: Fathima Mazlin AM (odoo@cybrosys.com)
#
# This program is free software: you can modify
# it under the terms of the GNU LESSER GENERAL PUBLIC LICENSE (LGPL) as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# Copyright (C) 2024-TODAY Cybrosys Technologies(<https://www.cybrosys.com>)
# Author: Fathima Mazlin AM (<https://www.cybrosys.com>)
#
# You can modify it under the terms of the GNU LESSER
# GENERAL PUBLIC LICENSE (LGPL v3), Version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU LESSER GENERAL PUBLIC LICENSE for more details.
# GNU LESSER GENERAL PUBLIC LICENSE (LGPL v3) for more details.
#
# You should have received a copy of the GNU LESSER GENERAL PUBLIC LICENSE
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# (LGPL v3) along with this program.
# If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
################################################################################
from . import hr_employee

87
pos_face_recognition/models/hr_employee.py

@ -1,88 +1,31 @@
# -*- coding: utf-8 -*-
###############################################################################
################################################################################
#
# Cybrosys Technologies Pvt. Ltd.
# Copyright (C) 2023-TODAY Cybrosys Technologies(<https://www.cybrosys.com>)
# Author: Fathima Mazlin AM (odoo@cybrosys.com)
#
# This program is free software: you can modify
# it under the terms of the GNU LESSER GENERAL PUBLIC LICENSE (LGPL) as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# Copyright (C) 2024-TODAY Cybrosys Technologies(<https://www.cybrosys.com>)
# Author: Fathima Mazlin AM (<https://www.cybrosys.com>)
#
# You can modify it under the terms of the GNU LESSER
# GENERAL PUBLIC LICENSE (LGPL v3), Version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU LESSER GENERAL PUBLIC LICENSE for more details.
# GNU LESSER GENERAL PUBLIC LICENSE (LGPL v3) for more details.
#
# You should have received a copy of the GNU LESSER GENERAL PUBLIC LICENSE
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# (LGPL v3) along with this program.
# If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import base64
import cv2
import face_recognition
from io import BytesIO
import numpy as np
from PIL import Image
import time
from odoo import api, models
################################################################################
from odoo import fields, models
class HrEmployee(models.Model):
"""
Inheriting hr_employee model to add a image field
"""
_inherit = 'hr.employee'
"""For comparing image """
@api.model
def camera_open(self, kwargs):
"""Capture the image from webcam and compare
with already saved image"""
cap = cv2.VideoCapture(0) # 0 is the index of the default camera
image = self.env['hr.employee'].browse(kwargs["id"]).image_1920
binary_data = base64.b64decode(image)
image_bytes = BytesIO(binary_data)
pil_image = Image.open(image_bytes)
np_image = np.array(pil_image)
img_saved = cv2.cvtColor(np_image, cv2.COLOR_BGR2RGB)
encode_image_saved = face_recognition.face_encodings(img_saved)
face_recognized = 0
start_time = time.time()
camera_time = 0
login_now = 0
while True:
ret, frame = cap.read() # Read a frame from the camera
cv2.imshow('frame', frame) # Display the frame
imgs = cv2.resize(frame, (0, 0), None, 0.25, 0.25)
imgs = cv2.cvtColor(imgs, cv2.COLOR_BGR2RGB)
face_current_frame = face_recognition.face_locations(imgs)
encode_current_frame = face_recognition.face_encodings(
imgs,
face_current_frame)
for encodeFace, faceLoc in zip(encode_current_frame,
face_current_frame):
matches1 = face_recognition.compare_faces(encode_image_saved,
encodeFace)
face_distance = face_recognition.face_distance(
encode_image_saved,
encodeFace)
match_index = np.argmin(face_distance)
elapsed_time = time.time() - start_time
if matches1[match_index]:
face_recognized = 1
if elapsed_time > 6:
login_now = 1
cap.release()
break
if login_now == 1:
break
if camera_time < 40 and login_now == 0:
camera_time = camera_time + 1
else:
cap.release()
cv2.destroyAllWindows()
break
if cv2.waitKey(1) == ord('q'):
break
cap.release() # Release the camera
cv2.destroyAllWindows() # Close all windows
return face_recognized
image = fields.Binary(string="Image", help="Add image for POS face login")

39
pos_face_recognition/static/description/index.html

@ -74,21 +74,6 @@
</div>
</a>
</div>
<div class="col-sm-12 col-md-6 my-3">
<a href="#configuration">
<div class="d-flex justify-content-between align-items-center"
style="background-color: #f5f5f5; padding: 30px; width: 100%;">
<div>
<span style="color: #714B67; font-size: 24px; font-weight: 500; display: block;">Configuration</span>
<span
style="color: #714B67; font-size: 16px; font-weight: 400; color:#282F33; display: block;">View
configuration of this
module</span>
</div>
<img src="assets/misc/right-arrow.png" width="36" height="36"/>
</div>
</a>
</div>
<div class="col-sm-12 col-md-6 my-3">
<a href="#screenshots">
<div class="d-flex justify-content-between align-items-center"
@ -154,30 +139,6 @@
</div>
</div>
<!-- END OF FEATURES SECTION -->
<!-- CONFIGURATION SECTION-->
<div class="d-flex align-items-center"
style="border-bottom: 2px solid #714B67; padding: 15px 0px;" id="configuration">
<div class="d-flex justify-content-center align-items-center mr-2"
style="background-color: #F5F5F5; border-radius: 0px; width: 40px; height: 40px;">
</div>
<h2 class="mt-2"
style="font-family: 'Montserrat', sans-serif; font-size: 24px; font-weight: bold;">
Configuration
</h2>
</div>
<div class="row"
style="font-family: 'Montserrat', sans-serif; font-weight: 400; font-size: 14px; line-height: 200%;">
<div class="col-sm-12 py-4">
<p>Opencv [version:4.2.0] and face recognition need to be installed.</p>
<p><b>Following steps required to install Opencv in Ubuntu 20.04:</b></p>
<p>1. $ sudo apt update</p>
<p>2. $ sudo apt install libopencv-dev python3-opencv</p>
<p>Verify the installation by importing the cv2 module and printing the OpenCV version:</p>
<p>python3 -c "import cv2; print(cv2.__version__)"</p>
<p><b>Following steps required to install face recognition in Ubuntu 20.04:</b></p>
<p>1. $ pip install face_recognition </p>
</div>
</div>
<!-- SCREENSHOTS SECTION -->
<div class="d-flex align-items-center"

226
pos_face_recognition/static/src/js/LoginScreen.js

@ -0,0 +1,226 @@
/** @odoo-module **/
const LoginScreen = require('pos_hr.LoginScreen');
import { patch } from "@web/core/utils/patch";
const {_t} = require('web.core');
const {useService} = require("@web/core/utils/hooks");
import { onMounted, useRef, useState } from "@odoo/owl";
var ajax = require('web.ajax');
const {Gui} = require('point_of_sale.Gui');
const {posbus} = require('point_of_sale.utils');
const MODEL_URL = '/pos_face_recognition/static/src/js/weights';
faceapi.nets.ssdMobilenetv1.loadFromUri(MODEL_URL)
faceapi.nets.faceLandmark68Net.loadFromUri(MODEL_URL)
faceapi.nets.faceRecognitionNet.loadFromUri(MODEL_URL)
faceapi.nets.tinyFaceDetector.load(MODEL_URL),
faceapi.nets.faceLandmark68TinyNet.load(MODEL_URL),
faceapi.nets.faceExpressionNet.load(MODEL_URL),
faceapi.nets.ageGenderNet.load(MODEL_URL)
// patching Login Screen
patch(LoginScreen.prototype, 'pos_hr/static/src/js/LoginScreen.js', {
// setup function
setup() {
this._super.apply(this, arguments);
this.root = useRef("LoginRoot");
this.ImageRoot = useRef("ImageRoot");
this.faceMatcher = null;
},
// overriding back function
async back() {
await this.loadImage()
},
// Function to loadimage
async loadImage() {
var cashier_id = this.env.pos.get_cashier().id
await ajax.jsonRpc('/cashier/image/', 'call', {
cashier_id: cashier_id,
}).then(async data => {
if (data) {
this.have_image = data
const employee_image = this.ImageRoot.el;
employee_image.src = "data:image/jpeg;base64," + data
await this.startWebcam()
} else {
await Gui.showPopup("ErrorPopup", {
'title': _t('Authentication failed'),
'body': _t('Selected cashier have no image..'),
});
location.reload();
}
});
},
// Function to start webcam
startWebcam() {
const video = this.root.el.querySelector('#video')
navigator.mediaDevices.getUserMedia(
{video: true, audio: false}
).then((stream) => {
video.srcObject = stream
}).catch((error) => {
console.error(error)
}).then(this.faceRecognition(video))
},
// Function to get the descriptions of cashier image
async getLabeledFaceDescriptions() {
const employee_image = this.ImageRoot.el;
const detections = await faceapi
.detectSingleFace(employee_image)
.withFaceLandmarks()
.withFaceExpressions()
.withFaceDescriptor();
return detections
},
// Function which compares the webcam image with cashier image
async faceRecognition(video) {
const labeledFaceDescriptors = await this.getLabeledFaceDescriptions()
if (!this.faceMatcher) {
this.faceMatcher = new faceapi.FaceMatcher([labeledFaceDescriptors.descriptor]);
}
this.root.el.querySelector('.screen-login').style.zIndex = -1
video.addEventListener('play', () => {
const canvas = faceapi.createCanvasFromMedia(video);
document.body.append(canvas);
const displaySize = {width: video.width, height: video.height}
faceapi.matchDimensions(canvas, displaySize)
setInterval(async () => {
const detections = await faceapi
.detectAllFaces(video)
.withFaceLandmarks()
.withFaceExpressions()
.withFaceDescriptors();
detections.forEach((detection) => {
const match = this.faceMatcher.findBestMatch(detection.descriptor);
if (match._distance < 0.4) { // Adjust threshold as needed
const modal = this.root.el.querySelector('#WebCamModal');
if (modal) {
modal.style.display = 'none';
this.props.resolve({
confirmed: false,
payload: false
});
this.trigger('close-temp-screen');
this.env.pos.hasLoggedIn = true;
this.env.posbus.trigger('start-cash-control');
video.srcObject.getTracks().forEach(track => track.stop());
canvas.remove();
}
} else {
Gui.showPopup("ErrorPopup", {
'title': _t('Unauthorized Access Detected'),
'body': _t('Face Recognition Failed..'),
});
location.reload();
}
});
}, 300);
})
},
});

1
pos_face_recognition/static/src/js/face-api.min.js

File diff suppressed because one or more lines are too long

64
pos_face_recognition/static/src/js/pos_face_recognition.js

@ -1,64 +0,0 @@
odoo.define('pos_face_recognition.pos_face_recognition', function (require) {
'use strict';
const LoginScreen = require('pos_hr.LoginScreen');
const Registries = require('point_of_sale.Registries');
const SelectCashierMixin = require('pos_hr.SelectCashierMixin');
const rpc = require('web.rpc');
const FaceRecognition = (LoginScreen) =>
class extends LoginScreen {
/**
For camera open
**/
async cameraOpen(employee) {
var self= this
await rpc.query({
model:'hr.employee',
method:'camera_open',
args: [employee]
}).then( function(data){
if (data == 1) {
self.env.pos.set_cashier(employee);
self.props.resolve({ confirmed: true, payload: true });
self.trigger('close-temp-screen');
} else {
self.showPopup('ErrorPopup',{
'title': "Unauthorized Access detected",
'body': "Face Recognition Failed",
});
return;
}
});
}
/**
For selecting the cashier
**/
async selectCashier() {
if (this.env.pos.config.module_pos_hr) {
const employeesList = this.env.pos.employees
.filter((employee) => employee.id !==
this.env.pos.get_cashier().id).map((employee) => {
return {
id: employee.id,
item: employee,
label: employee.name,
isSelected: false,
};
});
let {confirmed, payload: employee} =
await this.showPopup('SelectionPopup', {
title: this.env._t('Change Cashier'),
list: employeesList,
});
if (!confirmed) {
return;
}
if (employee) {
employee = await this.cameraOpen(employee);
}
return employee;
}
}
}
Registries.Component.extend(LoginScreen, FaceRecognition);
return FaceRecognition;
});

BIN
pos_face_recognition/static/src/js/weights/age_gender_model-shard1

Binary file not shown.

618
pos_face_recognition/static/src/js/weights/age_gender_model-weights_manifest.json

@ -0,0 +1,618 @@
[
{
"weights": [
{
"name": "entry_flow/conv_in/filters",
"shape": [
3,
3,
3,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005431825039433498,
"min": -0.7441600304023892
}
},
{
"name": "entry_flow/conv_in/bias",
"shape": [
32
],
"dtype": "float32"
},
{
"name": "entry_flow/reduction_block_0/separable_conv0/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005691980614381678,
"min": -0.6090419257388395
}
},
{
"name": "entry_flow/reduction_block_0/separable_conv0/pointwise_filter",
"shape": [
1,
1,
32,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.009089225881239947,
"min": -1.1179747833925135
}
},
{
"name": "entry_flow/reduction_block_0/separable_conv0/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "entry_flow/reduction_block_0/separable_conv1/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00683894624897078,
"min": -0.8138346036275228
}
},
{
"name": "entry_flow/reduction_block_0/separable_conv1/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011632566358528886,
"min": -1.3028474321552352
}
},
{
"name": "entry_flow/reduction_block_0/separable_conv1/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "entry_flow/reduction_block_0/expansion_conv/filters",
"shape": [
1,
1,
32,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010254812240600587,
"min": -0.9229331016540528
}
},
{
"name": "entry_flow/reduction_block_0/expansion_conv/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "entry_flow/reduction_block_1/separable_conv0/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0052509616403018725,
"min": -0.6406173201168285
}
},
{
"name": "entry_flow/reduction_block_1/separable_conv0/pointwise_filter",
"shape": [
1,
1,
64,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010788509424994973,
"min": -1.4564487723743214
}
},
{
"name": "entry_flow/reduction_block_1/separable_conv0/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "entry_flow/reduction_block_1/separable_conv1/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00553213918910307,
"min": -0.7025816770160899
}
},
{
"name": "entry_flow/reduction_block_1/separable_conv1/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.013602388606351965,
"min": -1.6186842441558837
}
},
{
"name": "entry_flow/reduction_block_1/separable_conv1/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "entry_flow/reduction_block_1/expansion_conv/filters",
"shape": [
1,
1,
64,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.007571851038465313,
"min": -1.158493208885193
}
},
{
"name": "entry_flow/reduction_block_1/expansion_conv/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "middle_flow/main_block_0/separable_conv0/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005766328409606335,
"min": -0.6688940955143349
}
},
{
"name": "middle_flow/main_block_0/separable_conv0/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.012136116214826995,
"min": -1.5776951079275094
}
},
{
"name": "middle_flow/main_block_0/separable_conv0/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "middle_flow/main_block_0/separable_conv1/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004314773222979377,
"min": -0.5652352922102984
}
},
{
"name": "middle_flow/main_block_0/separable_conv1/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01107162026798024,
"min": -1.2400214700137868
}
},
{
"name": "middle_flow/main_block_0/separable_conv1/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "middle_flow/main_block_0/separable_conv2/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0036451735917259667,
"min": -0.4848080876995536
}
},
{
"name": "middle_flow/main_block_0/separable_conv2/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008791744942758598,
"min": -1.134135097615859
}
},
{
"name": "middle_flow/main_block_0/separable_conv2/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "middle_flow/main_block_1/separable_conv0/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004915751896652521,
"min": -0.6095532351849126
}
},
{
"name": "middle_flow/main_block_1/separable_conv0/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010868691463096469,
"min": -1.3368490499608656
}
},
{
"name": "middle_flow/main_block_1/separable_conv0/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "middle_flow/main_block_1/separable_conv1/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005010117269029804,
"min": -0.6012140722835765
}
},
{
"name": "middle_flow/main_block_1/separable_conv1/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010311148213405235,
"min": -1.3816938605963016
}
},
{
"name": "middle_flow/main_block_1/separable_conv1/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "middle_flow/main_block_1/separable_conv2/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004911523706772748,
"min": -0.7367285560159123
}
},
{
"name": "middle_flow/main_block_1/separable_conv2/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008976466047997568,
"min": -1.2207993825276693
}
},
{
"name": "middle_flow/main_block_1/separable_conv2/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "exit_flow/reduction_block/separable_conv0/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005074804436926748,
"min": -0.7104726211697447
}
},
{
"name": "exit_flow/reduction_block/separable_conv0/pointwise_filter",
"shape": [
1,
1,
128,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011453078307357489,
"min": -1.4545409450344011
}
},
{
"name": "exit_flow/reduction_block/separable_conv0/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "exit_flow/reduction_block/separable_conv1/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.007741751390344957,
"min": -1.1380374543807086
}
},
{
"name": "exit_flow/reduction_block/separable_conv1/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011347713189966538,
"min": -1.497898141075583
}
},
{
"name": "exit_flow/reduction_block/separable_conv1/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "exit_flow/reduction_block/expansion_conv/filters",
"shape": [
1,
1,
128,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006717281014311547,
"min": -0.8329428457746318
}
},
{
"name": "exit_flow/reduction_block/expansion_conv/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "exit_flow/separable_conv/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0027201742518181892,
"min": -0.3237007359663645
}
},
{
"name": "exit_flow/separable_conv/pointwise_filter",
"shape": [
1,
1,
256,
512
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010076364348916447,
"min": -1.330080094056971
}
},
{
"name": "exit_flow/separable_conv/bias",
"shape": [
512
],
"dtype": "float32"
},
{
"name": "fc/age/weights",
"shape": [
512,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008674054987290326,
"min": -1.2664120281443876
}
},
{
"name": "fc/age/bias",
"shape": [
1
],
"dtype": "float32"
},
{
"name": "fc/gender/weights",
"shape": [
512,
2
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0029948226377075793,
"min": -0.34140978069866407
}
},
{
"name": "fc/gender/bias",
"shape": [
2
],
"dtype": "float32"
}
],
"paths": [
"age_gender_model-shard1"
]
}
]

BIN
pos_face_recognition/static/src/js/weights/face_expression_model-shard1

Binary file not shown.

606
pos_face_recognition/static/src/js/weights/face_expression_model-weights_manifest.json

@ -0,0 +1,606 @@
[
{
"weights": [
{
"name": "dense0/conv0/filters",
"shape": [
3,
3,
3,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0057930146946626555,
"min": -0.7125408074435067
}
},
{
"name": "dense0/conv0/bias",
"shape": [
32
],
"dtype": "float32"
},
{
"name": "dense0/conv1/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006473719839956246,
"min": -0.6408982641556684
}
},
{
"name": "dense0/conv1/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010509579321917366,
"min": -1.408283629136927
}
},
{
"name": "dense0/conv1/bias",
"shape": [
32
],
"dtype": "float32"
},
{
"name": "dense0/conv2/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005666389652326995,
"min": -0.7252978754978554
}
},
{
"name": "dense0/conv2/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010316079270605948,
"min": -1.1760330368490781
}
},
{
"name": "dense0/conv2/bias",
"shape": [
32
],
"dtype": "float32"
},
{
"name": "dense0/conv3/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0063220320963392074,
"min": -0.853474333005793
}
},
{
"name": "dense0/conv3/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010322785377502442,
"min": -1.4658355236053466
}
},
{
"name": "dense0/conv3/bias",
"shape": [
32
],
"dtype": "float32"
},
{
"name": "dense1/conv0/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0042531527724920535,
"min": -0.5741756242864272
}
},
{
"name": "dense1/conv0/pointwise_filter",
"shape": [
1,
1,
32,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010653339647779278,
"min": -1.1825207009035
}
},
{
"name": "dense1/conv0/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "dense1/conv1/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005166931012097527,
"min": -0.6355325144879957
}
},
{
"name": "dense1/conv1/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011478300188101974,
"min": -1.3888743227603388
}
},
{
"name": "dense1/conv1/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "dense1/conv2/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006144821410085641,
"min": -0.8479853545918185
}
},
{
"name": "dense1/conv2/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010541967317169788,
"min": -1.3809977185492421
}
},
{
"name": "dense1/conv2/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "dense1/conv3/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005769844849904378,
"min": -0.686611537138621
}
},
{
"name": "dense1/conv3/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010939095534530341,
"min": -1.2689350820055196
}
},
{
"name": "dense1/conv3/bias",
"shape": [
64
],
"dtype": "float32"
},
{
"name": "dense2/conv0/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0037769308277204924,
"min": -0.40790852939381317
}
},
{
"name": "dense2/conv0/pointwise_filter",
"shape": [
1,
1,
64,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01188667194516051,
"min": -1.4382873053644218
}
},
{
"name": "dense2/conv0/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "dense2/conv1/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006497045825509464,
"min": -0.8381189114907208
}
},
{
"name": "dense2/conv1/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011632198913424622,
"min": -1.3377028750438316
}
},
{
"name": "dense2/conv1/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "dense2/conv2/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005947182225246056,
"min": -0.7969224181829715
}
},
{
"name": "dense2/conv2/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011436844339557722,
"min": -1.4524792311238306
}
},
{
"name": "dense2/conv2/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "dense2/conv3/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006665432686899222,
"min": -0.8998334127313949
}
},
{
"name": "dense2/conv3/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01283421422920975,
"min": -1.642779421338848
}
},
{
"name": "dense2/conv3/bias",
"shape": [
128
],
"dtype": "float32"
},
{
"name": "dense3/conv0/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004711699953266218,
"min": -0.6737730933170692
}
},
{
"name": "dense3/conv0/pointwise_filter",
"shape": [
1,
1,
128,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010955964817720302,
"min": -1.3914075318504784
}
},
{
"name": "dense3/conv0/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "dense3/conv1/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00554193468654857,
"min": -0.7149095745647656
}
},
{
"name": "dense3/conv1/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.016790372250126858,
"min": -2.484975093018775
}
},
{
"name": "dense3/conv1/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "dense3/conv2/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006361540626077091,
"min": -0.8142772001378676
}
},
{
"name": "dense3/conv2/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01777329678628959,
"min": -1.7062364914838006
}
},
{
"name": "dense3/conv2/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "dense3/conv3/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006900275922289082,
"min": -0.8625344902861353
}
},
{
"name": "dense3/conv3/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.015449936717164282,
"min": -1.9003422162112067
}
},
{
"name": "dense3/conv3/bias",
"shape": [
256
],
"dtype": "float32"
},
{
"name": "fc/weights",
"shape": [
256,
7
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004834276554631252,
"min": -0.7203072066400565
}
},
{
"name": "fc/bias",
"shape": [
7
],
"dtype": "float32"
}
],
"paths": [
"face_expression_model-shard1"
]
}
]

BIN
pos_face_recognition/static/src/js/weights/face_landmark_68_model-shard1

Binary file not shown.

691
pos_face_recognition/static/src/js/weights/face_landmark_68_model-weights_manifest.json

@ -0,0 +1,691 @@
[
{
"weights": [
{
"name": "dense0/conv0/filters",
"shape": [
3,
3,
3,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004853619781194949,
"min": -0.5872879935245888
}
},
{
"name": "dense0/conv0/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004396426443960153,
"min": -0.7298067896973853
}
},
{
"name": "dense0/conv1/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00635151559231328,
"min": -0.5589333721235686
}
},
{
"name": "dense0/conv1/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.009354315552057004,
"min": -1.2628325995276957
}
},
{
"name": "dense0/conv1/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0029380727048013726,
"min": -0.5846764682554731
}
},
{
"name": "dense0/conv2/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0049374802439820535,
"min": -0.6171850304977566
}
},
{
"name": "dense0/conv2/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.009941946758943446,
"min": -1.3421628124573652
}
},
{
"name": "dense0/conv2/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0030300481062309416,
"min": -0.5272283704841838
}
},
{
"name": "dense0/conv3/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005672684837790097,
"min": -0.7431217137505026
}
},
{
"name": "dense0/conv3/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010712201455060173,
"min": -1.5639814124387852
}
},
{
"name": "dense0/conv3/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0030966934035806097,
"min": -0.3839899820439956
}
},
{
"name": "dense1/conv0/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0039155554537679636,
"min": -0.48161332081345953
}
},
{
"name": "dense1/conv0/pointwise_filter",
"shape": [
1,
1,
32,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01023082966898002,
"min": -1.094698774580862
}
},
{
"name": "dense1/conv0/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0027264176630506327,
"min": -0.3871513081531898
}
},
{
"name": "dense1/conv1/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004583378632863362,
"min": -0.5454220573107401
}
},
{
"name": "dense1/conv1/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00915846403907327,
"min": -1.117332612766939
}
},
{
"name": "dense1/conv1/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003091680419211294,
"min": -0.5966943209077797
}
},
{
"name": "dense1/conv2/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005407439727409214,
"min": -0.708374604290607
}
},
{
"name": "dense1/conv2/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00946493943532308,
"min": -1.2399070660273235
}
},
{
"name": "dense1/conv2/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004409168514550901,
"min": -0.9788354102303
}
},
{
"name": "dense1/conv3/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004478132958505668,
"min": -0.6493292789833219
}
},
{
"name": "dense1/conv3/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011063695888893277,
"min": -1.2501976354449402
}
},
{
"name": "dense1/conv3/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003909627596537272,
"min": -0.6646366914113363
}
},
{
"name": "dense2/conv0/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003213915404151468,
"min": -0.3374611174359041
}
},
{
"name": "dense2/conv0/pointwise_filter",
"shape": [
1,
1,
64,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010917326048308728,
"min": -1.4520043644250609
}
},
{
"name": "dense2/conv0/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002800439152063108,
"min": -0.38085972468058266
}
},
{
"name": "dense2/conv1/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0050568851770139206,
"min": -0.6927932692509071
}
},
{
"name": "dense2/conv1/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01074961213504567,
"min": -1.3222022926106174
}
},
{
"name": "dense2/conv1/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0030654204242369708,
"min": -0.5487102559384177
}
},
{
"name": "dense2/conv2/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00591809165244009,
"min": -0.917304206128214
}
},
{
"name": "dense2/conv2/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.01092823346455892,
"min": -1.366029183069865
}
},
{
"name": "dense2/conv2/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002681120470458386,
"min": -0.36463238398234055
}
},
{
"name": "dense2/conv3/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0048311497650894465,
"min": -0.5797379718107336
}
},
{
"name": "dense2/conv3/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.011227761062921263,
"min": -1.4483811771168429
}
},
{
"name": "dense2/conv3/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0034643323982463162,
"min": -0.3360402426298927
}
},
{
"name": "dense3/conv0/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003394978887894574,
"min": -0.49227193874471326
}
},
{
"name": "dense3/conv0/pointwise_filter",
"shape": [
1,
1,
128,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010051267287310432,
"min": -1.2765109454884247
}
},
{
"name": "dense3/conv0/bias",
"shape": [
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003142924752889895,
"min": -0.4588670139219247
}
},
{
"name": "dense3/conv1/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00448304671867221,
"min": -0.5872791201460595
}
},
{
"name": "dense3/conv1/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.016063522357566685,
"min": -2.3613377865623026
}
},
{
"name": "dense3/conv1/bias",
"shape": [
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00287135781026354,
"min": -0.47664539650374765
}
},
{
"name": "dense3/conv2/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006002906724518421,
"min": -0.7923836876364315
}
},
{
"name": "dense3/conv2/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.017087187019048954,
"min": -1.6061955797906016
}
},
{
"name": "dense3/conv2/bias",
"shape": [
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003124481205846749,
"min": -0.46242321846531886
}
},
{
"name": "dense3/conv3/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006576311588287353,
"min": -1.0193282961845398
}
},
{
"name": "dense3/conv3/pointwise_filter",
"shape": [
1,
1,
256,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.015590153955945782,
"min": -1.99553970636106
}
},
{
"name": "dense3/conv3/bias",
"shape": [
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004453541601405424,
"min": -0.6546706154065973
}
},
{
"name": "fc/weights",
"shape": [
256,
136
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.010417488509533453,
"min": -1.500118345372817
}
},
{
"name": "fc/bias",
"shape": [
136
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0025084222648658005,
"min": 0.07683877646923065
}
}
],
"paths": [
"face_landmark_68_model-shard1"
]
}
]

BIN
pos_face_recognition/static/src/js/weights/face_landmark_68_tiny_model-shard1

Binary file not shown.

397
pos_face_recognition/static/src/js/weights/face_landmark_68_tiny_model-weights_manifest.json

@ -0,0 +1,397 @@
[
{
"weights": [
{
"name": "dense0/conv0/filters",
"shape": [
3,
3,
3,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008194216092427571,
"min": -0.9423348506291708
}
},
{
"name": "dense0/conv0/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006839508168837603,
"min": -0.8412595047670252
}
},
{
"name": "dense0/conv1/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.009194007106855804,
"min": -1.2779669878529567
}
},
{
"name": "dense0/conv1/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0036026100317637128,
"min": -0.3170296827952067
}
},
{
"name": "dense0/conv1/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.000740380117706224,
"min": -0.06367269012273527
}
},
{
"name": "dense0/conv2/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 1,
"min": 0
}
},
{
"name": "dense0/conv2/pointwise_filter",
"shape": [
1,
1,
32,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 1,
"min": 0
}
},
{
"name": "dense0/conv2/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0037702228508743585,
"min": -0.6220867703942692
}
},
{
"name": "dense1/conv0/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0033707996209462483,
"min": -0.421349952618281
}
},
{
"name": "dense1/conv0/pointwise_filter",
"shape": [
1,
1,
32,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.014611541991140328,
"min": -1.8556658328748217
}
},
{
"name": "dense1/conv0/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002832523046755323,
"min": -0.30307996600281956
}
},
{
"name": "dense1/conv1/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006593170586754294,
"min": -0.6329443763284123
}
},
{
"name": "dense1/conv1/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.012215249211180444,
"min": -1.6001976466646382
}
},
{
"name": "dense1/conv1/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002384825547536214,
"min": -0.3028728445370992
}
},
{
"name": "dense1/conv2/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005859645441466687,
"min": -0.7617539073906693
}
},
{
"name": "dense1/conv2/pointwise_filter",
"shape": [
1,
1,
64,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.013121426806730382,
"min": -1.7845140457153321
}
},
{
"name": "dense1/conv2/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0032247188044529336,
"min": -0.46435950784122243
}
},
{
"name": "dense2/conv0/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002659512618008782,
"min": -0.32977956463308894
}
},
{
"name": "dense2/conv0/pointwise_filter",
"shape": [
1,
1,
64,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.015499923743453681,
"min": -1.9839902391620712
}
},
{
"name": "dense2/conv0/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0032450980999890497,
"min": -0.522460794098237
}
},
{
"name": "dense2/conv1/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005911862382701799,
"min": -0.792189559282041
}
},
{
"name": "dense2/conv1/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.021025861478319356,
"min": -2.2077154552235325
}
},
{
"name": "dense2/conv1/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00349616945958605,
"min": -0.46149436866535865
}
},
{
"name": "dense2/conv2/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008104994250278847,
"min": -1.013124281284856
}
},
{
"name": "dense2/conv2/pointwise_filter",
"shape": [
1,
1,
128,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.029337059282789044,
"min": -3.5791212325002633
}
},
{
"name": "dense2/conv2/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0038808938334969913,
"min": -0.4230174278511721
}
},
{
"name": "fc/weights",
"shape": [
128,
136
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.014016061670639936,
"min": -1.8921683255363912
}
},
{
"name": "fc/bias",
"shape": [
136
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0029505149698724935,
"min": 0.088760145008564
}
}
],
"paths": [
"face_landmark_68_tiny_model-shard1"
]
}
]

BIN
pos_face_recognition/static/src/js/weights/face_recognition_model-shard1

Binary file not shown.

6
pos_face_recognition/static/src/js/weights/face_recognition_model-shard2

File diff suppressed because one or more lines are too long

1462
pos_face_recognition/static/src/js/weights/face_recognition_model-weights_manifest.json

File diff suppressed because it is too large

BIN
pos_face_recognition/static/src/js/weights/mtcnn_model-shard1

Binary file not shown.

402
pos_face_recognition/static/src/js/weights/mtcnn_model-weights_manifest.json

@ -0,0 +1,402 @@
[
{
"paths": [
"mtcnn_model-shard1"
],
"weights": [
{
"dtype": "float32",
"name": "pnet/conv1/weights",
"shape": [
3,
3,
3,
10
]
},
{
"dtype": "float32",
"name": "pnet/conv1/bias",
"shape": [
10
]
},
{
"dtype": "float32",
"name": "pnet/prelu1_alpha",
"shape": [
10
]
},
{
"dtype": "float32",
"name": "pnet/conv2/weights",
"shape": [
3,
3,
10,
16
]
},
{
"dtype": "float32",
"name": "pnet/conv2/bias",
"shape": [
16
]
},
{
"dtype": "float32",
"name": "pnet/prelu2_alpha",
"shape": [
16
]
},
{
"dtype": "float32",
"name": "pnet/conv3/weights",
"shape": [
3,
3,
16,
32
]
},
{
"dtype": "float32",
"name": "pnet/conv3/bias",
"shape": [
32
]
},
{
"dtype": "float32",
"name": "pnet/prelu3_alpha",
"shape": [
32
]
},
{
"dtype": "float32",
"name": "pnet/conv4_1/weights",
"shape": [
1,
1,
32,
2
]
},
{
"dtype": "float32",
"name": "pnet/conv4_1/bias",
"shape": [
2
]
},
{
"dtype": "float32",
"name": "pnet/conv4_2/weights",
"shape": [
1,
1,
32,
4
]
},
{
"dtype": "float32",
"name": "pnet/conv4_2/bias",
"shape": [
4
]
},
{
"dtype": "float32",
"name": "rnet/conv1/weights",
"shape": [
3,
3,
3,
28
]
},
{
"dtype": "float32",
"name": "rnet/conv1/bias",
"shape": [
28
]
},
{
"dtype": "float32",
"name": "rnet/prelu1_alpha",
"shape": [
28
]
},
{
"dtype": "float32",
"name": "rnet/conv2/weights",
"shape": [
3,
3,
28,
48
]
},
{
"dtype": "float32",
"name": "rnet/conv2/bias",
"shape": [
48
]
},
{
"dtype": "float32",
"name": "rnet/prelu2_alpha",
"shape": [
48
]
},
{
"dtype": "float32",
"name": "rnet/conv3/weights",
"shape": [
2,
2,
48,
64
]
},
{
"dtype": "float32",
"name": "rnet/conv3/bias",
"shape": [
64
]
},
{
"dtype": "float32",
"name": "rnet/prelu3_alpha",
"shape": [
64
]
},
{
"dtype": "float32",
"name": "rnet/fc1/weights",
"shape": [
576,
128
]
},
{
"dtype": "float32",
"name": "rnet/fc1/bias",
"shape": [
128
]
},
{
"dtype": "float32",
"name": "rnet/prelu4_alpha",
"shape": [
128
]
},
{
"dtype": "float32",
"name": "rnet/fc2_1/weights",
"shape": [
128,
2
]
},
{
"dtype": "float32",
"name": "rnet/fc2_1/bias",
"shape": [
2
]
},
{
"dtype": "float32",
"name": "rnet/fc2_2/weights",
"shape": [
128,
4
]
},
{
"dtype": "float32",
"name": "rnet/fc2_2/bias",
"shape": [
4
]
},
{
"dtype": "float32",
"name": "onet/conv1/weights",
"shape": [
3,
3,
3,
32
]
},
{
"dtype": "float32",
"name": "onet/conv1/bias",
"shape": [
32
]
},
{
"dtype": "float32",
"name": "onet/prelu1_alpha",
"shape": [
32
]
},
{
"dtype": "float32",
"name": "onet/conv2/weights",
"shape": [
3,
3,
32,
64
]
},
{
"dtype": "float32",
"name": "onet/conv2/bias",
"shape": [
64
]
},
{
"dtype": "float32",
"name": "onet/prelu2_alpha",
"shape": [
64
]
},
{
"dtype": "float32",
"name": "onet/conv3/weights",
"shape": [
3,
3,
64,
64
]
},
{
"dtype": "float32",
"name": "onet/conv3/bias",
"shape": [
64
]
},
{
"dtype": "float32",
"name": "onet/prelu3_alpha",
"shape": [
64
]
},
{
"dtype": "float32",
"name": "onet/conv4/weights",
"shape": [
2,
2,
64,
128
]
},
{
"dtype": "float32",
"name": "onet/conv4/bias",
"shape": [
128
]
},
{
"dtype": "float32",
"name": "onet/prelu4_alpha",
"shape": [
128
]
},
{
"dtype": "float32",
"name": "onet/fc1/weights",
"shape": [
1152,
256
]
},
{
"dtype": "float32",
"name": "onet/fc1/bias",
"shape": [
256
]
},
{
"dtype": "float32",
"name": "onet/prelu5_alpha",
"shape": [
256
]
},
{
"dtype": "float32",
"name": "onet/fc2_1/weights",
"shape": [
256,
2
]
},
{
"dtype": "float32",
"name": "onet/fc2_1/bias",
"shape": [
2
]
},
{
"dtype": "float32",
"name": "onet/fc2_2/weights",
"shape": [
256,
4
]
},
{
"dtype": "float32",
"name": "onet/fc2_2/bias",
"shape": [
4
]
},
{
"dtype": "float32",
"name": "onet/fc2_3/weights",
"shape": [
256,
10
]
},
{
"dtype": "float32",
"name": "onet/fc2_3/bias",
"shape": [
10
]
}
]
}
]

BIN
pos_face_recognition/static/src/js/weights/ssd_mobilenetv1_model-shard1

Binary file not shown.

137
pos_face_recognition/static/src/js/weights/ssd_mobilenetv1_model-shard2

File diff suppressed because one or more lines are too long

1936
pos_face_recognition/static/src/js/weights/ssd_mobilenetv1_model-weights_manifest.json

File diff suppressed because it is too large

BIN
pos_face_recognition/static/src/js/weights/tiny_face_detector_model-shard1

Binary file not shown.

273
pos_face_recognition/static/src/js/weights/tiny_face_detector_model-weights_manifest.json

@ -0,0 +1,273 @@
[
{
"weights": [
{
"name": "conv0/filters",
"shape": [
3,
3,
3,
16
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.009007044399485869,
"min": -1.2069439495311063
}
},
{
"name": "conv0/bias",
"shape": [
16
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005263455241334205,
"min": -0.9211046672334858
}
},
{
"name": "conv1/depthwise_filter",
"shape": [
3,
3,
16,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.004001977630690033,
"min": -0.5042491814669441
}
},
{
"name": "conv1/pointwise_filter",
"shape": [
1,
1,
16,
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.013836609615999109,
"min": -1.411334180831909
}
},
{
"name": "conv1/bias",
"shape": [
32
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0015159862590771096,
"min": -0.30926119685173037
}
},
{
"name": "conv2/depthwise_filter",
"shape": [
3,
3,
32,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002666276225856706,
"min": -0.317286870876948
}
},
{
"name": "conv2/pointwise_filter",
"shape": [
1,
1,
32,
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.015265831292844286,
"min": -1.6792414422128714
}
},
{
"name": "conv2/bias",
"shape": [
64
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0020280554598453,
"min": -0.37113414915168985
}
},
{
"name": "conv3/depthwise_filter",
"shape": [
3,
3,
64,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006100742489683862,
"min": -0.8907084034938438
}
},
{
"name": "conv3/pointwise_filter",
"shape": [
1,
1,
64,
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.016276211832083907,
"min": -2.0508026908425725
}
},
{
"name": "conv3/bias",
"shape": [
128
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.003394414279975143,
"min": -0.7637432129944072
}
},
{
"name": "conv4/depthwise_filter",
"shape": [
3,
3,
128,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006716050119961009,
"min": -0.8059260143953211
}
},
{
"name": "conv4/pointwise_filter",
"shape": [
1,
1,
128,
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.021875603993733724,
"min": -2.8875797271728514
}
},
{
"name": "conv4/bias",
"shape": [
256
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0041141652009066415,
"min": -0.8187188749804216
}
},
{
"name": "conv5/depthwise_filter",
"shape": [
3,
3,
256,
1
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008423839597141042,
"min": -0.9013508368940915
}
},
{
"name": "conv5/pointwise_filter",
"shape": [
1,
1,
256,
512
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.030007277283014035,
"min": -3.8709387695088107
}
},
{
"name": "conv5/bias",
"shape": [
512
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008402082966823203,
"min": -1.4871686851277068
}
},
{
"name": "conv8/filters",
"shape": [
1,
1,
512,
25
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.028336129469030042,
"min": -4.675461362389957
}
},
{
"name": "conv8/bias",
"shape": [
25
],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002268134028303857,
"min": -0.41053225912299807
}
}
],
"paths": [
"tiny_face_detector_model-shard1"
]
}
]

36
pos_face_recognition/static/src/xml/LoginScreen.xml

@ -0,0 +1,36 @@
<?xml version="1.0" encoding="UTF-8"?>
<templates xml:space="preserve">
<!-- Replacing LoginScreen to add a modal-->
<t t-inherit="pos_hr.LoginScreen" t-inherit-mode="extension">
<xpath expr="//div[hasclass('login-overlay')]" position="replace">
<div class="login-overlay" t-ref="LoginRoot">
<div id="WebCamModal"
style="display: block; margin-top: 250px; margin-left: 780px; left: 0; top: 260px; width: 500px; background-color: transparent">
<div class="container" style="z-index: 1">
<video id="video" width="400" height="400" autoplay=""
muted=""/>
<img id="employee_image" t-ref="ImageRoot"
style="visibility: hidden; height: 10px; width: 10px"/>
</div>
</div>
<div class="screen-login">
<div class="login-title"><small>Log in to </small>
<t t-esc="shopName" />
</div>
<div class="login-body">
<span class="login-element">
<img class="login-barcode-img"
src="/point_of_sale/static/img/barcode.png" />
<div class="login-barcode-text">Scan your badge</div>
</span>
<span class="login-or">or</span>
<span class="login-element">
<button class="login-button select-cashier"
t-on-click="() => this.selectCashier()">Select Cashier</button>
</span>
</div>
</div>
</div>
</xpath>
</t>
</templates>

14
pos_face_recognition/views/hr_employee_view.xml

@ -0,0 +1,14 @@
<?xml version="1.0" encoding="utf-8"?>
<odoo>
<!-- inherit hr.employee view-->
<record id="view_employee_form" model="ir.ui.view">
<field name="name">hr.employee.view.form.inherit.pos.face.recognition</field>
<field name="model">hr.employee</field>
<field name="inherit_id" ref="hr.view_employee_form"/>
<field name="arch" type="xml">
<xpath expr="//field[@name='pin']" position="after">
<field name="image"/>
</xpath>
</field>
</record>
</odoo>
Loading…
Cancel
Save