diff --git a/auto_database_backup/README.rst b/auto_database_backup/README.rst index e5e790503..8251c31f1 100644 --- a/auto_database_backup/README.rst +++ b/auto_database_backup/README.rst @@ -19,7 +19,7 @@ Company Credits ------- * Developer: -(v14) Midilaj @ Cybrosys +(v14) Midilaj @ Cybrosys, Farhana Jahan PT @ Cybrosys, Contacts diff --git a/auto_database_backup/__init__.py b/auto_database_backup/__init__.py index f2c5a4636..05a787b6a 100644 --- a/auto_database_backup/__init__.py +++ b/auto_database_backup/__init__.py @@ -19,5 +19,6 @@ # If not, see . # ############################################################################# - +from . import controllers from . import models +from . import wizard diff --git a/auto_database_backup/__manifest__.py b/auto_database_backup/__manifest__.py index 948ba609e..84e331821 100644 --- a/auto_database_backup/__manifest__.py +++ b/auto_database_backup/__manifest__.py @@ -19,13 +19,14 @@ # If not, see . # ############################################################################# - { 'name': "Automatic Database Backup", - 'version': '14.0.1.0.0', - 'summary': """Generate automatic backup of databases and store to local, google drive or remote server""", - 'description': """This module has been developed for creating database backups automatically - and store it to the different locations.""", + 'version': '14.0.1.0.1', + 'summary': 'Generate automatic backup of databases and store to local, ' + 'google drive or remote server', + 'description': 'his module has been developed for creating ' + 'database backups automatically ' + 'and store it to the different locations.', 'author': "Cybrosys Techno Solutions", 'website': "https://www.cybrosys.com", 'company': 'Cybrosys Techno Solutions', @@ -35,8 +36,12 @@ 'data': [ 'security/ir.model.access.csv', 'data/data.xml', - 'views/db_backup_configure_views.xml' + 'views/db_backup_configure_views.xml', + 'wizard/dropbox_auth_code_views.xml' ], + 'external_dependencies': { + 'python': ['dropbox', 'pyncclient', 'boto3', 'nextcloud-api-wrapper', + 'paramiko']}, 'license': 'LGPL-3', 'images': ['static/description/banner.gif'], 'installable': True, diff --git a/auto_database_backup/controllers/__init__.py b/auto_database_backup/controllers/__init__.py new file mode 100644 index 000000000..be06f5727 --- /dev/null +++ b/auto_database_backup/controllers/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +############################################################################### +# +# Cybrosys Technologies Pvt. Ltd. +# +# Copyright (C) 2023-TODAY Cybrosys Technologies() +# Author: Cybrosys Techno Solutions (odoo@cybrosys.com) +# +# You can modify it under the terms of the GNU LESSER +# GENERAL PUBLIC LICENSE (LGPL v3), Version 3. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU LESSER GENERAL PUBLIC LICENSE (LGPL v3) for more details. +# +# You should have received a copy of the GNU LESSER GENERAL PUBLIC LICENSE +# (LGPL v3) along with this program. +# If not, see . +# +############################################################################### +from . import auto_database_backup diff --git a/auto_database_backup/controllers/auto_database_backup.py b/auto_database_backup/controllers/auto_database_backup.py new file mode 100644 index 000000000..376042e5e --- /dev/null +++ b/auto_database_backup/controllers/auto_database_backup.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +############################################################################### +# +# Cybrosys Technologies Pvt. Ltd. +# +# Copyright (C) 2023-TODAY Cybrosys Technologies() +# Author: Cybrosys Techno Solutions (odoo@cybrosys.com) +# +# You can modify it under the terms of the GNU LESSER +# GENERAL PUBLIC LICENSE (LGPL v3), Version 3. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU LESSER GENERAL PUBLIC LICENSE (LGPL v3) for more details. +# +# You should have received a copy of the GNU LESSER GENERAL PUBLIC LICENSE +# (LGPL v3) along with this program. +# If not, see . +# +############################################################################### +import json +from odoo import http +from odoo.http import request + + +class OnedriveAuth(http.Controller): + """Controller for handling authentication with OneDrive and Google Drive.""" + @http.route('/onedrive/authentication', type='http', auth="public") + def oauth2callback(self, **kw): + """ + Callback function for OneDrive authentication. + + :param kw: A dictionary of keyword arguments. + :return: A redirect response. + """ + state = json.loads(kw['state']) + backup_config = request.env['db.backup.configure'].sudo().browse( + state.get('backup_config_id')) + backup_config.get_onedrive_tokens(kw.get('code')) + backup_config.hide_active = True + backup_config.active = True + return http.local_redirect(state.get('url_return')) + + @http.route('/google_drive/authentication', type='http', auth="public") + def gdrive_oauth2callback(self, **kw): + """Callback function for Google Drive authentication.""" + state = json.loads(kw['state']) + backup_config = request.env['db.backup.configure'].sudo().browse( + state.get('backup_config_id')) + backup_config.get_gdrive_tokens(kw.get('code')) + backup_config.active = True + return http.local_redirect(state.get('url_return')) diff --git a/auto_database_backup/data/data.xml b/auto_database_backup/data/data.xml index 82b7e00af..87c0f77be 100644 --- a/auto_database_backup/data/data.xml +++ b/auto_database_backup/data/data.xml @@ -1,7 +1,5 @@ - - @@ -14,23 +12,28 @@ -1 True - - - - + + Database Backup: Notification Successful - - Database Backup Successful: ${object.db_name} - ${object.user_id.email_formatted | safe} + + Database Backup Successful: + ${object.db_name} + + ${object.user_id.email_formatted | safe} +

Dear ${object.user_id.sudo().name},

- Backup of the database ${object.db_name} has been successfully + Backup of the database + ${object.db_name} + has been successfully generated and stored to % if object.backup_destination in ('local'): Local @@ -56,7 +59,8 @@ % elif object.backup_destination in ('sftp'): SFTP Server % endif - % if object.backup_destination in ('local', 'ftp', 'sftp') + % if object.backup_destination in ('local', 'ftp', + 'sftp')
Backup Path: % if object.backup_destination in ('local'): @@ -75,20 +79,23 @@

- - + Database Backup: Notification Failed - - Database Backup Failed: ${object.db_name} - ${object.user_id.email_formatted | safe} + + Database Backup Failed: ${object.db_name} + + ${object.user_id.email_formatted | safe} +

Dear ${object.user_id.sudo().name},

- Backup generation of the database ${object.db_name} has been + Backup generation of the + database ${object.db_name} has been Failed.

@@ -104,7 +111,8 @@ % elif object.backup_destination in ('sftp'): SFTP Server % endif - % if object.backup_destination in ('local', 'ftp', 'sftp') + % if object.backup_destination in ('local', 'ftp', + 'sftp')
Backup Path: % if object.backup_destination in ('local'): @@ -127,5 +135,4 @@ - diff --git a/auto_database_backup/doc/RELEASE_NOTES.md b/auto_database_backup/doc/RELEASE_NOTES.md index d56c8af53..2b5e852a3 100644 --- a/auto_database_backup/doc/RELEASE_NOTES.md +++ b/auto_database_backup/doc/RELEASE_NOTES.md @@ -5,5 +5,22 @@ #### ADD - Initial commit for auto_database_backup +#### 16.02.2024 +#### Version 14.0.1.0.1 +#### ADD +- Dropbox integration added. Backup can be stored in to dropbox. +#### 16.02.2024 +#### Version 14.0.1.0.1 +#### ADD +- Onedrive integration added. Backup can be stored in to onedrive. +#### 16.02.2024 +#### Version 14.0.1.0.1 +#### ADD +- Google Drive authentication updated. + +#### 16.02.2024 +#### Version 14.0.1.0.1 +#### ADD +- Nextcloud and Amazon S3 integration added. Backup can be stored into Nextcloud and Amazon S3. diff --git a/auto_database_backup/models/__init__.py b/auto_database_backup/models/__init__.py index 90120bcea..d441b67c3 100644 --- a/auto_database_backup/models/__init__.py +++ b/auto_database_backup/models/__init__.py @@ -1,10 +1,10 @@ # -*- coding: utf-8 -*- -############################################################################# +############################################################################### # # Cybrosys Technologies Pvt. Ltd. # -# Copyright (C) 2021-TODAY Cybrosys Technologies() -# Author: Cybrosys Techno Solutions() +# Copyright (C) 2023-TODAY Cybrosys Technologies() +# Author: Cybrosys Techno Solutions (odoo@cybrosys.com) # # You can modify it under the terms of the GNU LESSER # GENERAL PUBLIC LICENSE (LGPL v3), Version 3. @@ -17,5 +17,6 @@ # You should have received a copy of the GNU LESSER GENERAL PUBLIC LICENSE # (LGPL v3) along with this program. # If not, see . - +# +############################################################################### from . import db_backup_configure diff --git a/auto_database_backup/models/db_backup_configure.py b/auto_database_backup/models/db_backup_configure.py index c009f7c6b..d93e5acc9 100644 --- a/auto_database_backup/models/db_backup_configure.py +++ b/auto_database_backup/models/db_backup_configure.py @@ -1,10 +1,10 @@ # -*- coding: utf-8 -*- -############################################################################# +############################################################################### # # Cybrosys Technologies Pvt. Ltd. # -# Copyright (C) 2021-TODAY Cybrosys Technologies() -# Author: Cybrosys Techno Solutions() +# Copyright (C) 2023-TODAY Cybrosys Technologies() +# Author: Cybrosys Techno Solutions (odoo@cybrosys.com) # # You can modify it under the terms of the GNU LESSER # GENERAL PUBLIC LICENSE (LGPL v3), Version 3. @@ -18,24 +18,37 @@ # (LGPL v3) along with this program. # If not, see . # -############################################################################# - -from odoo import models, fields, api, _ -from odoo.exceptions import UserError, ValidationError -import odoo -from odoo.service import db - +############################################################################### import datetime -import os -import paramiko +import errno import ftplib import json -import requests -import tempfile -import errno import logging +import os +import tempfile +from datetime import timedelta + +import boto3 +import dropbox +import nextcloud_client +import paramiko +import requests +from nextcloud import NextCloud +from requests.auth import HTTPBasicAuth +from werkzeug import urls + +import odoo +from odoo import api, fields, models, _ +from odoo.exceptions import UserError, ValidationError +from odoo.http import request +from odoo.service import db _logger = logging.getLogger(__name__) +ONEDRIVE_SCOPE = ['offline_access openid Files.ReadWrite.All'] +MICROSOFT_GRAPH_END_POINT = "https://graph.microsoft.com" +GOOGLE_AUTH_ENDPOINT = 'https://accounts.google.com/o/oauth2/auth' +GOOGLE_TOKEN_ENDPOINT = 'https://accounts.google.com/o/oauth2/token' +GOOGLE_API_BASE_URL = 'https://www.googleapis.com' class AutoDatabaseBackup(models.Model): @@ -53,9 +66,14 @@ class AutoDatabaseBackup(models.Model): ('local', 'Local Storage'), ('google_drive', 'Google Drive'), ('ftp', 'FTP'), - ('sftp', 'SFTP') + ('sftp', 'SFTP'), + ('dropbox', 'Dropbox'), + ('onedrive', 'Onedrive'), + ('next_cloud', 'Next Cloud'), + ('amazon_s3', 'Amazon S3') ], string='Backup Destination') - backup_path = fields.Char(string='Backup Path', help='Local storage directory path') + backup_path = fields.Char(string='Backup Path', + help='Local storage directory path') sftp_host = fields.Char(string='SFTP Host') sftp_port = fields.Char(string='SFTP Port', default=22) sftp_user = fields.Char(string='SFTP User') @@ -66,22 +84,292 @@ class AutoDatabaseBackup(models.Model): ftp_user = fields.Char(string='FTP User') ftp_password = fields.Char(string='FTP Password') ftp_path = fields.Char(string='FTP Path') - active = fields.Boolean(default=True) + dropbox_client_key = fields.Char(string='Dropbox Client ID', copy=False, + help='Client id of the dropbox') + dropbox_client_secret = fields.Char(string='Dropbox Client Secret', + copy=False, + help='Client secret id of the dropbox') + dropbox_refresh_token = fields.Char(string='Dropbox Refresh Token', + copy=False, + help='Refresh token for the dropbox') + is_dropbox_token_generated = fields.Boolean( + string='Dropbox Token Generated', + compute='_compute_is_dropbox_token_generated', + copy=False, help='Is the dropbox token generated or not?') + dropbox_folder = fields.Char(string='Dropbox Folder', help='Dropbox folder') + active = fields.Boolean(default=True, string='Active', + help='Activate the Scheduled Action or not') + hide_active = fields.Boolean(string="Hide Active", + help="Make active field to readonly") save_to_drive = fields.Boolean() auto_remove = fields.Boolean(string='Remove Old Backups') days_to_remove = fields.Integer(string='Remove After', - help='Automatically delete stored backups after this specified number of days') - google_drive_folderid = fields.Char(string='Drive Folder ID') + help='Automatically delete stored backups ' + 'after this specified number of days') + google_drive_folder = fields.Char(string='Drive Folder ID') notify_user = fields.Boolean(string='Notify User', - help='Send an email notification to user when the backup operation is successful or failed') + help='Send an email notification to user when ' + 'the backup operation is successful or ' + 'failed') user_id = fields.Many2one('res.users', string='User') - backup_filename = fields.Char(string='Backup Filename', help='For Storing generated backup filename') - generated_exception = fields.Char(string='Exception', help='Exception Encountered while Backup generation') + backup_filename = fields.Char(string='Backup Filename', + help='For Storing generated backup filename') + generated_exception = fields.Char(string='Exception', + help='Exception Encountered while Backup ' + 'generation') + gdrive_refresh_token = fields.Char(string='Google drive Refresh Token', + copy=False, + help='Refresh token for google drive') + gdrive_access_token = fields.Char(string='Google Drive Access Token', + copy=False, + help='Access token for google drive') + domain = fields.Char(string='Domain Name', help="Field used to store the " + "name of a domain") + next_cloud_user_name = fields.Char(string='User Name', + help="Field used to store the user name" + " for a Nextcloud account.") + next_cloud_password = fields.Char(string='Password', + help="Field used to store the password" + " for a Nextcloud account.") + nextcloud_folder_key = fields.Char(string='Next Cloud Folder Id', + help="Field used to store the unique " + "identifier for a Nextcloud " + "folder.") + is_google_drive_token_generated = fields.Boolean( + string='Google drive Token Generated', + compute='_compute_is_google_drive_token_generated', copy=False, + help='Google drive token generated or not') + gdrive_client_key = fields.Char(string='Google Drive Client ID', copy=False, + help='Client id of the google drive') + gdrive_client_secret = fields.Char(string='Google Drive Client Secret', + copy=False, + help='Client secret id of the google' + ' drive') + gdrive_token_validity = fields.Datetime( + string='Google Drive Token Validity', copy=False, + help='Token validity of the google drive') + gdrive_redirect_uri = fields.Char(string='Google Drive Redirect URI', + compute='_compute_redirect_uri', + help='Redirect URI of the google drive') + onedrive_client_key = fields.Char(string='Onedrive Client ID', copy=False, + help='Client ID of the onedrive') + onedrive_client_secret = fields.Char(string='Onedrive Client Secret', + copy=False, help='Client secret id of' + ' the onedrive') + onedrive_access_token = fields.Char(string='Onedrive Access Token', + copy=False, + help='Access token for one drive') + onedrive_refresh_token = fields.Char(string='Onedrive Refresh Token', + copy=False, + help='Refresh token for one drive') + onedrive_token_validity = fields.Datetime(string='Onedrive Token Validity', + copy=False, + help='Token validity date') + onedrive_folder_key = fields.Char(string='Folder ID', + help='Folder id of the onedrive') + is_onedrive_token_generated = fields.Boolean( + string='onedrive Tokens Generated', + compute='_compute_is_onedrive_token_generated', + copy=False, help='Whether to generate onedrive token?') + onedrive_redirect_uri = fields.Char(string='Onedrive Redirect URI', + compute='_compute_redirect_uri', + help='Redirect URI of the onedrive') + aws_access_key = fields.Char(string="Amazon S3 Access Key", + help="Field used to store the Access Key" + " for an Amazon S3 bucket.") + aws_secret_access_key = fields.Char(string='Amazon S3 Secret Key', + help="Field used to store the Secret" + " Key for an Amazon S3 bucket.") + bucket_file_name = fields.Char(string='Bucket Name', + help="Field used to store the name of an" + " Amazon S3 bucket.") + aws_folder_name = fields.Char(string='File Name', + help="field used to store the name of a" + " folder in an Amazon S3 bucket.") + gdrive_backup_error_test = fields.Boolean(string="Google Drive Error Test") + onedrive_backup_error_test = fields.Boolean(string="OneDrive Error Test") + + @api.depends('dropbox_refresh_token') + def _compute_is_dropbox_token_generated(self): + """Set True if the dropbox refresh token is generated""" + for rec in self: + rec.is_dropbox_token_generated = bool(rec.dropbox_refresh_token) + + @api.onchange('backup_destination') + def _onchange_backup_destination(self): + self.write({ + "gdrive_backup_error_test": False, + "onedrive_backup_error_test": False + }) + + @api.onchange('gdrive_client_key', 'gdrive_client_secret', + 'google_drive_folder', 'onedrive_client_key', + 'onedrive_client_secret', 'onedrive_folder_key') + def _onchange_gdrive_backup_error_test(self): + if self.backup_destination == 'google_drive': + if self.gdrive_backup_error_test: + self.write({ + "gdrive_backup_error_test": False + }) + if self.backup_destination == 'onedrive': + if self.onedrive_backup_error_test: + self.write({ + "onedrive_backup_error_test": False + }) + + def action_get_dropbox_auth_code(self): + """Open a wizards to set up dropbox Authorization code""" + return { + 'type': 'ir.actions.act_window', + 'name': 'Dropbox Authorization Wizard', + 'res_model': 'dropbox.auth.code', + 'view_mode': 'form', + 'target': 'new', + 'context': {'dropbox_auth': True} + } + + def action_s3cloud(self): + """If it has aws_secret_access_key, which will perform s3 cloud + operations for connection test""" + if self.aws_access_key and self.aws_secret_access_key: + try: + bo3 = boto3.client( + 's3', + aws_access_key_id=self.aws_access_key, + aws_secret_access_key=self.aws_secret_access_key) + response = bo3.list_buckets() + for bucket in response['Buckets']: + if self.bucket_file_name == bucket['Name']: + self.active = True + self.hide_active = True + return { + 'type': 'ir.actions.client', + 'tag': 'display_notification', + 'params': { + 'type': 'success', + 'title': _("Connection Test Succeeded!"), + 'message': _( + "Everything seems properly set up!"), + 'sticky': False, + } + } + raise UserError( + _("Bucket not found. Please check the bucket name and" + " try again.")) + except Exception: + self.active = False + self.hide_active = False + return { + 'type': 'ir.actions.client', + 'tag': 'display_notification', + 'params': { + 'type': 'danger', + 'title': _("Connection Test Failed!"), + 'message': _("An error occurred while testing the " + "connection."), + 'sticky': False, + } + } + + def get_dropbox_auth_url(self): + """Return dropbox authorization url""" + dbx_auth = dropbox.oauth.DropboxOAuth2FlowNoRedirect( + self.dropbox_client_key, + self.dropbox_client_secret, + token_access_type='offline') + return dbx_auth.start() + + def set_dropbox_refresh_token(self, auth_code): + """Generate and set the dropbox refresh token from authorization code""" + try: + dbx_auth = dropbox.oauth.DropboxOAuth2FlowNoRedirect( + self.dropbox_client_key, + self.dropbox_client_secret, + token_access_type='offline') + outh_result = dbx_auth.finish(auth_code) + self.dropbox_refresh_token = outh_result.refresh_token + except Exception: + raise ValidationError( + 'Please Enter Valid Authentication Code') + + @api.depends('gdrive_access_token', 'gdrive_refresh_token') + def _compute_is_google_drive_token_generated(self): + """Set True if the Google Drive refresh token is generated""" + for rec in self: + rec.is_google_drive_token_generated = bool( + rec.gdrive_access_token) and bool(rec.gdrive_refresh_token) + + def action_nextcloud(self): + """If it has next_cloud_password, domain, and next_cloud_user_name + which will perform an action for nextcloud connection test""" + if self.domain and self.next_cloud_password and \ + self.next_cloud_user_name: + try: + ncx = NextCloud(self.domain, + auth=HTTPBasicAuth(self.next_cloud_user_name, + self.next_cloud_password)) + data = ncx.list_folders('/').__dict__ + if data['raw'].status_code == 207: + self.active = True + self.hide_active = True + return { + 'type': 'ir.actions.client', + 'tag': 'display_notification', + 'params': { + 'type': 'success', + 'title': _("Connection Test Succeeded!"), + 'message': _("Everything seems properly set up!"), + 'sticky': False, + } + } + else: + self.active = False + self.hide_active = False + return { + 'type': 'ir.actions.client', + 'tag': 'display_notification', + 'params': { + 'type': 'danger', + 'title': _("Connection Test Failed!"), + 'message': _("An error occurred while testing the " + "connection."), + 'sticky': False, + } + } + except Exception: + self.active = False + self.hide_active = False + return { + 'type': 'ir.actions.client', + 'tag': 'display_notification', + 'params': { + 'type': 'danger', + 'title': _("Connection Test Failed!"), + 'message': _("An error occurred while testing the " + "connection."), + 'sticky': False, + } + } + + def _compute_redirect_uri(self): + """Compute the redirect URI for onedrive and Google Drive""" + for rec in self: + base_url = request.env['ir.config_parameter'].get_param( + 'web.base.url') + rec.onedrive_redirect_uri = base_url + '/onedrive/authentication' + rec.gdrive_redirect_uri = base_url + '/google_drive/authentication' + + @api.depends('onedrive_access_token', 'onedrive_refresh_token') + def _compute_is_onedrive_token_generated(self): + """Set true if onedrive tokens are generated""" + for rec in self: + rec.is_onedrive_token_generated = bool( + rec.onedrive_access_token) and bool(rec.onedrive_refresh_token) @api.constrains('db_name', 'master_pwd') def _check_db_credentials(self): """ - Validate enetered database name and master password + Validate entered database name and master password """ database_list = db.list_dbs() if self.db_name not in database_list: @@ -91,6 +379,235 @@ class AutoDatabaseBackup(models.Model): except Exception: raise ValidationError(_("Invalid Master Password!")) + def action_get_onedrive_auth_code(self): + """Generate onedrive authorization code""" + AUTHORITY = \ + 'https://login.microsoftonline.com/common/oauth2/v2.0/authorize' + action = self.env.ref( + "auto_database_backup.action_db_backup_configure") + action_data = { + 'id': action.id, + 'name': action.name, + 'type': action.type, + 'xml_id': action.xml_id, + 'help': action.help, + 'binding_model_id': action.binding_model_id, + 'binding_type': action.binding_type, + 'binding_view_types': action.binding_view_types, + 'display_name': action.display_name, + 'res_model': action.res_model, + 'target': action.target, + 'view_mode': action.view_mode, + 'views': action.views, + 'groups_id': [(6, 0, action.groups_id.ids)], + 'search_view_id': action.search_view_id.id if action.search_view_id else False, + 'filter': action.filter, + 'search_view': action.search_view, + 'limit': action.limit, + } + base_url = request.env['ir.config_parameter'].get_param('web.base.url') + url_return = base_url + \ + '/web#id=%d&action=%d&view_type=form&model=%s' % ( + self.id, action_data['id'], 'db.backup.configure') + state = { + 'backup_config_id': self.id, + 'url_return': url_return + } + encoded_params = urls.url_encode({ + 'response_type': 'code', + 'client_id': self.onedrive_client_key, + 'state': json.dumps(state), + 'scope': ONEDRIVE_SCOPE, + 'redirect_uri': base_url + '/onedrive/authentication', + 'prompt': 'consent', + 'access_type': 'offline' + }) + auth_url = "%s?%s" % (AUTHORITY, encoded_params) + return { + 'type': 'ir.actions.act_url', + 'target': 'self', + 'url': auth_url, + } + + def action_get_gdrive_auth_code(self): + """Generate google drive authorization code""" + action = self.env.ref( + "auto_database_backup.action_db_backup_configure") + action_data = { + 'id': action.id, + 'name': action.name, + 'type': action.type, + 'xml_id': action.xml_id, + 'help': action.help, + 'binding_model_id': action.binding_model_id, + 'binding_type': action.binding_type, + 'binding_view_types': action.binding_view_types, + 'display_name': action.display_name, + 'res_model': action.res_model, + 'target': action.target, + 'view_mode': action.view_mode, + 'views': action.views, + 'groups_id': [(6, 0, action.groups_id.ids)], + 'search_view_id': action.search_view_id.id if action.search_view_id else False, + 'filter': action.filter, + 'search_view': action.search_view, + 'limit': action.limit, + } + base_url = request.env['ir.config_parameter'].get_param('web.base.url') + url_return = base_url + \ + '/web#id=%d&action=%d&view_type=form&model=%s' % ( + self.id, action_data['id'], 'db.backup.configure') + state = { + 'backup_config_id': self.id, + 'url_return': url_return + } + encoded_params = urls.url_encode({ + 'response_type': 'code', + 'client_id': self.gdrive_client_key, + 'scope': 'https://www.googleapis.com/auth/drive https://www.googleapis.com/auth/drive.file', + 'redirect_uri': base_url + '/google_drive/authentication', + 'access_type': 'offline', + 'state': json.dumps(state), + 'approval_prompt': 'force', + }) + auth_url = "%s?%s" % (GOOGLE_AUTH_ENDPOINT, encoded_params) + return { + 'type': 'ir.actions.act_url', + 'target': 'self', + 'url': auth_url, + } + + def get_gdrive_tokens(self, authorize_code): + """Generate onedrive tokens from authorization code.""" + base_url = request.env['ir.config_parameter'].get_param('web.base.url') + headers = {"content-type": "application/x-www-form-urlencoded"} + data = { + 'code': authorize_code, + 'client_id': self.gdrive_client_key, + 'client_secret': self.gdrive_client_secret, + 'grant_type': 'authorization_code', + 'redirect_uri': base_url + '/google_drive/authentication' + } + try: + res = requests.post(GOOGLE_TOKEN_ENDPOINT, params=data, + headers=headers) + res.raise_for_status() + response = res.content and res.json() or {} + if response: + expires_in = response.get('expires_in') + self.write({ + 'gdrive_access_token': response.get('access_token'), + 'gdrive_refresh_token': response.get('refresh_token'), + 'gdrive_token_validity': fields.Datetime.now() + timedelta( + seconds=expires_in) if expires_in else False, + }) + if self.gdrive_backup_error_test: + self.write({ + 'gdrive_backup_error_test': False + }) + except Exception: + if not self.gdrive_backup_error_test: + self.write({"gdrive_backup_error_test": True}) + + def generate_onedrive_refresh_token(self): + """Generate onedrive access token from refresh token if expired""" + base_url = request.env['ir.config_parameter'].get_param('web.base.url') + headers = {"Content-type": "application/x-www-form-urlencoded"} + data = { + 'client_id': self.onedrive_client_key, + 'client_secret': self.onedrive_client_secret, + 'scope': ONEDRIVE_SCOPE, + 'grant_type': "refresh_token", + 'redirect_uri': base_url + '/onedrive/authentication', + 'refresh_token': self.onedrive_refresh_token + } + res = requests.post( + "https://login.microsoftonline.com/common/oauth2/v2.0/token", + data=data, headers=headers) + try: + res.raise_for_status() + response = res.content and res.json() or {} + if response: + expires_in = response.get('expires_in') + self.write({ + 'onedrive_access_token': response.get('access_token'), + 'onedrive_refresh_token': response.get('refresh_token'), + 'onedrive_token_validity': fields.Datetime.now() + timedelta( + seconds=expires_in) if expires_in else False, + }) + except requests.HTTPError as error: + if res.status_code != 200: + raise ValidationError("Bad microsoft onedrive request..!") + _logger.exception("Bad microsoft onedrive request : %s !", + error.response.content) + raise error + + def get_onedrive_tokens(self, authorize_code): + """Generate onedrive tokens from authorization code.""" + headers = {"content-type": "application/x-www-form-urlencoded"} + base_url = request.env['ir.config_parameter'].get_param('web.base.url') + data = { + 'code': authorize_code, + 'client_id': self.onedrive_client_key, + 'client_secret': self.onedrive_client_secret, + 'grant_type': 'authorization_code', + 'scope': ONEDRIVE_SCOPE, + 'redirect_uri': base_url + '/onedrive/authentication' + } + res = requests.post( + "https://login.microsoftonline.com/common/oauth2/v2.0/token", + data=data, headers=headers) + try: + res.raise_for_status() + response = res.content and res.json() or {} + if response: + expires_in = response.get('expires_in') + self.write({ + 'onedrive_access_token': response.get('access_token'), + 'onedrive_refresh_token': response.get('refresh_token'), + 'onedrive_token_validity': fields.Datetime.now() + timedelta( + seconds=expires_in) if expires_in else False, + }) + if self.onedrive_backup_error_test: + self.write({ + 'onedrive_backup_error_test': False + }) + except Exception: + if not self.onedrive_backup_error_test: + self.write({"onedrive_backup_error_test": True}) + + def generate_gdrive_refresh_token(self): + """Generate Google Drive access token from refresh token if expired""" + headers = {"content-type": "application/x-www-form-urlencoded"} + data = { + 'refresh_token': self.gdrive_refresh_token, + 'client_id': self.gdrive_client_key, + 'client_secret': self.gdrive_client_secret, + 'grant_type': 'refresh_token', + } + try: + res = requests.post(GOOGLE_TOKEN_ENDPOINT, data=data, + headers=headers) + res.raise_for_status() + response = res.content and res.json() or {} + if response: + expires_in = response.get('expires_in') + self.write({ + 'gdrive_access_token': response.get('access_token'), + 'gdrive_token_validity': fields.Datetime.now() + timedelta( + seconds=expires_in) if expires_in else False, + }) + except requests.HTTPError as error: + error_key = error.response.json().get("error", "nc") + error_msg = _( + "An error occurred while generating the token. Your" + "authorization code may be invalid or has already expired [%s]." + "You should check your Client ID and secret on the Google APIs" + " platform or try to stop and restart your calendar" + " synchronisation.", + error_key) + raise UserError(error_msg) + def test_connection(self): """ Test the sftp and ftp connection using entered credentials @@ -99,11 +616,16 @@ class AutoDatabaseBackup(models.Model): client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) try: - client.connect(hostname=self.sftp_host, username=self.sftp_user, password=self.sftp_password, port=self.sftp_port) + client.connect(hostname=self.sftp_host, username=self.sftp_user, + password=self.sftp_password, port=self.sftp_port) sftp = client.open_sftp() sftp.close() - except Exception as e: - raise UserError(_("SFTP Exception: %s", e)) + except Exception: + raise UserError( + _("It seems there was an issue with the connection, " + "possibly due to incorrect information provided. " + "Please double-check all the information you provided " + "for the connection to ensure it is correct.")) finally: client.close() elif self.backup_destination == 'ftp': @@ -112,8 +634,12 @@ class AutoDatabaseBackup(models.Model): ftp_server.connect(self.ftp_host, int(self.ftp_port)) ftp_server.login(self.ftp_user, self.ftp_password) ftp_server.quit() - except Exception as e: - raise UserError(_("FTP Exception: %s", e)) + except Exception: + raise UserError( + _("It seems there was an issue with the connection, " + "possibly due to incorrect information provided. " + "Please double-check all the information you provided " + "for the connection to ensure it is correct.")) title = _("Connection Test Succeeded!") message = _("Everything seems properly set up!") return { @@ -132,11 +658,15 @@ class AutoDatabaseBackup(models.Model): Database backup for all the active records in backup configuration model will be created """ records = self.search([]) - mail_template_success = self.env.ref('auto_database_backup.mail_template_data_db_backup_successful') - mail_template_failed = self.env.ref('auto_database_backup.mail_template_data_db_backup_failed') + mail_template_success = self.env.ref( + 'auto_database_backup.mail_template_data_db_backup_successful') + mail_template_failed = self.env.ref( + 'auto_database_backup.mail_template_data_db_backup_failed') for rec in records: - backup_time = datetime.datetime.utcnow().strftime("%Y-%m-%d_%H-%M-%S") - backup_filename = "%s_%s.%s" % (rec.db_name, backup_time, rec.backup_format) + backup_time = datetime.datetime.utcnow().strftime( + "%Y-%m-%d_%H-%M-%S") + backup_filename = "%s_%s.%s" % ( + rec.db_name, backup_time, rec.backup_format) rec.backup_filename = backup_filename # Local backup if rec.backup_destination == 'local': @@ -151,7 +681,8 @@ class AutoDatabaseBackup(models.Model): if rec.auto_remove: for filename in os.listdir(rec.backup_path): file = os.path.join(rec.backup_path, filename) - create_time = datetime.datetime.fromtimestamp(os.path.getctime(file)) + create_time = datetime.datetime.fromtimestamp( + os.path.getctime(file)) backup_duration = datetime.datetime.utcnow() - create_time if backup_duration.days >= rec.days_to_remove: os.remove(file) @@ -169,20 +700,26 @@ class AutoDatabaseBackup(models.Model): ftp_server.connect(rec.ftp_host, int(rec.ftp_port)) ftp_server.login(rec.ftp_user, rec.ftp_password) ftp_server.encoding = "utf-8" - temp = tempfile.NamedTemporaryFile(suffix='.%s' % rec.backup_format) + temp = tempfile.NamedTemporaryFile( + suffix='.%s' % rec.backup_format) try: ftp_server.cwd(rec.ftp_path) except ftplib.error_perm: ftp_server.mkd(rec.ftp_path) ftp_server.cwd(rec.ftp_path) with open(temp.name, "wb+") as tmp: - odoo.service.db.dump_db(rec.db_name, tmp, rec.backup_format) - ftp_server.storbinary('STOR %s' % backup_filename, open(temp.name, "rb")) + odoo.service.db.dump_db(rec.db_name, tmp, + rec.backup_format) + ftp_server.storbinary('STOR %s' % backup_filename, + open(temp.name, "rb")) if rec.auto_remove: files = ftp_server.nlst() for f in files: - create_time = datetime.datetime.strptime(ftp_server.sendcmd('MDTM ' + f)[4:], "%Y%m%d%H%M%S") - diff_days = (datetime.datetime.now() - create_time).days + create_time = datetime.datetime.strptime( + ftp_server.sendcmd('MDTM ' + f)[4:], + "%Y%m%d%H%M%S") + diff_days = ( + datetime.datetime.now() - create_time).days if diff_days >= rec.days_to_remove: ftp_server.delete(f) ftp_server.quit() @@ -198,11 +735,16 @@ class AutoDatabaseBackup(models.Model): client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) try: - client.connect(hostname=rec.sftp_host, username=rec.sftp_user, password=rec.sftp_password, port=rec.sftp_port) + client.connect(hostname=rec.sftp_host, + username=rec.sftp_user, + password=rec.sftp_password, + port=rec.sftp_port) sftp = client.open_sftp() - temp = tempfile.NamedTemporaryFile(suffix='.%s' % rec.backup_format) + temp = tempfile.NamedTemporaryFile( + suffix='.%s' % rec.backup_format) with open(temp.name, "wb+") as tmp: - odoo.service.db.dump_db(rec.db_name, tmp, rec.backup_format) + odoo.service.db.dump_db(rec.db_name, tmp, + rec.backup_format) try: sftp.chdir(rec.sftp_path) except IOError as e: @@ -212,7 +754,11 @@ class AutoDatabaseBackup(models.Model): sftp.put(temp.name, backup_filename) if rec.auto_remove: files = sftp.listdir() - expired = list(filter(lambda fl: (datetime.datetime.now() - datetime.datetime.fromtimestamp(sftp.stat(fl).st_mtime)).days >= rec.days_to_remove, files)) + expired = list(filter(lambda fl: ( + datetime.datetime.now() - datetime.datetime.fromtimestamp( + sftp.stat( + fl).st_mtime)).days >= rec.days_to_remove, + files)) for file in expired: sftp.unlink(file) sftp.close() @@ -227,39 +773,313 @@ class AutoDatabaseBackup(models.Model): client.close() # Google Drive backup elif rec.backup_destination == 'google_drive': - temp = tempfile.NamedTemporaryFile(suffix='.%s' % rec.backup_format) + try: + if rec.gdrive_token_validity <= fields.Datetime.now(): + rec.generate_gdrive_refresh_token() + temp = tempfile.NamedTemporaryFile( + suffix='.%s' % rec.backup_format) + with open(temp.name, "wb+") as tmp: + odoo.service.db.dump_db(rec.db_name, tmp, + rec.backup_format) + try: + headers = { + "Authorization": "Bearer %s" % rec.gdrive_access_token} + para = { + "name": backup_filename, + "parents": [rec.google_drive_folder], + } + files = { + 'data': ('metadata', json.dumps(para), + 'application/json; charset=UTF-8'), + 'file': open(temp.name, "rb") + } + requests.post( + "https://www.googleapis.com/upload/drive/v3/files?uploadType=multipart", + headers=headers, + files=files + ) + if rec.auto_remove: + query = "parents = '%s'" % rec.google_drive_folder_key + files_req = requests.get( + "https://www.googleapis.com/drive/v3/files?q=%s" % query, + headers=headers) + files = files_req.json()['files'] + for file in files: + file_date_req = requests.get( + "https://www.googleapis.com/drive/v3/files/%s?fields=createdTime" % + file['id'], headers=headers) + create_time = file_date_req.json()[ + 'createdTime'][ + :19].replace('T', ' ') + diff_days = ( + fields.datetime.now() - fields.datetime.strptime( + create_time, '%Y-%m-%d %H:%M:%S')).days + if diff_days >= rec.days_to_remove: + requests.delete( + "https://www.googleapis.com/drive/v3/files/%s" % + file['id'], headers=headers) + if rec.notify_user: + mail_template_success.send_mail(rec.id, + force_send=True) + except Exception as e: + rec.generated_exception = e + _logger.info('Google Drive Exception: %s', e) + if rec.notify_user: + mail_template_failed.send_mail(rec.id, + force_send=True) + except Exception: + if rec.notify_user: + mail_template_failed.send_mail(rec.id, force_send=True) + raise ValidationError( + 'Please check the credentials before activation') + else: + raise ValidationError('Please check connection') + # Dropbox backup + elif rec.backup_destination == 'dropbox': + temp = tempfile.NamedTemporaryFile( + suffix='.%s' % rec.backup_format) with open(temp.name, "wb+") as tmp: - odoo.service.db.dump_db(rec.db_name, tmp, rec.backup_format) + odoo.service.db.dump_db(rec.db_name, tmp, + rec.backup_format) try: - access_token = self.env['google.drive.config'].sudo().get_access_token() - headers = {"Authorization": "Bearer %s" % access_token} - para = { - "name": backup_filename, - "parents": [rec.google_drive_folderid], - } - files = { - 'data': ('metadata', json.dumps(para), 'application/json; charset=UTF-8'), - 'file': open(temp.name, "rb") - } - requests.post( - "https://www.googleapis.com/upload/drive/v3/files?uploadType=multipart", - headers=headers, - files=files - ) + dbx = dropbox.Dropbox( + app_key=rec.dropbox_client_key, + app_secret=rec.dropbox_client_secret, + oauth2_refresh_token=rec.dropbox_refresh_token) + dropbox_destination = (rec.dropbox_folder + '/' + + backup_filename) + dbx.files_upload(temp.read(), dropbox_destination) if rec.auto_remove: - query = "parents = '%s'" % rec.google_drive_folderid - files_req = requests.get("https://www.googleapis.com/drive/v3/files?q=%s" % query, headers=headers) - files = files_req.json()['files'] - for file in files: - file_date_req = requests.get("https://www.googleapis.com/drive/v3/files/%s?fields=createdTime" % file['id'], headers=headers) - create_time = file_date_req.json()['createdTime'][:19].replace('T', ' ') - diff_days = (datetime.datetime.now() - datetime.datetime.strptime(create_time, '%Y-%m-%d %H:%M:%S')).days - if diff_days >= rec.days_to_remove: - requests.delete("https://www.googleapis.com/drive/v3/files/%s" % file['id'], headers=headers) + files = dbx.files_list_folder( + rec.dropbox_folder) + file_entries = files.entries + expired_files = list(filter( + lambda fl: (fields.datetime.now() - + fl.client_modified).days >= + rec.days_to_remove, + file_entries)) + for file in expired_files: + dbx.files_delete_v2(file.path_display) if rec.notify_user: - mail_template_success.send_mail(rec.id, force_send=True) - except Exception as e: - rec.generated_exception = e - _logger.info('Google Drive Exception: %s', e) + mail_template_success.send_mail(rec.id, + force_send=True) + except Exception as error: + rec.generated_exception = error + _logger.info('Dropbox Exception: %s', error) if rec.notify_user: - mail_template_failed.send_mail(rec.id, force_send=True) + mail_template_failed.send_mail(rec.id, + force_send=True) + # Onedrive Backup + elif rec.backup_destination == 'onedrive': + try: + if rec.onedrive_token_validity <= fields.Datetime.now(): + rec.generate_onedrive_refresh_token() + temp = tempfile.NamedTemporaryFile( + suffix='.%s' % rec.backup_format) + with open(temp.name, "wb+") as tmp: + odoo.service.db.dump_db(rec.db_name, tmp, + rec.backup_format) + headers = { + 'Authorization': 'Bearer %s' % rec.onedrive_access_token, + 'Content-Type': 'application/json'} + upload_session_url = MICROSOFT_GRAPH_END_POINT + "/v1.0/me/drive/items/%s:/%s:/createUploadSession" % ( + rec.onedrive_folder_key, backup_filename) + try: + upload_session = requests.post( + upload_session_url, + headers=headers) + upload_url = upload_session.json().get( + 'uploadUrl') + requests.put(upload_url, data=temp.read()) + if rec.auto_remove: + list_url = MICROSOFT_GRAPH_END_POINT + "/v1.0/me/drive/items/%s/children" % rec.onedrive_folder_key + response = requests.get(list_url, + headers=headers) + files = response.json().get('value') + for file in files: + create_time = file['createdDateTime'][ + :19].replace( + 'T', + ' ') + diff_days = ( + fields.datetime.now() - fields.datetime.strptime( + create_time, + '%Y-%m-%d %H:%M:%S')).days + if diff_days >= rec.days_to_remove: + delete_url = MICROSOFT_GRAPH_END_POINT + "/v1.0/me/drive/items/%s" % \ + file['id'] + requests.delete(delete_url, + headers=headers) + if rec.notify_user: + mail_template_success.send_mail(rec.id, + force_send=True) + except Exception as error: + rec.generated_exception = error + _logger.info('Onedrive Exception: %s', error) + if rec.notify_user: + mail_template_failed.send_mail(rec.id, + force_send=True) + except Exception: + if rec.notify_user: + mail_template_failed.send_mail(rec.id, + force_send=True) + raise ValidationError( + 'Please check the credentials before activation') + else: + raise ValidationError('Please check connection') + + # NextCloud Backup + elif rec.backup_destination == 'next_cloud': + try: + if rec.domain and rec.next_cloud_password and \ + rec.next_cloud_user_name: + try: + # Connect to NextCloud using the provided username + # and password + ncx = NextCloud(rec.domain, + auth=HTTPBasicAuth( + rec.next_cloud_user_name, + rec.next_cloud_password)) + # Connect to NextCloud again to perform additional + # operations + nc = nextcloud_client.Client(rec.domain) + nc.login(rec.next_cloud_user_name, + rec.next_cloud_password) + # Get the folder name from the NextCloud folder ID + folder_name = rec.nextcloud_folder_key + # If auto_remove is enabled, remove backup files + # older than specified days + if rec.auto_remove: + folder_path = "/" + folder_name + for item in nc.list(folder_path): + backup_file_name = item.path.split("/")[-1] + backup_date_str = \ + backup_file_name.split("_")[ + 2] + backup_date = fields.datetime.strptime( + backup_date_str, '%Y-%m-%d').date() + if (fields.date.today() - backup_date).days \ + >= rec.days_to_remove: + nc.delete(item.path) + # If notify_user is enabled, send a success email + # notification + if rec.notify_user: + mail_template_success.send_mail(rec.id, + force_send=True) + except Exception as error: + rec.generated_exception = error + _logger.info('NextCloud Exception: %s', error) + if rec.notify_user: + # If an exception occurs, send a failed email + # notification + mail_template_failed.send_mail(rec.id, + force_send=True) + # Get the list of folders in the root directory of NextCloud + data = ncx.list_folders('/').__dict__ + folders = [ + [file_name['href'].split('/')[-2], + file_name['file_id']] + for file_name in data['data'] if + file_name['href'].endswith('/')] + # If the folder name is not found in the list of folders, + # create the folder + if folder_name not in [file[0] for file in folders]: + nc.mkdir(folder_name) + # Dump the database to a temporary file + temp = tempfile.NamedTemporaryFile( + suffix='.%s' % rec.backup_format) + with open(temp.name, "wb+") as tmp: + odoo.service.db.dump_db(rec.db_name, tmp, + rec.backup_format) + backup_file_path = temp.name + remote_file_path = f"/{folder_name}/{rec.db_name}_" \ + f"{backup_time}.{rec.backup_format}" + nc.put_file(remote_file_path, backup_file_path) + else: + # Dump the database to a temporary file + temp = tempfile.NamedTemporaryFile( + suffix='.%s' % rec.backup_format) + with open(temp.name, "wb+") as tmp: + odoo.service.db.dump_db(rec.db_name, tmp, + rec.backup_format) + backup_file_path = temp.name + remote_file_path = f"/{folder_name}/{rec.db_name}_" \ + f"{backup_time}.{rec.backup_format}" + nc.put_file(remote_file_path, backup_file_path) + except Exception: + raise ValidationError('Please check connection') + # Amazon S3 Backup + elif rec.backup_destination == 'amazon_s3': + if rec.aws_access_key and rec.aws_secret_access_key: + try: + # Create a boto3 client for Amazon S3 with provided + # access key id and secret access key + bo3 = boto3.client( + 's3', + aws_access_key_id=rec.aws_access_key, + aws_secret_access_key=rec.aws_secret_access_key) + # If auto_remove is enabled, remove the backups that + # are older than specified days from the S3 bucket + if rec.auto_remove: + folder_path = rec.aws_folder_name + response = bo3.list_objects( + Bucket=rec.bucket_file_name, + Prefix=folder_path) + today = fields.date.today() + for file in response['Contents']: + file_path = file['Key'] + last_modified = file['LastModified'] + date = last_modified.date() + age_in_days = (today - date).days + if age_in_days >= rec.days_to_remove: + bo3.delete_object( + Bucket=rec.bucket_file_name, + Key=file_path) + # Create a boto3 resource for Amazon S3 with provided + # access key id and secret access key + s3 = boto3.resource( + 's3', + aws_access_key_id=rec.aws_access_key, + aws_secret_access_key=rec.aws_secret_access_key) + # Create a folder in the specified bucket, if it + # doesn't already exist + s3.Object(rec.bucket_file_name, + rec.aws_folder_name + '/').put() + bucket = s3.Bucket(rec.bucket_file_name) + # Get all the prefixes in the bucket + prefixes = set() + for obj in bucket.objects.all(): + key = obj.key + if key.endswith('/'): + prefix = key[:-1] # Remove the trailing slash + prefixes.add(prefix) + # If the specified folder is present in the bucket, + # take a backup of the database and upload it to the + # S3 bucket + if rec.aws_folder_name in prefixes: + temp = tempfile.NamedTemporaryFile( + suffix='.%s' % rec.backup_format) + with open(temp.name, "wb+") as tmp: + odoo.service.db.dump_db(rec.db_name, tmp, + rec.backup_format) + backup_file_path = temp.name + remote_file_path = f"{rec.aws_folder_name}/{rec.db_name}_" \ + f"{backup_time}.{rec.backup_format}" + s3.Object(rec.bucket_file_name, + remote_file_path).upload_file( + backup_file_path) + # If notify_user is enabled, send email to the + # user notifying them about the successful backup + if rec.notify_user: + mail_template_success.send_mail(rec.id, + force_send=True) + except Exception as error: + # If any error occurs, set the 'generated_exception' + # field to the error message and log the error + rec.generated_exception = error + _logger.info('Amazon S3 Exception: %s', error) + # If notify_user is enabled, send email to the user + # notifying them about the failed backup + if rec.notify_user: + mail_template_failed.send_mail(rec.id, + force_send=True) diff --git a/auto_database_backup/security/ir.model.access.csv b/auto_database_backup/security/ir.model.access.csv index bd7799217..db952ee15 100644 --- a/auto_database_backup/security/ir.model.access.csv +++ b/auto_database_backup/security/ir.model.access.csv @@ -1,2 +1,3 @@ id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink -access_db_backup_configure,access.db.backup.configure,model_db_backup_configure,base.group_user,1,1,1,1 \ No newline at end of file +access_db_backup_configure,access.db.backup.configure,model_db_backup_configure,base.group_user,1,1,1,1 +access_dropbox_auth_code_user,access.dropbox.auth.code.user,model_dropbox_auth_code,base.group_user,1,1,1,1 diff --git a/auto_database_backup/static/description/assets/screenshots/add.png b/auto_database_backup/static/description/assets/screenshots/add.png new file mode 100644 index 000000000..af240d4d9 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/add.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/amazon_s3_7.png b/auto_database_backup/static/description/assets/screenshots/amazon_s3_7.png new file mode 100644 index 000000000..9a740ae0c Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazon_s3_7.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/amazon_s3_8.png b/auto_database_backup/static/description/assets/screenshots/amazon_s3_8.png new file mode 100644 index 000000000..c1dd06a9a Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazon_s3_8.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/amazon_s3_pci1.png b/auto_database_backup/static/description/assets/screenshots/amazon_s3_pci1.png new file mode 100644 index 000000000..5fcd336d6 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazon_s3_pci1.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/amazon_s3_pic 2.png b/auto_database_backup/static/description/assets/screenshots/amazon_s3_pic 2.png new file mode 100644 index 000000000..d259da8a9 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazon_s3_pic 2.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/amazons3-1.png b/auto_database_backup/static/description/assets/screenshots/amazons3-1.png new file mode 100644 index 000000000..c18d4146e Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazons3-1.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/amazons3_4.png b/auto_database_backup/static/description/assets/screenshots/amazons3_4.png new file mode 100644 index 000000000..c50a20cc8 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazons3_4.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/amazons3_5.png b/auto_database_backup/static/description/assets/screenshots/amazons3_5.png new file mode 100644 index 000000000..415d521c4 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazons3_5.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/amazons3_6.png b/auto_database_backup/static/description/assets/screenshots/amazons3_6.png new file mode 100644 index 000000000..284490127 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazons3_6.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/amazons3_7.png b/auto_database_backup/static/description/assets/screenshots/amazons3_7.png new file mode 100644 index 000000000..f9345a6d8 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazons3_7.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/amazons3_access.png b/auto_database_backup/static/description/assets/screenshots/amazons3_access.png new file mode 100644 index 000000000..270d27035 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazons3_access.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/amazons3_pick 3.png b/auto_database_backup/static/description/assets/screenshots/amazons3_pick 3.png new file mode 100644 index 000000000..22bbce54c Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazons3_pick 3.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/amazons3_signup.png b/auto_database_backup/static/description/assets/screenshots/amazons3_signup.png new file mode 100644 index 000000000..52357df9a Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazons3_signup.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/backup7.png b/auto_database_backup/static/description/assets/screenshots/backup7.png index f262abd98..d7969211f 100644 Binary files a/auto_database_backup/static/description/assets/screenshots/backup7.png and b/auto_database_backup/static/description/assets/screenshots/backup7.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/backup8.png b/auto_database_backup/static/description/assets/screenshots/backup8.png index c0dc26f93..65e51f91c 100644 Binary files a/auto_database_backup/static/description/assets/screenshots/backup8.png and b/auto_database_backup/static/description/assets/screenshots/backup8.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/backup9.png b/auto_database_backup/static/description/assets/screenshots/backup9.png index f4f4470f6..175540d5c 100644 Binary files a/auto_database_backup/static/description/assets/screenshots/backup9.png and b/auto_database_backup/static/description/assets/screenshots/backup9.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/drive1.png b/auto_database_backup/static/description/assets/screenshots/drive1.png new file mode 100644 index 000000000..29ea97a8d Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/drive1.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/drive2.png b/auto_database_backup/static/description/assets/screenshots/drive2.png new file mode 100644 index 000000000..0b5f7a388 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/drive2.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/drive3.png b/auto_database_backup/static/description/assets/screenshots/drive3.png new file mode 100644 index 000000000..ab9333477 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/drive3.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/drive4.png b/auto_database_backup/static/description/assets/screenshots/drive4.png new file mode 100644 index 000000000..e2803dd15 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/drive4.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/drive5.png b/auto_database_backup/static/description/assets/screenshots/drive5.png new file mode 100644 index 000000000..40a3e5c81 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/drive5.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/drop1.png b/auto_database_backup/static/description/assets/screenshots/drop1.png new file mode 100644 index 000000000..af02afc76 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/drop1.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/drop2.png b/auto_database_backup/static/description/assets/screenshots/drop2.png new file mode 100644 index 000000000..488fb6cbc Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/drop2.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/drop3.png b/auto_database_backup/static/description/assets/screenshots/drop3.png new file mode 100644 index 000000000..1088638e0 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/drop3.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/drop4.png b/auto_database_backup/static/description/assets/screenshots/drop4.png new file mode 100644 index 000000000..f8eeae60e Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/drop4.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/dropbox-1.png b/auto_database_backup/static/description/assets/screenshots/dropbox-1.png new file mode 100644 index 000000000..29b81027f Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/dropbox-1.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/dropbox-2.png b/auto_database_backup/static/description/assets/screenshots/dropbox-2.png new file mode 100644 index 000000000..490085b97 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/dropbox-2.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/hero.gif b/auto_database_backup/static/description/assets/screenshots/hero.gif new file mode 100644 index 000000000..b3f15b03e Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/hero.gif differ diff --git a/auto_database_backup/static/description/assets/screenshots/hero.png b/auto_database_backup/static/description/assets/screenshots/hero.png deleted file mode 100644 index 5e7d34238..000000000 Binary files a/auto_database_backup/static/description/assets/screenshots/hero.png and /dev/null differ diff --git a/auto_database_backup/static/description/assets/screenshots/newcloud1.png b/auto_database_backup/static/description/assets/screenshots/newcloud1.png new file mode 100644 index 000000000..89b6501ce Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/newcloud1.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/newcloud2.png b/auto_database_backup/static/description/assets/screenshots/newcloud2.png new file mode 100644 index 000000000..b213b1145 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/newcloud2.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/newcloud2_ds.png b/auto_database_backup/static/description/assets/screenshots/newcloud2_ds.png new file mode 100644 index 000000000..2c97c2316 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/newcloud2_ds.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/next_cloud-1.png b/auto_database_backup/static/description/assets/screenshots/next_cloud-1.png new file mode 100644 index 000000000..823c0c748 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/next_cloud-1.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/next_cloud2.png b/auto_database_backup/static/description/assets/screenshots/next_cloud2.png new file mode 100644 index 000000000..bfa0ff1f6 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/next_cloud2.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/next_cloud_9.png b/auto_database_backup/static/description/assets/screenshots/next_cloud_9.png new file mode 100644 index 000000000..261d476f4 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/next_cloud_9.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/nextcloud_4.png b/auto_database_backup/static/description/assets/screenshots/nextcloud_4.png new file mode 100644 index 000000000..a0fbb2f36 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/nextcloud_4.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/nextcloud_5.png b/auto_database_backup/static/description/assets/screenshots/nextcloud_5.png new file mode 100644 index 000000000..14a69f1e0 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/nextcloud_5.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/nextcloud_6.png b/auto_database_backup/static/description/assets/screenshots/nextcloud_6.png new file mode 100644 index 000000000..35259d927 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/nextcloud_6.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/nextcloud_create_3.png b/auto_database_backup/static/description/assets/screenshots/nextcloud_create_3.png new file mode 100644 index 000000000..2548aa71e Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/nextcloud_create_3.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/onedrive-1.png b/auto_database_backup/static/description/assets/screenshots/onedrive-1.png new file mode 100644 index 000000000..7ddc88b58 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/onedrive-1.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/onedrive1.png b/auto_database_backup/static/description/assets/screenshots/onedrive1.png new file mode 100644 index 000000000..d62bc7826 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/onedrive1.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/onedrive2.png b/auto_database_backup/static/description/assets/screenshots/onedrive2.png new file mode 100644 index 000000000..155d97204 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/onedrive2.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/onedrive3.png b/auto_database_backup/static/description/assets/screenshots/onedrive3.png new file mode 100644 index 000000000..f4e564206 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/onedrive3.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/onedrive4.png b/auto_database_backup/static/description/assets/screenshots/onedrive4.png new file mode 100644 index 000000000..4ae8a4c0f Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/onedrive4.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/onedrive5.png b/auto_database_backup/static/description/assets/screenshots/onedrive5.png new file mode 100644 index 000000000..44123fcd5 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/onedrive5.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/onedrive6.png b/auto_database_backup/static/description/assets/screenshots/onedrive6.png new file mode 100644 index 000000000..4f241299c Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/onedrive6.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/onedrive7.png b/auto_database_backup/static/description/assets/screenshots/onedrive7.png new file mode 100644 index 000000000..2da4020c1 Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/onedrive7.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/onedrive8.png b/auto_database_backup/static/description/assets/screenshots/onedrive8.png new file mode 100644 index 000000000..61f5cb9bc Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/onedrive8.png differ diff --git a/auto_database_backup/static/description/assets/screenshots/onedrive9.png b/auto_database_backup/static/description/assets/screenshots/onedrive9.png new file mode 100644 index 000000000..b2894148f Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/onedrive9.png differ diff --git a/auto_database_backup/static/description/index.html b/auto_database_backup/static/description/index.html index 4de79931b..49f4f4d09 100644 --- a/auto_database_backup/static/description/index.html +++ b/auto_database_backup/static/description/index.html @@ -1,744 +1,1170 @@ -

-
-
-
- -
-
-
- Community -
-
- Enterprise -
- -
+
+
+
+
+ +
+
+
+ Community +
+
+ Enterprise +
+ +
+
-
-
-
-

- Automatic Database Backup

-

- A Module for generating database backup and storing backup to multiple locations. -

- -
-
- -
-
-

- Explore this module -

-
-
- -
-
-

- Overview

+
+
+

+ Automatic Database Backup

- Learn more about this module

-
-
- -
-
-
+ style="font-family: 'Montserrat', sans-serif !important; font-weight: 300 !important; color: #FFFFFF !important; font-size: 1.4rem !important; text-align: center !important;"> + A Module for generating database backup and storing backup to + multiple locations. +

+ +
-
- -
-
-

- Features

-

- View features of this module

-
-
- -
-
-
+ + -
- - + style="font-family: 'Roboto', sans-serif !important; font-weight: 400 !important; color: #282F33 !important; font-size: 1rem !important; line-height: 30px !important;"> + This module helps to generate backups of your databases + automatically on + regular interval of times. The generated backups can be stored + into + local storage, ftp server, sftp server, dropbox,nextcloud, + Google Drive + or Onedrive and Amazon S3. User can enable auto remove option to + automatically delete + old backups. User can enable email notification to be notified + about the + success and failure of the backup generation and storage. + Using Automatic Database Backup module user can generate and + store + database backups to multiple location.

+
+
+ This module uses an external python dependency 'dropbox'.Before + installing the module install the python + package + first.The required python package can be installed using the + following command, +
+ pip install dropbox +
+
+
+ This module uses an external python dependency + 'nextcloud'.Before + installing the module install the python + package + first.The required python package can be installed using the + following command, +
+ pip install pyncclient +
+
+
+
+ This module uses an external python dependency + 'nextcloud-api-wrapper'.Before + installing the module install the python + package + first.The required python package can be installed using the + following command, +
+ pip install nextcloud-api-wrapper +
+
+
+ This module uses an external python dependency 'Boto3'.Before + installing + the module install the python + package + first.The required python package can be installed using the + following + command, +
+ pip install boto3 +
-
-
-

- Overview -

+
+
+ This module uses an external python dependency 'paramiko'.Before + installing the module install the python + package + first.The required python package can be installed using the + following + command, +
+ pip install paramiko +
+
-
-

- This module helps to generate backups of your databases automatically on regular interval of times. - The generated backups can be stored into local storage, ftp server, sftp server or Google Drive. - User can enable auto remove option to automatically delete old backups. - User can enable email notification to be notified about the success and failure of the backup generation and storage. - Using Automatic Database Backup module user can generate and store database backups to multiple location.

+
+
+

+ Features +

+
+
+
+ +
+
+

+ Community & Enterprise Support

+

+ Available in Odoo 14.0 Community and Enterprise.

+
+
+
+
+ +
+
+

+ Generate Database Backup

+

+ Generate database backups on regular intervals.

+
-
+
+
+ +
+
+

+ Store Backup to FTP Server

+

+ Generated backup can be stored to remote FTP server.

+
+
-
-
-

- Features -

+
+
+ +
+
+

+ Store Backup to SFTP Server

+

+ Generated backup can be stored to remote SFTP server.

+
-
- -
-
-

- Community & Enterprise Support

-

- Available in Odoo 14.0 Community and Enterprise.

-
+
+ +
+
+

+ Store Backup to Google drive

+

+ Generated backup can be stored to google drive

+
+
-
- -
-
-

- Generate Database Backup

-

- Generate database backups on regular intervals.

-
+
+ +
+
+

+ Store Backup to Dropbox

+

+ Generated backup can be stored to Dropbox

+
-
- -
-
-

- Store Backup to FTP Server

-

- Generated backup can be stored to remote FTP server.

-
+
+ +
+
+

+ Store Backup to Onedrive

+

+ Generated backup can be stored to Onedrive

+
-
- -
-
-

- Store Backup to SFTP Server

-

- Generated backup can be stored to remote SFTP server.

-
+
+ +
+
+

+ Store Backup to Nextcloud

+

+ Generated backup can be stored to Nextcloud

+
-
- -
-
-

- Store Backup to Google drive

-

- Generated backup can be stored to google drive

-
+
+ +
+
+

+ Store Backup to Amazon S3

+

+ Generated backup can be stored to Amazon S3

+
-
- -
-
-

- Automatically remove old backups.

-

- Old backups files will be deleted automatically based on the obsolescence of backup.

-
+
+ +
+
+

+ Automatically remove old backups.

+

+ Old backups files will be deleted automatically based on the + obsolescence of backup.

+
-
- -
-
-

- Notify user on success and failure of backup generation

-

- An email notification send to user on successful backup generation also send an email notification when backup operation failed.

-
+
+ +
+
+

+ Notify user on success and failure of backup generation

+

+ An email notification send to user on successful backup + generation also send an email notification when backup operation + failed.

+
-
+
-
-
-

- Screenshots -

+
+
+

+ Screenshots +

-

- Database Backup Configuration Menu

-

- Got Setting --> Technical --> Backup Configuration to configure backups

- +

+ Database Backup Configuration Menu

+

+ Got Setting --> Technical --> Backup Configuration to configure + backups

+
-

- Crate New Database Backup Configuration

-

- Enter the database name and master password. specify backup type and destination. - Enter the backup directory path, if directory does not exist new directory will be created. -

- +

+ Crate New Database Backup Configuration

+

+ Enter the database name and master password. specify backup type and + destination. + Enter the backup directory path, if directory does not exist new + directory will be created. +

+
-

- Store Backup to Remote SFTP Server

-

- Select backup destination as SFTP, enter credentials. - Test connection button to check whether the connection is successful. -

- - -

- A successful message will be displayed if connection is successful -

- - -

- An error message will be displayed if test connection is failed -

- -
+

+ Store Backup to Remote SFTP Server

+

+ Select backup destination as SFTP, enter credentials. + Test connection button to check whether the connection is + successful. +

+ +

+ A successful message will be displayed if connection is successful +

+ -
-

- Store Backup to Remote FTP Server

-

- Select backup destination as FTP, enter credentials. - Test connection button to check whether the connection is successful. -

- +

+ An error message will be displayed if test connection is failed +

+
-

- Store Backup to Google Drive

-

- Select backup destination as Google Drive. Enter google drive folder ID. - Enable and configure Google Drive option from general settings. -

- - -

- Setup refresh token from general settings -

- +

+ Store Backup to Remote FTP Server

+

+ Select backup destination as FTP, enter credentials. + Test connection button to check whether the connection is + successful. +

+
-
-

- Automatically Remove Old Backups

-

- Enable auto remove option, specify number of days to remove backups. -

- +
+

+ Store Backup to Google Drive

+

+ You'll need to create a + new + Google API project and enabling the Google Drive API, Go to the + Google API Console and log into your + account. + While creating the project, for the Redirect URI restrictions, + copy your Odoo database URI followed by + /google_drive/authentication. Example:

+ +

+ ENABLE API AND SERVICES

+ +

+ ENABLE GOOGLE DRIVE API

+ +

+ Create Credentials, + Follow + the steps, select Website application for the Application + Type.

+ +

+ Under the Authorized + JavaScript Origins section, click + Add URI and type your + company's Odoo URL address. + Under the Authorized redirect URIs section, click + Add URI and + type your company's Odoo URL address + followed + by /google_drive/authentication. + After all the steps are completed, A Client ID and Client secret + will + be given, copy the credentials +

+ + +

+ Go to the "OAuth consent screen", then Test users and click on 'ADD + USERS' then add the user. + +

+ +
+

+ Configure Backup, Copy Client ID and Client Secret from Google Drive API + Credentials page into their respective fields. +

+ + +

+ Setup Token, it will be redirected to an authorization page. +

+ +
+
+ +
+
+

+ Store Backup to Dropbox +

+

+ To get the app key and secret key go to the App + Console. + Create a new app +

+ + + Once you created the App , you can get the App key and App Secret as seen in + the screenshot + + Choose your app's permission (files.content.write and files.content.read + permissions required).then click on Submit + + Choose Dropbox as that of the backup destination. Enter the app secret and + key and dropbox Folder. + +

+ Get the Authorization + Code + and click confirm.

+ +
-
-

+

+ Store Backup to + Onedrive +

+

+ Select Backup + Destination + as OneDrive. Enter the App key and App secret. + you'll need to register a new app in the Microsoft + Azure + portal. + While registering the app for the Redirect URI restrictions, + copy your Odoo database URI followed by + /onedrive/authentication. Example:

+ +

+ Copy the Client ID

+ +

+ Generate Client + Secret.

+ + +

+ Get OneDrive folder ID, + where need to store the backup files.

+ +

+ Configure the + Backup

+ +

+ Setup Token, it will + be + redirected to an authorization page.

+ +
+
+

+ Store Backup to + Nextcloud +

+ +

+ To Create an account in Nextcloud go to + https://nextcloud.com/sign-up/, Enter Your Email Address and + Sign up . + + + +

+ You will be redirected to the page as shown in the screenshot, + and it will ask you enter your email and password for the + Nextcloud.

+ + +

+ To get the Domain of the Nextcloud.Go to Settings in the + Nextcloud and Click on Mobile & desktop. You will see server + address Copy link and paste it in your Domain Name.

+ + + +

+ Select the backup destination as Nextcloud. Enter the Domain + Name, UserName, Password and Folder Name where you want to store + your backup on the NextCloud server. Check the Connect button + to check if the connection is successful. + + + +

+ Every day, a Scheduled Action will take place to store a backup + on the Nextcloud Server. The backup will be stored as the folder + name provided in the Folder ID field in Odoo. + + + +

+ +
+

+ Store Backup to + Amazon S3 +

+ +

+ To Create an account in Amazon S3 go to + https://portal.aws.amazon.com/billing/signup#/start/email, Enter + Your Email Address and Sign up . + + + +

+ After you created the account.You need to get the Access Key and + Secret Key,To get these go the account Security credentials and + go the Access Keys and create new access keys from there you + will get Access Key and Secret Key.

+ + +

+ + +

+ + + +

+ Next You need to create a Bucket Folder in the Amazon S3.To do + that Go the Services in the top right and go to Storage and S3 + as shown in the Screen shot.

+ + +

+ To create Bucket folder,Click on the Create bucket Button.

+ + +

+ On Creating a Bucket Folder,Check the rules for naming the + Bucket folder,and Select the region as well.After that click on + the create Bucket Button in the bottom of the page.

+ +

+ You will see the Bucket Folder as shown in the screenshot.

+ + +

+ Select Backup Destination as Amazon S3. Enter the Amazon S3 + Access Key,Amazon S3 Secret Key,Bucket Name->Bucket folder you + have created in the Amazon S3 and the File Name->The Folder You + want to store your backup in Amazon S3 Bucket Folder.Test + connection button to check whether the connection is + successful.

+ + +

+ Every day, a Scheduled Action will take place to store a backup + on the Amazon S3 Server. The backup will be stored as the folder + name provided in the File Name field in Odoo.

+ + + + +
+ +
+

Notify User on Success and Failure of Backup Generation

-

- Enable notify user option, and select a user to notify. An email notification will be sent to the selected user on +

+ Enable notify user option, and select a user to notify. An email + notification will be sent to the selected user on backup successful and failure. - -

+ +
-
-

+

Successful backup notification email

- -
+ +
-
-

+

Notification email when backup generation failed

- -
+ +
-
-

+

Scheduled Action For Generating Backup

-

- - Enable the 'Automatic database Backup' scheduled action, and set up the execution interval. - Based on the scheduled action setup, backups will be generated on regular intervals. -

- - -
- -
+

+ + Enable the 'Automatic database Backup' scheduled action, and set up the + execution interval. + Based on the scheduled action setup, backups will be generated on + regular intervals. +

+ + +
- -
+ +
-

Suggested Products

-
- -