diff --git a/auto_database_backup/README.rst b/auto_database_backup/README.rst
index 484d7ce15..3d506359e 100644
--- a/auto_database_backup/README.rst
+++ b/auto_database_backup/README.rst
@@ -17,7 +17,8 @@ Company
Credits
=======
-* Developer: (v12) Ajmunnisa @ Cybrosys , Contact : odoo@cybrosys.com
+* Developer: (v12) Ajmunnisa @ Cybrosys, Farhana Jahan PT @ Cybrosys ,
+Contact : odoo@cybrosys.com
Contacts
========
diff --git a/auto_database_backup/__init__.py b/auto_database_backup/__init__.py
index 097bdb395..9b4ada7a5 100644
--- a/auto_database_backup/__init__.py
+++ b/auto_database_backup/__init__.py
@@ -19,4 +19,6 @@
# If not, see .
#
#############################################################################
+from . import controllers
from . import models
+from . import wizard
diff --git a/auto_database_backup/__manifest__.py b/auto_database_backup/__manifest__.py
index f6ba245b8..8f93281ae 100644
--- a/auto_database_backup/__manifest__.py
+++ b/auto_database_backup/__manifest__.py
@@ -21,7 +21,7 @@
#############################################################################
{
'name': "Auto Database Backup",
- 'version': '12.0.1.0.0',
+ 'version': '12.0.1.0.1',
'category': 'Extra Tools',
'summary': 'Generate automatic backup of databases and store to local, '
'google drive, dropbox, nextcloud, amazon S3, onedrive or '
@@ -39,8 +39,11 @@
'security/ir.model.access.csv',
'data/ir_cron_data.xml',
'data/mail_template_data.xml',
- 'views/db_backup_configure_views.xml'
+ 'views/db_backup_configure_views.xml',
+ 'wizard/dropbox_auth_code_views.xml'
],
+ 'external_dependencies': {
+ 'python': ['dropbox', 'pyncclient', 'boto3', 'nextcloud-api-wrapper','paramiko']},
'images': ['static/description/banner.jpg'],
'license': 'LGPL-3',
'installable': True,
diff --git a/auto_database_backup/controllers/__init__.py b/auto_database_backup/controllers/__init__.py
new file mode 100644
index 000000000..e9b606a5a
--- /dev/null
+++ b/auto_database_backup/controllers/__init__.py
@@ -0,0 +1 @@
+from . import auto_database_backup
diff --git a/auto_database_backup/controllers/auto_database_backup.py b/auto_database_backup/controllers/auto_database_backup.py
new file mode 100644
index 000000000..7eac449bc
--- /dev/null
+++ b/auto_database_backup/controllers/auto_database_backup.py
@@ -0,0 +1,33 @@
+import json
+from odoo import http
+from odoo.http import request
+
+
+class OnedriveAuth(http.Controller):
+ """Controller for handling authentication with OneDrive and Google Drive."""
+ @http.route('/onedrive/authentication', type='http', auth="public")
+ def oauth2callback(self, **kw):
+ """
+ Callback function for OneDrive authentication.
+
+ :param kw: A dictionary of keyword arguments.
+ :return: A redirect response.
+ """
+ state = json.loads(kw['state'])
+ backup_config = request.env['db.backup.configure'].sudo().browse(
+ state.get('backup_config_id'))
+ backup_config.get_onedrive_tokens(kw.get('code'))
+ backup_config.hide_active = True
+ backup_config.active = True
+ return http.local_redirect(state.get('url_return'))
+
+ @http.route('/google_drive/authentication', type='http', auth="public")
+ def gdrive_oauth2callback(self, **kw):
+ """Callback function for Google Drive authentication."""
+ state = json.loads(kw['state'])
+ backup_config = request.env['db.backup.configure'].sudo().browse(
+ state.get('backup_config_id'))
+ backup_config.get_gdrive_tokens(kw.get('code'))
+ backup_config.hide_active = True
+ backup_config.active = True
+ return http.local_redirect(state.get('url_return'))
diff --git a/auto_database_backup/doc/RELEASE_NOTES.md b/auto_database_backup/doc/RELEASE_NOTES.md
index 6cf26b22d..32baa9288 100644
--- a/auto_database_backup/doc/RELEASE_NOTES.md
+++ b/auto_database_backup/doc/RELEASE_NOTES.md
@@ -1,6 +1,25 @@
## Module
-
#### 12.02.2024
#### Version 12.0.1.0.0
#### ADD
- Initial commit for Auto Database Backup
+
+#### 16.02.2024
+#### Version 12.0.1.0.1
+#### ADD
+- Dropbox integration added. Backup can be stored in to dropbox.
+
+#### 16.02.2024
+#### Version 12.0.1.0.1
+#### ADD
+- Onedrive integration added. Backup can be stored in to onedrive.
+
+#### 16.02.2024
+#### Version 12.0.1.0.1
+#### ADD
+- Google Drive authentication updated.
+
+#### 16.02.2024
+#### Version 12.0.1.0.1
+#### ADD
+- Nextcloud and Amazon S3 integration added. Backup can be stored into Nextcloud and Amazon S3.
diff --git a/auto_database_backup/models/db_backup_configure.py b/auto_database_backup/models/db_backup_configure.py
index 15230e059..a4361dfe6 100644
--- a/auto_database_backup/models/db_backup_configure.py
+++ b/auto_database_backup/models/db_backup_configure.py
@@ -19,21 +19,35 @@
# If not, see .
#
#############################################################################
+from odoo import api, fields, models, _
import datetime
+
+from odoo.http import request
+from odoo.service import db
+from datetime import timedelta
+import boto3
import errno
+import dropbox
import ftplib
import json
import logging
-import os
+import nextcloud_client
import odoo
+import os
import paramiko
import requests
+from nextcloud import NextCloud
+from requests.auth import HTTPBasicAuth
import tempfile
-from odoo import api, fields, models, _
+from werkzeug import urls
from odoo.exceptions import UserError, ValidationError
-from odoo.service import db
_logger = logging.getLogger(__name__)
+ONEDRIVE_SCOPE = ['offline_access openid Files.ReadWrite.All']
+MICROSOFT_GRAPH_END_POINT = "https://graph.microsoft.com"
+GOOGLE_AUTH_ENDPOINT = 'https://accounts.google.com/o/oauth2/auth'
+GOOGLE_TOKEN_ENDPOINT = 'https://accounts.google.com/o/oauth2/token'
+GOOGLE_API_BASE_URL = 'https://www.googleapis.com'
class DbBackupConfigure(models.Model):
@@ -56,7 +70,11 @@ class DbBackupConfigure(models.Model):
('local', 'Local Storage'),
('google_drive', 'Google Drive'),
('ftp', 'FTP'),
- ('sftp', 'SFTP')
+ ('sftp', 'SFTP'),
+ ('dropbox', 'Dropbox'),
+ ('onedrive', 'Onedrive'),
+ ('next_cloud', 'Next Cloud'),
+ ('amazon_s3', 'Amazon S3')
], string='Backup Destination',
help='Specify the location that the backup need to store')
backup_path = fields.Char(string='Backup Path',
@@ -71,30 +89,483 @@ class DbBackupConfigure(models.Model):
ftp_user = fields.Char(string='FTP User', help='FTP User')
ftp_password = fields.Char(string='FTP Password', help='FTP password')
ftp_path = fields.Char(string='FTP Path', help='FTP Path')
- active = fields.Boolean(default=True,
- help='To know the configuration is active')
- save_to_drive = fields.Boolean(help='To know that the backup need '
- 'to save in drive')
+ dropbox_client_key = fields.Char(string='Dropbox Client ID', copy=False,
+ help='Client id of the dropbox')
+ dropbox_client_secret = fields.Char(string='Dropbox Client Secret',
+ copy=False,
+ help='Client secret id of the dropbox')
+ dropbox_refresh_token = fields.Char(string='Dropbox Refresh Token',
+ copy=False,
+ help='Refresh token for the dropbox')
+ is_dropbox_token_generated = fields.Boolean(
+ string='Dropbox Token Generated',
+ compute='_compute_is_dropbox_token_generated',
+ copy=False, help='Is the dropbox token generated or not?')
+ dropbox_folder = fields.Char(string='Dropbox Folder', help='Dropbox folder')
+ active = fields.Boolean(default=True, help='Checking the configuration'
+ ' is active or not')
+ save_to_drive = fields.Boolean(help='Checking whether the backup need '
+ 'to store in drive')
auto_remove = fields.Boolean(string='Remove Old Backups',
- help='Remove old backup if the value is true')
+ help='Remove Old Backups')
days_to_remove = fields.Integer(string='Remove After',
help='Automatically delete stored backups '
'after this specified number of days')
google_drive_folder = fields.Char(string='Drive Folder ID')
notify_user = fields.Boolean(string='Notify User',
- help='Send an email notification to user '
- 'when the backup operation is successful'
- ' or failed')
+ help='Send an email notification to user when '
+ 'the backup operation is successful or '
+ 'failed')
user_id = fields.Many2one('res.users', string='User')
backup_filename = fields.Char(string='Backup Filename',
help='For Storing generated backup filename')
generated_exception = fields.Char(string='Exception',
help='Exception Encountered while Backup '
'generation')
+ gdrive_refresh_token = fields.Char(string='Google drive Refresh Token',
+ copy=False,
+ help='Refresh token for google drive')
+ gdrive_access_token = fields.Char(string='Google Drive Access Token',
+ copy=False,
+ help='Access token for google drive')
+ is_google_drive_token_generated = fields.Boolean(
+ string='Google drive Token Generated',
+ compute='_compute_is_google_drive_token_generated', copy=False,
+ help='Google drive token generated or not')
+ gdrive_client_key = fields.Char(string='Google Drive Client ID', copy=False,
+ help='Client id of the google drive')
+ gdrive_client_secret = fields.Char(string='Google Drive Client Secret',
+ copy=False,
+ help='Client secret id of the google'
+ ' drive')
+ gdrive_token_validity = fields.Datetime(
+ string='Google Drive Token Validity', copy=False,
+ help='Token validity of the google drive')
+ gdrive_redirect_uri = fields.Char(string='Google Drive Redirect URI',
+ compute='_compute_redirect_uri',
+ help='Redirect URI of the google drive')
+ domain = fields.Char(string='Domain Name', help="Field used to store the "
+ "name of a domain")
+ next_cloud_user_name = fields.Char(string='User Name',
+ help="Field used to store the user name"
+ " for a Nextcloud account.")
+ next_cloud_password = fields.Char(string='Password',
+ help="Field used to store the password"
+ " for a Nextcloud account.")
+ nextcloud_folder_key = fields.Char(string='Next Cloud Folder Id',
+ help="Field used to store the unique "
+ "identifier for a Nextcloud "
+ "folder.")
+ onedrive_client_key = fields.Char(string='Onedrive Client ID', copy=False,
+ help='Client ID of the onedrive')
+ onedrive_client_secret = fields.Char(string='Onedrive Client Secret',
+ copy=False, help='Client secret id of'
+ ' the onedrive')
+ onedrive_access_token = fields.Char(string='Onedrive Access Token',
+ copy=False,
+ help='Access token for one drive')
+ onedrive_refresh_token = fields.Char(string='Onedrive Refresh Token',
+ copy=False,
+ help='Refresh token for one drive')
+ onedrive_token_validity = fields.Datetime(string='Onedrive Token Validity',
+ copy=False,
+ help='Token validity date')
+ onedrive_folder_key = fields.Char(string='Folder ID',
+ help='Folder id of the onedrive')
+ is_onedrive_token_generated = fields.Boolean(
+ string='onedrive Tokens Generated',
+ compute='_compute_is_onedrive_token_generated',
+ copy=False, help='Whether to generate onedrive token?')
+ onedrive_redirect_uri = fields.Char(string='Onedrive Redirect URI',
+ compute='_compute_redirect_uri',
+ help='Redirect URI of the onedrive')
+ aws_access_key = fields.Char(string="Amazon S3 Access Key",
+ help="Field used to store the Access Key"
+ " for an Amazon S3 bucket.")
+ aws_secret_access_key = fields.Char(string='Amazon S3 Secret Key',
+ help="Field used to store the Secret"
+ " Key for an Amazon S3 bucket.")
+ bucket_file_name = fields.Char(string='Bucket Name',
+ help="Field used to store the name of an"
+ " Amazon S3 bucket.")
+ aws_folder_name = fields.Char(string='File Name',
+ help="field used to store the name of a"
+ " folder in an Amazon S3 bucket.")
+ success_message = fields.Char('Success Message', readonly=True)
+ success_test = fields.Boolean(string="Success Test")
+ fail_test = fields.Boolean(string="Fail Test")
+ gdrive_backup_error_test = fields.Boolean(string="Google Drive Error Test")
+ onedrive_backup_error_test = fields.Boolean(string="OneDrive Error Test")
+
+ @api.depends('gdrive_access_token', 'gdrive_refresh_token')
+ def _compute_is_google_drive_token_generated(self):
+ """Set True if the Google Drive refresh token is generated"""
+ for rec in self:
+ rec.is_google_drive_token_generated = bool(
+ rec.gdrive_access_token) and bool(rec.gdrive_refresh_token)
+
+ @api.onchange('backup_destination')
+ def _onchange_backup_destination(self):
+ self.write({
+ "fail_test": False,
+ "success_test": False,
+ "gdrive_backup_error_test": False,
+ "onedrive_backup_error_test": False
+ })
+
+ @api.onchange('gdrive_client_key', 'gdrive_client_secret',
+ 'google_drive_folder', 'onedrive_client_key',
+ 'onedrive_client_secret', 'onedrive_folder_key', 'sftp_host',
+ 'sftp_port', 'sftp_user', 'sftp_password', 'sftp_path',
+ 'ftp_host', 'ftp_port', 'ftp_user', 'ft_password', 'ftp_path')
+ def _onchange_gdrive_backup_error_test(self):
+ if self.backup_destination == 'ftp':
+ self.write({"success_test": False, "fail_test": True})
+ if self.backup_destination == 'sftp':
+ self.write({"success_test": False, "fail_test": True})
+ if self.backup_destination == 'google_drive':
+ if self.gdrive_backup_error_test:
+ self.write({
+ "gdrive_backup_error_test": False
+ })
+ if self.backup_destination == 'onedrive':
+ if self.onedrive_backup_error_test:
+ self.write({
+ "onedrive_backup_error_test": False
+ })
+
+ def action_s3cloud(self):
+ """If it has aws_secret_access_key, which will perform s3_cloud
+ operations for connection test"""
+ if self.aws_access_key and self.aws_secret_access_key:
+ try:
+ bo3 = boto3.client(
+ 's3',
+ aws_access_key_id=self.aws_access_key,
+ aws_secret_access_key=self.aws_secret_access_key)
+ response = bo3.list_buckets()
+ for bucket in response['Buckets']:
+ if self.bucket_file_name == bucket['Name']:
+ self.active = True
+ self.hide_active = True
+ self.write({"success_test": True, "fail_test": False})
+ return
+ raise UserError(
+ _("Bucket not found. Please check the bucket name and"
+ " try again."))
+ except Exception:
+ self.write({"fail_test": True, "success_test": False})
+ self.active = False
+ self.hide_active = False
+
+ def action_nextcloud(self):
+ """If it has next_cloud_password, domain, and next_cloud_user_name
+ which will perform an action for nextcloud connection test"""
+ if self.domain and self.next_cloud_password and \
+ self.next_cloud_user_name:
+ try:
+ ncx = NextCloud(self.domain,
+ auth=HTTPBasicAuth(self.next_cloud_user_name,
+ self.next_cloud_password))
+
+ data = ncx.list_folders('/').__dict__
+ if data['raw'].status_code == 207:
+ self.active = True
+ self.hide_active = True
+ self.write({"fail_test": False, "success_test": True})
+ return
+ else:
+ self.active = False
+ self.hide_active = False
+ self.write({"fail_test": True, "success_test": False})
+ except Exception:
+ self.active = False
+ self.hide_active = False
+ self.write({"fail_test": True, "success_test": False})
+
+ def _compute_redirect_uri(self):
+ """Compute the redirect URI for onedrive and Google Drive"""
+ for rec in self:
+ base_url = request.env['ir.config_parameter'].get_param(
+ 'web.base.url')
+ rec.onedrive_redirect_uri = base_url + '/onedrive/authentication'
+ rec.gdrive_redirect_uri = base_url + '/google_drive/authentication'
+
+ @api.depends('onedrive_access_token', 'onedrive_refresh_token')
+ def _compute_is_onedrive_token_generated(self):
+ """Set true if onedrive tokens are generated"""
+ for rec in self:
+ rec.is_onedrive_token_generated = bool(
+ rec.onedrive_access_token) and bool(rec.onedrive_refresh_token)
+
+ @api.depends('dropbox_refresh_token')
+ def _compute_is_dropbox_token_generated(self):
+ """Set True if the dropbox refresh token is generated"""
+ for rec in self:
+ rec.is_dropbox_token_generated = bool(rec.dropbox_refresh_token)
+
+ def action_get_dropbox_auth_code(self):
+ """Open a wizards to set up dropbox Authorization code"""
+ return {
+ 'type': 'ir.actions.act_window',
+ 'name': 'Dropbox Authorization Wizard',
+ 'res_model': 'dropbox.auth.code',
+ 'view_mode': 'form',
+ 'target': 'new',
+ 'context': {'dropbox_auth': True}
+ }
+
+ def action_get_onedrive_auth_code(self):
+ """Generate onedrive authorization code"""
+ AUTHORITY = \
+ 'https://login.microsoftonline.com/common/oauth2/v2.0/authorize'
+ action = self.env.ref(
+ "auto_database_backup.action_db_backup_configure")
+ action_data = {
+ 'id': action.id,
+ 'name': action.name,
+ 'type': action.type,
+ 'xml_id': action.xml_id,
+ 'help': action.help,
+ 'binding_model_id': action.binding_model_id,
+ 'binding_type': action.binding_type,
+ 'display_name': action.display_name,
+ 'res_model': action.res_model,
+ 'target': action.target,
+ 'view_mode': action.view_mode,
+ 'views': action.views,
+ 'groups_id': [(6, 0, action.groups_id.ids)],
+ 'search_view_id': action.search_view_id.id if action.search_view_id else False,
+ 'filter': action.filter,
+ 'search_view': action.search_view,
+ 'limit': action.limit,
+ }
+ base_url = request.env['ir.config_parameter'].get_param('web.base.url')
+ url_return = base_url + \
+ '/web#id=%d&action=%d&view_type=form&model=%s' % (
+ self.id, action_data['id'], 'db.backup.configure')
+ state = {
+ 'backup_config_id': self.id,
+ 'url_return': url_return
+ }
+ encoded_params = urls.url_encode({
+ 'response_type': 'code',
+ 'client_id': self.onedrive_client_key,
+ 'state': json.dumps(state),
+ 'scope': ONEDRIVE_SCOPE,
+ 'redirect_uri': base_url + '/onedrive/authentication',
+ 'prompt': 'consent',
+ 'access_type': 'offline'
+ })
+ auth_url = "%s?%s" % (AUTHORITY, encoded_params)
+ return {
+ 'type': 'ir.actions.act_url',
+ 'target': 'self',
+ 'url': auth_url,
+ }
+
+ def action_get_gdrive_auth_code(self):
+ """Generate google drive authorization code"""
+ action = self.env.ref(
+ "auto_database_backup.action_db_backup_configure")
+ action_data = {
+ 'id': action.id,
+ 'name': action.name,
+ 'type': action.type,
+ 'xml_id': action.xml_id,
+ 'help': action.help,
+ 'binding_model_id': action.binding_model_id,
+ 'binding_type': action.binding_type,
+ 'display_name': action.display_name,
+ 'res_model': action.res_model,
+ 'target': action.target,
+ 'view_mode': action.view_mode,
+ 'views': action.views,
+ 'groups_id': [(6, 0, action.groups_id.ids)],
+ 'search_view_id': action.search_view_id.id if action.search_view_id else False,
+ 'filter': action.filter,
+ 'search_view': action.search_view,
+ 'limit': action.limit,
+ }
+ base_url = request.env['ir.config_parameter'].get_param('web.base.url')
+ url_return = base_url + \
+ '/web#id=%d&action=%d&view_type=form&model=%s' % (
+ self.id, action_data['id'], 'db.backup.configure')
+ state = {
+ 'backup_config_id': self.id,
+ 'url_return': url_return
+ }
+ encoded_params = urls.url_encode({
+ 'response_type': 'code',
+ 'client_id': self.gdrive_client_key,
+ 'scope': 'https://www.googleapis.com/auth/drive https://www.googleapis.com/auth/drive.file',
+ 'redirect_uri': base_url + '/google_drive/authentication',
+ 'access_type': 'offline',
+ 'state': json.dumps(state),
+ 'approval_prompt': 'force',
+ })
+ auth_url = "%s?%s" % (GOOGLE_AUTH_ENDPOINT, encoded_params)
+ return {
+ 'type': 'ir.actions.act_url',
+ 'target': 'self',
+ 'url': auth_url,
+ }
+
+ def get_gdrive_tokens(self, authorize_code):
+ """Generate onedrive tokens from authorization code."""
+ base_url = request.env['ir.config_parameter'].get_param('web.base.url')
+ headers = {"content-type": "application/x-www-form-urlencoded"}
+ data = {
+ 'code': authorize_code,
+ 'client_id': self.gdrive_client_key,
+ 'client_secret': self.gdrive_client_secret,
+ 'grant_type': 'authorization_code',
+ 'redirect_uri': base_url + '/google_drive/authentication'
+ }
+ try:
+ res = requests.post(GOOGLE_TOKEN_ENDPOINT, params=data,
+ headers=headers)
+ res.raise_for_status()
+ response = res.content and res.json() or {}
+ if response:
+ expires_in = response.get('expires_in')
+ self.write({
+ 'gdrive_access_token': response.get('access_token'),
+ 'gdrive_refresh_token': response.get('refresh_token'),
+ 'gdrive_token_validity': fields.Datetime.now() + timedelta(
+ seconds=expires_in) if expires_in else False,
+ })
+ if self.gdrive_backup_error_test:
+ self.write({
+ 'gdrive_backup_error_test': False
+ })
+ except Exception:
+ if not self.gdrive_backup_error_test:
+ self.write({"gdrive_backup_error_test": True})
+
+ def generate_onedrive_refresh_token(self):
+ """Generate onedrive access token from refresh token if expired"""
+ base_url = request.env['ir.config_parameter'].get_param('web.base.url')
+ headers = {"Content-type": "application/x-www-form-urlencoded"}
+ data = {
+ 'client_id': self.onedrive_client_key,
+ 'client_secret': self.onedrive_client_secret,
+ 'scope': ONEDRIVE_SCOPE,
+ 'grant_type': "refresh_token",
+ 'redirect_uri': base_url + '/onedrive/authentication',
+ 'refresh_token': self.onedrive_refresh_token
+ }
+ try:
+ res = requests.post(
+ "https://login.microsoftonline.com/common/oauth2/v2.0/token",
+ data=data, headers=headers)
+ res.raise_for_status()
+ response = res.content and res.json() or {}
+ if response:
+ expires_in = response.get('expires_in')
+ self.write({
+ 'onedrive_access_token': response.get('access_token'),
+ 'onedrive_refresh_token': response.get('refresh_token'),
+ 'onedrive_token_validity': fields.Datetime.now() + timedelta(
+ seconds=expires_in) if expires_in else False,
+ })
+ except requests.HTTPError as error:
+ _logger.exception("Bad microsoft onedrive request : %s !",
+ error.response.content)
+ raise error
+
+ def get_onedrive_tokens(self, authorize_code):
+ """Generate onedrive tokens from authorization code."""
+ headers = {"content-type": "application/x-www-form-urlencoded"}
+ base_url = request.env['ir.config_parameter'].get_param('web.base.url')
+ data = {
+ 'code': authorize_code,
+ 'client_id': self.onedrive_client_key,
+ 'client_secret': self.onedrive_client_secret,
+ 'grant_type': 'authorization_code',
+ 'scope': ONEDRIVE_SCOPE,
+ 'redirect_uri': base_url + '/onedrive/authentication'
+ }
+ try:
+ res = requests.post(
+ "https://login.microsoftonline.com/common/oauth2/v2.0/token",
+ data=data, headers=headers)
+ res.raise_for_status()
+ response = res.content and res.json() or {}
+ if response:
+ expires_in = response.get('expires_in')
+ self.write({
+ 'onedrive_access_token': response.get('access_token'),
+ 'onedrive_refresh_token': response.get('refresh_token'),
+ 'onedrive_token_validity': fields.Datetime.now() + timedelta(
+ seconds=expires_in) if expires_in else False,
+ })
+ if self.onedrive_backup_error_test:
+ self.write({
+ 'onedrive_backup_error_test': False
+ })
+ except Exception:
+ if not self.onedrive_backup_error_test:
+ self.write({"onedrive_backup_error_test": True})
+
+ def generate_gdrive_refresh_token(self):
+ """Generate Google Drive access token from refresh token if expired"""
+ headers = {"content-type": "application/x-www-form-urlencoded"}
+ data = {
+ 'refresh_token': self.gdrive_refresh_token,
+ 'client_id': self.gdrive_client_key,
+ 'client_secret': self.gdrive_client_secret,
+ 'grant_type': 'refresh_token',
+ }
+ try:
+ res = requests.post(GOOGLE_TOKEN_ENDPOINT, data=data,
+ headers=headers)
+ res.raise_for_status()
+ response = res.content and res.json() or {}
+ if response:
+ expires_in = response.get('expires_in')
+ self.write({
+ 'gdrive_access_token': response.get('access_token'),
+ 'gdrive_token_validity': fields.Datetime.now() + timedelta(
+ seconds=expires_in) if expires_in else False,
+ })
+ except requests.HTTPError as error:
+ error_key = error.response.json().get("error", "nc")
+ error_msg = _(
+ "An error occurred while generating the token. Your"
+ "authorization code may be invalid or has already expired [%s]."
+ "You should check your Client ID and secret on the Google APIs"
+ " platform or try to stop and restart your calendar"
+ " synchronisation.",
+ error_key)
+ raise UserError(error_msg)
+
+ def get_dropbox_auth_url(self):
+ """Return dropbox authorization url"""
+ dbx_auth = dropbox.oauth.DropboxOAuth2FlowNoRedirect(
+ self.dropbox_client_key,
+ self.dropbox_client_secret,
+ token_access_type='offline')
+ return dbx_auth.start()
+
+ def set_dropbox_refresh_token(self, auth_code):
+ """Generate and set the dropbox refresh token from authorization code"""
+ try:
+ dbx_auth = dropbox.oauth.DropboxOAuth2FlowNoRedirect(
+ self.dropbox_client_key,
+ self.dropbox_client_secret,
+ token_access_type='offline')
+ outh_result = dbx_auth.finish(auth_code)
+ self.dropbox_refresh_token = outh_result.refresh_token
+ except Exception:
+ raise ValidationError(
+ 'Please Enter Valid Authentication Code')
@api.constrains('db_name', 'master_pwd')
def _check_db_credentials(self):
- """Validate entered database name and master password"""
+ """
+ Validate entered database name and master password
+ """
database_list = db.list_dbs()
if self.db_name not in database_list:
raise ValidationError(_("Invalid Database Name!"))
@@ -115,8 +586,12 @@ class DbBackupConfigure(models.Model):
port=self.sftp_port)
sftp = client.open_sftp()
sftp.close()
- except Exception as e:
- raise UserError(_("SFTP Exception: %s", e))
+ except Exception:
+ raise UserError(
+ _("It seems there was an issue with the connection, "
+ "possibly due to incorrect information provided. "
+ "Please double-check all the information you provided "
+ "for the connection to ensure it is correct."))
finally:
client.close()
elif self.backup_destination == 'ftp':
@@ -125,19 +600,13 @@ class DbBackupConfigure(models.Model):
ftp_server.connect(self.ftp_host, int(self.ftp_port))
ftp_server.login(self.ftp_user, self.ftp_password)
ftp_server.quit()
- except Exception as e:
- raise UserError(_("FTP Exception: %s", e))
- title = _("Connection Test Succeeded!")
- message = _("Everything seems properly set up!")
- return {
- 'type': 'ir.actions.client',
- 'tag': 'display_notification',
- 'params': {
- 'title': title,
- 'message': message,
- 'sticky': False,
- }
- }
+ except Exception:
+ raise UserError(
+ _("It seems there was an issue with the connection, "
+ "possibly due to incorrect information provided. "
+ "Please double-check all the information you provided "
+ "for the connection to ensure it is correct."))
+ self.write({"success_test": True})
def _schedule_auto_backup(self):
"""Function for generating and storing backup
@@ -269,60 +738,315 @@ class DbBackupConfigure(models.Model):
client.close()
# Google Drive backup
elif rec.backup_destination == 'google_drive':
- temp = tempfile.NamedTemporaryFile(
- suffix='.%s' % rec.backup_format)
- with open(temp.name, "wb+") as tmp:
- odoo.service.db.dump_db(rec.db_name, tmp,
- rec.backup_format)
try:
- access_token = self.env[
- 'google.drive.config'].sudo().get_access_token()
- headers = {"Authorization": "Bearer %s" % access_token}
- para = {
- "name": backup_filename,
- "parents": [rec.google_drive_folder],
- }
- files = {
- 'data': ('metadata', json.dumps(para),
- 'application/json; charset=UTF-8'),
- 'file': open(temp.name, "rb")
- }
- requests.post(
- "https://www.googleapis.com/upload/drive/v3/files?"
- "uploadType=multipart",
- headers=headers,
- files=files)
- if rec.auto_remove:
- query = "parents = '%s'" % rec.google_drive_folder
- files_req = requests.get(
- "https://www.googleapis.com/drive/v3/files?q=%s"
- % query,
- headers=headers)
- files = files_req.json()['files']
- for file in files:
- file_date_req = requests.get(
- "https://www.googleapis.com/drive/v3/files/"
- "%s?fields=createdTime" %
- file['id'],
+ if rec.gdrive_token_validity <= fields.Datetime.now():
+ rec.generate_gdrive_refresh_token()
+ temp = tempfile.NamedTemporaryFile(
+ suffix='.%s' % rec.backup_format)
+ with open(temp.name, "wb+") as tmp:
+ odoo.service.db.dump_db(rec.db_name, tmp,
+ rec.backup_format)
+ try:
+ headers = {
+ "Authorization": "Bearer %s" % rec.gdrive_access_token}
+ para = {
+ "name": backup_filename,
+ "parents": [rec.google_drive_folder],
+ }
+ files = {
+ 'data': ('metadata', json.dumps(para),
+ 'application/json; charset=UTF-8'),
+ 'file': open(temp.name, "rb")
+ }
+ requests.post(
+ "https://www.googleapis.com/upload/drive/v3/files?uploadType=multipart",
+ headers=headers,
+ files=files
+ )
+ if rec.auto_remove:
+ query = "parents = '%s'" % rec.google_drive_folder_key
+ files_req = requests.get(
+ "https://www.googleapis.com/drive/v3/files?q=%s" % query,
headers=headers)
- create_time = file_date_req.json()['createdTime'][
- :19].replace('T', ' ')
- diff_days = (
- datetime.datetime.now() - datetime.
- datetime.strptime(create_time,
- '%Y-%m-%d '
- '%H:%M:%S')).days
- if diff_days >= rec.days_to_remove:
- requests.delete(
- "https://www.googleapis.com/drive/v3/"
- "files/%s" %
- file['id'],
- headers=headers)
+ files = files_req.json()['files']
+ for file in files:
+ file_date_req = requests.get(
+ "https://www.googleapis.com/drive/v3/files/%s?fields=createdTime" %
+ file['id'], headers=headers)
+ create_time = file_date_req.json()[
+ 'createdTime'][
+ :19].replace('T', ' ')
+ diff_days = (
+ fields.datetime.now() - fields.datetime.strptime(
+ create_time, '%Y-%m-%d %H:%M:%S')).days
+ if diff_days >= rec.days_to_remove:
+ requests.delete(
+ "https://www.googleapis.com/drive/v3/files/%s" %
+ file['id'], headers=headers)
+ if rec.notify_user:
+ mail_template_success.send_mail(rec.id,
+ force_send=True)
+ except Exception as e:
+ rec.generated_exception = e
+ _logger.info('Google Drive Exception: %s', e)
+ if rec.notify_user:
+ mail_template_failed.send_mail(rec.id,
+ force_send=True)
+ except Exception:
if rec.notify_user:
- mail_template_success.send_mail(rec.id,
- force_send=True)
- except Exception as e:
- rec.generated_exception = e
- _logger.info('Google Drive Exception: %s', e)
+ mail_template_failed.send_mail(rec.id,
+ force_send=True)
+ raise ValidationError(
+ 'Please check the credentials before activation')
+ else:
+ raise ValidationError('Please check connection')
+ # Dropbox backup
+ elif rec.backup_destination == 'dropbox':
+ try:
+ temp = tempfile.NamedTemporaryFile(
+ suffix='.%s' % rec.backup_format)
+ with open(temp.name, "wb+") as tmp:
+ odoo.service.db.dump_db(rec.db_name, tmp,
+ rec.backup_format)
+ try:
+ dbx = dropbox.Dropbox(
+ app_key=rec.dropbox_client_key,
+ app_secret=rec.dropbox_client_secret,
+ oauth2_refresh_token=rec.dropbox_refresh_token)
+ dropbox_destination = (rec.dropbox_folder + '/' +
+ backup_filename)
+ dbx.files_upload(temp.read(), dropbox_destination)
+ if rec.auto_remove:
+ files = dbx.files_list_folder(
+ rec.dropbox_folder)
+ file_entries = files.entries
+ expired_files = list(filter(
+ lambda fl: (fields.datetime.now() -
+ fl.client_modified).days >=
+ rec.days_to_remove,
+ file_entries))
+ for file in expired_files:
+ dbx.files_delete_v2(file.path_display)
+ if rec.notify_user:
+ mail_template_success.send_mail(rec.id,
+ force_send=True)
+ except Exception as error:
+ rec.generated_exception = error
+ _logger.info('Dropbox Exception: %s', error)
+ if rec.notify_user:
+ mail_template_failed.send_mail(rec.id,
+ force_send=True)
+ except Exception:
if rec.notify_user:
mail_template_failed.send_mail(rec.id, force_send=True)
+ raise ValidationError(
+ 'Please check the credentials before activation')
+ else:
+ raise ValidationError('Please check connection')
+ # Onedrive Backup
+ elif rec.backup_destination == 'onedrive':
+ try:
+ if rec.onedrive_token_validity <= fields.Datetime.now():
+ rec.generate_onedrive_refresh_token()
+ temp = tempfile.NamedTemporaryFile(
+ suffix='.%s' % rec.backup_format)
+ with open(temp.name, "wb+") as tmp:
+ odoo.service.db.dump_db(rec.db_name, tmp,
+ rec.backup_format)
+ headers = {
+ 'Authorization': 'Bearer %s' % rec.onedrive_access_token,
+ 'Content-Type': 'application/json'}
+ upload_session_url = MICROSOFT_GRAPH_END_POINT + "/v1.0/me/drive/items/%s:/%s:/createUploadSession" % (
+ rec.onedrive_folder_key, backup_filename)
+ try:
+ upload_session = requests.post(upload_session_url,
+ headers=headers)
+ upload_url = upload_session.json().get('uploadUrl')
+ requests.put(upload_url, data=temp.read())
+ if rec.auto_remove:
+ list_url = MICROSOFT_GRAPH_END_POINT + "/v1.0/me/drive/items/%s/children" % rec.onedrive_folder_key
+ response = requests.get(list_url, headers=headers)
+ files = response.json().get('value')
+ for file in files:
+ create_time = file['createdDateTime'][
+ :19].replace(
+ 'T',
+ ' ')
+ diff_days = (
+ fields.datetime.now() - fields.datetime.strptime(
+ create_time, '%Y-%m-%d %H:%M:%S')).days
+ if diff_days >= rec.days_to_remove:
+ delete_url = MICROSOFT_GRAPH_END_POINT + "/v1.0/me/drive/items/%s" % \
+ file['id']
+ requests.delete(delete_url, headers=headers)
+ if rec.notify_user:
+ mail_template_success.send_mail(rec.id,
+ force_send=True)
+ except Exception as error:
+ rec.generated_exception = error
+ _logger.info('Onedrive Exception: %s', error)
+ if rec.notify_user:
+ mail_template_failed.send_mail(rec.id,
+ force_send=True)
+ except Exception:
+ if rec.notify_user:
+ mail_template_failed.send_mail(rec.id, force_send=True)
+ raise ValidationError(
+ 'Please check the credentials before activation')
+ else:
+ raise ValidationError('Please check connection')
+ # NextCloud Backup
+ elif rec.backup_destination == 'next_cloud':
+ try:
+ if rec.domain and rec.next_cloud_password and \
+ rec.next_cloud_user_name:
+ try:
+ # Connect to NextCloud using the provided username
+ # and password
+ ncx = NextCloud(rec.domain,
+ auth=HTTPBasicAuth(
+ rec.next_cloud_user_name,
+ rec.next_cloud_password))
+ # Connect to NextCloud again to perform additional
+ # operations
+ nc = nextcloud_client.Client(rec.domain)
+ nc.login(rec.next_cloud_user_name,
+ rec.next_cloud_password)
+ # Get the folder name from the NextCloud folder ID
+ folder_name = rec.nextcloud_folder_key
+ # If auto_remove is enabled, remove backup files
+ # older than specified days
+ if rec.auto_remove:
+ folder_path = "/" + folder_name
+ for item in nc.list(folder_path):
+ backup_file_name = item.path.split("/")[-1]
+ backup_date_str = \
+ backup_file_name.split("_")[
+ 2]
+ backup_date = fields.datetime.strptime(
+ backup_date_str, '%Y-%m-%d').date()
+ if (fields.date.today() - backup_date).days \
+ >= rec.days_to_remove:
+ nc.delete(item.path)
+ # If notify_user is enabled, send a success email
+ # notification
+ if rec.notify_user:
+ mail_template_success.send_mail(rec.id,
+ force_send=True)
+ except Exception as error:
+ rec.generated_exception = error
+ _logger.info('NextCloud Exception: %s', error)
+ if rec.notify_user:
+ # If an exception occurs, send a failed email
+ # notification
+ mail_template_failed.send_mail(rec.id,
+ force_send=True)
+ # Get the list of folders in the root directory of NextCloud
+ data = ncx.list_folders('/').__dict__
+ folders = [
+ [file_name['href'].split('/')[-2],
+ file_name['file_id']]
+ for file_name in data['data'] if
+ file_name['href'].endswith('/')]
+ # If the folder name is not found in the list of folders,
+ # create the folder
+ if folder_name not in [file[0] for file in folders]:
+ nc.mkdir(folder_name)
+ # Dump the database to a temporary file
+ temp = tempfile.NamedTemporaryFile(
+ suffix='.%s' % rec.backup_format)
+ with open(temp.name, "wb+") as tmp:
+ odoo.service.db.dump_db(rec.db_name, tmp,
+ rec.backup_format)
+ backup_file_path = temp.name
+ remote_file_path = f"/{folder_name}/{rec.db_name}_" \
+ f"{backup_time}.{rec.backup_format}"
+ nc.put_file(remote_file_path, backup_file_path)
+ else:
+ # Dump the database to a temporary file
+ temp = tempfile.NamedTemporaryFile(
+ suffix='.%s' % rec.backup_format)
+ with open(temp.name, "wb+") as tmp:
+ odoo.service.db.dump_db(rec.db_name, tmp,
+ rec.backup_format)
+ backup_file_path = temp.name
+ remote_file_path = f"/{folder_name}/{rec.db_name}_" \
+ f"{backup_time}.{rec.backup_format}"
+ nc.put_file(remote_file_path, backup_file_path)
+ except Exception:
+ raise ValidationError('Please check connection')
+ # Amazon S3 Backup
+ elif rec.backup_destination == 'amazon_s3':
+ if rec.aws_access_key and rec.aws_secret_access_key:
+ try:
+ # Create a boto3 client for Amazon S3 with provided
+ # access key id and secret access key
+ bo3 = boto3.client(
+ 's3',
+ aws_access_key_id=rec.aws_access_key,
+ aws_secret_access_key=rec.aws_secret_access_key)
+ # If auto_remove is enabled, remove the backups that
+ # are older than specified days from the S3 bucket
+ if rec.auto_remove:
+ folder_path = rec.aws_folder_name
+ response = bo3.list_objects(
+ Bucket=rec.bucket_file_name,
+ Prefix=folder_path)
+ today = fields.date.today()
+ for file in response['Contents']:
+ file_path = file['Key']
+ last_modified = file['LastModified']
+ date = last_modified.date()
+ age_in_days = (today - date).days
+ if age_in_days >= rec.days_to_remove:
+ bo3.delete_object(
+ Bucket=rec.bucket_file_name,
+ Key=file_path)
+ # Create a boto3 resource for Amazon S3 with provided
+ # access key id and secret access key
+ s3 = boto3.resource(
+ 's3',
+ aws_access_key_id=rec.aws_access_key,
+ aws_secret_access_key=rec.aws_secret_access_key)
+ # Create a folder in the specified bucket, if it
+ # doesn't already exist
+ s3.Object(rec.bucket_file_name,
+ rec.aws_folder_name + '/').put()
+ bucket = s3.Bucket(rec.bucket_file_name)
+ # Get all the prefixes in the bucket
+ prefixes = set()
+ for obj in bucket.objects.all():
+ key = obj.key
+ if key.endswith('/'):
+ prefix = key[:-1] # Remove the trailing slash
+ prefixes.add(prefix)
+ # If the specified folder is present in the bucket,
+ # take a backup of the database and upload it to the
+ # S3 bucket
+ if rec.aws_folder_name in prefixes:
+ temp = tempfile.NamedTemporaryFile(
+ suffix='.%s' % rec.backup_format)
+ with open(temp.name, "wb+") as tmp:
+ odoo.service.db.dump_db(rec.db_name, tmp,
+ rec.backup_format)
+ backup_file_path = temp.name
+ remote_file_path = f"{rec.aws_folder_name}/{rec.db_name}_" \
+ f"{backup_time}.{rec.backup_format}"
+ s3.Object(rec.bucket_file_name,
+ remote_file_path).upload_file(
+ backup_file_path)
+ # If notify_user is enabled, send email to the
+ # user notifying them about the successful backup
+ if rec.notify_user:
+ mail_template_success.send_mail(rec.id,
+ force_send=True)
+ except Exception as error:
+ # If any error occurs, set the 'generated_exception'
+ # field to the error message and log the error
+ rec.generated_exception = error
+ _logger.info('Amazon S3 Exception: %s', error)
+ # If notify_user is enabled, send email to the user
+ # notifying them about the failed backup
+ if rec.notify_user:
+ mail_template_failed.send_mail(rec.id,
+ force_send=True)
diff --git a/auto_database_backup/static/description/assets/screenshots/add.png b/auto_database_backup/static/description/assets/screenshots/add.png
new file mode 100644
index 000000000..2263161ff
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/add.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/amazon_s3_7.png b/auto_database_backup/static/description/assets/screenshots/amazon_s3_7.png
new file mode 100644
index 000000000..9a740ae0c
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazon_s3_7.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/amazon_s3_8.png b/auto_database_backup/static/description/assets/screenshots/amazon_s3_8.png
new file mode 100644
index 000000000..c1dd06a9a
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazon_s3_8.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/amazon_s3_pci1.png b/auto_database_backup/static/description/assets/screenshots/amazon_s3_pci1.png
new file mode 100644
index 000000000..5fcd336d6
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazon_s3_pci1.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/amazon_s3_pic 2.png b/auto_database_backup/static/description/assets/screenshots/amazon_s3_pic 2.png
new file mode 100644
index 000000000..d259da8a9
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazon_s3_pic 2.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/amazons3-1.png b/auto_database_backup/static/description/assets/screenshots/amazons3-1.png
new file mode 100644
index 000000000..5fa2872ad
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazons3-1.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/amazons3_4.png b/auto_database_backup/static/description/assets/screenshots/amazons3_4.png
new file mode 100644
index 000000000..c50a20cc8
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazons3_4.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/amazons3_5.png b/auto_database_backup/static/description/assets/screenshots/amazons3_5.png
new file mode 100644
index 000000000..415d521c4
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazons3_5.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/amazons3_6.png b/auto_database_backup/static/description/assets/screenshots/amazons3_6.png
new file mode 100644
index 000000000..284490127
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazons3_6.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/amazons3_7.png b/auto_database_backup/static/description/assets/screenshots/amazons3_7.png
new file mode 100644
index 000000000..f9345a6d8
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazons3_7.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/amazons3_access.png b/auto_database_backup/static/description/assets/screenshots/amazons3_access.png
new file mode 100644
index 000000000..270d27035
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazons3_access.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/amazons3_pick 3.png b/auto_database_backup/static/description/assets/screenshots/amazons3_pick 3.png
new file mode 100644
index 000000000..22bbce54c
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazons3_pick 3.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/amazons3_signup.png b/auto_database_backup/static/description/assets/screenshots/amazons3_signup.png
new file mode 100644
index 000000000..52357df9a
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/amazons3_signup.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/backup.zip b/auto_database_backup/static/description/assets/screenshots/backup.zip
deleted file mode 100644
index 4376b8f7d..000000000
Binary files a/auto_database_backup/static/description/assets/screenshots/backup.zip and /dev/null differ
diff --git a/auto_database_backup/static/description/assets/screenshots/backup1.png b/auto_database_backup/static/description/assets/screenshots/backup1.png
index 770993add..62f5c2bd1 100644
Binary files a/auto_database_backup/static/description/assets/screenshots/backup1.png and b/auto_database_backup/static/description/assets/screenshots/backup1.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/backup10.png b/auto_database_backup/static/description/assets/screenshots/backup10.png
index b7ebe8c93..2d77aad17 100644
Binary files a/auto_database_backup/static/description/assets/screenshots/backup10.png and b/auto_database_backup/static/description/assets/screenshots/backup10.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/backup13.png b/auto_database_backup/static/description/assets/screenshots/backup13.png
index d2e1ba8e5..722de25fd 100644
Binary files a/auto_database_backup/static/description/assets/screenshots/backup13.png and b/auto_database_backup/static/description/assets/screenshots/backup13.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/backup14.png b/auto_database_backup/static/description/assets/screenshots/backup14.png
index 1b9641e58..3b4aff440 100644
Binary files a/auto_database_backup/static/description/assets/screenshots/backup14.png and b/auto_database_backup/static/description/assets/screenshots/backup14.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/backup2.png b/auto_database_backup/static/description/assets/screenshots/backup2.png
index 6507d59a5..0aee14ce2 100644
Binary files a/auto_database_backup/static/description/assets/screenshots/backup2.png and b/auto_database_backup/static/description/assets/screenshots/backup2.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/backup3.png b/auto_database_backup/static/description/assets/screenshots/backup3.png
index a2b4d651a..0d3732977 100644
Binary files a/auto_database_backup/static/description/assets/screenshots/backup3.png and b/auto_database_backup/static/description/assets/screenshots/backup3.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/backup4.png b/auto_database_backup/static/description/assets/screenshots/backup4.png
index c5139df9a..f4b28b68c 100644
Binary files a/auto_database_backup/static/description/assets/screenshots/backup4.png and b/auto_database_backup/static/description/assets/screenshots/backup4.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/backup5.png b/auto_database_backup/static/description/assets/screenshots/backup5.png
index 6d6486d13..5a939249f 100644
Binary files a/auto_database_backup/static/description/assets/screenshots/backup5.png and b/auto_database_backup/static/description/assets/screenshots/backup5.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/backup6.png b/auto_database_backup/static/description/assets/screenshots/backup6.png
index 9b0be5bfa..1992610d5 100644
Binary files a/auto_database_backup/static/description/assets/screenshots/backup6.png and b/auto_database_backup/static/description/assets/screenshots/backup6.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/backup7.png b/auto_database_backup/static/description/assets/screenshots/backup7.png
index 0d4e9b446..d340214f2 100644
Binary files a/auto_database_backup/static/description/assets/screenshots/backup7.png and b/auto_database_backup/static/description/assets/screenshots/backup7.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/backup8.png b/auto_database_backup/static/description/assets/screenshots/backup8.png
index c0dc26f93..856383732 100644
Binary files a/auto_database_backup/static/description/assets/screenshots/backup8.png and b/auto_database_backup/static/description/assets/screenshots/backup8.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/backup9.png b/auto_database_backup/static/description/assets/screenshots/backup9.png
index 259c703de..2376a7950 100644
Binary files a/auto_database_backup/static/description/assets/screenshots/backup9.png and b/auto_database_backup/static/description/assets/screenshots/backup9.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/drive1.png b/auto_database_backup/static/description/assets/screenshots/drive1.png
new file mode 100644
index 000000000..29ea97a8d
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/drive1.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/drive2.png b/auto_database_backup/static/description/assets/screenshots/drive2.png
new file mode 100644
index 000000000..0b5f7a388
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/drive2.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/drive3.png b/auto_database_backup/static/description/assets/screenshots/drive3.png
new file mode 100644
index 000000000..ab9333477
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/drive3.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/drive4.png b/auto_database_backup/static/description/assets/screenshots/drive4.png
new file mode 100644
index 000000000..e2803dd15
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/drive4.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/drive5.png b/auto_database_backup/static/description/assets/screenshots/drive5.png
new file mode 100644
index 000000000..633d465bc
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/drive5.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/drop1.png b/auto_database_backup/static/description/assets/screenshots/drop1.png
new file mode 100644
index 000000000..af02afc76
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/drop1.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/drop2.png b/auto_database_backup/static/description/assets/screenshots/drop2.png
new file mode 100644
index 000000000..488fb6cbc
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/drop2.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/drop3.png b/auto_database_backup/static/description/assets/screenshots/drop3.png
new file mode 100644
index 000000000..1088638e0
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/drop3.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/drop4.png b/auto_database_backup/static/description/assets/screenshots/drop4.png
new file mode 100644
index 000000000..f8eeae60e
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/drop4.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/dropbox-1.png b/auto_database_backup/static/description/assets/screenshots/dropbox-1.png
new file mode 100644
index 000000000..18675f61d
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/dropbox-1.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/dropbox-2.png b/auto_database_backup/static/description/assets/screenshots/dropbox-2.png
new file mode 100644
index 000000000..1f721d44e
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/dropbox-2.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/hero.gif b/auto_database_backup/static/description/assets/screenshots/hero.gif
index 5010451e3..0de198bc0 100644
Binary files a/auto_database_backup/static/description/assets/screenshots/hero.gif and b/auto_database_backup/static/description/assets/screenshots/hero.gif differ
diff --git a/auto_database_backup/static/description/assets/screenshots/newcloud1.png b/auto_database_backup/static/description/assets/screenshots/newcloud1.png
new file mode 100644
index 000000000..89b6501ce
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/newcloud1.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/newcloud2.png b/auto_database_backup/static/description/assets/screenshots/newcloud2.png
new file mode 100644
index 000000000..b213b1145
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/newcloud2.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/newcloud2_ds.png b/auto_database_backup/static/description/assets/screenshots/newcloud2_ds.png
new file mode 100644
index 000000000..2c97c2316
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/newcloud2_ds.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/next_cloud-1.png b/auto_database_backup/static/description/assets/screenshots/next_cloud-1.png
new file mode 100644
index 000000000..97f925568
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/next_cloud-1.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/next_cloud2.png b/auto_database_backup/static/description/assets/screenshots/next_cloud2.png
new file mode 100644
index 000000000..bfa0ff1f6
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/next_cloud2.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/next_cloud_9.png b/auto_database_backup/static/description/assets/screenshots/next_cloud_9.png
new file mode 100644
index 000000000..261d476f4
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/next_cloud_9.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/nextcloud_4.png b/auto_database_backup/static/description/assets/screenshots/nextcloud_4.png
new file mode 100644
index 000000000..a0fbb2f36
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/nextcloud_4.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/nextcloud_5.png b/auto_database_backup/static/description/assets/screenshots/nextcloud_5.png
new file mode 100644
index 000000000..14a69f1e0
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/nextcloud_5.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/nextcloud_6.png b/auto_database_backup/static/description/assets/screenshots/nextcloud_6.png
new file mode 100644
index 000000000..a7bb81056
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/nextcloud_6.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/nextcloud_create_3.png b/auto_database_backup/static/description/assets/screenshots/nextcloud_create_3.png
new file mode 100644
index 000000000..2548aa71e
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/nextcloud_create_3.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/onedrive-1.png b/auto_database_backup/static/description/assets/screenshots/onedrive-1.png
new file mode 100644
index 000000000..b39cbb2d5
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/onedrive-1.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/onedrive1.png b/auto_database_backup/static/description/assets/screenshots/onedrive1.png
new file mode 100644
index 000000000..009d6c7f6
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/onedrive1.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/onedrive2.png b/auto_database_backup/static/description/assets/screenshots/onedrive2.png
new file mode 100644
index 000000000..155d97204
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/onedrive2.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/onedrive3.png b/auto_database_backup/static/description/assets/screenshots/onedrive3.png
new file mode 100644
index 000000000..f4e564206
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/onedrive3.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/onedrive4.png b/auto_database_backup/static/description/assets/screenshots/onedrive4.png
new file mode 100644
index 000000000..4ae8a4c0f
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/onedrive4.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/onedrive5.png b/auto_database_backup/static/description/assets/screenshots/onedrive5.png
new file mode 100644
index 000000000..44123fcd5
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/onedrive5.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/onedrive6.png b/auto_database_backup/static/description/assets/screenshots/onedrive6.png
new file mode 100644
index 000000000..4f241299c
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/onedrive6.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/onedrive7.png b/auto_database_backup/static/description/assets/screenshots/onedrive7.png
new file mode 100644
index 000000000..2da4020c1
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/onedrive7.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/onedrive8.png b/auto_database_backup/static/description/assets/screenshots/onedrive8.png
new file mode 100644
index 000000000..61f5cb9bc
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/onedrive8.png differ
diff --git a/auto_database_backup/static/description/assets/screenshots/onedrive9.png b/auto_database_backup/static/description/assets/screenshots/onedrive9.png
new file mode 100644
index 000000000..b2894148f
Binary files /dev/null and b/auto_database_backup/static/description/assets/screenshots/onedrive9.png differ
diff --git a/auto_database_backup/static/description/index.html b/auto_database_backup/static/description/index.html
index a46b98955..a83306b14 100644
--- a/auto_database_backup/static/description/index.html
+++ b/auto_database_backup/static/description/index.html
@@ -31,7 +31,7 @@
Auto Database Backup
- A Module For Generating Database Backup And Storing Backup To Multiple Locations.
+ Generate automatic backup of databases and store to local, google drive, dropbox, nextcloud, amazon S3, onedrive or remote server, Odoo17
@@ -112,12 +112,67 @@
- This module helps to generate backups of your databases automatically on regular interval of times.
- The generated backups can be stored into local storage, ftp server, sftp server or Google Drive.
- User can enable auto remove option to automatically delete old backups.
- User can enable email notification to be notified about the success and failure of the backup generation and storage.
- Using Automatic Database Backup module user can generate and store database backups to multiple location.
+ This module helps to generate backups of your databases automatically on
+ regular interval of times. The generated backups can be stored into
+ local storage, ftp server, sftp server, dropbox,nextcloud, Google Drive
+ or Onedrive and Amazon S3. User can enable auto remove option to automatically delete
+ old backups. User can enable email notification to be notified about the
+ success and failure of the backup generation and storage.
+ Using Automatic Database Backup module user can generate and store
+ database backups to multiple location.
+
+
+
+ This module uses an external python dependency 'dropbox'.Before
+ installing the module install the python
+ package
+ first.The required python package can be installed using the
+ following command,
+
+ pip install dropbox
+
+
+
+ This module uses an external python dependency 'nextcloud'.Before
+ installing the module install the python
+ package
+ first.The required python package can be installed using the
+ following command,
+
+ pip install pyncclient
+
+
+
+
+ This module uses an external python dependency 'nextcloud-api-wrapper'.Before
+ installing the module install the python
+ package
+ first.The required python package can be installed using the
+ following command,
+
+ pip install nextcloud-api-wrapper
+
+
+
+
+ This module uses an external python dependency 'Boto3'.Before installing
+ the module install the python
+ package
+ first.The required python package can be installed using the following
+ command,
+
+ pip install boto3
+
+
+
+ This module uses an external python dependency 'paramiko'.Before
+ installing the module install the python
+ package
+ first.The required python package can be installed using the following
+ command,
+
+ pip install paramiko
@@ -143,7 +198,7 @@
Available in Odoo 12.0 Community and Enterprise.
-
+
@@ -178,7 +233,76 @@
- Automatically Remove Old Backups.
+ Store Backup to SFTP Server
+
+ Generated backup can be stored to remote SFTP server.
+
+
+
+
+
+

+
+
+
+ Store Backup to Google drive
+
+ Generated backup can be stored to google drive
+
+
+
+
+

+
+
+
+ Store Backup to Onedrive
+
+ Generated backup can be stored to Onedrive
+
+
+
+
+
+

+
+
+
+ Store Backup to Nextcloud
+
+ Generated backup can be stored to Nextcloud
+
+
+
+
+
+

+
+
+
+ Store Backup to Amazon S3
+
+ Generated backup can be stored to Amazon S3
+
+
+
+
+
+

+
+
+
+ Automatically remove old backups.
Old backups files will be deleted automatically based on the obsolescence of backup.
@@ -192,7 +316,7 @@
- Notify User On Success And Failure Of Backup Generation
+ Notify user on success and failure of backup generation
An email notification send to user on successful backup generation also send an email notification when backup operation failed.
@@ -208,6 +332,7 @@
Screenshots
+
@@ -243,6 +368,20 @@
+
+
+ A successful message will be displayed if connection is successful
+
+

+
+
+ An error message will be displayed if test connection is failed
+
+
@@ -259,22 +398,63 @@
height="auto" />
-
-
+
Store Backup to Google Drive
+
+ You'll need to create a
+ new
+ Google API project and enabling the Google Drive API, Go to the
+ Google API Console and log into your
+ account.
+ While creating the project, for the Redirect URI restrictions,
+ copy your Odoo database URI followed by
+ /google_drive/authentication. Example:
+

+
+ ENABLE API AND SERVICES
+

+
+ ENABLE GOOGLE DRIVE API
+

+
+ Create Credentials,
+ Follow
+ the steps, select Website application for the Application
+ Type.
+

+
+ Under the Authorized
+ JavaScript Origins section, click + Add URI and type your
+ company's Odoo URL address.
+ Under the Authorized redirect URIs section, click + Add URI and
+ type your company's Odoo URL address
+ followed
+ by /google_drive/authentication.
+ After all the steps are completed, A Client ID and Client secret will
+ be given, copy the credentials
+
+

+
+
+ Go to the "OAuth consent screen", then Test users and click on 'ADD USERS' then add the user.
+
+
+

+
+
- Select backup destination as Google Drive. Enter google drive folder ID.
- Enable and configure Google Drive option from general settings.
+ Configure Backup, Copy Client ID and Client Secret from Google Drive API Credentials page into their respective fields.
- Setup refresh token from general settings
+ Setup Token, it will be redirected to an authorization page.

@@ -282,16 +462,192 @@
-
- Automatically Remove Old Backups
-
- Enable auto remove option, specify number of days to remove backups.
-
+
+
+ Store Backup to Dropbox
+
+
+ To get the app key and secret key go to the App
+ Console.
+ Create a new app
+
+

+
+ Once you created the App , you can get the App key and App Secret as seen in the screenshot
+

+ Choose your app's permission (files.content.write and files.content.read permissions required).then click on Submit
+

+ Choose Dropbox as that of the backup destination. Enter the app secret and key and dropbox Folder.
+

+
+ Get the Authorization
+ Code
+ and click confirm.
+

+
+
+
+
+ Store Backup to
+ Onedrive
+
+
+ Select Backup
+ Destination
+ as OneDrive. Enter the App key and App secret.
+ you'll need to register a new app in the Microsoft
+ Azure
+ portal.
+ While registering the app for the Redirect URI restrictions,
+ copy your Odoo database URI followed by
+ /onedrive/authentication. Example:
+

+
+ Copy the Client ID
+

+
+ Generate Client
+ Secret.
+

+

+
+ Get OneDrive folder ID,
+ where need to store the backup files.
+

+
+ Configure the
+ Backup
+

+
+ Setup Token, it will
+ be
+ redirected to an authorization page.
+

+
+
+
+ Store Backup to
+ Nextcloud
+
+
+
+ To Create an account in Nextcloud go to
+ https://nextcloud.com/sign-up/, Enter Your Email Address and
+ Sign up .
+
+
+
+
+ You will be redirected to the page as shown in the screenshot,
+ and it will ask you enter your email and password for the
+ Nextcloud.
+

+
+
+ To get the Domain of the Nextcloud.Go to Settings in the
+ Nextcloud and Click on Mobile & desktop. You will see server
+ address Copy link and paste it in your Domain Name.
+

+
+
+
+ Select the backup destination as Nextcloud. Enter the Domain
+ Name, UserName, Password and Folder Name where you want to store
+ your backup on the NextCloud server. Check the Connect button
+ to check if the connection is successful.
+
+
+
+
+ Every day, a Scheduled Action will take place to store a backup
+ on the Nextcloud Server. The backup will be stored as the folder
+ name provided in the Folder ID field in Odoo.
+
+
+
+
+
+
+
+ Store Backup to
+ Amazon S3
+
+
+
+ To Create an account in Amazon S3 go to
+ https://portal.aws.amazon.com/billing/signup#/start/email, Enter
+ Your Email Address and Sign up .
+
+
+
+
+ After you created the account.You need to get the Access Key and
+ Secret Key,To get these go the account Security credentials and
+ go the Access Keys and create new access keys from there you
+ will get Access Key and Secret Key.
+

+
+
+

+
+
+

+
+
+
+ Next You need to create a Bucket Folder in the Amazon S3.To do
+ that Go the Services in the top right and go to Storage and S3
+ as shown in the Screen shot.
+

+
+
+ To create Bucket folder,Click on the Create bucket Button.
+

+
+
+ On Creating a Bucket Folder,Check the rules for naming the
+ Bucket folder,and Select the region as well.After that click on
+ the create Bucket Button in the bottom of the page.
+

+
+ You will see the Bucket Folder as shown in the screenshot.
+

+
+
+ Select Backup Destination as Amazon S3. Enter the Amazon S3
+ Access Key,Amazon S3 Secret Key,Bucket Name->Bucket folder you
+ have created in the Amazon S3 and the File Name->The Folder You
+ want to store your backup in Amazon S3 Bucket Folder.Test
+ connection button to check whether the connection is
+ successful.
+

+
+
+ Every day, a Scheduled Action will take place to store a backup
+ on the Amazon S3 Server. The backup will be stored as the folder
+ name provided in the File Name field in Odoo.
+

+

+
+
+
-
-
@@ -701,5 +1055,3 @@
-
-
diff --git a/auto_database_backup/views/db_backup_configure_views.xml b/auto_database_backup/views/db_backup_configure_views.xml
index 3520f2603..55e7c3d14 100644
--- a/auto_database_backup/views/db_backup_configure_views.xml
+++ b/auto_database_backup/views/db_backup_configure_views.xml
@@ -19,6 +19,16 @@
db.backup.configure
diff --git a/auto_database_backup/wizard/__init__.py b/auto_database_backup/wizard/__init__.py
new file mode 100644
index 000000000..75b6f75ea
--- /dev/null
+++ b/auto_database_backup/wizard/__init__.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+###############################################################################
+#
+# Cybrosys Technologies Pvt. Ltd.
+#
+# Copyright (C) 2023-TODAY Cybrosys Technologies()
+# Author: Cybrosys Techno Solutions()
+#
+# You can modify it under the terms of the GNU LESSER
+# GENERAL PUBLIC LICENSE (LGPL v3), Version 3.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU LESSER GENERAL PUBLIC LICENSE (LGPL v3) for more details.
+#
+# You should have received a copy of the GNU LESSER GENERAL PUBLIC LICENSE
+# (LGPL v3) along with this program.
+# If not, see .
+#
+###############################################################################
+from . import dropbox_auth_code
diff --git a/auto_database_backup/wizard/dropbox_auth_code.py b/auto_database_backup/wizard/dropbox_auth_code.py
new file mode 100644
index 000000000..ce788d50e
--- /dev/null
+++ b/auto_database_backup/wizard/dropbox_auth_code.py
@@ -0,0 +1,60 @@
+# -*- coding: utf-8 -*-
+###############################################################################
+#
+# Cybrosys Technologies Pvt. Ltd.
+#
+# Copyright (C) 2023-TODAY Cybrosys Technologies()
+# Author: Cybrosys Techno Solutions()
+#
+# You can modify it under the terms of the GNU LESSER
+# GENERAL PUBLIC LICENSE (LGPL v3), Version 3.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU LESSER GENERAL PUBLIC LICENSE (LGPL v3) for more details.
+#
+# You should have received a copy of the GNU LESSER GENERAL PUBLIC LICENSE
+# (LGPL v3) along with this program.
+# If not, see .
+#
+###############################################################################
+from odoo import api, fields, models
+
+
+class DropboxAuthCode(models.TransientModel):
+ """Model for managing the Dropbox authentication code and URL.
+ Methods:
+ _compute_dropbox_auth_url:
+ Compute method to retrieve the Dropbox authentication URL
+ action_setup_dropbox_token:
+ Method to set up the Dropbox token using the provided authorization
+ code.
+ """
+ _name = 'dropbox.auth.code'
+ _description = 'Dropbox Authentication Code Wizard'
+
+ dropbox_authorization_code = fields.Char(string='Authorization Code',
+ help='Authorization code received'
+ 'from Dropbox')
+ dropbox_auth_url = fields.Char(string='Dropbox Authentication URL',
+ compute='_compute_dropbox_auth_url',
+ help='URL for Dropbox authentication')
+
+ @api.depends('dropbox_authorization_code')
+ def _compute_dropbox_auth_url(self):
+ """Compute method to retrieve the Dropbox authentication URL"""
+ backup_config = self.env['db.backup.configure'].browse(
+ self.env.context.get('active_id'))
+ dropbox_auth_url = backup_config.get_dropbox_auth_url()
+ for rec in self:
+ rec.dropbox_auth_url = dropbox_auth_url
+
+ def action_setup_dropbox_token(self):
+ """Method to set up the Dropbox token using the provided authorization
+ code."""
+ backup_config = self.env['db.backup.configure'].browse(
+ self.env.context.get('active_id'))
+ backup_config.hide_active = True
+ backup_config.active = True
+ backup_config.set_dropbox_refresh_token(self.dropbox_authorization_code)
diff --git a/auto_database_backup/wizard/dropbox_auth_code_views.xml b/auto_database_backup/wizard/dropbox_auth_code_views.xml
new file mode 100644
index 000000000..b1533b7c6
--- /dev/null
+++ b/auto_database_backup/wizard/dropbox_auth_code_views.xml
@@ -0,0 +1,27 @@
+
+
+
+
+ dropbox.auth.code.view.form
+ dropbox.auth.code
+
+
+
+
+