@ -78,6 +78,11 @@ class DbBackupConfigure(models.Model):
( ' next_cloud ' , ' Next Cloud ' ) ,
( ' next_cloud ' , ' Next Cloud ' ) ,
( ' amazon_s3 ' , ' Amazon S3 ' )
( ' amazon_s3 ' , ' Amazon S3 ' )
] , string = ' Backup Destination ' , help = ' Destination of the backup ' )
] , string = ' Backup Destination ' , help = ' Destination of the backup ' )
backup_frequency = fields . Selection ( [
( ' daily ' , ' Daily ' ) ,
( ' weekly ' , ' Weekly ' ) ,
( ' monthly ' , ' Monthly ' ) ,
] , default = ' daily ' , string = ' Backup Frequency ' , help = ' Frequency of Backup Scheduling ' )
backup_path = fields . Char ( string = ' Backup Path ' ,
backup_path = fields . Char ( string = ' Backup Path ' ,
help = ' Local storage directory path ' )
help = ' Local storage directory path ' )
sftp_host = fields . Char ( string = ' SFTP Host ' , help = ' SFTP host details ' )
sftp_host = fields . Char ( string = ' SFTP Host ' , help = ' SFTP host details ' )
@ -215,8 +220,7 @@ class DbBackupConfigure(models.Model):
)
)
response = s3_client . head_bucket ( Bucket = self . bucket_file_name )
response = s3_client . head_bucket ( Bucket = self . bucket_file_name )
if response [ ' ResponseMetadata ' ] [ ' HTTPStatusCode ' ] == 200 :
if response [ ' ResponseMetadata ' ] [ ' HTTPStatusCode ' ] == 200 :
self . active = True
self . active = self . hide_active = True
self . hide_active = True
return {
return {
' type ' : ' ir.actions.client ' ,
' type ' : ' ir.actions.client ' ,
' tag ' : ' display_notification ' ,
' tag ' : ' display_notification ' ,
@ -232,8 +236,7 @@ class DbBackupConfigure(models.Model):
_ ( " Bucket not found. Please check the bucket name and "
_ ( " Bucket not found. Please check the bucket name and "
" try again. " ) )
" try again. " ) )
except Exception :
except Exception :
self . active = False
self . active = self . hide_active = False
self . hide_active = False
return {
return {
' type ' : ' ir.actions.client ' ,
' type ' : ' ir.actions.client ' ,
' tag ' : ' display_notification ' ,
' tag ' : ' display_notification ' ,
@ -255,11 +258,9 @@ class DbBackupConfigure(models.Model):
ncx = NextCloud ( self . domain ,
ncx = NextCloud ( self . domain ,
auth = HTTPBasicAuth ( self . next_cloud_user_name ,
auth = HTTPBasicAuth ( self . next_cloud_user_name ,
self . next_cloud_password ) )
self . next_cloud_password ) )
data = ncx . list_folders ( ' / ' ) . __dict__
data = ncx . list_folders ( ' / ' ) . __dict__
if data [ ' raw ' ] . status_code == 207 :
if data [ ' raw ' ] . status_code == 207 :
self . active = True
self . active = self . hide_active = True
self . hide_active = True
return {
return {
' type ' : ' ir.actions.client ' ,
' type ' : ' ir.actions.client ' ,
' tag ' : ' display_notification ' ,
' tag ' : ' display_notification ' ,
@ -271,8 +272,7 @@ class DbBackupConfigure(models.Model):
}
}
}
}
else :
else :
self . active = False
self . active = self . hide_active = False
self . hide_active = False
return {
return {
' type ' : ' ir.actions.client ' ,
' type ' : ' ir.actions.client ' ,
' tag ' : ' display_notification ' ,
' tag ' : ' display_notification ' ,
@ -285,8 +285,7 @@ class DbBackupConfigure(models.Model):
}
}
}
}
except Exception :
except Exception :
self . active = False
self . active = self . hide_active = False
self . hide_active = False
return {
return {
' type ' : ' ir.actions.client ' ,
' type ' : ' ir.actions.client ' ,
' tag ' : ' display_notification ' ,
' tag ' : ' display_notification ' ,
@ -303,10 +302,8 @@ class DbBackupConfigure(models.Model):
def _compute_redirect_uri ( self ) :
def _compute_redirect_uri ( self ) :
""" Compute the redirect URI for onedrive and Google Drive """
""" Compute the redirect URI for onedrive and Google Drive """
for rec in self :
for rec in self :
base_url = request . env [ ' ir.config_parameter ' ] . get_param (
rec . onedrive_redirect_uri = self . get_base_url ( ) + ' /onedrive/authentication '
' web.base.url ' )
rec . gdrive_redirect_uri = self . get_base_url ( ) + ' /google_drive/authentication '
rec . onedrive_redirect_uri = base_url + ' /onedrive/authentication '
rec . gdrive_redirect_uri = base_url + ' /google_drive/authentication '
@api . depends ( ' onedrive_access_token ' , ' onedrive_refresh_token ' )
@api . depends ( ' onedrive_access_token ' , ' onedrive_refresh_token ' )
def _compute_is_onedrive_token_generated ( self ) :
def _compute_is_onedrive_token_generated ( self ) :
@ -340,57 +337,50 @@ class DbBackupConfigure(models.Model):
}
}
def action_get_onedrive_auth_code ( self ) :
def action_get_onedrive_auth_code ( self ) :
""" Generate onedrive authorization code """
""" Generate OneDrive authorization code. """
AUTHORITY = \
AUTHORITY = ' https://login.microsoftonline.com/common/oauth2/v2.0/authorize '
' https://login.microsoftonline.com/common/oauth2/v2.0/authorize '
base_url = self . get_base_url ( )
action = self . env [ " ir.actions.act_window " ] . sudo ( ) . _for_xml_id (
action_id = self . env [ " ir.actions.act_window " ] . sudo ( ) . _for_xml_id (
" auto_database_backup.db_backup_configure_action " )
" auto_database_backup.db_backup_configure_action " ) [ ' id ' ]
base_url = request . env [ ' ir.config_parameter ' ] . get_param ( ' web.base.url ' )
url_return = f " { base_url } /web#id= { self . id } &action= { action_id } &view_type=form&model=db.backup.configure "
url_return = base_url + \
' /web#id= %d &action= %d &view_type=form&model= %s ' % (
self . id , action [ ' id ' ] , ' db.backup.configure ' )
state = {
state = {
' backup_config_id ' : self . id ,
' backup_config_id ' : self . id ,
' url_return ' : url_return
' url_return ' : url_return }
}
params = {
encoded_params = urls . url_encode ( {
' response_type ' : ' code ' ,
' response_type ' : ' code ' ,
' client_id ' : self . onedrive_client_key ,
' client_id ' : self . onedrive_client_key ,
' state ' : json . dumps ( state ) ,
' state ' : json . dumps ( state ) ,
' scope ' : ONEDRIVE_SCOPE ,
' scope ' : ONEDRIVE_SCOPE ,
' redirect_uri ' : base_url + ' /onedrive/authentication ' ,
' redirect_uri ' : f " { base_url } /onedrive/authentication " ,
' prompt ' : ' consent ' ,
' prompt ' : ' consent ' ,
' access_type ' : ' offline '
' access_type ' : ' offline '
} )
}
auth_url = " %s ? %s " % ( AUTHORITY , encoded_params )
return {
return {
' type ' : ' ir.actions.act_url ' ,
' type ' : ' ir.actions.act_url ' ,
' target ' : ' self ' ,
' target ' : ' self ' ,
' url ' : auth_url ,
' url ' : f " { AUTHORITY } ? { urls . url_encode ( params ) } " ,
}
}
def action_get_gdrive_auth_code ( self ) :
def action_get_gdrive_auth_code ( self ) :
""" Generate google drive authorization code """
""" Generate Google Drive authorization code. """
action = self . env [ " ir.actions.act_window " ] . sudo ( ) . _for_xml_id (
GOOGLE_AUTH_ENDPOINT = " https://accounts.google.com/o/oauth2/auth "
" auto_database_backup.db_backup_configure_action " )
base_url = self . get_base_url ( )
base_url = request . env [ ' ir.config_parameter ' ] . get_param ( ' web.base.url ' )
action_id = self . env [ " ir.actions.act_window " ] . sudo ( ) . _for_xml_id (
url_return = base_url + \
" auto_database_backup.db_backup_configure_action " ) [ ' id ' ]
' /web#id= %d &action= %d &view_type=form&model= %s ' % (
url_return = f " { base_url } /web#id= { self . id } &action= { action_id } &view_type=form&model=db.backup.configure "
self . id , action [ ' id ' ] , ' db.backup.configure ' )
state = {
state = {
' backup_config_id ' : self . id ,
' backup_config_id ' : self . id ,
' url_return ' : url_return
' url_return ' : url_return }
}
params = {
encoded_params = urls . url_encode ( {
' response_type ' : ' code ' ,
' response_type ' : ' code ' ,
' client_id ' : self . gdrive_client_key ,
' client_id ' : self . gdrive_client_key ,
' scope ' : ' https://www.googleapis.com/auth/drive https://www.googleapis.com/auth/drive.file ' ,
' scope ' : ' https://www.googleapis.com/auth/drive https://www.googleapis.com/auth/drive.file ' ,
' redirect_uri ' : base_url + ' /google_drive/authentication ' ,
' redirect_uri ' : f " { base_url } /google_drive/authentication " ,
' access_type ' : ' offline ' ,
' access_type ' : ' offline ' ,
' state ' : json . dumps ( state ) ,
' state ' : json . dumps ( state ) ,
' approval_prompt ' : ' force ' ,
' approval_prompt ' : ' force ' ,
} )
}
auth_url = " %s ? %s " % ( GOOGLE_AUTH_ENDPOINT , encoded_params )
auth_url = f " { GOOGLE_AUTH_ENDPOINT } ? { urls . url_encode ( params ) } "
return {
return {
' type ' : ' ir.actions.act_url ' ,
' type ' : ' ir.actions.act_url ' ,
' target ' : ' self ' ,
' target ' : ' self ' ,
@ -398,70 +388,64 @@ class DbBackupConfigure(models.Model):
}
}
def generate_onedrive_refresh_token ( self ) :
def generate_onedrive_refresh_token ( self ) :
""" Generate onedrive access token from refresh token if expired """
""" Generate OneDrive access token from refresh token if expired. """
base_url = request . env [ ' ir.config_parameter ' ] . get_param ( ' web.base.url ' )
base_url = self . get_base_url ( )
token_url = " https://login.microsoftonline.com/common/oauth2/v2.0/token "
headers = { " Content-type " : " application/x-www-form-urlencoded " }
headers = { " Content-type " : " application/x-www-form-urlencoded " }
data = {
data = {
' client_id ' : self . onedrive_client_key ,
' client_id ' : self . onedrive_client_key ,
' client_secret ' : self . onedrive_client_secret ,
' client_secret ' : self . onedrive_client_secret ,
' scope ' : ONEDRIVE_SCOPE ,
' scope ' : ONEDRIVE_SCOPE ,
' grant_type ' : " refresh_token " ,
' grant_type ' : " refresh_token " ,
' redirect_uri ' : base_url + ' /onedrive/authentication ' ,
' redirect_uri ' : f " { base_url } /onedrive/authentication " ,
' refresh_token ' : self . onedrive_refresh_token
' refresh_token ' : self . onedrive_refresh_token ,
}
}
try :
try :
res = requests . post (
res = requests . post ( token_url , data = data , headers = headers )
" https://login.microsoftonline.com/common/oauth2/v2.0/token " ,
data = data , headers = headers )
res . raise_for_status ( )
res . raise_for_status ( )
response = res . content and res . json ( ) or { }
response = res . json ( ) if res . ok else { }
if response :
if response :
expires_in = response . get ( ' expires_in ' )
expires_in = response . get ( ' expires_in ' , 0 )
self . write ( {
self . write ( {
' onedrive_access_token ' : response . get ( ' access_token ' ) ,
' onedrive_access_token ' : response . get ( ' access_token ' ) ,
' onedrive_refresh_token ' : response . get ( ' refresh_token ' ) ,
' onedrive_refresh_token ' : response . get ( ' refresh_token ' ) ,
' onedrive_token_validity ' : fields . Datetime . now ( ) + timedelta (
' onedrive_token_validity ' : fields . Datetime . now ( ) + timedelta ( seconds = expires_in ) ,
seconds = expires_in ) if expires_in else False ,
} )
} )
except requests . HTTPError as error :
except requests . HTTPError as error :
_logger . exception ( " Bad microsoft onedrive request : %s ! " ,
_logger . exception ( " Bad Microsoft OneDrive request: %s " , error . response . content )
error . response . content )
raise error
raise error
def get_onedrive_tokens ( self , authorize_code ) :
def get_onedrive_tokens ( self , authorize_code ) :
""" Generate oned rive tokens from authorization code. """
""" Generate OneD rive tokens from authorization code. """
headers = { " content-t ype" : " application/x-www-form-urlencoded " }
headers = { " Content-T ype" : " application/x-www-form-urlencoded " }
base_url = request . env [ ' ir.config_parameter ' ] . get_param ( ' web.base.url ' )
base_url = self . get_base_url ( )
data = {
data = {
' code ' : authorize_code ,
' code ' : authorize_code ,
' client_id ' : self . onedrive_client_key ,
' client_id ' : self . onedrive_client_key ,
' client_secret ' : self . onedrive_client_secret ,
' client_secret ' : self . onedrive_client_secret ,
' grant_type ' : ' authorization_code ' ,
' grant_type ' : ' authorization_code ' ,
' scope ' : ONEDRIVE_SCOPE ,
' scope ' : ONEDRIVE_SCOPE ,
' redirect_uri ' : base_url + ' /onedrive/authentication '
' redirect_uri ' : f " { base_url } /onedrive/authentication " ,
}
}
token_url = " https://login.microsoftonline.com/common/oauth2/v2.0/token "
try :
try :
res = requests . post (
res = requests . post ( token_url , data = data , headers = headers )
" https://login.microsoftonline.com/common/oauth2/v2.0/token " ,
data = data , headers = headers )
res . raise_for_status ( )
res . raise_for_status ( )
response = res . content and res . json ( ) or { }
response = res . json ( ) if res . ok else { }
if response :
if response :
expires_in = response . get ( ' expires_in ' )
expires_in = response . get ( ' expires_in ' , 0 )
self . write ( {
self . write ( {
' onedrive_access_token ' : response . get ( ' access_token ' ) ,
' onedrive_access_token ' : response . get ( ' access_token ' ) ,
' onedrive_refresh_token ' : response . get ( ' refresh_token ' ) ,
' onedrive_refresh_token ' : response . get ( ' refresh_token ' ) ,
' onedrive_token_validity ' : fields . Datetime . now ( ) + timedelta (
' onedrive_token_validity ' : fields . Datetime . now ( ) + timedelta ( seconds = expires_in ) ,
seconds = expires_in ) if expires_in else False ,
} )
} )
except requests . HTTPError as error :
except requests . HTTPError as error :
_logger . exception ( " Bad microsoft onedrive request : %s ! " ,
_logger . exception ( " Bad Microsoft OneDrive request: %s " , error . response . content )
error . response . content )
raise error
raise error
def generate_gdrive_refresh_token ( self ) :
def generate_gdrive_refresh_token ( self ) :
""" Generate Google Drive access token from refresh token if expired """
""" Generate Google Drive access token from refresh token if expired. """
headers = { " content-t ype" : " application/x-www-form-urlencoded " }
headers = { " Content-T ype" : " application/x-www-form-urlencoded " }
data = {
data = {
' refresh_token ' : self . gdrive_refresh_token ,
' refresh_token ' : self . gdrive_refresh_token ,
' client_id ' : self . gdrive_client_key ,
' client_id ' : self . gdrive_client_key ,
@ -469,46 +453,44 @@ class DbBackupConfigure(models.Model):
' grant_type ' : ' refresh_token ' ,
' grant_type ' : ' refresh_token ' ,
}
}
try :
try :
res = requests . post ( GOOGLE_TOKEN_ENDPOINT , data = data ,
res = requests . post ( GOOGLE_TOKEN_ENDPOINT , data = data , headers = headers )
headers = headers )
res . raise_for_status ( )
res . raise_for_status ( )
response = res . content and res . json ( ) or { }
response = res . json ( ) if res . ok else { }
if response :
if response :
expires_in = response . get ( ' expires_in ' )
expires_in = response . get ( ' expires_in ' , 0 )
self . write ( {
self . write ( {
' gdrive_access_token ' : response . get ( ' access_token ' ) ,
' gdrive_access_token ' : response . get ( ' access_token ' ) ,
' gdrive_token_validity ' : fields . Datetime . now ( ) + timedelta (
' gdrive_token_validity ' : fields . Datetime . now ( ) + timedelta ( seconds = expires_in ) ,
seconds = expires_in ) if expires_in else False ,
} )
} )
except requests . HTTPError as error :
except requests . HTTPError as error :
error_key = error . response . json ( ) . get ( " error " , " nc " )
error_key = error . response . json ( ) . get ( " error " , " unknown error " )
error_msg = _ (
error_msg = _ (
" An error occurred while generating the token. Your "
" An error occurred while generating the token. Your "
" authorization code may be invalid or has already expired [ %s ]. "
" authorization code may be invalid or has already expired [ %s ]. "
" You should check your Client ID and secret on the Google APIs "
" Please check your Client ID and secret on the Google APIs "
" plateform or try to stop and restart your calendar "
" platform or try stopping and restarting your calendar "
" synchronisation. " ,
" synchronization. " ,
error_key )
error_key
)
raise UserError ( error_msg )
raise UserError ( error_msg )
def get_gdrive_tokens ( self , authorize_code ) :
def get_gdrive_tokens ( self , authorize_code ) :
""" Generate oned rive tokens from authorization code. """
""" Generate Google D rive tokens from authorization code. """
base_url = request . env [ ' ir.config_parameter ' ] . get_param ( ' web.base.url ' )
base_url = request . env [ ' ir.config_parameter ' ] . get_param ( ' web.base.url ' )
headers = { " content-t ype" : " application/x-www-form-urlencoded " }
headers = { " Content-T ype" : " application/x-www-form-urlencoded " }
data = {
data = {
' code ' : authorize_code ,
' code ' : authorize_code ,
' client_id ' : self . gdrive_client_key ,
' client_id ' : self . gdrive_client_key ,
' client_secret ' : self . gdrive_client_secret ,
' client_secret ' : self . gdrive_client_secret ,
' grant_type ' : ' authorization_code ' ,
' grant_type ' : ' authorization_code ' ,
' redirect_uri ' : base_url + ' /google_drive/authentication '
' redirect_uri ' : f " { base_url } /google_drive/authentication " ,
}
}
try :
try :
res = requests . post ( GOOGLE_TOKEN_ENDPOINT , params = data ,
res = requests . post ( GOOGLE_TOKEN_ENDPOINT , data = data , headers = headers )
headers = headers )
res . raise_for_status ( )
res . raise_for_status ( )
response = res . content and res . json ( ) or { }
response = res . json ( ) if res . ok else { }
if response :
if response :
expires_in = response . get ( ' expires_in ' )
expires_in = response . get ( ' expires_in ' , 0 )
self . write ( {
self . write ( {
' gdrive_access_token ' : response . get ( ' access_token ' ) ,
' gdrive_access_token ' : response . get ( ' access_token ' ) ,
' gdrive_refresh_token ' : response . get ( ' refresh_token ' ) ,
' gdrive_refresh_token ' : response . get ( ' refresh_token ' ) ,
@ -517,8 +499,9 @@ class DbBackupConfigure(models.Model):
} )
} )
except requests . HTTPError :
except requests . HTTPError :
error_msg = _ (
error_msg = _ (
" Something went wrong during your token generation. Maybe your "
" Something went wrong during your token generation. "
" Authorization Code is invalid " )
" Your authorization code may be invalid. "
)
raise UserError ( error_msg )
raise UserError ( error_msg )
def get_dropbox_auth_url ( self ) :
def get_dropbox_auth_url ( self ) :
@ -573,8 +556,7 @@ class DbBackupConfigure(models.Model):
ftp_server . quit ( )
ftp_server . quit ( )
except Exception as e :
except Exception as e :
raise UserError ( _ ( " FTP Exception: %s " , e ) )
raise UserError ( _ ( " FTP Exception: %s " , e ) )
self . hide_active = True
self . active = self . hide_active = True
self . active = True
return {
return {
' type ' : ' ir.actions.client ' ,
' type ' : ' ir.actions.client ' ,
' tag ' : ' display_notification ' ,
' tag ' : ' display_notification ' ,
@ -596,20 +578,18 @@ class DbBackupConfigure(models.Model):
if self . backup_destination == ' local ' :
if self . backup_destination == ' local ' :
self . hide_active = True
self . hide_active = True
def _schedule_auto_backup ( self ) :
def _schedule_auto_backup ( self , frequency ) :
""" Function for generating and storing backup.
""" Function for generating and storing backup.
Database backup for all the active records in backup configuration
Database backup for all the active records in backup configuration
model will be created . """
model will be created . """
records = self . search ( [ ] )
records = self . search ( [ ( ' backup_frequency ' , ' = ' , frequency ) ] )
mail_template_success = self . env . ref (
mail_template_success = self . env . ref (
' auto_database_backup.mail_template_data_db_backup_successful ' )
' auto_database_backup.mail_template_data_db_backup_successful ' )
mail_template_failed = self . env . ref (
mail_template_failed = self . env . ref (
' auto_database_backup.mail_template_data_db_backup_failed ' )
' auto_database_backup.mail_template_data_db_backup_failed ' )
for rec in records :
for rec in records :
backup_time = fields . datetime . utcnow ( ) . strftime (
backup_time = fields . datetime . utcnow ( ) . strftime ( " % Y- % m- %d _ % H- % M- % S " )
" % Y- % m- %d _ % H- % M- % S " )
backup_filename = f " { rec . db_name } _ { backup_time } . { rec . backup_format } "
backup_filename = " %s _ %s . %s " % (
rec . db_name , backup_time , rec . backup_format )
rec . backup_filename = backup_filename
rec . backup_filename = backup_filename
# Local backup
# Local backup
if rec . backup_destination == ' local ' :
if rec . backup_destination == ' local ' :
@ -619,7 +599,7 @@ class DbBackupConfigure(models.Model):
backup_file = os . path . join ( rec . backup_path ,
backup_file = os . path . join ( rec . backup_path ,
backup_filename )
backup_filename )
f = open ( backup_file , " wb " )
f = open ( backup_file , " wb " )
self . dump_data ( rec . db_name , f , rec . backup_format )
self . dump_data ( rec . db_name , f , rec . backup_format , rec . backup_frequency )
f . close ( )
f . close ( )
# Remove older backups
# Remove older backups
if rec . auto_remove :
if rec . auto_remove :
@ -631,8 +611,7 @@ class DbBackupConfigure(models.Model):
if backup_duration . days > = rec . days_to_remove :
if backup_duration . days > = rec . days_to_remove :
os . remove ( file )
os . remove ( file )
if rec . notify_user :
if rec . notify_user :
mail_template_success . send_mail ( rec . id ,
mail_template_success . send_mail ( rec . id , force_send = True )
force_send = True )
except Exception as e :
except Exception as e :
rec . generated_exception = e
rec . generated_exception = e
_logger . info ( ' FTP Exception: %s ' , e )
_logger . info ( ' FTP Exception: %s ' , e )
@ -654,7 +633,7 @@ class DbBackupConfigure(models.Model):
ftp_server . cwd ( rec . ftp_path )
ftp_server . cwd ( rec . ftp_path )
with open ( temp . name , " wb+ " ) as tmp :
with open ( temp . name , " wb+ " ) as tmp :
self . dump_data ( rec . db_name , tmp ,
self . dump_data ( rec . db_name , tmp ,
rec . backup_format )
rec . backup_format , rec . backup_frequency )
ftp_server . storbinary ( ' STOR %s ' % backup_filename ,
ftp_server . storbinary ( ' STOR %s ' % backup_filename ,
open ( temp . name , " rb " ) )
open ( temp . name , " rb " ) )
if rec . auto_remove :
if rec . auto_remove :
@ -689,7 +668,7 @@ class DbBackupConfigure(models.Model):
temp = tempfile . NamedTemporaryFile (
temp = tempfile . NamedTemporaryFile (
suffix = ' . %s ' % rec . backup_format )
suffix = ' . %s ' % rec . backup_format )
with open ( temp . name , " wb+ " ) as tmp :
with open ( temp . name , " wb+ " ) as tmp :
self . dump_data ( rec . db_name , tmp , rec . backup_format )
self . dump_data ( rec . db_name , tmp , rec . backup_format , rec . backup_frequency )
try :
try :
sftp . chdir ( rec . sftp_path )
sftp . chdir ( rec . sftp_path )
except IOError as e :
except IOError as e :
@ -726,7 +705,7 @@ class DbBackupConfigure(models.Model):
suffix = ' . %s ' % rec . backup_format )
suffix = ' . %s ' % rec . backup_format )
with open ( temp . name , " wb+ " ) as tmp :
with open ( temp . name , " wb+ " ) as tmp :
self . dump_data ( rec . db_name , tmp ,
self . dump_data ( rec . db_name , tmp ,
rec . backup_format )
rec . backup_format , rec . backup_frequency )
try :
try :
headers = {
headers = {
" Authorization " : " Bearer %s " % rec . gdrive_access_token }
" Authorization " : " Bearer %s " % rec . gdrive_access_token }
@ -749,8 +728,7 @@ class DbBackupConfigure(models.Model):
files_req = requests . get (
files_req = requests . get (
" https://www.googleapis.com/drive/v3/files?q= %s " % query ,
" https://www.googleapis.com/drive/v3/files?q= %s " % query ,
headers = headers )
headers = headers )
files = files_req . json ( ) [ ' files ' ]
for file in files_req . json ( ) [ ' files ' ] :
for file in files :
file_date_req = requests . get (
file_date_req = requests . get (
" https://www.googleapis.com/drive/v3/files/ %s ?fields=createdTime " %
" https://www.googleapis.com/drive/v3/files/ %s ?fields=createdTime " %
file [ ' id ' ] , headers = headers )
file [ ' id ' ] , headers = headers )
@ -787,7 +765,7 @@ class DbBackupConfigure(models.Model):
suffix = ' . %s ' % rec . backup_format )
suffix = ' . %s ' % rec . backup_format )
with open ( temp . name , " wb+ " ) as tmp :
with open ( temp . name , " wb+ " ) as tmp :
self . dump_data ( rec . db_name , tmp ,
self . dump_data ( rec . db_name , tmp ,
rec . backup_format )
rec . backup_format , rec . backup_frequency )
try :
try :
dbx = dropbox . Dropbox (
dbx = dropbox . Dropbox (
app_key = rec . dropbox_client_key ,
app_key = rec . dropbox_client_key ,
@ -821,7 +799,7 @@ class DbBackupConfigure(models.Model):
temp = tempfile . NamedTemporaryFile (
temp = tempfile . NamedTemporaryFile (
suffix = ' . %s ' % rec . backup_format )
suffix = ' . %s ' % rec . backup_format )
with open ( temp . name , " wb+ " ) as tmp :
with open ( temp . name , " wb+ " ) as tmp :
self . dump_data ( rec . db_name , tmp , rec . backup_format )
self . dump_data ( rec . db_name , tmp , rec . backup_format , rec . backup_frequency )
headers = {
headers = {
' Authorization ' : ' Bearer %s ' % rec . onedrive_access_token ,
' Authorization ' : ' Bearer %s ' % rec . onedrive_access_token ,
' Content-Type ' : ' application/json ' }
' Content-Type ' : ' application/json ' }
@ -916,22 +894,22 @@ class DbBackupConfigure(models.Model):
suffix = ' . %s ' % rec . backup_format )
suffix = ' . %s ' % rec . backup_format )
with open ( temp . name , " wb+ " ) as tmp :
with open ( temp . name , " wb+ " ) as tmp :
self . dump_data ( rec . db_name , tmp ,
self . dump_data ( rec . db_name , tmp ,
rec . backup_format )
rec . backup_format , rec . backup_frequency )
backup_file_path = temp . name
backup_file_name = temp . name
remote_file_path = f " / { folder_name } / { rec . db_name } _ " \
remote_file_path = f " / { folder_name } / { rec . db_name } _ " \
f " { backup_time } . { rec . backup_format } "
f " { backup_time } . { rec . backup_format } "
nc . put_file ( remote_file_path , backup_file_path )
nc . put_file ( remote_file_path , backup_file_name )
else :
else :
# Dump the database to a temporary file
# Dump the database to a temporary file
temp = tempfile . NamedTemporaryFile (
temp = tempfile . NamedTemporaryFile (
suffix = ' . %s ' % rec . backup_format )
suffix = ' . %s ' % rec . backup_format )
with open ( temp . name , " wb+ " ) as tmp :
with open ( temp . name , " wb+ " ) as tmp :
self . dump_data ( rec . db_name , tmp ,
self . dump_data ( rec . db_name , tmp ,
rec . backup_format )
rec . backup_format , rec . backup_frequency )
backup_file_path = temp . name
backup_file_name = temp . name
remote_file_path = f " / { folder_name } / { rec . db_name } _ " \
remote_file_path = f " / { folder_name } / { rec . db_name } _ " \
f " { backup_time } . { rec . backup_format } "
f " { backup_time } . { rec . backup_format } "
nc . put_file ( remote_file_path , backup_file_path )
nc . put_file ( remote_file_path , backup_file_name )
except Exception :
except Exception :
raise ValidationError ( ' Please check connection ' )
raise ValidationError ( ' Please check connection ' )
# Amazon S3 Backup
# Amazon S3 Backup
@ -981,19 +959,19 @@ class DbBackupConfigure(models.Model):
prefixes . add ( prefix )
prefixes . add ( prefix )
# If the specified folder is present in the bucket,
# If the specified folder is present in the bucket,
# take a backup of the database and upload it to the
# take a backup of the database and upload it to the
# S3 bucket
# S3 bucket
if rec . aws_folder_name in prefixes :
if rec . aws_folder_name in prefixes :
temp = tempfile . NamedTemporaryFile (
temp = tempfile . NamedTemporaryFile (
suffix = ' . %s ' % rec . backup_format )
suffix = ' . %s ' % rec . backup_format )
with open ( temp . name , " wb+ " ) as tmp :
with open ( temp . name , " wb+ " ) as tmp :
self . dump_data ( rec . db_name , tmp ,
self . dump_data ( rec . db_name , tmp ,
rec . backup_format )
rec . backup_format , rec . backup_frequency )
backup_file_path = temp . name
backup_file_name = temp . name
remote_file_path = f " { rec . aws_folder_name } / { rec . db_name } _ " \
remote_file_path = f " { rec . aws_folder_name } / { rec . db_name } _ " \
f " { backup_time } . { rec . backup_format } "
f " { backup_time } . { rec . backup_format } "
s3 . Object ( rec . bucket_file_name ,
s3 . Object ( rec . bucket_file_name ,
remote_file_path ) . upload_file (
remote_file_path ) . upload_file (
backup_file_path )
backup_file_name )
# If notify_user is enabled, send an email to the
# If notify_user is enabled, send an email to the
# user notifying them about the successful backup
# user notifying them about the successful backup
if rec . notify_user :
if rec . notify_user :
@ -1009,16 +987,14 @@ class DbBackupConfigure(models.Model):
if rec . notify_user :
if rec . notify_user :
mail_template_failed . send_mail ( rec . id , force_send = True )
mail_template_failed . send_mail ( rec . id , force_send = True )
def dump_data ( self , db_name , stream , backup_format ) :
def dump_data ( self , db_name , stream , backup_format , backup_frequency ) :
""" Dump database `db` into file-like object `stream` if stream is None
""" Dump database `db` into file-like object `stream` if stream is None
return a file object with the dump . """
return a file object with the dump . """
cron_user_id = self . env . ref ( f ' auto_database_backup.ir_cron_auto_db_backup_ { backup_frequency } ' ) . user_id . id
cron_user_id = self . env . ref ( ' auto_database_backup.ir_cron_auto_db_backup ' ) . user_id . id
if cron_user_id != self . env . user . id :
if cron_user_id != self . env . user . id :
_logger . error (
_logger . error (
' Unauthorized database operation. Backups should only be available from the cron job. ' )
' Unauthorized database operation. Backups should only be available from the cron job. ' )
raise ValidationError ( " Unauthorized database operation. Backups should only be available from the cron job. " )
raise ValidationError ( " Unauthorized database operation. Backups should only be available from the cron job. " )
_logger . info ( ' DUMP DB: %s format %s ' , db_name , backup_format )
_logger . info ( ' DUMP DB: %s format %s ' , db_name , backup_format )
cmd = [ find_pg_tool ( ' pg_dump ' ) , ' --no-owner ' , db_name ]
cmd = [ find_pg_tool ( ' pg_dump ' ) , ' --no-owner ' , db_name ]
env = exec_pg_environ ( )
env = exec_pg_environ ( )