Merge branch 'v2.0.3' into stable

This commit is contained in:
Usman Nasir
2020-12-08 22:56:08 +05:00
13 changed files with 771 additions and 241 deletions

View File

@@ -42,6 +42,17 @@ class cyberPanel:
from random import randint
externalApp = "".join(re.findall("[a-zA-Z]+", domainName))[:5] + str(randint(1000, 9999))
phpSelection = 'PHP ' + php
try:
counter = 0
_externalApp=externalApp
while True:
tWeb = Websites.objects.get(externalApp=externalApp)
externalApp = '%s%s' % (_externalApp, str(counter))
counter = counter + 1
except BaseException as msg:
logger.writeforCLI(str(msg), "Error", stack()[0][3])
time.sleep(2)
result = virtualHostUtilities.createVirtualHost(domainName, email, phpSelection, externalApp, ssl, dkim,
openBasedir, owner, package, 0)
@@ -1556,4 +1567,4 @@ def main():
if __name__ == "__main__":
main()
main()

View File

@@ -961,9 +961,12 @@ class CloudManager:
def forceRunAWSBackup(self, request):
try:
request.session['userID'] = self.admin.pk
s3 = S3Backups(request, self.data, 'forceRunAWSBackup')
s3.start()
execPath = "/usr/local/CyberCP/bin/python /usr/local/CyberCP/plogical/IncScheduler.py forceRunAWSBackup --planName %s" % (self.data['planName'])
ProcessUtilities.popenExecutioner(execPath)
return self.ajaxPre(1, None)
except BaseException as msg:
return self.ajaxPre(0, str(msg))
@@ -1638,3 +1641,153 @@ class CloudManager:
except BaseException as msg:
return self.ajaxPre(0, str(msg))
def SubmitCloudBackup(self):
try:
tempStatusPath = "/home/cyberpanel/" + str(randint(1000, 9999))
writeToFile = open(tempStatusPath, 'w')
writeToFile.write('Starting..,0')
writeToFile.close()
try:
data = str(int(self.data['data']))
except:
data = '0'
try:
emails = str(int(self.data['emails']))
except:
emails = '0'
try:
databases = str(int(self.data['databases']))
except:
databases = '0'
execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/backupUtilities.py"
execPath = execPath + " CloudBackup --backupDomain %s --data %s --emails %s --databases %s --tempStoragePath %s" % (self.data['domain'], data, emails, databases, tempStatusPath)
ProcessUtilities.popenExecutioner(execPath)
final_dic = {'status': 1, 'tempStatusPath': tempStatusPath}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
except BaseException as msg:
return self.ajaxPre(0, str(msg))
def getCurrentCloudBackups(self):
try:
backupDomain = self.data['domainName']
backupsPath = '/home/cyberpanel/backups/%s/' % (backupDomain)
try:
backups = os.listdir(backupsPath)
backups.reverse()
except:
backups = []
json_data = "["
checker = 0
counter = 1
for items in backups:
size = str(int(int(os.path.getsize('%s/%s' % (backupsPath, items)))/int(1048576)))
dic = {'id': counter,
'file': items,
'size': '%s MBs' % (size),
}
counter = counter + 1
if checker == 0:
json_data = json_data + json.dumps(dic)
checker = 1
else:
json_data = json_data + ',' + json.dumps(dic)
json_data = json_data + ']'
final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None", "data": json_data})
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'status': 0, 'fetchStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def fetchCloudBackupSettings(self):
try:
from plogical.backupUtilities import backupUtilities
if os.path.exists(backupUtilities.CloudBackupConfigPath):
result = json.loads(open(backupUtilities.CloudBackupConfigPath, 'r').read())
self.nice = result['nice']
self.cpu = result['cpu']
self.time = result['time']
else:
self.nice = backupUtilities.NiceDefault
self.cpu = backupUtilities.CPUDefault
self.time = backupUtilities.time
data_ret = {'status': 1, 'nice': self.nice, 'cpu': self.cpu, 'time': self.time}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
except BaseException as msg:
data_ret = {'status': 0, 'abort': 0, 'installationProgress': "0", 'errorMessage': str(msg)}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
def saveCloudBackupSettings(self):
try:
from plogical.backupUtilities import backupUtilities
writeToFile = open(backupUtilities.CloudBackupConfigPath, 'w')
writeToFile.write(json.dumps(self.data))
writeToFile.close()
data_ret = {'status': 1}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
except BaseException as msg:
data_ret = {'status': 0, 'abort': 0, 'installationProgress': "0", 'errorMessage': str(msg)}
json_data = json.dumps(data_ret)
return HttpResponse(json_data)
def deleteCloudBackup(self):
try:
backupDomain = self.data['domainName']
backupFile = self.data['backupFile']
backupsPathComplete = '/home/cyberpanel/backups/%s/%s' % (backupDomain, backupFile)
command = 'rm -f %s' % (backupsPathComplete)
ProcessUtilities.executioner(command)
final_json = json.dumps({'status': 1, 'fetchStatus': 1, 'error_message': "None"})
return HttpResponse(final_json)
except BaseException as msg:
final_dic = {'status': 0, 'fetchStatus': 0, 'error_message': str(msg)}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
def SubmitCloudBackupRestore(self):
try:
tempStatusPath = "/home/cyberpanel/" + str(randint(1000, 9999))
writeToFile = open(tempStatusPath, 'w')
writeToFile.write('Starting..,0')
writeToFile.close()
execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/backupUtilities.py"
execPath = execPath + " SubmitCloudBackupRestore --backupDomain %s --backupFile %s --tempStoragePath %s" % (self.data['domain'], self.data['backupFile'], tempStatusPath)
ProcessUtilities.popenExecutioner(execPath)
final_dic = {'status': 1, 'tempStatusPath': tempStatusPath}
final_json = json.dumps(final_dic)
return HttpResponse(final_json)
except BaseException as msg:
return self.ajaxPre(0, str(msg))

View File

@@ -51,6 +51,18 @@ def router(request):
return cm.ReadReportDNS()
elif controller == 'ResetDNSConfigurations':
return cm.ResetDNSConfigurations()
elif controller == 'SubmitCloudBackup':
return cm.SubmitCloudBackup()
elif controller == 'getCurrentCloudBackups':
return cm.getCurrentCloudBackups()
elif controller == 'fetchCloudBackupSettings':
return cm.fetchCloudBackupSettings()
elif controller == 'saveCloudBackupSettings':
return cm.saveCloudBackupSettings()
elif controller == 'deleteCloudBackup':
return cm.deleteCloudBackup()
elif controller == 'SubmitCloudBackupRestore':
return cm.SubmitCloudBackupRestore()
elif controller == 'fetchWebsites':
return cm.fetchWebsites()
elif controller == 'fetchWebsiteDataJSON':

View File

@@ -478,6 +478,7 @@ EOF
fi
ln -s /usr/bin/pip3 /usr/bin/pip
pip install virtualenv==16.7.9
#pip install virtualenv
check_return
fi
@@ -499,6 +500,7 @@ EOF
check_return
pip install virtualenv==16.7.9
#pip install virtualenv
check_return
fi

View File

@@ -276,9 +276,9 @@ class InstallCyberPanel:
if self.remotemysql == 'OFF':
if self.distro == ubuntu:
passwordCMD = "use mysql;GRANT ALL PRIVILEGES ON *.* TO 'root'@'localhost' IDENTIFIED BY '%s';UPDATE user SET plugin='' WHERE User='root';flush privileges;" % (InstallCyberPanel.mysql_Root_password)
passwordCMD = "use mysql;DROP DATABASE IF EXISTS test;DELETE FROM mysql.db WHERE Db='test' OR Db='test\\_%%';GRANT ALL PRIVILEGES ON *.* TO 'root'@'localhost' IDENTIFIED BY '%s';UPDATE user SET plugin='' WHERE User='root';flush privileges;" % (InstallCyberPanel.mysql_Root_password)
else:
passwordCMD = "use mysql;GRANT ALL PRIVILEGES ON *.* TO 'root'@'localhost' IDENTIFIED BY '%s';flush privileges;" % (InstallCyberPanel.mysql_Root_password)
passwordCMD = "use mysql;DROP DATABASE IF EXISTS test;DELETE FROM mysql.db WHERE Db='test' OR Db='test\\_%%';GRANT ALL PRIVILEGES ON *.* TO 'root'@'localhost' IDENTIFIED BY '%s';flush privileges;" % (InstallCyberPanel.mysql_Root_password)
command = 'mysql -u root -e "' + passwordCMD + '"'

View File

@@ -18,11 +18,16 @@ from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload
from plogical.backupSchedule import backupSchedule
import requests
from websiteFunctions.models import NormalBackupJobs, NormalBackupSites, NormalBackupDests, NormalBackupJobLogs
from websiteFunctions.models import NormalBackupJobs, NormalBackupJobLogs
from boto3.s3.transfer import TransferConfig
try:
from s3Backups.models import BackupPlan, BackupLogs
import boto3
from plogical.virtualHostUtilities import virtualHostUtilities
from plogical.mailUtilities import mailUtilities
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
from plogical.processUtilities import ProcessUtilities
except:
pass
@@ -589,19 +594,141 @@ Automatic backup failed for %s on %s.
backupjob.config = json.dumps(jobConfig)
backupjob.save()
@staticmethod
def fetchAWSKeys():
path = '/home/cyberpanel/.aws'
credentials = path + '/credentials'
data = open(credentials, 'r').readlines()
aws_access_key_id = data[1].split(' ')[2].strip(' ').strip('\n')
aws_secret_access_key = data[2].split(' ')[2].strip(' ').strip('\n')
region = data[3].split(' ')[2].strip(' ').strip('\n')
return aws_access_key_id, aws_secret_access_key, region
@staticmethod
def forceRunAWSBackup(planName):
try:
plan = BackupPlan.objects.get(name=planName)
bucketName = plan.bucket.strip('\n').strip(' ')
runTime = time.strftime("%d:%m:%Y")
config = TransferConfig(multipart_threshold=1024 * 25, max_concurrency=10,
multipart_chunksize=1024 * 25, use_threads=True)
aws_access_key_id, aws_secret_access_key, region = IncScheduler.fetchAWSKeys()
client = boto3.client(
's3',
aws_access_key_id = aws_access_key_id,
aws_secret_access_key = aws_secret_access_key,
#region_name=region
)
##
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Starting backup process..').save()
PlanConfig = json.loads(plan.config)
for items in plan.websitesinplan_set.all():
from plogical.backupUtilities import backupUtilities
tempStatusPath = "/home/cyberpanel/" + str(randint(1000, 9999))
extraArgs = {}
extraArgs['domain'] = items.domain
extraArgs['tempStatusPath'] = tempStatusPath
extraArgs['data'] = int(PlanConfig['data'])
extraArgs['emails'] = int(PlanConfig['emails'])
extraArgs['databases'] = int(PlanConfig['databases'])
bu = backupUtilities(extraArgs)
result, fileName = bu.CloudBackups()
finalResult = open(tempStatusPath, 'r').read()
if result == 1:
key = plan.name + '/' + runTime + '/' + fileName.split('/')[-1]
client.upload_file(
fileName,
bucketName,
key,
Config=config
)
command = 'rm -f ' + fileName
ProcessUtilities.executioner(command)
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup successful for ' + items.domain + '.').save()
else:
BackupLogs(owner=plan, level='ERROR', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup failed for ' + items.domain + '. Error: ' + finalResult).save()
plan.lastRun = runTime
plan.save()
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup Process Finished.').save()
###
s3 = boto3.resource(
's3',
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
region_name=region
)
ts = time.time()
retentionSeconds = 86400 * plan.retention
for bucket in s3.buckets.all():
if bucket.name == plan.bucket:
for file in bucket.objects.all():
result = float(ts - file.last_modified.timestamp())
if result > retentionSeconds:
file.delete()
break
except BaseException as msg:
logging.writeToFile(str(msg) + ' [S3Backups.runBackupPlan]')
plan = BackupPlan.objects.get(name=planName)
BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR', msg=str(msg)).save()
@staticmethod
def runAWSBackups(freq):
try:
for plan in BackupPlan.objects.all():
if plan.freq == 'Daily' == freq:
IncScheduler.forceRunAWSBackup(plan.name)
except BaseException as msg:
logging.writeToFile(str(msg) + ' [S3Backups.runAWSBackups]')
def main():
parser = argparse.ArgumentParser(description='CyberPanel Installer')
parser.add_argument('function', help='Specific a function to call!')
parser.add_argument('--planName', help='Plan name for AWS!')
args = parser.parse_args()
if args.function == 'forceRunAWSBackup':
IncScheduler.forceRunAWSBackup(args.planName)
return 0
IncScheduler.startBackup(args.function)
IncScheduler.runGoogleDriveBackups(args.function)
IncScheduler.git(args.function)
IncScheduler.checkDiskUsage()
IncScheduler.startNormalBackups(args.function)
IncScheduler.runAWSBackups(args.function)
if __name__ == "__main__":

View File

@@ -54,10 +54,16 @@ BUILD = 3
class backupUtilities:
Server_root = "/usr/local/lsws"
completeKeyPath = "/home/cyberpanel/.ssh"
destinationsPath = "/home/cyberpanel/destinations"
licenseKey = '/usr/local/lsws/conf/license.key'
NiceDefault = '10'
CPUDefault = '1000'
CloudBackupConfigPath = '/home/cyberpanel/CloudBackup.json'
time = 10
def __init__(self, extraArgs):
self.extraArgs = extraArgs
@staticmethod
def prepareBackupMeta(backupDomain, backupName, tempStoragePath, backupPath):
@@ -1246,6 +1252,372 @@ class backupUtilities:
logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [getAliases]")
print(0)
### Cloud Backup functions
def CheckIfSleepNeeded(self):
import psutil
while (1):
logging.CyberCPLogFileWriter.writeToFile('Current CPU percent %s.' % (int(psutil.cpu_percent(interval=None))))
if int(psutil.cpu_percent(interval=None)) > int(self.cpu):
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Current CPU usage exceeds %s percent. Backup process will sleep for %s seconds..,0' % (self.cpu, str(self.time)))
import time
time.sleep(self.time)
else:
break
def BackupData(self):
try:
### Creating the dir to store backups
self.BackupDataPath = '%s/data' % (self.BackupPath)
command = 'mkdir -p %s' % (self.BackupDataPath)
ProcessUtilities.executioner(command)
self.DataPath = '/home/%s' % (self.extraArgs['domain'])
## Backing up data
self.CheckIfSleepNeeded()
command = 'nice -n %s cp -Rp %s %s' % (self.nice, self.DataPath, self.BackupDataPath)
ProcessUtilities.executioner(command)
## Store child domains if any in json format
DataJson = {}
childs = []
import json
for child in self.website.childdomains_set.all():
childs.append({'domain': child.domain, 'path': child.path, 'php': child.phpSelection})
DataJson['ChildDomains'] = childs
DataJsonPath = '%s/%s' % (self.BackupPath, 'data.json')
writeToFile = open(DataJsonPath, 'w')
writeToFile.write(json.dumps(DataJson))
writeToFile.close()
return 1, None
except BaseException as msg:
return 0, str(msg)
def BackupEmails(self):
try:
from mailServer.models import Domains, EUsers
try:
emailDomain = Domains.objects.get(domainOwner=self.website)
except:
return 1, None
### Creating the dir to store backups
self.BackupDataPath = '%s/emails' % (self.BackupPath)
command = 'mkdir -p %s' % (self.BackupDataPath)
ProcessUtilities.executioner(command)
self.DataPath = '/home/vmail/%s' % (self.extraArgs['domain'])
## Backing up data
self.CheckIfSleepNeeded()
command = 'nice -n %s cp -Rp %s %s' % (self.nice, self.DataPath, self.BackupDataPath)
ProcessUtilities.executioner(command)
## Store child domains if any in json format
DataJson = {}
emailsList = []
import json
for emails in emailDomain.eusers_set.all():
emailsList.append({'email': emails.email, 'password': emails.password})
DataJson['emails'] = emailsList
DataJsonPath = '%s/%s' % (self.BackupPath, 'emails.json')
writeToFile = open(DataJsonPath, 'w')
writeToFile.write(json.dumps(DataJson))
writeToFile.close()
return 1, None
except BaseException as msg:
return 0, str(msg)
def BackupDatabases(self):
try:
### Creating the dir to store backups
self.BackupDataPath = '%s/databases' % (self.BackupPath)
command = 'mkdir -p %s' % (self.BackupDataPath)
ProcessUtilities.executioner(command)
## Backing up data
self.CheckIfSleepNeeded()
DataJson = {}
databases = []
import json
for items in self.website.databases_set.all():
try:
dbuser = DBUsers.objects.get(user=items.dbUser)
userToTry = items.dbUser
except:
try:
dbusers = DBUsers.objects.all().filter(user=items.dbUser)
userToTry = items.dbUser
for it in dbusers:
dbuser = it
break
userToTry = mysqlUtilities.mysqlUtilities.fetchuser(items.dbName)
if userToTry == 0 or userToTry == 1:
continue
try:
dbuser = DBUsers.objects.get(user=userToTry)
except:
try:
dbusers = DBUsers.objects.all().filter(user=userToTry)
for it in dbusers:
dbuser = it
break
except BaseException as msg:
logging.CyberCPLogFileWriter.writeToFile(
'While creating backup for %s, we failed to backup database %s. Error message: %s' % (
self.website.domain, items.dbName, str(msg)))
continue
except BaseException as msg:
logging.CyberCPLogFileWriter.writeToFile(
'While creating backup for %s, we failed to backup database %s. Error message: %s' % (
self.website.domain, items.dbName, str(msg)))
continue
databases.append({'databaseName': str(items.dbName), 'databaseUser': str(userToTry), 'password': str(dbuser.password)})
self.CheckIfSleepNeeded()
mysqlUtilities.mysqlUtilities.createDatabaseBackup(items.dbName, self.BackupDataPath)
DataJson['databases'] = databases
DataJsonPath = '%s/%s' % (self.BackupPath, 'databases.json')
writeToFile = open(DataJsonPath, 'w')
writeToFile.write(json.dumps(DataJson))
writeToFile.close()
return 1, None
except BaseException as msg:
return 0, str(msg)
def CloudBackups(self):
import json
if os.path.exists(backupUtilities.CloudBackupConfigPath):
result = json.loads(open(backupUtilities.CloudBackupConfigPath, 'r').read())
self.nice = result['nice']
self.cpu = result['cpu']
self.time = int(result['time'])
else:
self.nice = backupUtilities.NiceDefault
self.cpu = backupUtilities.CPUDefault
self.time = int(backupUtilities.time)
self.BackupPath = '/home/cyberpanel/backups/%s/backup-' % (self.extraArgs['domain']) + self.extraArgs['domain'] + "-" + time.strftime("%m.%d.%Y_%H-%M-%S")
self.website = Websites.objects.get(domain=self.extraArgs['domain'])
command = 'mkdir -p %s' % (self.BackupPath)
ProcessUtilities.executioner(command)
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Starting backup generation..,0')
if self.extraArgs['data']:
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Generating backup for your data,5')
result = self.BackupData()
if result[0] == 0:
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Failed to generate backups for data. Error: %s. [404], 0' % (result[1] ))
return 0, self.BackupPath
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Data backup successfully generated,30')
if self.extraArgs['emails']:
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Generating backup for your emails,40')
result = self.BackupEmails()
if result[0] == 0:
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Failed to generate backups for emails. Error: %s. [404], 0' % (result[1] ))
return 0, self.BackupPath
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Emails backup successfully generated,60')
if self.extraArgs['databases']:
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Generating backup for your databases,60')
result = self.BackupDatabases()
if result[0] == 0:
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Failed to generate backups for databases. Error: %s. [404], 0' % (result[1] ))
return 0, self.BackupPath
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Databases backups successfully generated,30')
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Creating final archive..,80')
command = 'nice -n %s tar czf %s.tar.gz -C %s %s' % (self.nice, self.BackupPath, '/home/cyberpanel/backups/%s' % (self.extraArgs['domain']), self.BackupPath.split('/')[-1])
ProcessUtilities.executioner(command)
command = 'rm -rf %s' % (self.BackupPath)
ProcessUtilities.executioner(command)
finalPath = '%s.tar.gz' % (self.BackupPath)
command = 'chown cyberpanel:cyberpanel %s' % (finalPath)
ProcessUtilities.executioner(command)
command = 'chmod 600:600 %s' % (finalPath)
ProcessUtilities.executioner(command)
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Completed [200].')
return 1, self.BackupPath + '.tar.gz'
## Restore functions
def SubmitCloudBackupRestore(self):
import json
if os.path.exists(backupUtilities.CloudBackupConfigPath):
result = json.loads(open(backupUtilities.CloudBackupConfigPath, 'r').read())
self.nice = result['nice']
self.cpu = result['cpu']
self.time = int(result['time'])
else:
self.nice = backupUtilities.NiceDefault
self.cpu = backupUtilities.CPUDefault
self.time = int(backupUtilities.time)
self.BackupPath = '/home/cyberpanel/backups/%s/%s' % (self.extraArgs['domain'], self.extraArgs['backupFile'])
self.website = Websites.objects.get(domain=self.extraArgs['domain'])
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Extracting main archive..,0')
command = 'tar -xf %s -C %s' % (self.BackupPath, '/home/cyberpanel/backups/%s/' % (self.extraArgs['domain']))
ProcessUtilities.executioner(command)
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Main Archive extracted,20')
self.extractedPath = '/home/cyberpanel/backups/%s/%s' % (self.extraArgs['domain'], self.extraArgs['backupFile'].rstrip('.tar.gz'))
self.dataPath = '%s/data' % (self.extractedPath)
self.databasesPath = '%s/databases' % (self.extractedPath)
self.emailsPath = '%s/emails' % (self.extractedPath)
## Data
if os.path.exists(self.dataPath):
try:
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Creating child domains if any..,20')
childDomains = json.loads(open('%s/data.json' % (self.extractedPath), 'r').read())['ChildDomains']
for child in childDomains:
try:
ch = ChildDomains.objects.get(domain=child['domain'])
except:
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Creating %s,20' % (child['domain']))
virtualHostUtilities.createDomain(self.website.domain, child['domain'], child['php'], child['path'], 1, 0, 0,
self.website.admin.userName, 0, "/home/cyberpanel/" + str(randint(1000, 9999)))
except BaseException as msg:
logging.CyberCPLogFileWriter.writeToFile('%s [SubmitCloudBackupRestore:1533]' % str(msg))
homePath = '/home/%s' % (self.website.domain)
command = 'rm -rf %s' % (homePath)
ProcessUtilities.executioner(command)
command = 'mv %s/%s %s' % (self.dataPath, self.website.domain, '/home')
ProcessUtilities.executioner(command)
from filemanager.filemanager import FileManager
fm = FileManager(None, None)
fm.fixPermissions(self.website.domain)
## Emails
if os.path.exists(self.emailsPath):
try:
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Creating emails if any..,40')
emails = json.loads(open('%s/emails.json' % (self.extractedPath), 'r').read())['emails']
from mailServer.models import Domains, EUsers
emailDomain = Domains.objects.get(domain=self.website.domain)
for email in emails:
try:
eu = EUsers.objects.get(emailOwner=emailDomain, email=email['email'])
except:
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Creating %s,40' % (email['email']))
emailAcct = EUsers(emailOwner=emailDomain, email=email['email'], password=email['password'])
emailAcct.mail = 'maildir:/home/vmail/%s/%s/Maildir' % (self.website.domain, email['email'].split('@')[0])
emailAcct.save()
EmailsHome = '/home/vmail/%s' % (self.website.domain)
command = 'rm -rf %s' % (EmailsHome)
ProcessUtilities.executioner(command)
command = 'mv %s/%s /home/vmail' % (self.emailsPath, self.website.domain)
ProcessUtilities.executioner(command)
command = 'chown -R vmail:vmail %s' % (EmailsHome)
ProcessUtilities.executioner(command)
except BaseException as msg:
logging.CyberCPLogFileWriter.writeToFile('%s [SubmitCloudBackupRestore:1533]' % str(msg))
## Databases
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Restoring databases if any..,70')
databases = json.loads(open('%s/databases.json' % (self.extractedPath), 'r').read())['databases']
for db in databases:
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'],
'Restoring database %s..,70' % (db['databaseName']))
mysqlUtilities.mysqlUtilities.submitDBDeletion(db['databaseName'])
if mysqlUtilities.mysqlUtilities.createDatabase(db['databaseName'], db['databaseUser'], "cyberpanel") == 0:
raise BaseException("Failed to create Databases!")
newDB = Databases(website=self.website, dbName=db['databaseName'], dbUser=db['databaseUser'])
newDB.save()
mysqlUtilities.mysqlUtilities.restoreDatabaseBackup(db['databaseName'], self.databasesPath, db['password'])
command = 'rm -rf %s' % (self.extractedPath)
ProcessUtilities.executioner(command)
logging.CyberCPLogFileWriter.statusWriter(self.extraArgs['tempStatusPath'], 'Completed [200].')
### Cloud Backup functions ends
def submitBackupCreation(tempStoragePath, backupName, backupPath, backupDomain):
try:
@@ -1386,7 +1758,6 @@ def submitBackupCreation(tempStoragePath, backupName, backupPath, backupDomain):
logging.CyberCPLogFileWriter.writeToFile(
str(msg) + " [submitBackupCreation]")
def cancelBackupCreation(backupCancellationDomain, fileName):
try:
@@ -1421,7 +1792,6 @@ def cancelBackupCreation(backupCancellationDomain, fileName):
str(msg) + " [cancelBackupCreation]")
print("0," + str(msg))
def submitRestore(backupFile, dir):
try:
@@ -1435,7 +1805,6 @@ def submitRestore(backupFile, dir):
str(msg) + " [cancelBackupCreation]")
print("0," + str(msg))
def submitDestinationCreation(ipAddress, password, port='22', user='root'):
setupKeys = backupUtilities.setupSSHKeys(ipAddress, password, port, user)
@@ -1445,7 +1814,6 @@ def submitDestinationCreation(ipAddress, password, port='22', user='root'):
else:
print(setupKeys[1])
def getConnectionStatus(ipAddress):
try:
checkCon = backupUtilities.checkConnection(ipAddress)
@@ -1460,8 +1828,8 @@ def getConnectionStatus(ipAddress):
def main():
parser = argparse.ArgumentParser(description='CyberPanel Installer')
parser.add_argument('function', help='Specific a function to call!')
parser = argparse.ArgumentParser(description='CyberPanel Backup Generator')
parser.add_argument('function', help='Specify a function to call!')
parser.add_argument('--tempStoragePath', help='')
parser.add_argument('--backupName', help='!')
parser.add_argument('--backupPath', help='')
@@ -1485,6 +1853,13 @@ def main():
parser.add_argument('--backupFile', help='')
parser.add_argument('--dir', help='')
### For Cloud Backups
parser.add_argument('--data', help='')
parser.add_argument('--emails', help='')
parser.add_argument('--databases', help='')
args = parser.parse_args()
if args.function == "submitBackupCreation":
@@ -1501,6 +1876,22 @@ def main():
backupUtilities.startBackup(args.tempStoragePath, args.backupName, args.backupPath, args.metaPath)
elif args.function == "BackupRoot":
backupUtilities.BackupRoot(args.tempStoragePath, args.backupName, args.backupPath, args.metaPath)
elif args.function == 'CloudBackup':
extraArgs = {}
extraArgs['domain'] = args.backupDomain
extraArgs['tempStatusPath'] = args.tempStoragePath
extraArgs['data'] = int(args.data)
extraArgs['emails'] = int(args.emails)
extraArgs['databases'] = int(args.databases)
bu = backupUtilities(extraArgs)
bu.CloudBackups()
elif args.function == 'SubmitCloudBackupRestore':
extraArgs = {}
extraArgs['domain'] = args.backupDomain
extraArgs['tempStatusPath'] = args.tempStoragePath
extraArgs['backupFile'] = args.backupFile
bu = backupUtilities(extraArgs)
bu.SubmitCloudBackupRestore()
if __name__ == "__main__":

View File

@@ -228,6 +228,7 @@ class mysqlUtilities:
cnfContent = """[mysqldump]
user=root
password=%s
max_allowed_packet=1024M
[mysql]
user=root
password=%s
@@ -288,6 +289,7 @@ password=%s
cnfContent = """[mysqldump]
user=root
password=%s
max_allowed_packet=1024M
[mysql]
user=root
password=%s

View File

@@ -765,6 +765,11 @@ imap_folder_list_limit = 0
except:
pass
try:
cursor.execute('ALTER TABLE s3Backups_backupplan ADD config longtext')
except:
pass
query = """CREATE TABLE `s3Backups_websitesinplan` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`domain` varchar(100) NOT NULL,

View File

@@ -5,8 +5,8 @@ Babel==2.8.0
backports.ssl-match-hostname==3.7.0.1
bcrypt==3.2.0
beautifulsoup4==4.9.3
boto3==1.16.13
botocore==1.19.13
boto3==1.16.30
botocore==1.19.30
cachetools==4.1.1
certifi==2020.11.8
cffi==1.14.3

View File

@@ -13,6 +13,7 @@ class BackupPlan(models.Model):
retention = models.IntegerField()
type = models.CharField(max_length=5, default='AWS')
lastRun = models.CharField(max_length=50, default='0:0:0')
config = models.TextField()
class WebsitesInPlan(models.Model):
owner = models.ForeignKey(BackupPlan,on_delete=models.CASCADE)

View File

@@ -97,43 +97,6 @@ class S3Backups(multi.Thread):
json_data = json_data + ']'
return json_data
def setupCron(self):
try:
command = "sudo cat /etc/crontab"
crons = ProcessUtilities.outputExecutioner(command).splitlines()
cronCheck = 1
for items in crons:
if items.find('s3Backups.py') > -1:
cronCheck = 0
tempPath = '/home/cyberpanel/' + str(randint(10000, 99999))
writeToFile = open(tempPath, "w")
for items in crons:
writeToFile.writelines(items + "\n")
if cronCheck:
writeToFile.writelines("0 0 * * * root /usr/local/CyberCP/bin/python /usr/local/CyberCP/s3Backups/s3Backups.py > /home/cyberpanel/error-logs.txt 2>&1\n")
writeToFile.close()
command = 'sudo mv ' + tempPath + " /etc/crontab"
ProcessUtilities.executioner(command)
command = 'chown root:root /etc/crontab'
ProcessUtilities.executioner(command)
try:
os.remove(tempPath)
except:
pass
except BaseException as msg:
logging.writeToFile(str(msg) + " [S3Backups.setupCron]")
def connectAccount(self):
try:
@@ -160,8 +123,6 @@ class S3Backups(multi.Thread):
##
self.setupCron()
return proc.ajax(1, None)
except BaseException as msg:
@@ -176,8 +137,9 @@ class S3Backups(multi.Thread):
aws_access_key_id = data[1].split(' ')[2].strip(' ').strip('\n')
aws_secret_access_key = data[2].split(' ')[2].strip(' ').strip('\n')
region = data[3].split(' ')[2].strip(' ').strip('\n')
return aws_access_key_id, aws_secret_access_key
return aws_access_key_id, aws_secret_access_key, region
def fetchBuckets(self):
try:
@@ -191,12 +153,13 @@ class S3Backups(multi.Thread):
return proc.ajax(0, 'Only administrators can use AWS S3 Backups.')
aws_access_key_id, aws_secret_access_key = self.fetchAWSKeys()
aws_access_key_id, aws_secret_access_key, region = self.fetchAWSKeys()
s3 = boto3.resource(
's3',
aws_access_key_id = aws_access_key_id,
aws_secret_access_key = aws_secret_access_key
aws_secret_access_key = aws_secret_access_key,
region_name=region
)
json_data = "["
@@ -232,8 +195,28 @@ class S3Backups(multi.Thread):
admin = Administrator.objects.get(pk=userID)
## What to backup
WTB = {}
try:
WTB['data'] = int(self.data['data'])
except:
WTB['data'] = 0
try:
WTB['databases'] = int(self.data['databases'])
except:
WTB['databases'] = 0
try:
WTB['emails'] = int(self.data['emails'])
except:
WTB['emails'] = 0
###
newPlan = BackupPlan(owner=admin, name=self.data['planName'].replace(' ', ''), freq=self.data['frequency'],
retention=self.data['retenion'], bucket=self.data['bucketName'])
retention=self.data['retenion'], bucket=self.data['bucketName'], config=json.dumps(WTB))
newPlan.save()
for items in self.data['websitesInPlan']:
@@ -263,12 +246,16 @@ class S3Backups(multi.Thread):
checker = 0
for plan in admin.backupplan_set.all():
config = json.loads(plan.config)
dic = {
'name': plan.name,
'bucket': plan.bucket,
'freq': plan.freq,
'retention': plan.retention,
'lastRun': plan.lastRun,
'data': config['data'],
'databases': config['databases'],
'emails': config['emails'],
}
if checker == 0:
@@ -374,9 +361,28 @@ class S3Backups(multi.Thread):
changePlan = BackupPlan.objects.get(name=self.data['planName'])
## What to backup
WTB = {}
try:
WTB['data'] = int(self.data['data'])
except:
WTB['data'] = 0
try:
WTB['databases'] = int(self.data['databases'])
except:
WTB['databases'] = 0
try:
WTB['emails'] = int(self.data['emails'])
except:
WTB['emails'] = 0
changePlan.bucket = self.data['bucketName']
changePlan.freq = self.data['frequency']
changePlan.retention = self.data['retention']
changePlan.config = json.dumps(WTB)
changePlan.save()
@@ -474,94 +480,6 @@ class S3Backups(multi.Thread):
items.delete()
return 0, status
def forceRunAWSBackup(self):
try:
plan = BackupPlan.objects.get(name=self.data['planName'])
bucketName = plan.bucket.strip('\n').strip(' ')
runTime = time.strftime("%d:%m:%Y")
aws_access_key_id, aws_secret_access_key = self.fetchAWSKeys()
client = boto3.client(
's3',
aws_access_key_id = aws_access_key_id,
aws_secret_access_key = aws_secret_access_key
)
config = TransferConfig(multipart_threshold=1024 * 25, max_concurrency=10,
multipart_chunksize=1024 * 25, use_threads=True)
## Set Expiration for objects
try:
client.put_bucket_lifecycle_configuration(
Bucket='string',
LifecycleConfiguration={
'Rules': [
{
'Expiration': {
'Days': plan.retention,
'ExpiredObjectDeleteMarker': True
},
'ID': plan.name,
'Prefix': '',
'Filter': {
'Prefix': plan.name + '/',
},
'Status': 'Enabled',
},
]
}
)
except BaseException as msg:
BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR',
msg=str(msg)).save()
##
userID = self.request.session['userID']
currentACL = ACLManager.loadedACL(userID)
if currentACL['admin'] == 0:
BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='INFO',
msg='Unauthorised user tried to run AWS Backups.').save()
return 0
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Starting backup process..').save()
for items in plan.websitesinplan_set.all():
result = self.createBackup(items.domain)
if result[0]:
key = plan.name + '/' + runTime + '/' + result[1].split('/')[-1] + ".tar.gz"
client.upload_file(
result[1] + ".tar.gz",
bucketName,
key,
Config=config,
)
command = 'rm -f ' + result[1] + ".tar.gz"
ProcessUtilities.executioner(command)
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup successful for ' + items.domain + '.').save()
else:
BackupLogs(owner=plan, level='ERROR', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup failed for ' + items.domain + '. Error: ' + result[1]).save()
plan.lastRun = runTime
plan.save()
BackupLogs(owner=plan, level='INFO', timeStamp=time.strftime("%b %d %Y, %H:%M:%S"),
msg='Backup Process Finished.').save()
except BaseException as msg:
logging.writeToFile(str(msg) + ' [S3Backups.runBackupPlan]')
plan = BackupPlan.objects.get(name=self.data['planName'])
BackupLogs(owner=plan, timeStamp=time.strftime("%b %d %Y, %H:%M:%S"), level='ERROR', msg=str(msg)).save()
def connectAccountDO(self):
try:
@@ -587,9 +505,6 @@ class S3Backups(multi.Thread):
credFile.write(self.data['credData'])
credFile.close()
##
self.setupCron()
return proc.ajax(1, None)
@@ -961,8 +876,6 @@ class S3Backups(multi.Thread):
secretKey=self.data['secretKey'])
newNode.save()
self.setupCron()
return proc.ajax(1, None)
except BaseException as msg:
@@ -1299,93 +1212,3 @@ class S3Backups(multi.Thread):
except BaseException as msg:
proc = httpProc(self.request, None, None)
return proc.ajax(0, str(msg))
def runAWSBackups(self):
try:
admin = Administrator.objects.get(userName='admin')
self.request.session['userID'] = admin.pk
for plan in BackupPlan.objects.all():
lastRunDay = plan.lastRun.split(':')[0]
lastRunMonth = plan.lastRun.split(':')[1]
if plan.freq == 'Daily' and lastRunDay != time.strftime("%d"):
self.data = {}
self.data['planName'] = plan.name
self.forceRunAWSBackup()
else:
if lastRunMonth == time.strftime("%m"):
days = int(time.strftime("%d")) - int(lastRunDay)
if days >= 6:
self.data = {}
self.data['planName'] = plan.name
self.forceRunAWSBackup()
else:
days = 30 - int(lastRunDay)
days = days + int(time.strftime("%d"))
if days >= 6:
self.data = {}
self.data['planName'] = plan.name
self.forceRunAWSBackup()
for plan in BackupPlanDO.objects.all():
lastRunDay = plan.lastRun.split(':')[0]
lastRunMonth = plan.lastRun.split(':')[1]
if plan.freq == 'Daily' and lastRunDay != time.strftime("%d"):
self.data = {}
self.data['planName'] = plan.name
self.forceRunAWSBackupDO()
else:
if lastRunMonth == time.strftime("%m"):
days = int(time.strftime("%d")) - int(lastRunDay)
if days >= 6:
self.data = {}
self.data['planName'] = plan.name
self.forceRunAWSBackupDO()
else:
days = 30 - int(lastRunDay)
days = days + int(time.strftime("%d"))
if days >= 6:
self.data = {}
self.data['planName'] = plan.name
self.forceRunAWSBackupDO()
for plan in BackupPlanMINIO.objects.all():
lastRunDay = plan.lastRun.split(':')[0]
lastRunMonth = plan.lastRun.split(':')[1]
if plan.freq == 'Daily' and lastRunDay != time.strftime("%d"):
self.data = {}
self.data['planName'] = plan.name
self.forceRunAWSBackupMINIO()
else:
if lastRunMonth == time.strftime("%m"):
days = int(time.strftime("%d")) - int(lastRunDay)
if days >= 6:
self.data = {}
self.data['planName'] = plan.name
self.forceRunAWSBackupMINIO()
else:
days = 30 - int(lastRunDay)
days = days + int(time.strftime("%d"))
if days >= 6:
self.data = {}
self.data['planName'] = plan.name
self.forceRunAWSBackupMINIO()
except BaseException as msg:
logging.writeToFile(str(msg) + ' [S3Backups.runAWSBackups]')
def main():
pathToFile = "/home/cyberpanel/" + str(randint(1000, 9999))
file = open(pathToFile, "w")
file.close()
finalData = json.dumps({'randomFile': pathToFile})
requests.post("https://localhost:8090/api/runAWSBackups", data=finalData, verify=False)
if __name__ == "__main__":
main()

View File

@@ -4340,6 +4340,9 @@ StrictHostKeyChecking no
command = 'mkdir -p /home/%s/.ssh/' % (domain)
ProcessUtilities.executioner(command)
command = 'chown %s:%s /home/%s/.ssh/' % (website.externalApp, website.externalApp, domain)
ProcessUtilities.executioner(command)
tempPath = "/home/cyberpanel/" + str(randint(1000, 9999))
writeToFile = open(tempPath, "w")