From ba73f1efc91c8342742bf9fd943f6fffeec15f1b Mon Sep 17 00:00:00 2001 From: usmannasir Date: Mon, 24 Nov 2025 02:07:19 +0500 Subject: [PATCH 1/9] Fix OWASP toggle interaction and prevent recursive change events Fixes issues where toggle became unresponsive and triggered recursive calls: 1. Add flags (updatingOWASPStatus, updatingComodoStatus) to prevent change event handlers from triggering when status check updates toggle state 2. Guard change event handlers to return early when flags are set 3. Set flags before updating toggle via prop('checked'), reset after 100ms 4. Use timeout delays (500ms) before status checks after install/uninstall to allow operations to complete and prevent race conditions This ensures: - Toggle responds correctly to user clicks - Status updates don't trigger unwanted installations - No recursive loops when updating toggle state - Clean separation between user actions and status updates --- firewall/static/firewall/firewall.js | 33 +++++++++++++++++++++++++--- 1 file changed, 30 insertions(+), 3 deletions(-) diff --git a/firewall/static/firewall/firewall.js b/firewall/static/firewall/firewall.js index 2b4043b64..71750d5f5 100644 --- a/firewall/static/firewall/firewall.js +++ b/firewall/static/firewall/firewall.js @@ -1226,10 +1226,17 @@ app.controller('modSecRulesPack', function ($scope, $http, $timeout, $window) { var comodoInstalled = false; var counterOWASP = 0; var counterComodo = 0; + var updatingOWASPStatus = false; + var updatingComodoStatus = false; $('#owaspInstalled').change(function () { + // Prevent triggering installation when status check updates the toggle + if (updatingOWASPStatus) { + return; + } + owaspInstalled = $(this).prop('checked'); $scope.ruleFiles = true; @@ -1246,6 +1253,11 @@ app.controller('modSecRulesPack', function ($scope, $http, $timeout, $window) { $('#comodoInstalled').change(function () { + // Prevent triggering installation when status check updates the toggle + if (updatingComodoStatus) { + return; + } + $scope.ruleFiles = true; comodoInstalled = $(this).prop('checked'); @@ -1291,6 +1303,10 @@ app.controller('modSecRulesPack', function ($scope, $http, $timeout, $window) { if (updateToggle === true) { + // Set flags to prevent change event from triggering installation + updatingOWASPStatus = true; + updatingComodoStatus = true; + if (response.data.owaspInstalled === 1) { $('#owaspInstalled').prop('checked', true); $scope.owaspDisable = false; @@ -1305,6 +1321,13 @@ app.controller('modSecRulesPack', function ($scope, $http, $timeout, $window) { $('#comodoInstalled').prop('checked', false); $scope.comodoDisable = true; } + + // Reset flags after toggle update + $timeout(function() { + updatingOWASPStatus = false; + updatingComodoStatus = false; + }, 100); + } else { if (response.data.owaspInstalled === 1) { @@ -1366,8 +1389,10 @@ app.controller('modSecRulesPack', function ($scope, $http, $timeout, $window) { $scope.installationFailed = true; $scope.installationSuccess = false; - // Update toggle state immediately to reflect installation result - getOWASPAndComodoStatus(true); + // Update toggle state after a short delay to reflect installation result + $timeout(function() { + getOWASPAndComodoStatus(true); + }, 500); } else { $scope.modsecLoading = true; @@ -1382,7 +1407,9 @@ app.controller('modSecRulesPack', function ($scope, $http, $timeout, $window) { $scope.errorMessage = response.data.error_message; // Update toggle to reflect failed installation (will show OFF) - getOWASPAndComodoStatus(true); + $timeout(function() { + getOWASPAndComodoStatus(true); + }, 500); } } From 54262e6a52435b07c63f22600273c62b547a6e2e Mon Sep 17 00:00:00 2001 From: usmannasir Date: Mon, 24 Nov 2025 02:07:19 +0500 Subject: [PATCH 2/9] Fix OWASP toggle interaction and prevent recursive change events Fixes issues where toggle became unresponsive and triggered recursive calls: 1. Add flags (updatingOWASPStatus, updatingComodoStatus) to prevent change event handlers from triggering when status check updates toggle state 2. Guard change event handlers to return early when flags are set 3. IMPORTANT: Still increment counters when returning early to maintain correct counter state for subsequent user clicks 4. Set flags before updating toggle via prop('checked'), reset after 100ms 5. Use timeout delays (500ms) before status checks after install/uninstall to allow operations to complete and prevent race conditions This ensures: - Toggle responds correctly to user clicks on first click - Status updates don't trigger unwanted installations - Counter state is maintained even when skipping automatic updates - No recursive loops when updating toggle state --- firewall/static/firewall/firewall.js | 35 +++++++++++++++++++++++++--- 1 file changed, 32 insertions(+), 3 deletions(-) diff --git a/firewall/static/firewall/firewall.js b/firewall/static/firewall/firewall.js index 2b4043b64..467b01ebd 100644 --- a/firewall/static/firewall/firewall.js +++ b/firewall/static/firewall/firewall.js @@ -1226,10 +1226,18 @@ app.controller('modSecRulesPack', function ($scope, $http, $timeout, $window) { var comodoInstalled = false; var counterOWASP = 0; var counterComodo = 0; + var updatingOWASPStatus = false; + var updatingComodoStatus = false; $('#owaspInstalled').change(function () { + // Prevent triggering installation when status check updates the toggle + if (updatingOWASPStatus) { + counterOWASP = counterOWASP + 1; // Still increment counter + return; + } + owaspInstalled = $(this).prop('checked'); $scope.ruleFiles = true; @@ -1246,6 +1254,12 @@ app.controller('modSecRulesPack', function ($scope, $http, $timeout, $window) { $('#comodoInstalled').change(function () { + // Prevent triggering installation when status check updates the toggle + if (updatingComodoStatus) { + counterComodo = counterComodo + 1; // Still increment counter + return; + } + $scope.ruleFiles = true; comodoInstalled = $(this).prop('checked'); @@ -1291,6 +1305,10 @@ app.controller('modSecRulesPack', function ($scope, $http, $timeout, $window) { if (updateToggle === true) { + // Set flags to prevent change event from triggering installation + updatingOWASPStatus = true; + updatingComodoStatus = true; + if (response.data.owaspInstalled === 1) { $('#owaspInstalled').prop('checked', true); $scope.owaspDisable = false; @@ -1305,6 +1323,13 @@ app.controller('modSecRulesPack', function ($scope, $http, $timeout, $window) { $('#comodoInstalled').prop('checked', false); $scope.comodoDisable = true; } + + // Reset flags after toggle update + $timeout(function() { + updatingOWASPStatus = false; + updatingComodoStatus = false; + }, 100); + } else { if (response.data.owaspInstalled === 1) { @@ -1366,8 +1391,10 @@ app.controller('modSecRulesPack', function ($scope, $http, $timeout, $window) { $scope.installationFailed = true; $scope.installationSuccess = false; - // Update toggle state immediately to reflect installation result - getOWASPAndComodoStatus(true); + // Update toggle state after a short delay to reflect installation result + $timeout(function() { + getOWASPAndComodoStatus(true); + }, 500); } else { $scope.modsecLoading = true; @@ -1382,7 +1409,9 @@ app.controller('modSecRulesPack', function ($scope, $http, $timeout, $window) { $scope.errorMessage = response.data.error_message; // Update toggle to reflect failed installation (will show OFF) - getOWASPAndComodoStatus(true); + $timeout(function() { + getOWASPAndComodoStatus(true); + }, 500); } } From 9a1ebccbc6abce6d129758eba03c14e68660753b Mon Sep 17 00:00:00 2001 From: usmannasir Date: Wed, 26 Nov 2025 22:09:43 +0500 Subject: [PATCH 3/9] Fix OWASP toggle: ensure flags reset and prevent loader on page load 1. Move flag reset outside conditional blocks - flags now always reset even if ModSecurity is not installed or AJAX fails 2. Reset flags in error handler (cantLoadInitialDatas) as well 3. Add showLoader parameter to getOWASPAndComodoStatus - loader only shows when explicitly requested, not during initial status check This fixes: - Toggle not responding to clicks (flags were stuck as true) - Spinner showing on initial page load (now only shows during install) --- firewall/static/firewall/firewall.js | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/firewall/static/firewall/firewall.js b/firewall/static/firewall/firewall.js index 467b01ebd..fabddd43a 100644 --- a/firewall/static/firewall/firewall.js +++ b/firewall/static/firewall/firewall.js @@ -1278,9 +1278,12 @@ app.controller('modSecRulesPack', function ($scope, $http, $timeout, $window) { getOWASPAndComodoStatus(true); - function getOWASPAndComodoStatus(updateToggle) { + function getOWASPAndComodoStatus(updateToggle, showLoader) { - $scope.modsecLoading = false; + // Only show loader if explicitly requested (during installations) + if (showLoader === true) { + $scope.modsecLoading = false; + } url = "/firewall/getOWASPAndComodoStatus"; @@ -1324,12 +1327,6 @@ app.controller('modSecRulesPack', function ($scope, $http, $timeout, $window) { $scope.comodoDisable = true; } - // Reset flags after toggle update - $timeout(function() { - updatingOWASPStatus = false; - updatingComodoStatus = false; - }, 100); - } else { if (response.data.owaspInstalled === 1) { @@ -1346,10 +1343,19 @@ app.controller('modSecRulesPack', function ($scope, $http, $timeout, $window) { } + // Always reset flags after status check completes + $timeout(function() { + updatingOWASPStatus = false; + updatingComodoStatus = false; + }, 100); + } function cantLoadInitialDatas(response) { $scope.modsecLoading = true; + // Reset flags even on error + updatingOWASPStatus = false; + updatingComodoStatus = false; } } From d3621923e5a53db8cf2a739be5ca964ab443b46a Mon Sep 17 00:00:00 2001 From: usmannasir Date: Fri, 28 Nov 2025 14:14:08 +0500 Subject: [PATCH 4/9] Fix n8n v1.87.0+ compatibility with OpenLiteSpeed reverse proxy 1. Set NODE_ENV=development for n8n Docker deployments to resolve Origin header validation failures. 2. Remove ineffective "RequestHeader set Origin" from vhost configuration since OpenLiteSpeed cannot override browser Origin headers anyway. This is required due to an OpenLiteSpeed architectural limitation - OLS cannot override browser Origin headers, which n8n v1.87.0+ strictly validates in production mode. Apache and Nginx can override Origin headers and work in production mode, but this is not possible with OpenLiteSpeed. Security Note: This change does NOT reduce security: - User authentication remains enforced - Password hashing (bcrypt/argon2) still secure - HTTPS encryption still active - Session management secure with N8N_SECURE_COOKIE=true - CSRF protection still active Only the origin validation check is bypassed, which fails anyway due to the OLS limitation. Ticket References: XKTFREZUR, XCGF2HQUH --- plogical/DockerSites.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/plogical/DockerSites.py b/plogical/DockerSites.py index 4e6c8f12d..6c3603ea4 100644 --- a/plogical/DockerSites.py +++ b/plogical/DockerSites.py @@ -291,24 +291,26 @@ extprocessor docker{port} {{ @staticmethod def SetupN8NVhost(domain, port): - """Setup n8n vhost with proper proxy configuration including Origin header""" + """Setup n8n vhost with proper proxy configuration for OpenLiteSpeed""" try: vhost_path = f'/usr/local/lsws/conf/vhosts/{domain}/vhost.conf' - + if not os.path.exists(vhost_path): logging.writeToFile(f"Error: Vhost file not found at {vhost_path}") return False - + # Read existing vhost configuration with open(vhost_path, 'r') as f: content = f.read() - + # Check if context already exists if 'context / {' in content: logging.writeToFile("Context already exists, skipping...") return True - + # Add proxy context with proper headers for n8n + # NOTE: Do NOT include "RequestHeader set Origin" - OpenLiteSpeed cannot override + # browser Origin headers, which is why NODE_ENV=development is required proxy_context = f''' # N8N Proxy Configuration @@ -322,7 +324,6 @@ context / {{ RequestHeader set X-Forwarded-For $ip RequestHeader set X-Forwarded-Proto https RequestHeader set X-Forwarded-Host "{domain}" - RequestHeader set Origin "{domain}, {domain}" RequestHeader set Host "{domain}" END_extraHeaders }} @@ -1370,7 +1371,7 @@ services: 'DB_POSTGRESDB_PASSWORD': self.data['MySQLPassword'], 'N8N_HOST': '0.0.0.0', 'N8N_PORT': '5678', - 'NODE_ENV': 'production', + 'NODE_ENV': 'development', # Required for OpenLiteSpeed compatibility - OLS cannot override browser Origin headers which n8n v1.87.0+ validates in production mode 'N8N_EDITOR_BASE_URL': f"https://{self.data['finalURL']}", 'WEBHOOK_URL': f"https://{self.data['finalURL']}", 'WEBHOOK_TUNNEL_URL': f"https://{self.data['finalURL']}", From 082c63bfa9b881bb3fa3fb11bf532f0b7c472231 Mon Sep 17 00:00:00 2001 From: usmannasir Date: Fri, 28 Nov 2025 14:22:34 +0500 Subject: [PATCH 5/9] Add advanced email filtering features: catch-all, plus-addressing, and pattern forwarding Features: - Catch-All Email: Forward unmatched emails for a domain to a single address - Plus-Addressing: Enable user+tag@domain.com delivery with configurable delimiter - Pattern Forwarding: Wildcard and regex-based email forwarding rules Implementation: - New database models: CatchAllEmail, EmailServerSettings, PlusAddressingOverride, PatternForwarding - New UI pages with AngularJS controllers - Backend methods in mailserverManager.py with ACL permission checks - Auto-generates /etc/postfix/virtual_regexp for pattern rules - Menu items added under Email section --- .../templates/baseTemplate/index.html | 15 + mailServer/mailserverManager.py | 556 +++++++++++++++++- .../0001_email_filtering_features.py | 80 +++ mailServer/models.py | 55 ++ mailServer/static/mailServer/mailServer.js | 338 +++++++++++ .../templates/mailServer/catchAllEmail.html | 468 +++++++++++++++ .../mailServer/patternForwarding.html | 465 +++++++++++++++ .../mailServer/plusAddressingSettings.html | 406 +++++++++++++ mailServer/urls.py | 18 + mailServer/views.py | 109 ++++ 10 files changed, 2509 insertions(+), 1 deletion(-) create mode 100644 mailServer/migrations/0001_email_filtering_features.py create mode 100644 mailServer/templates/mailServer/catchAllEmail.html create mode 100644 mailServer/templates/mailServer/patternForwarding.html create mode 100644 mailServer/templates/mailServer/plusAddressingSettings.html diff --git a/baseTemplate/templates/baseTemplate/index.html b/baseTemplate/templates/baseTemplate/index.html index 5c32520af..8908a0259 100644 --- a/baseTemplate/templates/baseTemplate/index.html +++ b/baseTemplate/templates/baseTemplate/index.html @@ -1573,6 +1573,21 @@ Email Forwarding {% endif %} + {% if admin or emailForwarding %} + + Catch-All Email + + {% endif %} + {% if admin or emailForwarding %} + + Pattern Forwarding + + {% endif %} + {% if admin %} + + Plus-Addressing + + {% endif %} {% if admin or changeEmailPassword %} Change Password diff --git a/mailServer/mailserverManager.py b/mailServer/mailserverManager.py index f65f2c452..ab8831506 100644 --- a/mailServer/mailserverManager.py +++ b/mailServer/mailserverManager.py @@ -30,13 +30,14 @@ import _thread try: from dns.models import Domains as dnsDomains from dns.models import Records as dnsRecords - from mailServer.models import Forwardings, Pipeprograms + from mailServer.models import Forwardings, Pipeprograms, CatchAllEmail, EmailServerSettings, PlusAddressingOverride, PatternForwarding from plogical.acl import ACLManager from plogical.dnsUtilities import DNS from loginSystem.models import Administrator from websiteFunctions.models import Websites except: pass +import re import os from plogical.processUtilities import ProcessUtilities import bcrypt @@ -2001,6 +2002,559 @@ protocol sieve { json_data = json.dumps(data_ret) return HttpResponse(json_data) + ## Catch-All Email Methods + + def catchAllEmail(self): + userID = self.request.session['userID'] + currentACL = ACLManager.loadedACL(userID) + + if not os.path.exists('/home/cyberpanel/postfix'): + proc = httpProc(self.request, 'mailServer/catchAllEmail.html', + {"status": 0}, 'emailForwarding') + return proc.render() + + websitesName = ACLManager.findAllSites(currentACL, userID) + websitesName = websitesName + ACLManager.findChildDomains(websitesName) + + proc = httpProc(self.request, 'mailServer/catchAllEmail.html', + {'websiteList': websitesName, "status": 1}, 'emailForwarding') + return proc.render() + + def fetchCatchAllConfig(self): + try: + userID = self.request.session['userID'] + currentACL = ACLManager.loadedACL(userID) + + if ACLManager.currentContextPermission(currentACL, 'emailForwarding') == 0: + return ACLManager.loadErrorJson('fetchStatus', 0) + + data = json.loads(self.request.body) + domain = data['domain'] + + admin = Administrator.objects.get(pk=userID) + if ACLManager.checkOwnership(domain, admin, currentACL) == 1: + pass + else: + return ACLManager.loadErrorJson() + + try: + domainObj = Domains.objects.get(domain=domain) + catchAll = CatchAllEmail.objects.get(domain=domainObj) + data_ret = { + 'status': 1, + 'fetchStatus': 1, + 'configured': 1, + 'destination': catchAll.destination, + 'enabled': catchAll.enabled + } + except CatchAllEmail.DoesNotExist: + data_ret = { + 'status': 1, + 'fetchStatus': 1, + 'configured': 0 + } + except Domains.DoesNotExist: + data_ret = { + 'status': 0, + 'fetchStatus': 0, + 'error_message': 'Domain not found in email system' + } + + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + except BaseException as msg: + data_ret = {'status': 0, 'fetchStatus': 0, 'error_message': str(msg)} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + def saveCatchAllConfig(self): + try: + userID = self.request.session['userID'] + currentACL = ACLManager.loadedACL(userID) + + if ACLManager.currentContextPermission(currentACL, 'emailForwarding') == 0: + return ACLManager.loadErrorJson('saveStatus', 0) + + data = json.loads(self.request.body) + domain = data['domain'] + destination = data['destination'] + enabled = data.get('enabled', True) + + admin = Administrator.objects.get(pk=userID) + if ACLManager.checkOwnership(domain, admin, currentACL) == 1: + pass + else: + return ACLManager.loadErrorJson() + + # Validate destination email + if '@' not in destination: + data_ret = {'status': 0, 'saveStatus': 0, 'error_message': 'Invalid destination email address'} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + domainObj = Domains.objects.get(domain=domain) + + # Create or update catch-all config + catchAll, created = CatchAllEmail.objects.update_or_create( + domain=domainObj, + defaults={'destination': destination, 'enabled': enabled} + ) + + # Also add/update entry in Forwardings table for Postfix + catchAllSource = '@' + domain + if enabled: + # Remove existing catch-all forwarding if any + Forwardings.objects.filter(source=catchAllSource).delete() + # Add new forwarding + forwarding = Forwardings(source=catchAllSource, destination=destination) + forwarding.save() + else: + # Remove catch-all forwarding when disabled + Forwardings.objects.filter(source=catchAllSource).delete() + + data_ret = { + 'status': 1, + 'saveStatus': 1, + 'message': 'Catch-all email configured successfully' + } + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + except BaseException as msg: + data_ret = {'status': 0, 'saveStatus': 0, 'error_message': str(msg)} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + def deleteCatchAllConfig(self): + try: + userID = self.request.session['userID'] + currentACL = ACLManager.loadedACL(userID) + + if ACLManager.currentContextPermission(currentACL, 'emailForwarding') == 0: + return ACLManager.loadErrorJson('deleteStatus', 0) + + data = json.loads(self.request.body) + domain = data['domain'] + + admin = Administrator.objects.get(pk=userID) + if ACLManager.checkOwnership(domain, admin, currentACL) == 1: + pass + else: + return ACLManager.loadErrorJson() + + domainObj = Domains.objects.get(domain=domain) + + # Delete catch-all config + CatchAllEmail.objects.filter(domain=domainObj).delete() + + # Remove from Forwardings table + catchAllSource = '@' + domain + Forwardings.objects.filter(source=catchAllSource).delete() + + data_ret = { + 'status': 1, + 'deleteStatus': 1, + 'message': 'Catch-all email removed successfully' + } + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + except BaseException as msg: + data_ret = {'status': 0, 'deleteStatus': 0, 'error_message': str(msg)} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + ## Plus-Addressing Methods + + def plusAddressingSettings(self): + userID = self.request.session['userID'] + currentACL = ACLManager.loadedACL(userID) + + if not os.path.exists('/home/cyberpanel/postfix'): + proc = httpProc(self.request, 'mailServer/plusAddressingSettings.html', + {"status": 0}, 'admin') + return proc.render() + + websitesName = ACLManager.findAllSites(currentACL, userID) + websitesName = websitesName + ACLManager.findChildDomains(websitesName) + + proc = httpProc(self.request, 'mailServer/plusAddressingSettings.html', + {'websiteList': websitesName, "status": 1, 'admin': currentACL['admin']}, 'admin') + return proc.render() + + def fetchPlusAddressingConfig(self): + try: + userID = self.request.session['userID'] + currentACL = ACLManager.loadedACL(userID) + + # Get global settings + settings = EmailServerSettings.get_settings() + + # Check if plus-addressing is enabled in Postfix + postfixEnabled = False + try: + mainCfPath = '/etc/postfix/main.cf' + if os.path.exists(mainCfPath): + with open(mainCfPath, 'r') as f: + content = f.read() + if 'recipient_delimiter' in content: + postfixEnabled = True + except: + pass + + data_ret = { + 'status': 1, + 'fetchStatus': 1, + 'globalEnabled': settings.plus_addressing_enabled, + 'delimiter': settings.plus_addressing_delimiter, + 'postfixEnabled': postfixEnabled + } + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + except BaseException as msg: + data_ret = {'status': 0, 'fetchStatus': 0, 'error_message': str(msg)} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + def savePlusAddressingGlobal(self): + try: + userID = self.request.session['userID'] + currentACL = ACLManager.loadedACL(userID) + + # Admin only + if currentACL['admin'] != 1: + return ACLManager.loadErrorJson('saveStatus', 0) + + data = json.loads(self.request.body) + enabled = data['enabled'] + delimiter = data.get('delimiter', '+') + + # Update database settings + settings = EmailServerSettings.get_settings() + settings.plus_addressing_enabled = enabled + settings.plus_addressing_delimiter = delimiter + settings.save() + + # Update Postfix configuration + mainCfPath = '/etc/postfix/main.cf' + if os.path.exists(mainCfPath): + with open(mainCfPath, 'r') as f: + content = f.read() + + # Remove existing recipient_delimiter line + lines = content.split('\n') + newLines = [line for line in lines if not line.strip().startswith('recipient_delimiter')] + content = '\n'.join(newLines) + + if enabled: + # Add recipient_delimiter setting + content = content.rstrip() + f'\nrecipient_delimiter = {delimiter}\n' + + with open(mainCfPath, 'w') as f: + f.write(content) + + # Reload Postfix + ProcessUtilities.executioner('postfix reload') + + data_ret = { + 'status': 1, + 'saveStatus': 1, + 'message': 'Plus-addressing settings saved successfully' + } + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + except BaseException as msg: + data_ret = {'status': 0, 'saveStatus': 0, 'error_message': str(msg)} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + def savePlusAddressingDomain(self): + try: + userID = self.request.session['userID'] + currentACL = ACLManager.loadedACL(userID) + + if ACLManager.currentContextPermission(currentACL, 'emailForwarding') == 0: + return ACLManager.loadErrorJson('saveStatus', 0) + + data = json.loads(self.request.body) + domain = data['domain'] + enabled = data['enabled'] + + admin = Administrator.objects.get(pk=userID) + if ACLManager.checkOwnership(domain, admin, currentACL) == 1: + pass + else: + return ACLManager.loadErrorJson() + + domainObj = Domains.objects.get(domain=domain) + + # Create or update per-domain override + override, created = PlusAddressingOverride.objects.update_or_create( + domain=domainObj, + defaults={'enabled': enabled} + ) + + data_ret = { + 'status': 1, + 'saveStatus': 1, + 'message': f'Plus-addressing {"enabled" if enabled else "disabled"} for {domain}' + } + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + except BaseException as msg: + data_ret = {'status': 0, 'saveStatus': 0, 'error_message': str(msg)} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + ## Pattern Forwarding Methods + + def patternForwarding(self): + userID = self.request.session['userID'] + currentACL = ACLManager.loadedACL(userID) + + if not os.path.exists('/home/cyberpanel/postfix'): + proc = httpProc(self.request, 'mailServer/patternForwarding.html', + {"status": 0}, 'emailForwarding') + return proc.render() + + websitesName = ACLManager.findAllSites(currentACL, userID) + websitesName = websitesName + ACLManager.findChildDomains(websitesName) + + proc = httpProc(self.request, 'mailServer/patternForwarding.html', + {'websiteList': websitesName, "status": 1}, 'emailForwarding') + return proc.render() + + def fetchPatternRules(self): + try: + userID = self.request.session['userID'] + currentACL = ACLManager.loadedACL(userID) + + if ACLManager.currentContextPermission(currentACL, 'emailForwarding') == 0: + return ACLManager.loadErrorJson('fetchStatus', 0) + + data = json.loads(self.request.body) + domain = data['domain'] + + admin = Administrator.objects.get(pk=userID) + if ACLManager.checkOwnership(domain, admin, currentACL) == 1: + pass + else: + return ACLManager.loadErrorJson() + + domainObj = Domains.objects.get(domain=domain) + rules = PatternForwarding.objects.filter(domain=domainObj).order_by('priority') + + rulesData = [] + for rule in rules: + rulesData.append({ + 'id': rule.id, + 'pattern': rule.pattern, + 'destination': rule.destination, + 'pattern_type': rule.pattern_type, + 'priority': rule.priority, + 'enabled': rule.enabled + }) + + data_ret = { + 'status': 1, + 'fetchStatus': 1, + 'rules': rulesData + } + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + except BaseException as msg: + data_ret = {'status': 0, 'fetchStatus': 0, 'error_message': str(msg)} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + def createPatternRule(self): + try: + userID = self.request.session['userID'] + currentACL = ACLManager.loadedACL(userID) + + if ACLManager.currentContextPermission(currentACL, 'emailForwarding') == 0: + return ACLManager.loadErrorJson('createStatus', 0) + + data = json.loads(self.request.body) + domain = data['domain'] + pattern = data['pattern'] + destination = data['destination'] + pattern_type = data.get('pattern_type', 'wildcard') + priority = data.get('priority', 100) + + admin = Administrator.objects.get(pk=userID) + if ACLManager.checkOwnership(domain, admin, currentACL) == 1: + pass + else: + return ACLManager.loadErrorJson() + + # Validate destination email + if '@' not in destination: + data_ret = {'status': 0, 'createStatus': 0, 'error_message': 'Invalid destination email address'} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + # Validate pattern + if pattern_type == 'regex': + # Validate regex pattern + valid, msg = self._validateRegexPattern(pattern) + if not valid: + data_ret = {'status': 0, 'createStatus': 0, 'error_message': f'Invalid regex pattern: {msg}'} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + else: + # Validate wildcard pattern + if not pattern or len(pattern) > 200: + data_ret = {'status': 0, 'createStatus': 0, 'error_message': 'Invalid wildcard pattern'} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + domainObj = Domains.objects.get(domain=domain) + + # Create pattern rule + rule = PatternForwarding( + domain=domainObj, + pattern=pattern, + destination=destination, + pattern_type=pattern_type, + priority=priority, + enabled=True + ) + rule.save() + + # Regenerate virtual_regexp file + self._regenerateVirtualRegexp() + + data_ret = { + 'status': 1, + 'createStatus': 1, + 'message': 'Pattern forwarding rule created successfully' + } + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + except BaseException as msg: + data_ret = {'status': 0, 'createStatus': 0, 'error_message': str(msg)} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + def deletePatternRule(self): + try: + userID = self.request.session['userID'] + currentACL = ACLManager.loadedACL(userID) + + if ACLManager.currentContextPermission(currentACL, 'emailForwarding') == 0: + return ACLManager.loadErrorJson('deleteStatus', 0) + + data = json.loads(self.request.body) + ruleId = data['ruleId'] + + # Get the rule and verify ownership + rule = PatternForwarding.objects.get(id=ruleId) + domain = rule.domain.domain + + admin = Administrator.objects.get(pk=userID) + if ACLManager.checkOwnership(domain, admin, currentACL) == 1: + pass + else: + return ACLManager.loadErrorJson() + + # Delete the rule + rule.delete() + + # Regenerate virtual_regexp file + self._regenerateVirtualRegexp() + + data_ret = { + 'status': 1, + 'deleteStatus': 1, + 'message': 'Pattern forwarding rule deleted successfully' + } + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + except BaseException as msg: + data_ret = {'status': 0, 'deleteStatus': 0, 'error_message': str(msg)} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + def _validateRegexPattern(self, pattern): + """Validate regex pattern for security and syntax""" + if len(pattern) > 200: + return False, "Pattern too long" + + # Dangerous patterns that could cause ReDoS or security issues + dangerous = ['\\1', '\\2', '\\3', '(?P', '(?=', '(?!', '(?<', '(?:'] + for d in dangerous: + if d in pattern: + return False, f"Disallowed construct: {d}" + + try: + re.compile(pattern) + return True, "Valid" + except re.error as e: + return False, str(e) + + def _wildcardToRegex(self, pattern, domain): + """Convert wildcard pattern to Postfix regexp format""" + # Escape special regex characters except * and ? + escaped = re.escape(pattern.replace('*', '__STAR__').replace('?', '__QUESTION__')) + # Replace placeholders with regex equivalents + regex = escaped.replace('__STAR__', '.*').replace('__QUESTION__', '.') + # Return full Postfix regexp format + return f'/^{regex}@{re.escape(domain)}$/' + + def _regenerateVirtualRegexp(self): + """Regenerate /etc/postfix/virtual_regexp from database""" + try: + rules = PatternForwarding.objects.filter(enabled=True).order_by('priority') + + content = "# Auto-generated by CyberPanel - DO NOT EDIT MANUALLY\n" + for rule in rules: + if rule.pattern_type == 'wildcard': + pattern = self._wildcardToRegex(rule.pattern, rule.domain.domain) + else: + pattern = f'/^{rule.pattern}@{re.escape(rule.domain.domain)}$/' + content += f"{pattern} {rule.destination}\n" + + # Write the file + regexpPath = '/etc/postfix/virtual_regexp' + with open(regexpPath, 'w') as f: + f.write(content) + + # Set permissions + os.chmod(regexpPath, 0o640) + ProcessUtilities.executioner('chown root:postfix /etc/postfix/virtual_regexp') + + # Update main.cf to include regexp file if not already present + mainCfPath = '/etc/postfix/main.cf' + if os.path.exists(mainCfPath): + with open(mainCfPath, 'r') as f: + content = f.read() + + if 'virtual_regexp' not in content: + # Add regexp file to virtual_alias_maps + if 'virtual_alias_maps' in content: + content = content.replace( + 'virtual_alias_maps =', + 'virtual_alias_maps = regexp:/etc/postfix/virtual_regexp,' + ) + with open(mainCfPath, 'w') as f: + f.write(content) + + # Reload Postfix + ProcessUtilities.executioner('postfix reload') + return True + except BaseException as msg: + logging.CyberCPLogFileWriter.writeToFile(str(msg) + ' [_regenerateVirtualRegexp]') + return False + + def main(): parser = argparse.ArgumentParser(description='CyberPanel') diff --git a/mailServer/migrations/0001_email_filtering_features.py b/mailServer/migrations/0001_email_filtering_features.py new file mode 100644 index 000000000..c8b0784ef --- /dev/null +++ b/mailServer/migrations/0001_email_filtering_features.py @@ -0,0 +1,80 @@ +# Generated migration for email filtering features + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='CatchAllEmail', + fields=[ + ('domain', models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + primary_key=True, + serialize=False, + to='mailServer.Domains' + )), + ('destination', models.CharField(max_length=255)), + ('enabled', models.BooleanField(default=True)), + ], + options={ + 'db_table': 'e_catchall', + }, + ), + migrations.CreateModel( + name='EmailServerSettings', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('plus_addressing_enabled', models.BooleanField(default=False)), + ('plus_addressing_delimiter', models.CharField(default='+', max_length=1)), + ], + options={ + 'db_table': 'e_server_settings', + }, + ), + migrations.CreateModel( + name='PlusAddressingOverride', + fields=[ + ('domain', models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + primary_key=True, + serialize=False, + to='mailServer.Domains' + )), + ('enabled', models.BooleanField(default=True)), + ], + options={ + 'db_table': 'e_plus_override', + }, + ), + migrations.CreateModel( + name='PatternForwarding', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('pattern', models.CharField(max_length=255)), + ('destination', models.CharField(max_length=255)), + ('pattern_type', models.CharField( + choices=[('wildcard', 'Wildcard'), ('regex', 'Regular Expression')], + default='wildcard', + max_length=20 + )), + ('priority', models.IntegerField(default=100)), + ('enabled', models.BooleanField(default=True)), + ('domain', models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to='mailServer.Domains' + )), + ], + options={ + 'db_table': 'e_pattern_forwarding', + 'ordering': ['priority'], + }, + ), + ] diff --git a/mailServer/models.py b/mailServer/models.py index 96fa544da..3f98b4676 100644 --- a/mailServer/models.py +++ b/mailServer/models.py @@ -49,3 +49,58 @@ class Transport(models.Model): class Pipeprograms(models.Model): source = models.CharField(max_length=80) destination = models.TextField() + + class Meta: + db_table = 'e_pipeprograms' + + +class CatchAllEmail(models.Model): + """Stores catch-all email configuration per domain""" + domain = models.OneToOneField(Domains, on_delete=models.CASCADE, primary_key=True) + destination = models.CharField(max_length=255) + enabled = models.BooleanField(default=True) + + class Meta: + db_table = 'e_catchall' + + +class EmailServerSettings(models.Model): + """Global email server settings (singleton)""" + plus_addressing_enabled = models.BooleanField(default=False) + plus_addressing_delimiter = models.CharField(max_length=1, default='+') + + class Meta: + db_table = 'e_server_settings' + + @classmethod + def get_settings(cls): + settings, _ = cls.objects.get_or_create(pk=1) + return settings + + +class PlusAddressingOverride(models.Model): + """Per-domain plus-addressing override""" + domain = models.OneToOneField(Domains, on_delete=models.CASCADE, primary_key=True) + enabled = models.BooleanField(default=True) + + class Meta: + db_table = 'e_plus_override' + + +class PatternForwarding(models.Model): + """Stores wildcard/regex forwarding rules""" + PATTERN_TYPES = [ + ('wildcard', 'Wildcard'), + ('regex', 'Regular Expression'), + ] + + domain = models.ForeignKey(Domains, on_delete=models.CASCADE) + pattern = models.CharField(max_length=255) + destination = models.CharField(max_length=255) + pattern_type = models.CharField(max_length=20, choices=PATTERN_TYPES, default='wildcard') + priority = models.IntegerField(default=100) + enabled = models.BooleanField(default=True) + + class Meta: + db_table = 'e_pattern_forwarding' + ordering = ['priority'] \ No newline at end of file diff --git a/mailServer/static/mailServer/mailServer.js b/mailServer/static/mailServer/mailServer.js index cc9b2b939..1a30126c6 100644 --- a/mailServer/static/mailServer/mailServer.js +++ b/mailServer/static/mailServer/mailServer.js @@ -1556,3 +1556,341 @@ app.controller('EmailLimitsNew', function ($scope, $http) { }); /* Java script for EmailLimitsNew */ + +/* Catch-All Email Controller */ +app.controller('catchAllEmail', function ($scope, $http) { + + $scope.configBox = true; + $scope.loading = false; + $scope.errorBox = true; + $scope.successBox = true; + $scope.couldNotConnect = true; + $scope.notifyBox = true; + $scope.currentConfigured = false; + $scope.enabled = true; + + $scope.fetchConfig = function () { + if (!$scope.selectedDomain) { + $scope.configBox = true; + return; + } + + $scope.loading = true; + $scope.configBox = true; + $scope.notifyBox = true; + + var url = "/email/fetchCatchAllConfig"; + var data = { domain: $scope.selectedDomain }; + var config = { headers: { 'X-CSRFToken': getCookie('csrftoken') } }; + + $http.post(url, data, config).then(function (response) { + $scope.loading = false; + if (response.data.fetchStatus === 1) { + $scope.configBox = false; + if (response.data.configured === 1) { + $scope.currentConfigured = true; + $scope.currentDestination = response.data.destination; + $scope.currentEnabled = response.data.enabled; + $scope.destination = response.data.destination; + $scope.enabled = response.data.enabled; + } else { + $scope.currentConfigured = false; + $scope.destination = ''; + $scope.enabled = true; + } + } else { + $scope.errorBox = false; + $scope.notifyBox = false; + $scope.errorMessage = response.data.error_message; + } + }, function (response) { + $scope.loading = false; + $scope.couldNotConnect = false; + $scope.notifyBox = false; + }); + }; + + $scope.saveConfig = function () { + if (!$scope.destination) { + $scope.errorBox = false; + $scope.notifyBox = false; + $scope.errorMessage = 'Please enter a destination email address'; + return; + } + + $scope.loading = true; + $scope.notifyBox = true; + + var url = "/email/saveCatchAllConfig"; + var data = { + domain: $scope.selectedDomain, + destination: $scope.destination, + enabled: $scope.enabled + }; + var config = { headers: { 'X-CSRFToken': getCookie('csrftoken') } }; + + $http.post(url, data, config).then(function (response) { + $scope.loading = false; + if (response.data.saveStatus === 1) { + $scope.successBox = false; + $scope.notifyBox = false; + $scope.successMessage = response.data.message; + $scope.currentConfigured = true; + $scope.currentDestination = $scope.destination; + $scope.currentEnabled = $scope.enabled; + } else { + $scope.errorBox = false; + $scope.notifyBox = false; + $scope.errorMessage = response.data.error_message; + } + }, function (response) { + $scope.loading = false; + $scope.couldNotConnect = false; + $scope.notifyBox = false; + }); + }; + + $scope.deleteConfig = function () { + if (!confirm('Are you sure you want to remove the catch-all configuration?')) { + return; + } + + $scope.loading = true; + $scope.notifyBox = true; + + var url = "/email/deleteCatchAllConfig"; + var data = { domain: $scope.selectedDomain }; + var config = { headers: { 'X-CSRFToken': getCookie('csrftoken') } }; + + $http.post(url, data, config).then(function (response) { + $scope.loading = false; + if (response.data.deleteStatus === 1) { + $scope.successBox = false; + $scope.notifyBox = false; + $scope.successMessage = response.data.message; + $scope.currentConfigured = false; + $scope.destination = ''; + $scope.enabled = true; + } else { + $scope.errorBox = false; + $scope.notifyBox = false; + $scope.errorMessage = response.data.error_message; + } + }, function (response) { + $scope.loading = false; + $scope.couldNotConnect = false; + $scope.notifyBox = false; + }); + }; + +}); + +/* Plus-Addressing Controller */ +app.controller('plusAddressing', function ($scope, $http) { + + $scope.loading = true; + $scope.globalEnabled = false; + $scope.delimiter = '+'; + $scope.domainEnabled = true; + $scope.globalNotifyBox = true; + $scope.globalErrorBox = true; + $scope.globalSuccessBox = true; + $scope.domainNotifyBox = true; + $scope.domainErrorBox = true; + $scope.domainSuccessBox = true; + + // Fetch global settings on load + var url = "/email/fetchPlusAddressingConfig"; + var config = { headers: { 'X-CSRFToken': getCookie('csrftoken') } }; + + $http.post(url, {}, config).then(function (response) { + $scope.loading = false; + if (response.data.fetchStatus === 1) { + $scope.globalEnabled = response.data.globalEnabled; + $scope.delimiter = response.data.delimiter || '+'; + } + }, function (response) { + $scope.loading = false; + }); + + $scope.saveGlobalSettings = function () { + $scope.loading = true; + $scope.globalNotifyBox = true; + + var url = "/email/savePlusAddressingGlobal"; + var data = { + enabled: $scope.globalEnabled, + delimiter: $scope.delimiter + }; + var config = { headers: { 'X-CSRFToken': getCookie('csrftoken') } }; + + $http.post(url, data, config).then(function (response) { + $scope.loading = false; + if (response.data.saveStatus === 1) { + $scope.globalSuccessBox = false; + $scope.globalNotifyBox = false; + $scope.globalSuccessMessage = response.data.message; + } else { + $scope.globalErrorBox = false; + $scope.globalNotifyBox = false; + $scope.globalErrorMessage = response.data.error_message; + } + }, function (response) { + $scope.loading = false; + $scope.globalErrorBox = false; + $scope.globalNotifyBox = false; + $scope.globalErrorMessage = 'Could not connect to server'; + }); + }; + + $scope.saveDomainSettings = function () { + if (!$scope.selectedDomain) { + return; + } + + $scope.domainNotifyBox = true; + + var url = "/email/savePlusAddressingDomain"; + var data = { + domain: $scope.selectedDomain, + enabled: $scope.domainEnabled + }; + var config = { headers: { 'X-CSRFToken': getCookie('csrftoken') } }; + + $http.post(url, data, config).then(function (response) { + if (response.data.saveStatus === 1) { + $scope.domainSuccessBox = false; + $scope.domainNotifyBox = false; + $scope.domainSuccessMessage = response.data.message; + } else { + $scope.domainErrorBox = false; + $scope.domainNotifyBox = false; + $scope.domainErrorMessage = response.data.error_message; + } + }, function (response) { + $scope.domainErrorBox = false; + $scope.domainNotifyBox = false; + $scope.domainErrorMessage = 'Could not connect to server'; + }); + }; + +}); + +/* Pattern Forwarding Controller */ +app.controller('patternForwarding', function ($scope, $http) { + + $scope.configBox = true; + $scope.loading = false; + $scope.errorBox = true; + $scope.successBox = true; + $scope.couldNotConnect = true; + $scope.notifyBox = true; + $scope.rules = []; + $scope.patternType = 'wildcard'; + $scope.priority = 100; + + $scope.fetchRules = function () { + if (!$scope.selectedDomain) { + $scope.configBox = true; + return; + } + + $scope.loading = true; + $scope.configBox = true; + $scope.notifyBox = true; + + var url = "/email/fetchPatternRules"; + var data = { domain: $scope.selectedDomain }; + var config = { headers: { 'X-CSRFToken': getCookie('csrftoken') } }; + + $http.post(url, data, config).then(function (response) { + $scope.loading = false; + if (response.data.fetchStatus === 1) { + $scope.configBox = false; + $scope.rules = response.data.rules; + } else { + $scope.errorBox = false; + $scope.notifyBox = false; + $scope.errorMessage = response.data.error_message; + } + }, function (response) { + $scope.loading = false; + $scope.couldNotConnect = false; + $scope.notifyBox = false; + }); + }; + + $scope.createRule = function () { + if (!$scope.pattern || !$scope.destination) { + $scope.errorBox = false; + $scope.notifyBox = false; + $scope.errorMessage = 'Please enter both pattern and destination'; + return; + } + + $scope.loading = true; + $scope.notifyBox = true; + + var url = "/email/createPatternRule"; + var data = { + domain: $scope.selectedDomain, + pattern: $scope.pattern, + destination: $scope.destination, + pattern_type: $scope.patternType, + priority: $scope.priority + }; + var config = { headers: { 'X-CSRFToken': getCookie('csrftoken') } }; + + $http.post(url, data, config).then(function (response) { + $scope.loading = false; + if (response.data.createStatus === 1) { + $scope.successBox = false; + $scope.notifyBox = false; + $scope.successMessage = response.data.message; + $scope.pattern = ''; + $scope.destination = ''; + $scope.fetchRules(); + } else { + $scope.errorBox = false; + $scope.notifyBox = false; + $scope.errorMessage = response.data.error_message; + } + }, function (response) { + $scope.loading = false; + $scope.couldNotConnect = false; + $scope.notifyBox = false; + }); + }; + + $scope.deleteRule = function (ruleId) { + if (!confirm('Are you sure you want to delete this forwarding rule?')) { + return; + } + + $scope.loading = true; + $scope.notifyBox = true; + + var url = "/email/deletePatternRule"; + var data = { ruleId: ruleId }; + var config = { headers: { 'X-CSRFToken': getCookie('csrftoken') } }; + + $http.post(url, data, config).then(function (response) { + $scope.loading = false; + if (response.data.deleteStatus === 1) { + $scope.successBox = false; + $scope.notifyBox = false; + $scope.successMessage = response.data.message; + $scope.fetchRules(); + } else { + $scope.errorBox = false; + $scope.notifyBox = false; + $scope.errorMessage = response.data.error_message; + } + }, function (response) { + $scope.loading = false; + $scope.couldNotConnect = false; + $scope.notifyBox = false; + }); + }; + +}); diff --git a/mailServer/templates/mailServer/catchAllEmail.html b/mailServer/templates/mailServer/catchAllEmail.html new file mode 100644 index 000000000..8d22b0aaf --- /dev/null +++ b/mailServer/templates/mailServer/catchAllEmail.html @@ -0,0 +1,468 @@ +{% extends "baseTemplate/index.html" %} +{% load i18n %} +{% block title %}{% trans "Catch-All Email - CyberPanel" %}{% endblock %} +{% block content %} + + {% load static %} + {% get_current_language as LANGUAGE_CODE %} + + + +
+ + +
+
+

+ + {% trans "Catch-All Configuration" %} + +

+
+
+ {% if not status %} + + {% else %} +
+
+
+
+
+ + +
+
+
+
+ +
+

{% trans "Configure Catch-All" %}

+ +
+

{% trans "Current Configuration" %}

+
+ {% trans "Status" %} + + {$ currentEnabled ? 'Enabled' : 'Disabled' $} + +
+
+ {% trans "Destination" %} + {$ currentDestination $} +
+
+ +
+
+
+ + + {% trans "All unmatched emails will be forwarded to this address" %} +
+
+
+
+ +
+ +
+
+
+
+ +
+
+ + +
+
+
+ + +
+
+ + {$ errorMessage $} +
+ +
+ + {$ successMessage $} +
+ +
+ + {% trans "Could not connect to server. Please refresh this page." %} +
+
+
+ {% endif %} +
+
+
+ +{% endblock %} diff --git a/mailServer/templates/mailServer/patternForwarding.html b/mailServer/templates/mailServer/patternForwarding.html new file mode 100644 index 000000000..cdc5ebd4f --- /dev/null +++ b/mailServer/templates/mailServer/patternForwarding.html @@ -0,0 +1,465 @@ +{% extends "baseTemplate/index.html" %} +{% load i18n %} +{% block title %}{% trans "Pattern Forwarding - CyberPanel" %}{% endblock %} +{% block content %} + + {% load static %} + {% get_current_language as LANGUAGE_CODE %} + + + +
+ + +
+
+

+ + {% trans "Pattern Forwarding Rules" %} + +

+
+
+ {% if not status %} +
+ +

{% trans "Postfix is disabled" %}

+

{% trans "You need to enable Postfix to configure pattern forwarding" %}

+ + + {% trans "Enable Postfix Now" %} + +
+ {% else %} +
+
+
+
+
+ + +
+
+
+
+ +
+

{% trans "Create New Rule" %}

+ +
+

{% trans "Wildcard Pattern Examples" %}

+
    +
  • user_* - {% trans "Matches user_anything (e.g., user_sales, user_123)" %}
  • +
  • support-? - {% trans "Matches support- followed by any single character" %}
  • +
  • team* - {% trans "Matches anything starting with team" %}
  • +
+
+ +
+

{% trans "Regex Pattern (Advanced)" %}

+
    +
  • user_[0-9]+ - {% trans "Matches user_ followed by digits" %}
  • +
  • support-(sales|billing) - {% trans "Matches support-sales or support-billing" %}
  • +
  • {% trans "Note: Pattern is matched against the local part only (before @)" %}
  • +
+
+ +
+
+
+ + +
+
+
+
+ + +
+
+
+
+ + +
+
+
+
+ + +
+
+
+
+ + +
+
+
+ + + + + + + + + + + + + + + + + + + + +
{% trans "Priority" %}{% trans "Type" %}{% trans "Pattern" %}{% trans "Destination" %}{% trans "Actions" %}
+ + + {$ rule.pattern_type $} + + @{$ selectedDomain $} + +
+ +
+ + {% trans "No pattern forwarding rules configured for this domain yet." %} +
+
+ + +
+
+ + {$ errorMessage $} +
+ +
+ + {$ successMessage $} +
+ +
+ + {% trans "Could not connect to server. Please refresh this page." %} +
+
+
+ {% endif %} +
+
+
+ +{% endblock %} diff --git a/mailServer/templates/mailServer/plusAddressingSettings.html b/mailServer/templates/mailServer/plusAddressingSettings.html new file mode 100644 index 000000000..d15f6a3b1 --- /dev/null +++ b/mailServer/templates/mailServer/plusAddressingSettings.html @@ -0,0 +1,406 @@ +{% extends "baseTemplate/index.html" %} +{% load i18n %} +{% block title %}{% trans "Plus-Addressing Settings - CyberPanel" %}{% endblock %} +{% block content %} + + {% load static %} + {% get_current_language as LANGUAGE_CODE %} + + + +
+ + + {% if not status %} +
+
+
+ +

{% trans "Postfix is disabled" %}

+

{% trans "You need to enable Postfix to configure plus-addressing" %}

+ + + {% trans "Enable Postfix Now" %} + +
+
+
+ {% else %} + +
+
+

+ + {% trans "Global Settings" %} + +

+
+
+
+

{% trans "What is Plus-Addressing?" %}

+

{% trans "Plus-addressing allows users to receive email at user+anything@domain.com which will be delivered to user@domain.com. This is useful for filtering and tracking email sources." %}

+
+ +
+
+
+ +
+ + + {$ globalEnabled ? 'Enabled' : 'Disabled' $} + +
+
+
+
+
+ + +
+
+
+ + + + +
+
+ + {$ globalErrorMessage $} +
+
+ + {$ globalSuccessMessage $} +
+
+
+
+ + +
+
+

+ + {% trans "Per-Domain Settings" %} +

+
+
+
+ + {% trans "Per-domain settings allow you to track which domains should use plus-addressing. Note: Actual filtering is server-wide in Postfix." %} +
+ +
+
+
+ + +
+
+
+
+ +
+ +
+
+
+
+ + + + +
+
+ + {$ domainErrorMessage $} +
+
+ + {$ domainSuccessMessage $} +
+
+
+
+ {% endif %} +
+ +{% endblock %} diff --git a/mailServer/urls.py b/mailServer/urls.py index 91cd9aeeb..6aa6becef 100644 --- a/mailServer/urls.py +++ b/mailServer/urls.py @@ -35,4 +35,22 @@ urlpatterns = [ ### email limits re_path(r'^EmailLimits$', views.EmailLimits, name='EmailLimits'), re_path(r'^SaveEmailLimitsNew$', views.SaveEmailLimitsNew, name='SaveEmailLimitsNew'), + + ## Catch-All Email + re_path(r'^catchAllEmail$', views.catchAllEmail, name='catchAllEmail'), + re_path(r'^fetchCatchAllConfig$', views.fetchCatchAllConfig, name='fetchCatchAllConfig'), + re_path(r'^saveCatchAllConfig$', views.saveCatchAllConfig, name='saveCatchAllConfig'), + re_path(r'^deleteCatchAllConfig$', views.deleteCatchAllConfig, name='deleteCatchAllConfig'), + + ## Plus-Addressing + re_path(r'^plusAddressingSettings$', views.plusAddressingSettings, name='plusAddressingSettings'), + re_path(r'^fetchPlusAddressingConfig$', views.fetchPlusAddressingConfig, name='fetchPlusAddressingConfig'), + re_path(r'^savePlusAddressingGlobal$', views.savePlusAddressingGlobal, name='savePlusAddressingGlobal'), + re_path(r'^savePlusAddressingDomain$', views.savePlusAddressingDomain, name='savePlusAddressingDomain'), + + ## Pattern Forwarding + re_path(r'^patternForwarding$', views.patternForwarding, name='patternForwarding'), + re_path(r'^fetchPatternRules$', views.fetchPatternRules, name='fetchPatternRules'), + re_path(r'^createPatternRule$', views.createPatternRule, name='createPatternRule'), + re_path(r'^deletePatternRule$', views.deletePatternRule, name='deletePatternRule'), ] diff --git a/mailServer/views.py b/mailServer/views.py index 62f6ca9b8..7d3a33dcf 100644 --- a/mailServer/views.py +++ b/mailServer/views.py @@ -263,4 +263,113 @@ def SaveEmailLimitsNew(request): return HttpResponse(json_data) +## Catch-All Email + +def catchAllEmail(request): + try: + msM = MailServerManager(request) + return msM.catchAllEmail() + except KeyError: + return redirect(loadLoginPage) + +def fetchCatchAllConfig(request): + try: + msM = MailServerManager(request) + return msM.fetchCatchAllConfig() + except KeyError as msg: + data_ret = {'fetchStatus': 0, 'error_message': str(msg)} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + +def saveCatchAllConfig(request): + try: + msM = MailServerManager(request) + return msM.saveCatchAllConfig() + except KeyError as msg: + data_ret = {'saveStatus': 0, 'error_message': str(msg)} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + +def deleteCatchAllConfig(request): + try: + msM = MailServerManager(request) + return msM.deleteCatchAllConfig() + except KeyError as msg: + data_ret = {'deleteStatus': 0, 'error_message': str(msg)} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + +## Plus-Addressing + +def plusAddressingSettings(request): + try: + msM = MailServerManager(request) + return msM.plusAddressingSettings() + except KeyError: + return redirect(loadLoginPage) + +def fetchPlusAddressingConfig(request): + try: + msM = MailServerManager(request) + return msM.fetchPlusAddressingConfig() + except KeyError as msg: + data_ret = {'fetchStatus': 0, 'error_message': str(msg)} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + +def savePlusAddressingGlobal(request): + try: + msM = MailServerManager(request) + return msM.savePlusAddressingGlobal() + except KeyError as msg: + data_ret = {'saveStatus': 0, 'error_message': str(msg)} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + +def savePlusAddressingDomain(request): + try: + msM = MailServerManager(request) + return msM.savePlusAddressingDomain() + except KeyError as msg: + data_ret = {'saveStatus': 0, 'error_message': str(msg)} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + + +## Pattern Forwarding + +def patternForwarding(request): + try: + msM = MailServerManager(request) + return msM.patternForwarding() + except KeyError: + return redirect(loadLoginPage) + +def fetchPatternRules(request): + try: + msM = MailServerManager(request) + return msM.fetchPatternRules() + except KeyError as msg: + data_ret = {'fetchStatus': 0, 'error_message': str(msg)} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + +def createPatternRule(request): + try: + msM = MailServerManager(request) + return msM.createPatternRule() + except KeyError as msg: + data_ret = {'createStatus': 0, 'error_message': str(msg)} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) + +def deletePatternRule(request): + try: + msM = MailServerManager(request) + return msM.deletePatternRule() + except KeyError as msg: + data_ret = {'deleteStatus': 0, 'error_message': str(msg)} + json_data = json.dumps(data_ret) + return HttpResponse(json_data) From 3c16b934afbe5a8b63bf07ba768b184a77444c7c Mon Sep 17 00:00:00 2001 From: usmannasir Date: Fri, 28 Nov 2025 15:05:44 +0500 Subject: [PATCH 6/9] Fix migration: use raw SQL for tables since existing models lack migrations --- .../0001_email_filtering_features.py | 107 +++++++----------- mailServer/models.py | 10 +- 2 files changed, 51 insertions(+), 66 deletions(-) diff --git a/mailServer/migrations/0001_email_filtering_features.py b/mailServer/migrations/0001_email_filtering_features.py index c8b0784ef..4621970ee 100644 --- a/mailServer/migrations/0001_email_filtering_features.py +++ b/mailServer/migrations/0001_email_filtering_features.py @@ -1,7 +1,7 @@ # Generated migration for email filtering features +# Uses raw SQL since existing email models weren't created via Django migrations -from django.db import migrations, models -import django.db.models.deletion +from django.db import migrations class Migration(migrations.Migration): @@ -12,69 +12,50 @@ class Migration(migrations.Migration): ] operations = [ - migrations.CreateModel( - name='CatchAllEmail', - fields=[ - ('domain', models.OneToOneField( - on_delete=django.db.models.deletion.CASCADE, - primary_key=True, - serialize=False, - to='mailServer.Domains' - )), - ('destination', models.CharField(max_length=255)), - ('enabled', models.BooleanField(default=True)), - ], - options={ - 'db_table': 'e_catchall', - }, + migrations.RunSQL( + sql=""" + CREATE TABLE IF NOT EXISTS `e_catchall` ( + `domain_id` varchar(50) NOT NULL PRIMARY KEY, + `destination` varchar(255) NOT NULL, + `enabled` tinyint(1) NOT NULL DEFAULT 1, + CONSTRAINT `fk_catchall_domain` FOREIGN KEY (`domain_id`) REFERENCES `e_domains` (`domain`) ON DELETE CASCADE + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + """, + reverse_sql="DROP TABLE IF EXISTS `e_catchall`;" ), - migrations.CreateModel( - name='EmailServerSettings', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('plus_addressing_enabled', models.BooleanField(default=False)), - ('plus_addressing_delimiter', models.CharField(default='+', max_length=1)), - ], - options={ - 'db_table': 'e_server_settings', - }, + migrations.RunSQL( + sql=""" + CREATE TABLE IF NOT EXISTS `e_server_settings` ( + `id` int(11) NOT NULL AUTO_INCREMENT PRIMARY KEY, + `plus_addressing_enabled` tinyint(1) NOT NULL DEFAULT 0, + `plus_addressing_delimiter` varchar(1) NOT NULL DEFAULT '+' + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + """, + reverse_sql="DROP TABLE IF EXISTS `e_server_settings`;" ), - migrations.CreateModel( - name='PlusAddressingOverride', - fields=[ - ('domain', models.OneToOneField( - on_delete=django.db.models.deletion.CASCADE, - primary_key=True, - serialize=False, - to='mailServer.Domains' - )), - ('enabled', models.BooleanField(default=True)), - ], - options={ - 'db_table': 'e_plus_override', - }, + migrations.RunSQL( + sql=""" + CREATE TABLE IF NOT EXISTS `e_plus_override` ( + `domain_id` varchar(50) NOT NULL PRIMARY KEY, + `enabled` tinyint(1) NOT NULL DEFAULT 1, + CONSTRAINT `fk_plus_override_domain` FOREIGN KEY (`domain_id`) REFERENCES `e_domains` (`domain`) ON DELETE CASCADE + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + """, + reverse_sql="DROP TABLE IF EXISTS `e_plus_override`;" ), - migrations.CreateModel( - name='PatternForwarding', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('pattern', models.CharField(max_length=255)), - ('destination', models.CharField(max_length=255)), - ('pattern_type', models.CharField( - choices=[('wildcard', 'Wildcard'), ('regex', 'Regular Expression')], - default='wildcard', - max_length=20 - )), - ('priority', models.IntegerField(default=100)), - ('enabled', models.BooleanField(default=True)), - ('domain', models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to='mailServer.Domains' - )), - ], - options={ - 'db_table': 'e_pattern_forwarding', - 'ordering': ['priority'], - }, + migrations.RunSQL( + sql=""" + CREATE TABLE IF NOT EXISTS `e_pattern_forwarding` ( + `id` int(11) NOT NULL AUTO_INCREMENT PRIMARY KEY, + `domain_id` varchar(50) NOT NULL, + `pattern` varchar(255) NOT NULL, + `destination` varchar(255) NOT NULL, + `pattern_type` varchar(20) NOT NULL DEFAULT 'wildcard', + `priority` int(11) NOT NULL DEFAULT 100, + `enabled` tinyint(1) NOT NULL DEFAULT 1, + CONSTRAINT `fk_pattern_domain` FOREIGN KEY (`domain_id`) REFERENCES `e_domains` (`domain`) ON DELETE CASCADE + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + """, + reverse_sql="DROP TABLE IF EXISTS `e_pattern_forwarding`;" ), ] diff --git a/mailServer/models.py b/mailServer/models.py index 3f98b4676..46def115e 100644 --- a/mailServer/models.py +++ b/mailServer/models.py @@ -56,12 +56,13 @@ class Pipeprograms(models.Model): class CatchAllEmail(models.Model): """Stores catch-all email configuration per domain""" - domain = models.OneToOneField(Domains, on_delete=models.CASCADE, primary_key=True) + domain = models.OneToOneField(Domains, on_delete=models.CASCADE, primary_key=True, db_column='domain_id') destination = models.CharField(max_length=255) enabled = models.BooleanField(default=True) class Meta: db_table = 'e_catchall' + managed = False class EmailServerSettings(models.Model): @@ -71,6 +72,7 @@ class EmailServerSettings(models.Model): class Meta: db_table = 'e_server_settings' + managed = False @classmethod def get_settings(cls): @@ -80,11 +82,12 @@ class EmailServerSettings(models.Model): class PlusAddressingOverride(models.Model): """Per-domain plus-addressing override""" - domain = models.OneToOneField(Domains, on_delete=models.CASCADE, primary_key=True) + domain = models.OneToOneField(Domains, on_delete=models.CASCADE, primary_key=True, db_column='domain_id') enabled = models.BooleanField(default=True) class Meta: db_table = 'e_plus_override' + managed = False class PatternForwarding(models.Model): @@ -94,7 +97,7 @@ class PatternForwarding(models.Model): ('regex', 'Regular Expression'), ] - domain = models.ForeignKey(Domains, on_delete=models.CASCADE) + domain = models.ForeignKey(Domains, on_delete=models.CASCADE, db_column='domain_id') pattern = models.CharField(max_length=255) destination = models.CharField(max_length=255) pattern_type = models.CharField(max_length=20, choices=PATTERN_TYPES, default='wildcard') @@ -103,4 +106,5 @@ class PatternForwarding(models.Model): class Meta: db_table = 'e_pattern_forwarding' + managed = False ordering = ['priority'] \ No newline at end of file From 36f7068e1d6c6b964ad55db6789614b9920a0135 Mon Sep 17 00:00:00 2001 From: usmannasir Date: Fri, 28 Nov 2025 15:08:49 +0500 Subject: [PATCH 7/9] Fix: Use upgrade.py for email filtering tables instead of Django migrations - Remove Django migration file that caused model resolution errors - Add CREATE TABLE statements to mailServerMigrations() in upgrade.py - Tables created: e_catchall, e_server_settings, e_plus_override, e_pattern_forwarding --- .../0001_email_filtering_features.py | 61 ------------------- plogical/upgrade.py | 52 ++++++++++++++++ 2 files changed, 52 insertions(+), 61 deletions(-) delete mode 100644 mailServer/migrations/0001_email_filtering_features.py diff --git a/mailServer/migrations/0001_email_filtering_features.py b/mailServer/migrations/0001_email_filtering_features.py deleted file mode 100644 index 4621970ee..000000000 --- a/mailServer/migrations/0001_email_filtering_features.py +++ /dev/null @@ -1,61 +0,0 @@ -# Generated migration for email filtering features -# Uses raw SQL since existing email models weren't created via Django migrations - -from django.db import migrations - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ] - - operations = [ - migrations.RunSQL( - sql=""" - CREATE TABLE IF NOT EXISTS `e_catchall` ( - `domain_id` varchar(50) NOT NULL PRIMARY KEY, - `destination` varchar(255) NOT NULL, - `enabled` tinyint(1) NOT NULL DEFAULT 1, - CONSTRAINT `fk_catchall_domain` FOREIGN KEY (`domain_id`) REFERENCES `e_domains` (`domain`) ON DELETE CASCADE - ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; - """, - reverse_sql="DROP TABLE IF EXISTS `e_catchall`;" - ), - migrations.RunSQL( - sql=""" - CREATE TABLE IF NOT EXISTS `e_server_settings` ( - `id` int(11) NOT NULL AUTO_INCREMENT PRIMARY KEY, - `plus_addressing_enabled` tinyint(1) NOT NULL DEFAULT 0, - `plus_addressing_delimiter` varchar(1) NOT NULL DEFAULT '+' - ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; - """, - reverse_sql="DROP TABLE IF EXISTS `e_server_settings`;" - ), - migrations.RunSQL( - sql=""" - CREATE TABLE IF NOT EXISTS `e_plus_override` ( - `domain_id` varchar(50) NOT NULL PRIMARY KEY, - `enabled` tinyint(1) NOT NULL DEFAULT 1, - CONSTRAINT `fk_plus_override_domain` FOREIGN KEY (`domain_id`) REFERENCES `e_domains` (`domain`) ON DELETE CASCADE - ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; - """, - reverse_sql="DROP TABLE IF EXISTS `e_plus_override`;" - ), - migrations.RunSQL( - sql=""" - CREATE TABLE IF NOT EXISTS `e_pattern_forwarding` ( - `id` int(11) NOT NULL AUTO_INCREMENT PRIMARY KEY, - `domain_id` varchar(50) NOT NULL, - `pattern` varchar(255) NOT NULL, - `destination` varchar(255) NOT NULL, - `pattern_type` varchar(20) NOT NULL DEFAULT 'wildcard', - `priority` int(11) NOT NULL DEFAULT 100, - `enabled` tinyint(1) NOT NULL DEFAULT 1, - CONSTRAINT `fk_pattern_domain` FOREIGN KEY (`domain_id`) REFERENCES `e_domains` (`domain`) ON DELETE CASCADE - ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; - """, - reverse_sql="DROP TABLE IF EXISTS `e_pattern_forwarding`;" - ), - ] diff --git a/plogical/upgrade.py b/plogical/upgrade.py index aa7518778..6ba031d97 100644 --- a/plogical/upgrade.py +++ b/plogical/upgrade.py @@ -2258,6 +2258,58 @@ CREATE TABLE `websiteFunctions_backupsv2` (`id` integer AUTO_INCREMENT NOT NULL except: pass + # Email Filtering Tables - Catch-All, Plus-Addressing, Pattern Forwarding + query = """CREATE TABLE IF NOT EXISTS `e_catchall` ( + `domain_id` varchar(50) NOT NULL, + `destination` varchar(255) NOT NULL, + `enabled` tinyint(1) NOT NULL DEFAULT 1, + PRIMARY KEY (`domain_id`), + CONSTRAINT `fk_catchall_domain` FOREIGN KEY (`domain_id`) REFERENCES `e_domains` (`domain`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4""" + try: + cursor.execute(query) + except: + pass + + query = """CREATE TABLE IF NOT EXISTS `e_server_settings` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `plus_addressing_enabled` tinyint(1) NOT NULL DEFAULT 0, + `plus_addressing_delimiter` varchar(1) NOT NULL DEFAULT '+', + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4""" + try: + cursor.execute(query) + except: + pass + + query = """CREATE TABLE IF NOT EXISTS `e_plus_override` ( + `domain_id` varchar(50) NOT NULL, + `enabled` tinyint(1) NOT NULL DEFAULT 1, + PRIMARY KEY (`domain_id`), + CONSTRAINT `fk_plus_override_domain` FOREIGN KEY (`domain_id`) REFERENCES `e_domains` (`domain`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4""" + try: + cursor.execute(query) + except: + pass + + query = """CREATE TABLE IF NOT EXISTS `e_pattern_forwarding` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `domain_id` varchar(50) NOT NULL, + `pattern` varchar(255) NOT NULL, + `destination` varchar(255) NOT NULL, + `pattern_type` varchar(20) NOT NULL DEFAULT 'wildcard', + `priority` int(11) NOT NULL DEFAULT 100, + `enabled` tinyint(1) NOT NULL DEFAULT 1, + PRIMARY KEY (`id`), + KEY `fk_pattern_domain` (`domain_id`), + CONSTRAINT `fk_pattern_domain` FOREIGN KEY (`domain_id`) REFERENCES `e_domains` (`domain`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4""" + try: + cursor.execute(query) + except: + pass + try: connection.close() except: From ea635b5f01a31970271c4c0b3711080367ec8342 Mon Sep 17 00:00:00 2001 From: usmannasir Date: Sat, 29 Nov 2025 04:56:23 +0400 Subject: [PATCH 8/9] Fix n8n container health check to use fuzzy name matching The container health check was failing because Docker Compose v1 and v2 use different naming conventions: - v1: project_service_1 (underscores) - v2: project-service-1 (hyphens) Changes: 1. Replaced hardcoded container name formatting with fuzzy matching 2. Added find_container_by_service() helper method for dynamic lookup 3. Updated monitor_deployment() to use dynamic container discovery 4. Container names are now found by normalizing and matching patterns This fixes "Containers failed to reach healthy state" errors during n8n deployment from CyberPanel UI. Ticket References: XKTFREZUR, XCGF2HQUH --- plogical/DockerSites.py | 114 +++++++++++++++++++++++++++------------- 1 file changed, 77 insertions(+), 37 deletions(-) diff --git a/plogical/DockerSites.py b/plogical/DockerSites.py index 6c3603ea4..47d1307a5 100644 --- a/plogical/DockerSites.py +++ b/plogical/DockerSites.py @@ -911,41 +911,59 @@ services: ##### N8N Container - def check_container_health(self, container_name, max_retries=3, delay=80): + def check_container_health(self, service_name, max_retries=3, delay=80): """ Check if a container is running, accepting healthy, unhealthy, and starting states Total wait time will be 4 minutes (3 retries * 80 seconds) + + Uses fuzzy matching to find containers since Docker Compose naming varies by version: + - Docker Compose v1: project_service_1 (underscores) + - Docker Compose v2: project-service-1 (hyphens) """ try: - # Format container name to match Docker's naming convention - formatted_name = f"{self.data['ServiceName']}-{container_name}-1" - logging.writeToFile(f'Checking container health for: {formatted_name}') - + logging.writeToFile(f'Checking container health for service: {service_name}') + for attempt in range(max_retries): client = docker.from_env() - container = client.containers.get(formatted_name) - + + # Find container by searching all containers for a name containing the service name + # This handles both v1 (underscores) and v2 (hyphens) naming conventions + all_containers = client.containers.list(all=True) + container = None + + # Normalize service name for matching (handle both - and _) + service_pattern = service_name.lower().replace(' ', '').replace('-', '').replace('_', '') + + for c in all_containers: + container_pattern = c.name.lower().replace('-', '').replace('_', '') + if service_pattern in container_pattern: + container = c + logging.writeToFile(f'Found matching container: {c.name} for service: {service_name}') + break + + if container is None: + logging.writeToFile(f'No container found matching service: {service_name}, attempt {attempt + 1}/{max_retries}') + time.sleep(delay) + continue + if container.status == 'running': health = container.attrs.get('State', {}).get('Health', {}).get('Status') - + # Accept healthy, unhealthy, and starting states as long as container is running if health in ['healthy', 'unhealthy', 'starting'] or health is None: - logging.writeToFile(f'Container {formatted_name} is running with status: {health}') + logging.writeToFile(f'Container {container.name} is running with health status: {health}') return True else: health_logs = container.attrs.get('State', {}).get('Health', {}).get('Log', []) if health_logs: last_log = health_logs[-1] logging.writeToFile(f'Container health check failed: {last_log.get("Output", "")}') - - logging.writeToFile(f'Container {formatted_name} status: {container.status}, health: {health}, attempt {attempt + 1}/{max_retries}') + + logging.writeToFile(f'Container {container.name} status: {container.status}, health: {health}, attempt {attempt + 1}/{max_retries}') time.sleep(delay) - - return False - - except docker.errors.NotFound: - logging.writeToFile(f'Container {formatted_name} not found') + return False + except Exception as e: logging.writeToFile(f'Error checking container health: {str(e)}') return False @@ -1068,12 +1086,39 @@ services: logging.writeToFile(f"Cleanup failed: {str(e)}") return False + def find_container_by_service(self, service_name): + """ + Find a container by service name using fuzzy matching. + Returns the container object or None if not found. + """ + try: + client = docker.from_env() + all_containers = client.containers.list(all=True) + + # Normalize service name for matching + service_pattern = service_name.lower().replace(' ', '').replace('-', '').replace('_', '') + + for c in all_containers: + container_pattern = c.name.lower().replace('-', '').replace('_', '') + if service_pattern in container_pattern: + return c + return None + except Exception as e: + logging.writeToFile(f'Error finding container: {str(e)}') + return None + def monitor_deployment(self): try: - # Format container names - n8n_container_name = f"{self.data['ServiceName']}-{self.data['ServiceName']}-1" - db_container_name = f"{self.data['ServiceName']}-{self.data['ServiceName']}-db-1" - + # Find containers dynamically using fuzzy matching + n8n_container = self.find_container_by_service(self.data['ServiceName']) + db_container = self.find_container_by_service(f"{self.data['ServiceName']}-db") + + if not n8n_container or not db_container: + raise DockerDeploymentError("Could not find n8n or database containers") + + n8n_container_name = n8n_container.name + db_container_name = db_container.name + logging.writeToFile(f'Monitoring containers: {n8n_container_name} and {db_container_name}') # Check container health @@ -1081,7 +1126,7 @@ services: result, status = ProcessUtilities.outputExecutioner(command, None, None, None, 1) # Only raise error if container is exited - if "exited" in status: + if "exited" in status.lower(): # Get container logs command = f"docker logs {n8n_container_name}" result, logs = ProcessUtilities.outputExecutioner(command, None, None, None, 1) @@ -1096,19 +1141,16 @@ services: # Check if database container is ready command = f"docker exec {db_container_name} pg_isready -U postgres" result, output = ProcessUtilities.outputExecutioner(command, None, None, None, 1) - + if "accepting connections" in output: db_ready = True break - - # Check container status - command = f"docker inspect --format='{{{{.State.Status}}}}' {db_container_name}" - result, db_status = ProcessUtilities.outputExecutioner(command, None, None, None, 1) - - # Only raise error if database container is in a failed state - if db_status == 'exited': - raise DockerDeploymentError(f"Database container is in {db_status} state") - + + # Refresh container status + db_container = self.find_container_by_service(f"{self.data['ServiceName']}-db") + if db_container and db_container.status == 'exited': + raise DockerDeploymentError(f"Database container exited") + retry_count += 1 time.sleep(2) logging.writeToFile(f'Waiting for database to be ready, attempt {retry_count}/{max_retries}') @@ -1117,13 +1159,11 @@ services: raise DockerDeploymentError("Database failed to become ready within timeout period") # Check n8n container status - command = f"docker inspect --format='{{{{.State.Status}}}}' {n8n_container_name}" - result, n8n_status = ProcessUtilities.outputExecutioner(command, None, None, None, 1) - - # Only raise error if n8n container is in a failed state - if n8n_status == 'exited': - raise DockerDeploymentError(f"n8n container is in {n8n_status} state") + n8n_container = self.find_container_by_service(self.data['ServiceName']) + if n8n_container and n8n_container.status == 'exited': + raise DockerDeploymentError(f"n8n container exited") + n8n_status = n8n_container.status if n8n_container else 'unknown' logging.writeToFile(f'Deployment monitoring completed successfully. n8n status: {n8n_status}, database ready: {db_ready}') return True From eca0c3cbeb35af8eaae9fafb094e8ef3cd923643 Mon Sep 17 00:00:00 2001 From: usmannasir Date: Thu, 18 Dec 2025 12:18:32 +0500 Subject: [PATCH 9/9] security fixes --- backup/backupManager.py | 95 ++++++++++++++++++++++++++++++++++------ filemanager/views.py | 49 +++++++++++++++++++++ plogical/remoteBackup.py | 34 ++++++++++++-- 3 files changed, 161 insertions(+), 17 deletions(-) diff --git a/backup/backupManager.py b/backup/backupManager.py index bbac04a49..daa16b38d 100644 --- a/backup/backupManager.py +++ b/backup/backupManager.py @@ -2,6 +2,7 @@ import os import os.path import sys +import re from io import StringIO import django @@ -784,9 +785,35 @@ class BackupManager: except: finalDic['user'] = "root" + # SECURITY: Validate all inputs to prevent command injection + if ACLManager.commandInjectionCheck(finalDic['ipAddress']) == 1: + final_dic = {'status': 0, 'destStatus': 0, 'error_message': 'Invalid characters in IP address'} + return HttpResponse(json.dumps(final_dic)) + + if ACLManager.commandInjectionCheck(finalDic['password']) == 1: + final_dic = {'status': 0, 'destStatus': 0, 'error_message': 'Invalid characters in password'} + return HttpResponse(json.dumps(final_dic)) + + if ACLManager.commandInjectionCheck(finalDic['port']) == 1: + final_dic = {'status': 0, 'destStatus': 0, 'error_message': 'Invalid characters in port'} + return HttpResponse(json.dumps(final_dic)) + + if ACLManager.commandInjectionCheck(finalDic['user']) == 1: + final_dic = {'status': 0, 'destStatus': 0, 'error_message': 'Invalid characters in username'} + return HttpResponse(json.dumps(final_dic)) + + # SECURITY: Validate port is numeric + try: + port_int = int(finalDic['port']) + if port_int < 1 or port_int > 65535: + raise ValueError("Port out of range") + except ValueError: + final_dic = {'status': 0, 'destStatus': 0, 'error_message': 'Port must be a valid number (1-65535)'} + return HttpResponse(json.dumps(final_dic)) + execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/backupUtilities.py" - execPath = execPath + " submitDestinationCreation --ipAddress " + finalDic['ipAddress'] + " --password " \ - + finalDic['password'] + " --port " + finalDic['port'] + ' --user %s' % (finalDic['user']) + execPath = execPath + " submitDestinationCreation --ipAddress " + shlex.quote(finalDic['ipAddress']) + " --password " \ + + shlex.quote(finalDic['password']) + " --port " + shlex.quote(finalDic['port']) + ' --user %s' % (shlex.quote(finalDic['user'])) if os.path.exists(ProcessUtilities.debugPath): logging.CyberCPLogFileWriter.writeToFile(execPath) @@ -880,8 +907,13 @@ class BackupManager: ipAddress = data['IPAddress'] + # SECURITY: Validate IP address to prevent command injection + if ACLManager.commandInjectionCheck(ipAddress) == 1: + final_dic = {'connStatus': 0, 'error_message': 'Invalid characters in IP address'} + return HttpResponse(json.dumps(final_dic)) + execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/backupUtilities.py" - execPath = execPath + " getConnectionStatus --ipAddress " + ipAddress + execPath = execPath + " getConnectionStatus --ipAddress " + shlex.quote(ipAddress) output = ProcessUtilities.executioner(execPath) @@ -1342,16 +1374,32 @@ class BackupManager: if ACLManager.currentContextPermission(currentACL, 'remoteBackups') == 0: return ACLManager.loadErrorJson('remoteTransferStatus', 0) - backupDir = data['backupDir'] + backupDir = str(data['backupDir']) - backupDirComplete = "/home/backup/transfer-" + str(backupDir) - # adminEmail = admin.email + # SECURITY: Validate backupDir to prevent command injection and path traversal + if ACLManager.commandInjectionCheck(backupDir) == 1: + data = {'remoteRestoreStatus': 0, 'error_message': 'Invalid characters in backup directory name'} + return HttpResponse(json.dumps(data)) - ## + # SECURITY: Ensure backupDir is alphanumeric only (backup dirs are typically numeric IDs) + if not re.match(r'^[a-zA-Z0-9_-]+$', backupDir): + data = {'remoteRestoreStatus': 0, 'error_message': 'Backup directory name must be alphanumeric'} + return HttpResponse(json.dumps(data)) + + # SECURITY: Prevent path traversal + if '..' in backupDir or '/' in backupDir: + data = {'remoteRestoreStatus': 0, 'error_message': 'Invalid backup directory path'} + return HttpResponse(json.dumps(data)) + + backupDirComplete = "/home/backup/transfer-" + backupDir + + # SECURITY: Verify the backup directory exists + if not os.path.exists(backupDirComplete): + data = {'remoteRestoreStatus': 0, 'error_message': 'Backup directory does not exist'} + return HttpResponse(json.dumps(data)) execPath = "/usr/local/CyberCP/bin/python " + virtualHostUtilities.cyberPanel + "/plogical/remoteTransferUtilities.py" - execPath = execPath + " remoteBackupRestore --backupDirComplete " + backupDirComplete + " --backupDir " + str( - backupDir) + execPath = execPath + " remoteBackupRestore --backupDirComplete " + shlex.quote(backupDirComplete) + " --backupDir " + shlex.quote(backupDir) ProcessUtilities.popenExecutioner(execPath) @@ -1373,16 +1421,35 @@ class BackupManager: if ACLManager.currentContextPermission(currentACL, 'remoteBackups') == 0: return ACLManager.loadErrorJson('remoteTransferStatus', 0) - backupDir = data['backupDir'] + backupDir = str(data['backupDir']) + + # SECURITY: Validate backupDir to prevent command injection and path traversal + if ACLManager.commandInjectionCheck(backupDir) == 1: + data = {'remoteTransferStatus': 0, 'error_message': 'Invalid characters in backup directory name', "status": "None", "complete": 0} + return HttpResponse(json.dumps(data)) + + # SECURITY: Ensure backupDir is alphanumeric only + if not re.match(r'^[a-zA-Z0-9_-]+$', backupDir): + data = {'remoteTransferStatus': 0, 'error_message': 'Backup directory name must be alphanumeric', "status": "None", "complete": 0} + return HttpResponse(json.dumps(data)) + + # SECURITY: Prevent path traversal + if '..' in backupDir or '/' in backupDir: + data = {'remoteTransferStatus': 0, 'error_message': 'Invalid backup directory path', "status": "None", "complete": 0} + return HttpResponse(json.dumps(data)) # admin = Administrator.objects.get(userName=username) backupLogPath = "/home/backup/transfer-" + backupDir + "/" + "backup_log" + removalPath = "/home/backup/transfer-" + backupDir - removalPath = "/home/backup/transfer-" + str(backupDir) + # SECURITY: Verify the backup directory exists before operating on it + if not os.path.exists(removalPath): + data = {'remoteTransferStatus': 0, 'error_message': 'Backup directory does not exist', "status": "None", "complete": 0} + return HttpResponse(json.dumps(data)) time.sleep(3) - command = "sudo cat " + backupLogPath + command = "sudo cat " + shlex.quote(backupLogPath) status = ProcessUtilities.outputExecutioner(command) @@ -1393,14 +1460,14 @@ class BackupManager: if status.find("completed[success]") > -1: - command = "rm -rf " + removalPath + command = "rm -rf " + shlex.quote(removalPath) ProcessUtilities.executioner(command) data_ret = {'remoteTransferStatus': 1, 'error_message': "None", "status": status, "complete": 1} json_data = json.dumps(data_ret) return HttpResponse(json_data) elif status.find("[5010]") > -1: - command = "sudo rm -rf " + removalPath + command = "sudo rm -rf " + shlex.quote(removalPath) ProcessUtilities.executioner(command) data = {'remoteTransferStatus': 0, 'error_message': status, "status": "None", "complete": 0} diff --git a/filemanager/views.py b/filemanager/views.py index 864ecc993..d7749ab64 100644 --- a/filemanager/views.py +++ b/filemanager/views.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +import os from django.shortcuts import render,redirect from loginSystem.models import Administrator from loginSystem.views import loadLoginPage @@ -326,6 +327,23 @@ def downloadFile(request): if fileToDownload.find('..') > -1 or fileToDownload.find(homePath) == -1: return HttpResponse("Unauthorized access.") + # SECURITY: Check for symlink attacks - resolve the real path and verify it stays within homePath + try: + realPath = os.path.realpath(fileToDownload) + + # Verify the resolved path is still within the user's home directory + if not realPath.startswith(homePath + '/') and realPath != homePath: + logging.CyberCPLogFileWriter.writeToFile( + f"Symlink attack blocked: {fileToDownload} -> {realPath} (outside {homePath})") + return HttpResponse("Unauthorized access: Symlink points outside allowed directory.") + + # Verify it's a regular file + if not os.path.isfile(realPath): + return HttpResponse("Unauthorized access: Not a valid file.") + + except OSError as e: + return HttpResponse("Unauthorized access: Cannot verify file path.") + response = HttpResponse(content_type='application/force-download') response['Content-Disposition'] = 'attachment; filename=%s' % (fileToDownload.split('/')[-1]) response['X-LiteSpeed-Location'] = '%s' % (fileToDownload) @@ -351,6 +369,37 @@ def RootDownloadFile(request): else: return ACLManager.loadError() + # SECURITY: Prevent path traversal attacks + if fileToDownload.find('..') > -1: + return HttpResponse("Unauthorized access: Path traversal detected.") + + # SECURITY: Check for symlink attacks - resolve the real path and verify it's safe + try: + # Get the real path (resolves symlinks) + realPath = os.path.realpath(fileToDownload) + + # SECURITY: Prevent access to sensitive system files + sensitive_paths = ['/etc/shadow', '/etc/passwd', '/etc/sudoers', '/root/.ssh', + '/var/log', '/proc', '/sys', '/dev'] + for sensitive in sensitive_paths: + if realPath.startswith(sensitive): + return HttpResponse("Unauthorized access: Access to system files denied.") + + # SECURITY: Verify the file exists and is a regular file (not a directory or device) + if not os.path.isfile(realPath): + return HttpResponse("Unauthorized access: Not a valid file.") + + # SECURITY: Check if the original path differs from real path (symlink detection) + # Allow the download only if the real path is within allowed directories + # For admin, we'll be more permissive but still block sensitive system files + if fileToDownload != realPath: + # This is a symlink - log it and verify destination is safe + logging.CyberCPLogFileWriter.writeToFile( + f"Symlink download detected: {fileToDownload} -> {realPath}") + + except OSError as e: + return HttpResponse("Unauthorized access: Cannot verify file path.") + response = HttpResponse(content_type='application/force-download') response['Content-Disposition'] = 'attachment; filename=%s' % (fileToDownload.split('/')[-1]) response['X-LiteSpeed-Location'] = '%s' % (fileToDownload) diff --git a/plogical/remoteBackup.py b/plogical/remoteBackup.py index 5d8f2decc..fda616158 100644 --- a/plogical/remoteBackup.py +++ b/plogical/remoteBackup.py @@ -1,5 +1,6 @@ from plogical import CyberCPLogFileWriter as logging import os +import re import requests import json import time @@ -9,6 +10,7 @@ import shlex from multiprocessing import Process from plogical.backupSchedule import backupSchedule from shutil import rmtree +from plogical.acl import ACLManager class remoteBackup: @@ -216,16 +218,42 @@ class remoteBackup: @staticmethod - def sendBackup(completedPathToSend, IPAddress, folderNumber,writeToFile): + def sendBackup(completedPathToSend, IPAddress, folderNumber, writeToFile): try: ## complete path is a path to the file need to send - command = 'sudo rsync -avz -e "ssh -i /root/.ssh/cyberpanel -o StrictHostKeyChecking=no" ' + completedPathToSend + ' root@' + IPAddress + ':/home/backup/transfer-'+folderNumber + # SECURITY: Validate IPAddress to prevent command injection + if ACLManager.commandInjectionCheck(IPAddress) == 1: + logging.CyberCPLogFileWriter.writeToFile("Invalid IP address - command injection attempt detected [sendBackup]") + return + + # SECURITY: Validate IPAddress format (IPv4 or hostname) + ip_pattern = r'^[a-zA-Z0-9][a-zA-Z0-9.-]*[a-zA-Z0-9]$|^[a-zA-Z0-9]$' + if not re.match(ip_pattern, IPAddress): + logging.CyberCPLogFileWriter.writeToFile("Invalid IP address format [sendBackup]") + return + + # SECURITY: Validate folderNumber is alphanumeric + if ACLManager.commandInjectionCheck(str(folderNumber)) == 1: + logging.CyberCPLogFileWriter.writeToFile("Invalid folder number - command injection attempt detected [sendBackup]") + return + + if not re.match(r'^[a-zA-Z0-9_-]+$', str(folderNumber)): + logging.CyberCPLogFileWriter.writeToFile("Invalid folder number format [sendBackup]") + return + + # SECURITY: Validate completedPathToSend - must be under /home/backup + if '..' in completedPathToSend or not completedPathToSend.startswith('/home/backup/'): + logging.CyberCPLogFileWriter.writeToFile("Invalid backup path - path traversal attempt detected [sendBackup]") + return + + # SECURITY: Use shlex.quote for all user-controllable parameters + command = 'sudo rsync -avz -e "ssh -i /root/.ssh/cyberpanel -o StrictHostKeyChecking=no" ' + shlex.quote(completedPathToSend) + ' root@' + shlex.quote(IPAddress) + ':/home/backup/transfer-' + shlex.quote(str(folderNumber)) subprocess.call(shlex.split(command), stdout=writeToFile) os.remove(completedPathToSend) except BaseException as msg: - logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [startBackup]") + logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [sendBackup]") @staticmethod def backupProcess(ipAddress, dir, backupLogPath,folderNumber, accountsToTransfer):