Merge pull request #1746 from master3395/v2.5.5-dev

V2.5.5 dev
This commit is contained in:
Master3395
2026-03-27 21:14:23 +01:00
committed by GitHub
25 changed files with 1382 additions and 144 deletions

View File

@@ -47,9 +47,15 @@ fi
### OS Detection
Server_OS=""
Server_OS_Version=""
if grep -q -E "CentOS Linux 7|CentOS Linux 8" /etc/os-release ; then
if grep -q -E "CentOS Linux 7|CentOS Linux 8|CentOS Stream" /etc/os-release ; then
Server_OS="CentOS"
elif grep -q -E "AlmaLinux-8|AlmaLinux-9|AlmaLinux-10" /etc/os-release ; then
elif grep -q "Red Hat Enterprise Linux" /etc/os-release ; then
Server_OS="RedHat"
elif grep -q "AlmaLinux-8" /etc/os-release ; then
Server_OS="AlmaLinux"
elif grep -q "AlmaLinux-9" /etc/os-release ; then
Server_OS="AlmaLinux"
elif grep -q "AlmaLinux-10" /etc/os-release ; then
Server_OS="AlmaLinux"
elif grep -q -E "CloudLinux 7|CloudLinux 8" /etc/os-release ; then
Server_OS="CloudLinux"
@@ -57,11 +63,13 @@ elif grep -q -E "Rocky Linux" /etc/os-release ; then
Server_OS="RockyLinux"
elif grep -q -E "Ubuntu 18.04|Ubuntu 20.04|Ubuntu 20.10|Ubuntu 22.04|Ubuntu 24.04" /etc/os-release ; then
Server_OS="Ubuntu"
elif grep -q -E "Debian GNU/Linux 11|Debian GNU/Linux 12|Debian GNU/Linux 13" /etc/os-release ; then
Server_OS="Debian"
elif grep -q -E "openEuler 20.03|openEuler 22.03" /etc/os-release ; then
Server_OS="openEuler"
else
echo -e "Unable to detect your system..."
echo -e "\nCyberPanel is supported on x86_64 based Ubuntu 18.04, Ubuntu 20.04, Ubuntu 20.10, Ubuntu 22.04, Ubuntu 24.04, CentOS 7, CentOS 8, AlmaLinux 8, AlmaLinux 9, AlmaLinux 10, RockyLinux 8, CloudLinux 7, CloudLinux 8, openEuler 20.03, openEuler 22.03...\n"
echo -e "\nCyberPanel is supported on x86_64 based Ubuntu 18.04, Ubuntu 20.04, Ubuntu 20.10, Ubuntu 22.04, Ubuntu 24.04, Ubuntu 24.04.3, Debian 11, Debian 12, Debian 13, CentOS 7, CentOS 8, CentOS 9, RHEL 8, RHEL 9, AlmaLinux 8, AlmaLinux 9, AlmaLinux 10, RockyLinux 8, RockyLinux 9, CloudLinux 7, CloudLinux 8, openEuler 20.03, openEuler 22.03...\n"
exit
fi
@@ -69,10 +77,13 @@ Server_OS_Version=$(grep VERSION_ID /etc/os-release | awk -F[=,] '{print $2}' |
echo -e "System: $Server_OS $Server_OS_Version detected...\n"
if [[ $Server_OS = "CloudLinux" ]] || [[ "$Server_OS" = "AlmaLinux" ]] || [[ "$Server_OS" = "RockyLinux" ]] ; then
if [[ $Server_OS = "CloudLinux" ]] || [[ "$Server_OS" = "AlmaLinux" ]] || [[ "$Server_OS" = "RockyLinux" ]] || [[ "$Server_OS" = "RedHat" ]] ; then
Server_OS="CentOS"
#CloudLinux gives version id like 7.8, 7.9, so cut it to show first number only
#treat CloudLinux, Rocky and Alma as CentOS
#treat CloudLinux, Rocky, Alma and RedHat as CentOS
elif [[ "$Server_OS" = "Debian" ]] ; then
Server_OS="Ubuntu"
#Treat Debian as Ubuntu for package management (both use apt-get)
fi
if [[ $Server_OS = "CentOS" ]] && [[ "$Server_OS_Version" = "7" ]] ; then
@@ -114,6 +125,29 @@ elif [[ $Server_OS = "CentOS" ]] && [[ "$Server_OS_Version" = "8" ]] ; then
freshclam -v
elif [[ $Server_OS = "CentOS" ]] && [[ "$Server_OS_Version" = "9" ]] ; then
setenforce 0
dnf install -y perl dnf-utils perl-CPAN
dnf --enablerepo=crb install -y perl-IO-stringy
dnf install -y gcc cpp perl bzip2 zip make patch automake rpm-build perl-Archive-Zip perl-Filesys-Df perl-OLE-Storage_Lite perl-Net-CIDR perl-DBI perl-MIME-tools perl-DBD-SQLite binutils glibc-devel perl-Filesys-Df zlib unzip zlib-devel wget mlocate clamav clamav-update "perl(DBD::mysql)"
# Install unrar for AlmaLinux 9 (using EPEL)
dnf install -y unrar
export PERL_MM_USE_DEFAULT=1
curl -L https://cpanmin.us | perl - App::cpanminus
perl -MCPAN -e 'install Encoding::FixLatin'
perl -MCPAN -e 'install Digest::SHA1'
perl -MCPAN -e 'install Geo::IP'
perl -MCPAN -e 'install Razor2::Client::Agent'
perl -MCPAN -e 'install Sys::Hostname::Long'
perl -MCPAN -e 'install Sys::SigAction'
perl -MCPAN -e 'install Net::Patricia'
freshclam -v
elif [ "$CLNVERSION" = "ID=\"cloudlinux\"" ]; then
setenforce 0

View File

@@ -4,9 +4,15 @@
### OS Detection
Server_OS=""
Server_OS_Version=""
if grep -q -E "CentOS Linux 7|CentOS Linux 8" /etc/os-release ; then
if grep -q -E "CentOS Linux 7|CentOS Linux 8|CentOS Stream" /etc/os-release ; then
Server_OS="CentOS"
elif grep -q -E "AlmaLinux-8|AlmaLinux-9|AlmaLinux-10" /etc/os-release ; then
elif grep -q "Red Hat Enterprise Linux" /etc/os-release ; then
Server_OS="RedHat"
elif grep -q "AlmaLinux-8" /etc/os-release ; then
Server_OS="AlmaLinux"
elif grep -q "AlmaLinux-9" /etc/os-release ; then
Server_OS="AlmaLinux"
elif grep -q "AlmaLinux-10" /etc/os-release ; then
Server_OS="AlmaLinux"
elif grep -q -E "CloudLinux 7|CloudLinux 8" /etc/os-release ; then
Server_OS="CloudLinux"
@@ -14,11 +20,13 @@ elif grep -q -E "Rocky Linux" /etc/os-release ; then
Server_OS="RockyLinux"
elif grep -q -E "Ubuntu 18.04|Ubuntu 20.04|Ubuntu 20.10|Ubuntu 22.04|Ubuntu 24.04" /etc/os-release ; then
Server_OS="Ubuntu"
elif grep -q -E "Debian GNU/Linux 11|Debian GNU/Linux 12|Debian GNU/Linux 13" /etc/os-release ; then
Server_OS="Debian"
elif grep -q -E "openEuler 20.03|openEuler 22.03" /etc/os-release ; then
Server_OS="openEuler"
else
echo -e "Unable to detect your system..."
echo -e "\nCyberPanel is supported on x86_64 based Ubuntu 18.04, Ubuntu 20.04, Ubuntu 20.10, Ubuntu 22.04, Ubuntu 24.04, CentOS 7, CentOS 8, AlmaLinux 8, AlmaLinux 9, AlmaLinux 10, RockyLinux 8, CloudLinux 7, CloudLinux 8, openEuler 20.03, openEuler 22.03...\n"
echo -e "\nCyberPanel is supported on x86_64 based Ubuntu 18.04, Ubuntu 20.04, Ubuntu 20.10, Ubuntu 22.04, Ubuntu 24.04, Ubuntu 24.04.3, Debian 11, Debian 12, Debian 13, CentOS 7, CentOS 8, CentOS 9, RHEL 8, RHEL 9, AlmaLinux 8, AlmaLinux 9, AlmaLinux 10, RockyLinux 8, RockyLinux 9, CloudLinux 7, CloudLinux 8, openEuler 20.03, openEuler 22.03...\n"
exit
fi
@@ -26,10 +34,13 @@ Server_OS_Version=$(grep VERSION_ID /etc/os-release | awk -F[=,] '{print $2}' |
echo -e "System: $Server_OS $Server_OS_Version detected...\n"
if [[ $Server_OS = "CloudLinux" ]] || [[ "$Server_OS" = "AlmaLinux" ]] || [[ "$Server_OS" = "RockyLinux" ]] ; then
if [[ $Server_OS = "CloudLinux" ]] || [[ "$Server_OS" = "AlmaLinux" ]] || [[ "$Server_OS" = "RockyLinux" ]] || [[ "$Server_OS" = "RedHat" ]] ; then
Server_OS="CentOS"
#CloudLinux gives version id like 7.8, 7.9, so cut it to show first number only
#treat CloudLinux, Rocky and Alma as CentOS
#treat CloudLinux, Rocky, Alma and RedHat as CentOS
elif [[ "$Server_OS" = "Debian" ]] ; then
Server_OS="Ubuntu"
#Treat Debian as Ubuntu for package management (both use apt-get)
fi
systemctl stop mailscanner

View File

@@ -31,6 +31,11 @@ class DatabaseManager:
return proc.render()
def phpMyAdmin(self, request = None, userID = None):
try:
from plogical.phpmyadmin_utils import ensure_phpmyadmin_signin_bridge
ensure_phpmyadmin_signin_bridge()
except BaseException:
pass
template = 'databases/phpMyAdmin.html'
proc = httpProc(request, template, None, 'createDatabase')
return proc.render()

View File

@@ -257,6 +257,11 @@ def generateAccess(request):
@csrf_exempt
def fetchDetailsPHPMYAdmin(request):
try:
try:
from plogical.phpmyadmin_utils import ensure_phpmyadmin_signin_bridge
ensure_phpmyadmin_signin_bridge()
except BaseException:
pass
userID = request.session['userID']
admin = Administrator.objects.get(id=userID)

View File

@@ -118,8 +118,8 @@ rm -f "$SCRIPT_PATH" "$TEMP_DIR/cyberpanel.sh" "$TEMP_DIR/install.tar.gz"
# Ensure temp directory exists and is writable
mkdir -p "$TEMP_DIR" 2>/dev/null || true
# For v2.5.5-dev, try to get the cyberpanel.sh from the branch
if [ "$BRANCH_NAME" = "v2.5.5-dev" ] || [ "$BRANCH_NAME" = "stable" ]; then
# Prefer master3395/cyberpanel raw cyberpanel.sh for known branches (includes AlmaLinux 10 etc.)
if [ "$BRANCH_NAME" = "v2.5.5-dev" ] || [ "$BRANCH_NAME" = "stable" ] || [ "$BRANCH_NAME" = "v2.4.5" ]; then
# Try to download from the branch-specific URL
if curl --silent -o "$SCRIPT_PATH" "https://raw.githubusercontent.com/master3395/cyberpanel/$BRANCH_NAME/cyberpanel.sh" 2>/dev/null; then
if [ -f "$SCRIPT_PATH" ] && [ -s "$SCRIPT_PATH" ]; then

View File

@@ -236,6 +236,11 @@ class preFlightsChecks:
os_info = self.detect_os_info()
return os_info['name'] == 'almalinux' and os_info['major_version'] == 9
def is_almalinux10(self):
"""Check if running on AlmaLinux 10 (GH usmannasir/cyberpanel#1736)"""
os_info = self.detect_os_info()
return os_info['name'] == 'almalinux' and os_info['major_version'] == 10
def is_ubuntu(self):
"""Check if running on Ubuntu"""
os_info = self.detect_os_info()
@@ -651,6 +656,35 @@ class preFlightsChecks:
except Exception as e:
self.stdOut(f"Error applying AlmaLinux 9 MariaDB fixes: {str(e)}", 0)
def fix_almalinux10_mariadb(self):
"""EPEL/CRB + MariaDB official repo for AlmaLinux 10 (installer prereqs, GH #1736)."""
if not self.is_almalinux10():
return
try:
self.stdOut("Applying AlmaLinux 10 MariaDB / repo fixes...", 1)
for cmd, desc in (
("dnf install -y epel-release", "EPEL"),
("dnf config-manager --set-enabled crb 2>/dev/null || dnf config-manager --set-enabled powertools 2>/dev/null || true", "CRB/PowerTools"),
("dnf install -y htop 2>/dev/null || true", "htop"),
("dnf install -y libxcrypt-compat 2>/dev/null || true", "libxcrypt-compat for lscpd"),
):
self.call(cmd, self.distro, desc, desc, 1, 0, os.EX_OSERR)
for cmd, desc in (
("dnf config-manager --disable mariadb-maxscale 2>/dev/null || true", "disable maxscale"),
("rm -f /etc/yum.repos.d/mariadb-maxscale.repo /etc/yum.repos.d/mariadb-maxscale.repo.rpmnew 2>/dev/null || true", "remove maxscale repo files"),
):
self.call(cmd, self.distro, desc, desc, 1, 0, os.EX_OSERR)
self.stdOut("Setting up MariaDB official repository (11.8 LTS, EL10)...", 1)
cmd = "curl -sS https://downloads.mariadb.com/MariaDB/mariadb_repo_setup | bash -s -- --mariadb-server-version='11.8'"
self.call(cmd, self.distro, cmd, cmd, 1, 0, os.EX_OSERR)
self.call("dnf config-manager --disable mariadb-maxscale 2>/dev/null || true", self.distro, "disable maxscale after setup", "disable maxscale after setup", 1, 0, os.EX_OSERR)
self.stdOut("Installing MariaDB packages from MariaDB.org repo...", 1)
pkgs = "MariaDB-server MariaDB-client MariaDB-backup MariaDB-devel"
self.call(f"dnf install -y --nobest {pkgs}", self.distro, "MariaDB packages", "MariaDB packages", 1, 0, os.EX_OSERR)
self.stdOut("AlmaLinux 10 MariaDB fixes applied successfully", 1)
except Exception as e:
self.stdOut(f"Error applying AlmaLinux 10 MariaDB fixes: {str(e)}", 0)
def install_package_with_fallbacks(self, package_name, dev_package_name=None):
"""Install package with comprehensive fallback methods for AlmaLinux 9.6+"""
try:
@@ -826,7 +860,11 @@ class preFlightsChecks:
universal_fixes = UniversalOSFixes()
if universal_fixes.run_comprehensive_setup():
self.stdOut("Universal OS fixes applied successfully", 1)
return True
os_i = self.detect_os_info()
if os_i.get('name') == 'almalinux' and os_i.get('major_version') == 10:
self.stdOut("AlmaLinux 10: running legacy RHEL integration steps after universal fixes...", 1)
else:
return True
else:
self.stdOut("Universal OS fixes failed, falling back to legacy fixes...", 1)
except ImportError:
@@ -842,6 +880,8 @@ class preFlightsChecks:
for fix in fixes_needed:
if fix == 'mariadb' and self.is_almalinux9():
self.fix_almalinux9_mariadb()
elif fix == 'mariadb' and self.is_almalinux10():
self.fix_almalinux10_mariadb()
elif fix == 'ubuntu_specific' and self.is_ubuntu():
self.fix_ubuntu_specific()
elif fix == 'debian_specific' and self.is_debian():
@@ -889,6 +929,11 @@ class preFlightsChecks:
if any(distro in content for distro in ['red hat', 'almalinux', 'rocky', 'cloudlinux', 'centos']):
return 'rhel9'
# EL10: use rhel9 OLS/custom binaries until el10-specific builds ship (GLIBC-compatible)
if 'version="10.' in content or 'version_id="10.' in content or 'version_id="10"' in content:
if any(distro in content for distro in ['red hat', 'almalinux', 'rocky', 'cloudlinux', 'centos']):
return 'rhel9'
# Default to rhel8 if can't detect (safer default - rhel9 binaries may require GLIBC 2.35)
self.stdOut("WARNING: Could not detect platform, defaulting to rhel8", 1)
return 'rhel8'
@@ -1711,6 +1756,11 @@ module cyberpanel_ols {
if result.returncode != 0:
logging.InstallLog.writeToFile(f"Failed to setup MariaDB repository: {result.stderr}")
return False
try:
import install_utils
install_utils.strip_mariadb_maxscale_apt_repos()
except Exception:
pass
command = 'DEBIAN_FRONTEND=noninteractive apt-get update -y'
result = subprocess.run(command, shell=True, capture_output=True, universal_newlines=True)
@@ -3936,7 +3986,7 @@ $cfg['Servers'][$i]['LogoutURL'] = 'phpmyadminsignin.php?logout';
writeToFile.close()
os.mkdir('/usr/local/CyberCP/public/phpmyadmin/tmp')
os.makedirs('/usr/local/CyberCP/public/phpmyadmin/tmp', exist_ok=True)
command = 'chown -R lscpd:lscpd /usr/local/CyberCP/public/phpmyadmin'
preFlightsChecks.call(command, self.distro, '[chown -R lscpd:lscpd /usr/local/CyberCP/public/phpmyadmin]',
@@ -4946,6 +4996,21 @@ user_query = SELECT email as user, password, 'vmail' as uid, 'vmail' as gid, '/h
result = open('/etc/lsb-release', 'r').read()
if result.find('22.04') > -1 or result.find('24.04') > -1:
lscpdSelection = 'lscpd.0.4.0'
# AlmaLinux/RHEL 9 and 10: lscpd.0.4.0 (el9 binary on el10; origin/v2.4.5)
try:
cl_al_ver = FetchCloudLinuxAlmaVersionVersion()
if cl_al_ver in ('al-93', 'al-100'):
lscpdSelection = 'lscpd.0.4.0'
except Exception:
pass
if os.path.exists('/etc/os-release'):
with open('/etc/os-release', 'r') as f:
osrel = f.read()
if (('VERSION_ID="9"' in osrel or 'VERSION_ID="10"' in osrel or
'VERSION_ID="9.' in osrel or 'VERSION_ID="10.' in osrel) and
('AlmaLinux' in osrel or 'Rocky' in osrel or 'Red Hat' in osrel or
'CentOS' in osrel)):
lscpdSelection = 'lscpd.0.4.0'
else:
lscpdSelection = 'lscpd.aarch64'
@@ -4956,6 +5021,12 @@ user_query = SELECT email as user, password, 'vmail' as uid, 'vmail' as gid, '/h
result = open('/etc/lsb-release', 'r').read()
if result.find('22.04') > -1 or result.find('24.04') > -1:
lscpdSelection = 'lscpd.0.4.0'
try:
cl_al_ver = FetchCloudLinuxAlmaVersionVersion()
if cl_al_ver in ('al-93', 'al-100'):
lscpdSelection = 'lscpd.0.4.0'
except Exception:
pass
command = f'cp -f /usr/local/CyberCP/{lscpdSelection} /usr/local/lscp/bin/{lscpdSelection}'
@@ -5352,8 +5423,18 @@ user_query = SELECT email as user, password, 'vmail' as uid, 'vmail' as gid, '/h
##
command = 'systemctl daemon-reload'
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
command = 'systemctl start lscpd'
# preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
ret = preFlightsChecks.call(command, self.distro, command, command, 0, 0, os.EX_OSERR)
if ret != 0:
preFlightsChecks.stdOut("LSCPD start failed, reloading systemd and retrying...")
logging.InstallLog.writeToFile("LSCPD first start failed, retrying after daemon-reload")
preFlightsChecks.call('systemctl daemon-reload', self.distro, 'daemon-reload', 'daemon-reload', 1, 0, os.EX_OSERR)
ret = preFlightsChecks.call('systemctl start lscpd', self.distro, 'systemctl start lscpd', 'systemctl start lscpd', 0, 0, os.EX_OSERR)
if ret != 0:
preFlightsChecks.stdOut("[WARNING] LSCPD may not have started. Run: systemctl status lscpd")
logging.InstallLog.writeToFile("[WARNING] LSCPD start failed after retry - run systemctl status lscpd")
preFlightsChecks.stdOut("LSCPD Daemon Set!")

View File

@@ -38,6 +38,7 @@ def get_Ubuntu_code_name():
return "xenial"
# Using shared function from install_utils
FetchCloudLinuxAlmaVersionVersion = install_utils.FetchCloudLinuxAlmaVersionVersion
@@ -996,9 +997,11 @@ deb [arch=amd64,arm64,ppc64el,s390x signed-by=/usr/share/keyrings/mariadb-keyrin
install_utils.writeToFile("Manual MariaDB repository configuration completed.")
# GH #1740: strip broken MaxScale apt entries after mariadb_repo_setup (noble/jammy+)
if get_Ubuntu_release() > 21.00:
install_utils.strip_mariadb_maxscale_apt_repos()
command = 'DEBIAN_FRONTEND=noninteractive apt-get update -y'
command = 'DEBIAN_FRONTEND=noninteractive apt-get update -y'
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)

View File

@@ -5,6 +5,7 @@ This module contains shared functions used by both install.py and installCyberPa
"""
import os
import glob
import sys
import time
import logging
@@ -676,6 +677,61 @@ def generate_random_string(length=32, include_special=False):
return ''.join(secrets.choice(alphabet) for _ in range(length))
def strip_mariadb_maxscale_apt_repos():
"""
MariaDB mariadb_repo_setup adds MaxScale apt repo; Ubuntu noble has no Release (GH usmannasir/cyberpanel#1740).
"""
slist = '/etc/apt/sources.list.d'
try:
if not os.path.isdir(slist):
return
for pattern in (
'mariadb-maxscale*.list', 'mariadb-maxscale*.sources',
'*maxscale*.list', '*maxscale*.sources',
):
for fp in glob.glob(os.path.join(slist, pattern)):
try:
os.remove(fp)
except OSError:
pass
for fp in glob.glob(os.path.join(slist, 'mariadb*.list')):
try:
with open(fp, 'r', encoding='utf-8', errors='replace') as handle:
lines = handle.readlines()
new_lines = [
ln for ln in lines
if 'maxscale' not in ln.lower()
and 'dlm.mariadb.com/repo/maxscale' not in ln
]
if new_lines != lines:
with open(fp, 'w', encoding='utf-8') as handle:
handle.writelines(new_lines)
except OSError:
pass
for fp in glob.glob(os.path.join(slist, 'mariadb*.sources')):
try:
with open(fp, 'r', encoding='utf-8', errors='replace') as handle:
content = handle.read()
if 'maxscale' not in content.lower() and 'dlm.mariadb.com/repo/maxscale' not in content:
continue
blocks = content.split('\n\n')
kept = []
for block in blocks:
bl = block.lower()
if 'maxscale' in bl or 'dlm.mariadb.com/repo/maxscale' in block:
continue
kept.append(block)
new_content = '\n\n'.join(kept)
if new_content.strip() != content.strip():
with open(fp, 'w', encoding='utf-8') as handle:
handle.write(new_content)
except OSError:
pass
except Exception:
pass
def writeToFile(message):
"""
Write a message to the installation log file

View File

@@ -487,6 +487,12 @@ class UniversalOSFixes:
]
subprocess.run(' '.join(cmd), shell=True, check=True)
if os_id in ['ubuntu', 'debian']:
try:
import install_utils
install_utils.strip_mariadb_maxscale_apt_repos()
except Exception:
pass
self.logger.info("MariaDB repository setup completed")
return True

View File

@@ -68,6 +68,8 @@ rm -rf requirements.txt
wget -O requirements.txt https://raw.githubusercontent.com/usmannasir/cyberpanel/1.8.0/requirments.txt
# Install packages with robust error handling to prevent broken pipe errors
safe_pip_install "pip" "requirements.txt" "--ignore-installed"
# python-dotenv for Django .env loading (upstream f3437739; critical on some AlmaLinux 8 venvs)
pip install python-dotenv 2>/dev/null || echo "⚠️ python-dotenv install skipped or failed"
fi
if [[ $DEV == "ON" ]] ; then
@@ -100,6 +102,7 @@ EOF
fi
safe_pip_install "pip3.6" "requirements.txt" "--ignore-installed"
pip3.6 install python-dotenv 2>/dev/null || echo "⚠️ python-dotenv (pip3.6) install skipped or failed"
fi
if [ -f requirements.txt ] && [ -d cyberpanel ] ; then

View File

@@ -2,6 +2,17 @@
# install/venvsetup part 4 after_install
after_install() {
# Robust lscpd restart (origin/v2.4.5 e49ed16f; EL9/10)
_restart_lscpd_safe() {
systemctl daemon-reload 2>/dev/null || true
systemctl restart lscpd 2>/dev/null || true
if ! systemctl is-active --quiet lscpd 2>/dev/null; then
systemctl daemon-reload
systemctl restart lscpd
fi
systemctl restart fastapi_ssh_server 2>/dev/null || true
}
if [ ! -d "/var/lib/php" ]; then
mkdir /var/lib/php
fi
@@ -50,7 +61,8 @@ EOF
fi
safe_pip_install "pip3.6" "requirements.txt" "--ignore-installed"
systemctl restart lscpd
pip3.6 install python-dotenv 2>/dev/null || echo "⚠️ python-dotenv (after_install) skipped or failed"
_restart_lscpd_safe
fi
for version in $(ls /usr/local/lsws | grep lsphp);
@@ -112,7 +124,7 @@ ELAPSED="$(($SECONDS / 3600)) hrs $((($SECONDS / 60) % 60)) min $(($SECONDS % 60
MYSQLPASSWD=$(cat /etc/cyberpanel/mysqlPassword)
echo "$ADMIN_PASS" > /etc/cyberpanel/adminPass
/usr/local/CyberPanel/bin/python2 /usr/local/CyberCP/plogical/adminPass.py --password $ADMIN_PASS
systemctl restart lscpd
_restart_lscpd_safe
systemctl restart lsws
echo "/usr/local/CyberPanel/bin/python2 /usr/local/CyberCP/plogical/adminPass.py --password \"\$@\"" > /usr/bin/adminPass
echo "systemctl restart lscpd" >> /usr/bin/adminPass

View File

@@ -308,9 +308,8 @@ extprocessor docker{port} {{
logging.writeToFile("Context already exists, skipping...")
return True
# Add proxy context with proper headers for n8n
# NOTE: Do NOT include "RequestHeader set Origin" - OpenLiteSpeed cannot override
# browser Origin headers, which is why NODE_ENV=development is required
# Add proxy context with proper headers for n8n (OLS adds X-Forwarded-*; Origin set for n8n)
# NOTE: OpenLiteSpeed cannot override browser Origin headers; NODE_ENV=development may be required
proxy_context = f'''
# N8N Proxy Configuration
@@ -321,10 +320,7 @@ context / {{
websocket 1
extraHeaders <<<END_extraHeaders
RequestHeader set X-Forwarded-For $ip
RequestHeader set X-Forwarded-Proto https
RequestHeader set X-Forwarded-Host "{domain}"
RequestHeader set Host "{domain}"
RequestHeader set Origin "https://{domain}"
END_extraHeaders
}}
'''
@@ -1421,7 +1417,8 @@ services:
'N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS': 'true',
'DB_POSTGRESDB_SCHEMA': 'public',
'N8N_PROTOCOL': 'https',
'N8N_SECURE_COOKIE': 'true'
'N8N_SECURE_COOKIE': 'true',
'N8N_PROXY_HOPS': '1'
}
}

View File

@@ -12,6 +12,7 @@ from plogical.virtualHostUtilities import virtualHostUtilities
import os
import tarfile
import shutil
import time
from plogical.mailUtilities import mailUtilities
from plogical.processUtilities import ProcessUtilities
from plogical.installUtilities import installUtilities
@@ -105,11 +106,166 @@ class modSec:
return False
@staticmethod
def installModSec():
def isCustomOLSBinaryInstalled():
"""Detect if custom OpenLiteSpeed binary is installed"""
try:
OLS_BINARY_PATH = "/usr/local/lsws/bin/openlitespeed"
if not os.path.exists(OLS_BINARY_PATH):
return False
# Check for PHPConfig function signature in binary
command = f'strings {OLS_BINARY_PATH}'
result = subprocess.run(command, shell=True, capture_output=True, text=True, timeout=10)
if result.returncode == 0:
# Look for custom binary markers
return 'set_php_config_value' in result.stdout or 'PHPConfig LSIAPI' in result.stdout
return False
except Exception as msg:
logging.CyberCPLogFileWriter.writeToFile(f"WARNING: Could not detect OLS binary type: {msg}")
return False
@staticmethod
def detectBinarySuffix():
"""Detect which binary suffix to use based on OS distribution"""
try:
# Check if we're on RHEL/CentOS/AlmaLinux 8+ (uses libcrypt.so.2)
if os.path.exists('/etc/os-release'):
with open('/etc/os-release', 'r') as f:
os_release = f.read().lower()
# AlmaLinux 9+, Rocky 9+, RHEL 9+, CentOS Stream 9+
if any(x in os_release for x in ['almalinux', 'rocky', 'rhel']) and 'version="9' in os_release:
return 'rhel'
elif 'centos stream 9' in os_release:
return 'rhel'
# Check CentOS/RHEL path
if os.path.exists('/etc/redhat-release'):
data = open('/etc/redhat-release', 'r').read()
# CentOS/AlmaLinux/Rocky 8+ → rhel suffix
if 'release 8' in data or 'release 9' in data:
return 'rhel'
# Default to ubuntu
return 'ubuntu'
except Exception as msg:
logging.CyberCPLogFileWriter.writeToFile(f"Error detecting OS: {msg}, defaulting to Ubuntu binaries")
return 'ubuntu'
@staticmethod
def installCompatibleModSecurity():
"""Install ModSecurity compatible with custom OpenLiteSpeed binary"""
try:
mailUtilities.checkHome()
with open(modSec.installLogPath, 'w') as f:
f.write("Installing ModSecurity compatible with custom OpenLiteSpeed binary...\n")
MODSEC_PATH = "/usr/local/lsws/modules/mod_security.so"
# Detect OS and select appropriate ModSecurity binary
binary_suffix = modSec.detectBinarySuffix()
if binary_suffix == 'rhel':
MODSEC_URL = "https://cyberpanel.net/mod_security-compatible-rhel.so"
EXPECTED_SHA256 = "db580afc431fda40d46bdae2249ac74690d9175ff6d8b1843f2837d86f8d602f"
else: # ubuntu
MODSEC_URL = "https://cyberpanel.net/mod_security-compatible-ubuntu.so"
EXPECTED_SHA256 = "115971fcd44b74bc7c7b097b9cec33ddcfb0fb07bb9b562ec9f4f0691c388a6b"
# Download to temp location
tmp_modsec = "/tmp/mod_security_custom.so"
with open(modSec.installLogPath, 'a') as f:
f.write(f"Downloading compatible ModSecurity for {binary_suffix}...\n")
command = f'wget -q --show-progress {MODSEC_URL} -O {tmp_modsec}'
result = subprocess.call(shlex.split(command))
if result != 0 or not os.path.exists(tmp_modsec):
with open(modSec.installLogPath, 'a') as f:
f.write("ERROR: Failed to download ModSecurity\n")
f.write("Can not be installed.[404]\n")
logging.CyberCPLogFileWriter.writeToFile("[Could not download compatible ModSecurity]")
return 0
# Verify checksum
with open(modSec.installLogPath, 'a') as f:
f.write("Verifying checksum...\n")
result = subprocess.run(f'sha256sum {tmp_modsec}', shell=True, capture_output=True, text=True)
actual_sha256 = result.stdout.split()[0]
if actual_sha256 != EXPECTED_SHA256:
with open(modSec.installLogPath, 'a') as f:
f.write(f"ERROR: Checksum verification failed\n")
f.write(f" Expected: {EXPECTED_SHA256}\n")
f.write(f" Got: {actual_sha256}\n")
f.write("Can not be installed.[404]\n")
os.remove(tmp_modsec)
logging.CyberCPLogFileWriter.writeToFile("[ModSecurity checksum verification failed]")
return 0
# Backup existing ModSecurity if present
if os.path.exists(MODSEC_PATH):
backup_path = f"{MODSEC_PATH}.backup.{int(time.time())}"
shutil.copy2(MODSEC_PATH, backup_path)
with open(modSec.installLogPath, 'a') as f:
f.write(f"Backed up existing ModSecurity to: {backup_path}\n")
# Stop OpenLiteSpeed
subprocess.run(['/usr/local/lsws/bin/lswsctrl', 'stop'], timeout=30)
time.sleep(2)
# Install compatible ModSecurity
os.makedirs(os.path.dirname(MODSEC_PATH), exist_ok=True)
shutil.copy2(tmp_modsec, MODSEC_PATH)
os.chmod(MODSEC_PATH, 0o755)
os.remove(tmp_modsec)
# Start OpenLiteSpeed
subprocess.run(['/usr/local/lsws/bin/lswsctrl', 'start'], timeout=30)
with open(modSec.installLogPath, 'a') as f:
f.write("Compatible ModSecurity installed successfully\n")
f.write("ModSecurity Installed (ABI-compatible version).[200]\n")
logging.CyberCPLogFileWriter.writeToFile("[Compatible ModSecurity installed successfully]")
return 1
except subprocess.TimeoutExpired:
with open(modSec.installLogPath, 'a') as f:
f.write("ERROR: Timeout during OpenLiteSpeed restart\n")
f.write("Can not be installed.[404]\n")
logging.CyberCPLogFileWriter.writeToFile("[Timeout during ModSecurity installation]")
return 0
except Exception as msg:
with open(modSec.installLogPath, 'a') as f:
f.write(f"ERROR: {str(msg)}\n")
f.write("Can not be installed.[404]\n")
logging.CyberCPLogFileWriter.writeToFile(str(msg) + "[installCompatibleModSecurity]")
return 0
@staticmethod
def installModSec():
try:
mailUtilities.checkHome()
# Check if custom OLS binary is installed
if modSec.isCustomOLSBinaryInstalled():
# Install compatible ModSecurity for custom OLS
with open(modSec.installLogPath, 'w') as f:
f.write("Detected custom OpenLiteSpeed binary\n")
f.write("Installing ABI-compatible ModSecurity...\n")
return modSec.installCompatibleModSecurity()
# Stock OLS binary - use package manager as usual
if ProcessUtilities.decideDistro() == ProcessUtilities.centos or ProcessUtilities.decideDistro() == ProcessUtilities.cent8:
command = 'sudo yum install ols-modsecurity -y'
else:

View File

@@ -0,0 +1,44 @@
# -*- coding: utf-8 -*-
"""
Ensure phpMyAdmin single-sign-on bridge files exist under public/phpmyadmin/.
Fixes 404 on /phpmyadmin/phpmyadminsignin.php when the file was lost after a partial install or manual change.
"""
from __future__ import annotations
import os
import shutil
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
PMA_DIR = '/usr/local/CyberCP/public/phpmyadmin'
SIGNIN_SRC = '/usr/local/CyberCP/plogical/phpmyadminsignin.php'
SIGNIN_NAME = 'phpmyadminsignin.php'
def ensure_phpmyadmin_signin_bridge() -> bool:
"""
Copy plogical/phpmyadminsignin.php into the public phpMyAdmin tree if missing,
ensure tmp/ exists, and fix ownership for lscpd.
Returns True if the sign-in file is present afterward.
"""
dst = os.path.join(PMA_DIR, SIGNIN_NAME)
try:
if not os.path.isdir(PMA_DIR):
return False
if not os.path.isfile(SIGNIN_SRC):
logging.writeToFile('phpmyadmin_utils: source signin missing at ' + SIGNIN_SRC)
return os.path.isfile(dst)
need_copy = (not os.path.isfile(dst)) or os.path.getsize(dst) < 32
if need_copy:
shutil.copy2(SIGNIN_SRC, dst)
tmp_dir = os.path.join(PMA_DIR, 'tmp')
os.makedirs(tmp_dir, exist_ok=True)
try:
from plogical.processUtilities import ProcessUtilities
ProcessUtilities.executioner('chown -R lscpd:lscpd ' + PMA_DIR)
except Exception as ch_ex:
logging.writeToFile('phpmyadmin_utils: chown skipped or failed (non-fatal): ' + str(ch_ex))
return os.path.isfile(dst)
except Exception as ex:
logging.writeToFile('phpmyadmin_utils: ensure_phpmyadmin_signin_bridge failed: ' + str(ex))
return os.path.isfile(dst)

View File

@@ -1388,7 +1388,7 @@ $cfg['Servers'][$i]['port'] = '3306';
writeToFile.writelines("$cfg['TempDir'] = '/usr/local/CyberCP/public/phpmyadmin/tmp';\n")
writeToFile.close()
os.mkdir('/usr/local/CyberCP/public/phpmyadmin/tmp')
os.makedirs('/usr/local/CyberCP/public/phpmyadmin/tmp', exist_ok=True)
if saved_signon and os.path.isfile(tmp_signon):
shutil.copy2(tmp_signon, os.path.join(pma_dir, 'phpmyadminsignin.php'))
@@ -1417,6 +1417,12 @@ $cfg['Servers'][$i]['port'] = '3306';
command = 'chown -R lscpd:lscpd /usr/local/CyberCP/public/phpmyadmin/tmp'
Upgrade.executioner_silent(command, 'chown phpMyAdmin tmp')
try:
from plogical.phpmyadmin_utils import ensure_phpmyadmin_signin_bridge
ensure_phpmyadmin_signin_bridge()
except Exception:
pass
os.chdir(cwd)
except Exception as e:

View File

@@ -0,0 +1 @@
#

View File

@@ -0,0 +1 @@
#

View File

@@ -0,0 +1,102 @@
# -*- coding: utf-8 -*-
import os
import time
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Refresh CyberPanel plugin store cache (hourly scheduler)."
def add_arguments(self, parser):
parser.add_argument(
"--force",
action="store_true",
help="Refresh even if cache is not expired.",
)
parser.add_argument(
"--stale-lock-seconds",
type=int,
default=900,
help="Remove the cache-refresh lock if it is older than this many seconds.",
)
def handle(self, *args, **options):
force = bool(options.get("force", False))
stale_lock_seconds = int(options.get("stale_lock_seconds", 900))
try:
from pluginHolder import views as plugin_views
except Exception as e:
# Avoid printing secrets; just show a minimal message.
self.stderr.write("Failed to import pluginHolder views for cache refresh.")
return 1
# Only refresh when needed (unless --force is used).
try:
cache_expiry_timestamp, _ = plugin_views._get_cache_expiry_time()
cache_expired = plugin_views._is_cache_expired(cache_expiry_timestamp)
except Exception:
cache_expired = True
if not force and cache_expiry_timestamp and not cache_expired:
self.stdout.write("Plugin store cache is still fresh; no refresh needed.")
return 0
lock_path = plugin_views.PLUGIN_STORE_REFRESH_LOCK_FILE
try:
plugin_views._ensure_cache_dir()
except Exception:
pass
# Remove stale lock left behind by a crashed/aborted refresh.
if os.path.exists(lock_path):
try:
age_s = time.time() - os.path.getmtime(lock_path)
if age_s > stale_lock_seconds:
os.remove(lock_path)
try:
plugin_views.logging.writeToFile(
f"Management refresh: removed stale plugin store refresh lock (age: {age_s:.0f}s)"
)
except Exception:
pass
except Exception:
pass
# Acquire lock to avoid stampedes when multiple instances refresh.
try:
fd = os.open(lock_path, os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0o644)
with os.fdopen(fd, "w") as f:
f.write(str(os.getpid()))
except FileExistsError:
self.stdout.write("Plugin store refresh skipped: lock already exists.")
return 0
except Exception:
self.stderr.write("Plugin store refresh failed: could not acquire refresh lock.")
return 1
try:
plugins = plugin_views._fetch_plugins_from_github()
if not plugins:
self.stdout.write("Plugin store refresh fetched 0 plugins; cache not updated.")
return 0
plugin_views._save_plugins_cache(plugins)
self.stdout.write(f"Plugin store cache refreshed successfully. plugins={len(plugins)}")
return 0
except Exception as e:
# Log error summary server-side; don't leak internal exception details to stdout.
try:
plugin_views.logging.writeToFile(f"Plugin store cache refresh failed: {str(e)}")
except Exception:
pass
self.stderr.write("Plugin store cache refresh failed. Check error logs.")
return 1
finally:
try:
if os.path.exists(lock_path):
os.remove(lock_path)
except Exception:
pass

View File

@@ -3,4 +3,22 @@
from django.db import models
# Create your models here.
class PluginActivationKey(models.Model):
"""
Optional ORM mirror for activation keys persisted in MariaDB.
Runtime code uses raw SQL CREATE TABLE IF NOT EXISTS for migration safety.
"""
plugin_name = models.CharField(max_length=191)
user_identity = models.CharField(max_length=191)
activation_key_hash = models.CharField(max_length=64)
key_last4 = models.CharField(max_length=4, blank=True, default='')
source = models.CharField(max_length=50, blank=True, default='manual')
is_active = models.BooleanField(default=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
managed = False
db_table = 'plugin_activation_keys'
unique_together = (('plugin_name', 'user_identity'),)

View File

@@ -5,7 +5,170 @@ Checks if user has access to paid plugins
"""
from .patreon_verifier import PatreonVerifier
import logging
import hashlib
from django.db import connection
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
def _normalize_identity(value):
if not value:
return ''
return str(value).strip().lower()
def _hash_activation_key(raw_key):
return hashlib.sha256(raw_key.encode('utf-8')).hexdigest()
def _ensure_activation_table():
"""
Create table on-demand so upgrade paths without Django migrations are safe.
"""
sql = """
CREATE TABLE IF NOT EXISTS plugin_activation_keys (
id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,
plugin_name VARCHAR(191) NOT NULL,
user_identity VARCHAR(191) NOT NULL,
activation_key_hash CHAR(64) NOT NULL,
key_last4 VARCHAR(4) NOT NULL DEFAULT '',
source VARCHAR(50) NOT NULL DEFAULT 'manual',
is_active TINYINT(1) NOT NULL DEFAULT 1,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (id),
UNIQUE KEY uniq_plugin_identity (plugin_name, user_identity),
KEY idx_identity (user_identity),
KEY idx_plugin (plugin_name)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
"""
with connection.cursor() as cursor:
cursor.execute(sql)
def save_activation_key(plugin_name, user_identity, activation_key, source='manual'):
"""
Persist activation key hash in MariaDB (upsert by plugin_name + user_identity).
"""
plugin_name = _normalize_identity(plugin_name)
user_identity = _normalize_identity(user_identity)
activation_key = str(activation_key or '').strip()
if not plugin_name or not user_identity or not activation_key:
return False
try:
_ensure_activation_table()
key_hash = _hash_activation_key(activation_key)
key_last4 = activation_key[-4:] if len(activation_key) >= 4 else activation_key
with connection.cursor() as cursor:
cursor.execute(
"""
INSERT INTO plugin_activation_keys
(plugin_name, user_identity, activation_key_hash, key_last4, source, is_active)
VALUES (%s, %s, %s, %s, %s, 1)
ON DUPLICATE KEY UPDATE
activation_key_hash = VALUES(activation_key_hash),
key_last4 = VALUES(key_last4),
source = VALUES(source),
is_active = 1
""",
[plugin_name, user_identity, key_hash, key_last4, source]
)
return True
except Exception as e:
logging.writeToFile('plugin_access.save_activation_key failed: %s' % str(e))
return False
def has_saved_activation(plugin_name, user_identity):
plugin_name = _normalize_identity(plugin_name)
user_identity = _normalize_identity(user_identity)
if not plugin_name or not user_identity:
return False
try:
_ensure_activation_table()
with connection.cursor() as cursor:
cursor.execute(
"""
SELECT 1
FROM plugin_activation_keys
WHERE plugin_name = %s
AND user_identity = %s
AND is_active = 1
LIMIT 1
""",
[plugin_name, user_identity]
)
return cursor.fetchone() is not None
except Exception as e:
logging.writeToFile('plugin_access.has_saved_activation failed: %s' % str(e))
return False
def verify_saved_activation_key(plugin_name, user_identity, activation_key):
plugin_name = _normalize_identity(plugin_name)
user_identity = _normalize_identity(user_identity)
activation_key = str(activation_key or '').strip()
if not plugin_name or not user_identity or not activation_key:
return False
try:
_ensure_activation_table()
key_hash = _hash_activation_key(activation_key)
with connection.cursor() as cursor:
cursor.execute(
"""
SELECT 1
FROM plugin_activation_keys
WHERE plugin_name = %s
AND user_identity = %s
AND activation_key_hash = %s
AND is_active = 1
LIMIT 1
""",
[plugin_name, user_identity, key_hash]
)
return cursor.fetchone() is not None
except Exception as e:
logging.writeToFile('plugin_access.verify_saved_activation_key failed: %s' % str(e))
return False
def _resolve_identity_for_request(request):
"""
CyberPanel often authenticates via session userID (not Django auth user).
Prefer Administrator email when available, otherwise username.
"""
candidates = []
try:
if getattr(request, 'user', None) and request.user.is_authenticated:
u = request.user
email = getattr(u, 'email', None) or ''
if email:
candidates.append(email)
uname = getattr(u, 'username', None) or ''
if uname:
candidates.append(uname)
except Exception:
pass
try:
uid = request.session.get('userID') if hasattr(request, 'session') else None
if uid:
from loginSystem.models import Administrator
admin = Administrator.objects.filter(pk=uid).only('email', 'userName').first()
if admin:
if getattr(admin, 'email', '') and str(admin.email).lower() != 'none':
candidates.append(str(admin.email))
if getattr(admin, 'userName', ''):
candidates.append(str(admin.userName))
except Exception:
pass
for item in candidates:
item = (item or '').strip()
if item:
return item.lower()
return ''
def check_plugin_access(request, plugin_name, plugin_meta=None):
"""
@@ -40,21 +203,7 @@ def check_plugin_access(request, plugin_name, plugin_meta=None):
if not plugin_meta or not plugin_meta.get('is_paid', False):
return default_response
# Plugin is paid - check Patreon membership
if not request.user or not request.user.is_authenticated:
return {
'has_access': False,
'is_paid': True,
'message': 'Please log in to access this plugin',
'patreon_url': plugin_meta.get('patreon_url')
}
# Get user email
user_email = getattr(request.user, 'email', None)
if not user_email:
# Try to get from username or other fields
user_email = getattr(request.user, 'username', '')
user_email = _resolve_identity_for_request(request)
if not user_email:
return {
'has_access': False,
@@ -63,7 +212,16 @@ def check_plugin_access(request, plugin_name, plugin_meta=None):
'patreon_url': plugin_meta.get('patreon_url')
}
# Check Patreon membership
# First allow DB-backed activation keys (survives upgrades)
if has_saved_activation(plugin_name, user_email):
return {
'has_access': True,
'is_paid': True,
'message': 'Access granted',
'patreon_url': None
}
# Fallback to Patreon membership
verifier = PatreonVerifier()
has_membership = verifier.check_membership_cached(user_email)

View File

@@ -1695,8 +1695,8 @@
<thead>
<tr>
<th>{% trans "Plugin Name" %}</th>
<th>{% trans "New Version" %}</th>
<th>{% trans "Your Version" %}</th>
<th>{% trans "New Version" %}</th>
<th>{% trans "Date" %}</th>
<th>{% trans "Status / Action" %}</th>
</tr>
@@ -1737,7 +1737,7 @@
<strong>{% trans "Cache Information:" %}</strong>
{% trans "Plugin store data is cached for 1 hour to improve performance and reduce GitHub API rate limits. New plugins may take up to 1 hour to appear after being published." %}
{% if cache_expiry_timestamp %}
<br><strong>{% trans "Next cache update:" %}</strong> <span id="cacheExpiryTime" style="font-family: monospace;" data-timestamp="{{ cache_expiry_timestamp }}">{% trans "Calculating..." %}</span>
<br><strong>{% trans "Next cache update:" %}</strong> <span id="cacheExpiryTime" style="font-family: monospace;" data-timestamp="{{ cache_expiry_timestamp }}" data-expired="{% if cache_expired %}1{% else %}0{% endif %}" data-refresh-started="{% if cache_refresh_started %}1{% else %}0{% endif %}">{% trans "Calculating..." %}</span>
{% endif %}
</p>
<p class="warning-text">
@@ -1850,7 +1850,7 @@
</div>
<script>
// Cache-busting version: 2026-02-15-v1 - Grid/Table: collapsible Category Filter (like A-Å in store)
// Cache-busting version: 2026-03-27-v2 - Modify date: browser-local via modify_timestamp + nb-NO style
let storePlugins = [];
let currentFilter = 'all';
let currentCategory = 'all';
@@ -2013,11 +2013,11 @@ function displayUpgradesAvailable() {
const name = escapeHtml(plugin.name || plugin.plugin_dir || '');
const newVer = escapeHtml(plugin.version || '');
const yourVer = escapeHtml(plugin.installed_version || 'Unknown');
const date = escapeHtml(plugin.modify_date || '');
const date = formatPluginModifyDateDisplay(plugin);
const curVer = (plugin.installed_version || 'Unknown').replace(/'/g, '&#39;');
const nVer = (plugin.version || 'Unknown').replace(/'/g, '&#39;');
const actionHtml = '<button type="button" class="btn-action btn-upgrade" data-plugin-dir="' + dir + '" data-current-version="' + escapeHtml(curVer) + '" data-new-version="' + escapeHtml(nVer) + '" onclick="upgradePlugin(this.getAttribute(\'data-plugin-dir\'), this.getAttribute(\'data-current-version\'), this.getAttribute(\'data-new-version\'))"><i class="fas fa-arrow-up"></i> Upgrade</button>';
html += '<tr><td><strong>' + name + '</strong></td><td>' + newVer + '</td><td>' + yourVer + '</td><td>' + date + '</td><td>' + actionHtml + '</td></tr>';
html += '<tr><td><strong>' + name + '</strong></td><td>' + yourVer + '</td><td>' + newVer + '</td><td>' + date + '</td><td>' + actionHtml + '</td></tr>';
});
tbody.innerHTML = html;
}
@@ -2109,12 +2109,70 @@ function escapeHtml(text) {
return div.innerHTML;
}
function getFreshnessBadgeHtml(freshnessFromApi, modifyDate) {
/**
* Format meta.xml mtime for display: browser timezone + locale (nb-NO style DD.MM.YYYY, 24h).
* Uses modify_timestamp (Unix sec) from API when present; else legacy modify_date string.
*/
function formatPluginModifyDateDisplay(plugin) {
if (!plugin) return '';
const rawTs = plugin.modify_timestamp;
if (rawTs !== null && rawTs !== undefined && rawTs !== '') {
const sec = Number(rawTs);
if (!isNaN(sec)) {
const d = new Date(sec * 1000);
if (!isNaN(d.getTime())) {
const loc = (typeof navigator !== 'undefined' && navigator.language) ? navigator.language : 'nb-NO';
try {
const s = d.toLocaleString(loc, {
day: '2-digit',
month: '2-digit',
year: 'numeric',
hour: '2-digit',
minute: '2-digit',
second: '2-digit',
hourCycle: 'h23',
});
return escapeHtml(s);
} catch (e) {
try {
return escapeHtml(d.toLocaleString('nb-NO', {
day: '2-digit', month: '2-digit', year: 'numeric',
hour: '2-digit', minute: '2-digit', second: '2-digit', hourCycle: 'h23',
}));
} catch (e2) {
/* fall through */
}
}
}
}
}
return escapeHtml(plugin.modify_date || '');
}
function getFreshnessBadgeHtml(freshnessFromApi, modifyDate, modifyTimestamp) {
// Use API data if available
if (freshnessFromApi && freshnessFromApi.badge && freshnessFromApi.class) {
return `<br><span class="${escapeHtml(freshnessFromApi.class)}" title="${escapeHtml(freshnessFromApi.title || '')}">${escapeHtml(freshnessFromApi.badge)}</span>`;
}
// Compute from modify_date (for cached data without freshness_badge)
// Compute from instant when we have Unix timestamp (correct vs browser)
if (modifyTimestamp !== null && modifyTimestamp !== undefined && modifyTimestamp !== '') {
const sec = Number(modifyTimestamp);
if (!isNaN(sec)) {
const d = new Date(sec * 1000);
if (!isNaN(d.getTime())) {
const daysAgo = Math.floor((Date.now() - d.getTime()) / (24 * 60 * 60 * 1000));
if (daysAgo <= 90) {
return '<br><span class="freshness-badge-new" title="This plugin was released/updated within the last 3 months">NEW</span>';
} else if (daysAgo <= 365) {
return '<br><span class="freshness-badge-stable" title="This plugin was updated within the last year">Stable</span>';
} else if (daysAgo < 730) {
return '<br><span class="freshness-badge-unstable" title="This plugin has not been updated in over 1 year">Unstable</span>';
}
return '<br><span class="freshness-badge-stale" title="This plugin has not been updated in over 2 years">STALE</span>';
}
}
}
// Compute from modify_date (for cached data without freshness_badge or timestamp)
if (!modifyDate || modifyDate === 'N/A') return '';
try {
const m = modifyDate.match(/^(\d{4})-(\d{2})-(\d{2})\s+(\d{2}):(\d{2}):(\d{2})/);
@@ -2247,10 +2305,12 @@ function displayStorePlugins() {
</a>`;
// Modify Date column - show N/A for store plugins (they're from GitHub, not local)
const modifyDateHtml = plugin.modify_date ? `<small style="color: var(--text-secondary, #64748b);">${escapeHtml(plugin.modify_date)}</small>` : '<small style="color: var(--text-secondary, #64748b);">N/A</small>';
const modifyDateHtml = (plugin.modify_timestamp != null || (plugin.modify_date && plugin.modify_date !== 'N/A'))
? `<small style="color: var(--text-secondary, #64748b);">${formatPluginModifyDateDisplay(plugin)}</small>`
: '<small style="color: var(--text-secondary, #64748b);">N/A</small>';
// Freshness badge (NEW/Stable/STALE) - use API data or compute from modify_date
const freshnessBadgeHtml = getFreshnessBadgeHtml(plugin.freshness_badge || null, plugin.modify_date);
const freshnessBadgeHtml = getFreshnessBadgeHtml(plugin.freshness_badge || null, plugin.modify_date, plugin.modify_timestamp);
// Pricing badge - ALWAYS show a badge (default to Free if is_paid is missing/undefined/null)
// Version: 2026-01-25-v4 - Normalize is_paid to handle all possible values
@@ -3250,8 +3310,8 @@ function updateCacheExpiryTime() {
return;
}
// Get user's locale preferences
const locale = navigator.language || navigator.userLanguage || 'en-US';
// Always use Norwegian format for NO-facing UI: DD.MM.YYYY kl. HH:MM
const locale = 'nb-NO';
const timezone = Intl.DateTimeFormat().resolvedOptions().timeZone;
// Format date and time according to user's locale
@@ -3264,7 +3324,6 @@ function updateCacheExpiryTime() {
const timeOptions = {
hour: '2-digit',
minute: '2-digit',
second: '2-digit',
hour12: false
};
@@ -3273,11 +3332,18 @@ function updateCacheExpiryTime() {
const timeStr = expiryDate.toLocaleTimeString(locale, timeOptions);
// Combine with timezone abbreviation
const formatted = dateStr + ' ' + timeStr;
// Display with timezone info
expiryElement.textContent = formatted;
expiryElement.title = 'Local time: ' + formatted + ' | Timezone: ' + timezone;
const formatted = dateStr + ' kl. ' + timeStr;
const expired = expiryElement.getAttribute('data-expired') === '1';
const refreshStarted = expiryElement.getAttribute('data-refresh-started') === '1';
if (expired) {
expiryElement.textContent = refreshStarted
? ('Expired (' + formatted + ') - updating in background')
: ('Expired (' + formatted + ')');
} else {
expiryElement.textContent = formatted;
}
expiryElement.title = 'Lokal tid: ' + formatted + ' | Tidssone: ' + timezone;
} catch (e) {
console.error('Error formatting cache expiry time:', e);
expiryElement.textContent = 'Error calculating time';

View File

@@ -104,6 +104,7 @@ urlpatterns = [
path('api/revert/<str:plugin_name>/', views.revert_plugin, name='revert_plugin'),
path('api/debug-plugins/', views.debug_loaded_plugins, name='debug_loaded_plugins'),
path('api/check-subscription/<str:plugin_name>/', views.check_plugin_subscription, name='check_plugin_subscription'),
path('api/store-activation/<str:plugin_name>/', views.store_plugin_activation_key, name='store_plugin_activation_key'),
path('<str:plugin_name>/settings/', views.plugin_settings_proxy, name='plugin_settings_proxy'),
path('<str:plugin_name>/help/', views.plugin_help, name='plugin_help'),
]

View File

@@ -33,6 +33,7 @@ PLUGIN_STORE_CACHE_FILE = os.path.join(PLUGIN_STORE_CACHE_DIR, 'plugins_cache.js
PLUGIN_STORE_CACHE_DURATION = 3600 # Base cache duration: 1 hour (3600 seconds)
PLUGIN_STORE_CACHE_RANDOM_OFFSET = 600 # Random offset: ±10 minutes (600 seconds) to prevent simultaneous requests
PLUGIN_STORE_REFRESH_LOCK_FILE = os.path.join(PLUGIN_STORE_CACHE_DIR, 'plugins_cache_refresh.lock')
PLUGIN_STORE_REFRESH_LOCK_STALE_SECONDS = 900 # 15 minutes; remove leftover lock if stuck
GITHUB_REPO_API = 'https://api.github.com/repos/master3395/cyberpanel-plugins/contents'
GITHUB_RAW_BASE = 'https://raw.githubusercontent.com/master3395/cyberpanel-plugins/main'
GITHUB_COMMITS_API = 'https://api.github.com/repos/master3395/cyberpanel-plugins/commits'
@@ -48,6 +49,42 @@ PLUGIN_SOURCE_PATHS = ['/home/cyberpanel/plugins', '/home/cyberpanel-plugins']
BUILTIN_PLUGINS = frozenset(['emailMarketing', 'emailPremium'])
def _resolve_logged_in_plugin_identity(request):
"""
CyberPanel often authenticates via session userID (not Django auth user).
Use Administrator email when available, otherwise username.
"""
candidates = []
try:
if getattr(request, 'user', None) and request.user.is_authenticated:
u = request.user
email = getattr(u, 'email', None) or ''
if email:
candidates.append(email)
uname = getattr(u, 'username', None) or ''
if uname:
candidates.append(uname)
except Exception:
pass
try:
uid = request.session.get('userID') if hasattr(request, 'session') else None
if uid:
from loginSystem.models import Administrator
admin = Administrator.objects.filter(pk=uid).only('email', 'userName').first()
if admin:
if getattr(admin, 'email', '') and str(admin.email).lower() != 'none':
candidates.append(str(admin.email))
if getattr(admin, 'userName', ''):
candidates.append(str(admin.userName))
except Exception:
pass
for item in candidates:
item = (item or '').strip()
if item:
return item.lower()
return ''
def _install_plugin_compat(plugin_name, zip_path_abs):
"""
Call pluginInstaller.installPlugin with zip_path when supported (newer CyberPanel).
@@ -125,11 +162,10 @@ def _get_plugin_source_path(plugin_name):
return path
return None
def _get_local_plugin_meta_modify_date(plugin_name):
def _get_local_plugin_meta_modify_pair(plugin_name):
"""
Compute plugin modify date from local meta.xml file timestamps.
This avoids per-plugin GitHub commits API calls while still providing
a useful "Modify date" column in the plugin store UI.
Return (modify_date string server-local, unix seconds) from first found meta.xml.
Unix seconds represent the same instant everywhere; UI formats in browser timezone.
"""
candidate_paths = []
@@ -143,11 +179,39 @@ def _get_local_plugin_meta_modify_date(plugin_name):
try:
if os.path.exists(meta_path) and os.path.isfile(meta_path):
modify_time = os.path.getmtime(meta_path)
return datetime.fromtimestamp(modify_time).strftime('%Y-%m-%d %H:%M:%S')
return (
datetime.fromtimestamp(modify_time).strftime('%Y-%m-%d %H:%M:%S'),
int(modify_time),
)
except Exception:
continue
return 'N/A'
return ('N/A', None)
def _get_local_plugin_meta_modify_date(plugin_name):
"""
Compute plugin modify date from local meta.xml file timestamps.
This avoids per-plugin GitHub commits API calls while still providing
a useful "Modify date" column in the plugin store UI.
"""
return _get_local_plugin_meta_modify_pair(plugin_name)[0]
def _apply_modify_date_from_meta_path(data_dict, meta_xml_path):
"""Set modify_date, modify_timestamp, freshness_badge on plugin data dict."""
modify_date = 'N/A'
modify_timestamp = None
try:
if meta_xml_path and os.path.exists(meta_xml_path):
modify_time = os.path.getmtime(meta_xml_path)
modify_date = datetime.fromtimestamp(modify_time).strftime('%Y-%m-%d %H:%M:%S')
modify_timestamp = int(modify_time)
except Exception:
pass
data_dict['modify_date'] = modify_date
data_dict['modify_timestamp'] = modify_timestamp
data_dict['freshness_badge'] = _get_freshness_badge(modify_date)
def _ensure_plugin_meta_xml(plugin_name):
"""
@@ -365,16 +429,7 @@ def installed(request):
# Get modify date from local file (fast, no API calls)
# GitHub commit dates are fetched in the plugin store, not here to avoid timeouts
modify_date = 'N/A'
try:
if os.path.exists(metaXmlPath):
modify_time = os.path.getmtime(metaXmlPath)
modify_date = datetime.fromtimestamp(modify_time).strftime('%Y-%m-%d %H:%M:%S')
except Exception:
modify_date = 'N/A'
data['modify_date'] = modify_date
data['freshness_badge'] = _get_freshness_badge(modify_date)
_apply_modify_date_from_meta_path(data, metaXmlPath)
# Extract settings URL or main URL for "Manage" button
settings_url_elem = root.find('settings_url')
@@ -500,16 +555,7 @@ def installed(request):
data['patreon_url'] = None
# Get modify date from installed location
modify_date = 'N/A'
try:
if os.path.exists(metaXmlPath):
modify_time = os.path.getmtime(metaXmlPath)
modify_date = datetime.fromtimestamp(modify_time).strftime('%Y-%m-%d %H:%M:%S')
except Exception:
modify_date = 'N/A'
data['modify_date'] = modify_date
data['freshness_badge'] = _get_freshness_badge(modify_date)
_apply_modify_date_from_meta_path(data, metaXmlPath)
# Extract settings URL or main URL
settings_url_elem = root.find('settings_url')
@@ -602,12 +648,7 @@ def installed(request):
'manage_url': f'/plugins/{plugin_name}/',
'author': root.find('author').text if root.find('author') is not None and root.find('author').text else 'Unknown',
}
try:
modify_time = os.path.getmtime(meta_xml_path)
data['modify_date'] = datetime.fromtimestamp(modify_time).strftime('%Y-%m-%d %H:%M:%S')
except Exception:
data['modify_date'] = 'N/A'
data['freshness_badge'] = _get_freshness_badge(data['modify_date'])
_apply_modify_date_from_meta_path(data, meta_xml_path)
paid_elem = root.find('paid')
if paid_elem is not None and paid_elem.text and paid_elem.text.lower() == 'true':
data['is_paid'] = True
@@ -650,8 +691,13 @@ def installed(request):
for p in pluginList:
logging.writeToFile(f" - {p.get('plugin_dir')}: installed={p.get('installed')}, enabled={p.get('enabled')}")
# Get cache expiry timestamp for display (will be converted to local time in browser)
# Get cache expiry timestamp for display (browser formats this as nb-NO)
cache_expiry_timestamp, _ = _get_cache_expiry_time()
cache_expired = _is_cache_expired(cache_expiry_timestamp)
refresh_started = False
if cache_expired:
# If cache is stale while on Installed page, trigger best-effort background refresh.
refresh_started = _try_start_plugin_store_refresh_background()
# Sort plugins A-Å by name (case-insensitive) for Grid and Table view
pluginList.sort(key=lambda p: (p.get('name') or '').lower())
@@ -671,9 +717,11 @@ def installed(request):
pass
proc = httpProc(request, 'pluginHolder/plugins.html',
{'plugins': pluginList, 'error_plugins': errorPlugins,
{'plugins': pluginList, 'error_plugins': errorPlugins,
'installed_count': installed_count, 'active_count': active_count,
'cache_expiry_timestamp': cache_expiry_timestamp}, 'managePlugins')
'cache_expiry_timestamp': cache_expiry_timestamp,
'cache_expired': cache_expired,
'cache_refresh_started': refresh_started}, 'managePlugins')
return proc.render()
@csrf_exempt
@@ -946,6 +994,16 @@ def _get_cache_expiry_time():
logging.writeToFile(f"Error getting cache expiry time: {str(e)}")
return None, None
def _is_cache_expired(expiry_timestamp):
"""Return True if provided cache expiry timestamp is in the past."""
try:
if not expiry_timestamp:
return False
return float(expiry_timestamp) <= time.time()
except Exception:
return False
def _get_cached_plugins(allow_expired=False):
"""Get plugins from cache if available and not expired
@@ -1029,6 +1087,22 @@ def _try_start_plugin_store_refresh_background():
try:
_ensure_cache_dir()
# If a previous refresh crashed and left the lock behind, remove it
# so background refresh can resume. This is critical for hourly updates.
try:
if os.path.exists(lock_path):
age_s = time.time() - os.path.getmtime(lock_path)
if age_s > PLUGIN_STORE_REFRESH_LOCK_STALE_SECONDS:
try:
os.remove(lock_path)
logging.writeToFile(
f"Removed stale plugin store refresh lock (age: {age_s:.0f}s)"
)
except Exception:
pass
except Exception:
pass
# Try to acquire a file lock so multiple workers don't stampede GitHub.
try:
fd = os.open(lock_path, os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0o644)
@@ -1116,32 +1190,127 @@ def _get_installed_version(plugin_dir, plugin_install_dir):
return None
def _parse_version_from_meta_xml_bytes(content):
"""Return <version> text from meta.xml bytes, or None."""
if not content:
return None
try:
if isinstance(content, bytes):
content = content.decode('utf-8', errors='replace')
root = ElementTree.fromstring(content)
ve = root.find('version')
if ve is not None and ve.text:
return ve.text.strip()
except Exception as e:
logging.writeToFile('Parse meta.xml version: %s' % str(e))
return None
def _read_version_from_plugin_zip(zip_path, plugin_name):
"""Read version from plugin_name/meta.xml inside the plugin ZIP (upgrade archive)."""
import zipfile
inner = '%s/meta.xml' % plugin_name
try:
with zipfile.ZipFile(zip_path, 'r') as zf:
names = zf.namelist()
target = inner if inner in names else None
if target is None:
in_lower = inner.lower()
for n in names:
if n.lower() == in_lower:
target = n
break
if not target:
return None
return _parse_version_from_meta_xml_bytes(zf.read(target))
except Exception as e:
logging.writeToFile('read_version_from_plugin_zip: %s' % str(e))
return None
def _write_meta_xml_from_plugin_zip(zip_path, plugin_name, plugin_install_dir='/usr/local/CyberCP'):
"""Restore meta.xml on disk from the upgrade ZIP (fallback if sync/CDN overwrote with stale data)."""
import zipfile
inner = '%s/meta.xml' % plugin_name
try:
with zipfile.ZipFile(zip_path, 'r') as zf:
names = zf.namelist()
target = inner if inner in names else None
if target is None:
in_lower = inner.lower()
for n in names:
if n.lower() == in_lower:
target = n
break
if not target:
return False
data = zf.read(target)
meta_path = os.path.join(plugin_install_dir, plugin_name, 'meta.xml')
d = os.path.dirname(meta_path)
if d and not os.path.exists(d):
os.makedirs(d, mode=0o755, exist_ok=True)
with open(meta_path, 'wb') as f:
f.write(data)
f.flush()
if hasattr(os, 'fsync'):
try:
os.fsync(f.fileno())
except Exception:
pass
logging.writeToFile('Restored %s/meta.xml from upgrade ZIP' % plugin_name)
return True
except Exception as e:
logging.writeToFile('_write_meta_xml_from_plugin_zip: %s' % str(e))
return False
def _invalidate_plugin_store_cache():
"""Remove store cache so grid / upgrades-available refreshes installed vs store versions."""
try:
_ensure_cache_dir()
if os.path.isfile(PLUGIN_STORE_CACHE_FILE):
os.remove(PLUGIN_STORE_CACHE_FILE)
logging.writeToFile('Plugin store cache invalidated after upgrade')
except Exception as e:
logging.writeToFile('Could not invalidate plugin store cache: %s' % str(e))
def _sync_meta_xml_from_github(plugin_name, plugin_install_dir='/usr/local/CyberCP'):
"""
Fetch meta.xml from GitHub raw (main) and overwrite installed meta.xml.
Ensures installed version matches store even when archive ZIP is cached/stale.
Verifies write by re-reading version. Returns True if synced and version readable, False otherwise.
Never overwrites with an *older* <version> than already on disk (stale raw.githubusercontent CDN).
"""
meta_url = f'{GITHUB_RAW_BASE}/{plugin_name}/meta.xml'
meta_url = '%s/%s/meta.xml?t=%s' % (GITHUB_RAW_BASE, plugin_name, int(time.time()))
meta_path = os.path.join(plugin_install_dir, plugin_name, 'meta.xml')
for attempt in (1, 2):
try:
req = urllib.request.Request(meta_url, headers={'User-Agent': 'CyberPanel-Plugin-Store/1.0'})
req = urllib.request.Request(
meta_url,
headers={'User-Agent': 'CyberPanel-Plugin-Store/1.0', 'Cache-Control': 'no-cache'},
)
with urllib.request.urlopen(req, timeout=15) as resp:
content = resp.read()
if not content:
if attempt == 2:
logging.writeToFile(f"Sync meta.xml for {plugin_name}: empty response from GitHub")
continue
remote_ver = _parse_version_from_meta_xml_bytes(content)
current_ver = _get_installed_version(plugin_name, plugin_install_dir)
if current_ver and remote_ver and _compare_versions(remote_ver, current_ver) < 0:
logging.writeToFile(
"Skip meta.xml sync for %s: remote %s older than installed %s (CDN/stale raw)"
% (plugin_name, remote_ver, current_ver)
)
return False
with open(meta_path, 'wb') as f:
f.write(content)
f.flush()
if hasattr(os, 'fsync'):
try:
f.fsync()
os.fsync(f.fileno())
except Exception:
pass
# Verify we can read version back (ensures file is valid and readable)
ver = _get_installed_version(plugin_name, plugin_install_dir)
if ver:
logging.writeToFile(f"Synced meta.xml for {plugin_name} from GitHub raw (version {ver})")
@@ -1400,7 +1569,7 @@ def _fetch_plugins_from_github():
# Performance: avoid per-plugin GitHub commits API calls.
# Instead, compute modify_date from local meta.xml timestamps
# (installed meta.xml if present, otherwise plugin source meta.xml).
modify_date = _get_local_plugin_meta_modify_date(plugin_name)
modify_date, modify_timestamp = _get_local_plugin_meta_modify_pair(plugin_name)
freshness = _get_freshness_badge(modify_date)
# Extract paid plugin information
@@ -1439,6 +1608,7 @@ def _fetch_plugins_from_github():
'github_url': f'https://github.com/master3395/cyberpanel-plugins/tree/main/{plugin_name}',
'about_url': f'https://github.com/master3395/cyberpanel-plugins/tree/main/{plugin_name}',
'modify_date': modify_date,
'modify_timestamp': modify_timestamp,
'freshness_badge': freshness,
'is_paid': is_paid,
'patreon_tier': patreon_tier,
@@ -1677,6 +1847,12 @@ def upgrade_plugin(request, plugin_name):
zip_path_abs = os.path.abspath(zip_path)
if not os.path.exists(zip_path_abs):
raise Exception(f'Zip file not found: {zip_path_abs}')
expected_from_zip = _read_version_from_plugin_zip(zip_path_abs, plugin_name)
if expected_from_zip:
logging.writeToFile(
'Plugin %s: version in upgrade archive meta.xml: %s' % (plugin_name, expected_from_zip)
)
logging.writeToFile(f"Upgrading plugin using pluginInstaller (zip={zip_path_abs})")
@@ -1698,18 +1874,43 @@ def upgrade_plugin(request, plugin_name):
if not os.path.exists(pluginInstalled):
raise Exception(f'Plugin upgrade failed: {pluginInstalled} does not exist after upgrade')
# Sync meta.xml from GitHub raw so version matches store (archive ZIP can be cached/stale)
# Sync meta.xml from GitHub raw (never downgrades vs disk — avoids stale CDN on raw.githubusercontent.com)
_sync_meta_xml_from_github(plugin_name, '/usr/local/CyberCP')
new_version = _get_installed_version(plugin_name, '/usr/local/CyberCP')
# If version unchanged, meta sync may have failed (e.g. network); retry once
if new_version == installed_version:
logging.writeToFile(f"Plugin {plugin_name}: version unchanged after first meta sync, retrying sync")
_sync_meta_xml_from_github(plugin_name, '/usr/local/CyberCP')
new_version = _get_installed_version(plugin_name, '/usr/local/CyberCP')
if new_version == installed_version:
logging.writeToFile(f"Plugin {plugin_name}: version still {installed_version} after upgrade; meta.xml may not have been updated from GitHub")
logging.writeToFile(f"Plugin {plugin_name} upgraded successfully from {installed_version} to {new_version}")
if (
new_version == installed_version
and expected_from_zip
and installed_version
and _compare_versions(expected_from_zip, installed_version) > 0
):
logging.writeToFile(
'Plugin %s: forcing meta.xml from upgrade ZIP (archive says %s, disk still %s)'
% (plugin_name, expected_from_zip, installed_version)
)
_write_meta_xml_from_plugin_zip(zip_path_abs, plugin_name, '/usr/local/CyberCP')
new_version = _get_installed_version(plugin_name, '/usr/local/CyberCP')
if (
new_version == installed_version
and expected_from_zip
and installed_version
and _compare_versions(expected_from_zip, installed_version) > 0
):
err = (
'Upgrade did not update version on disk (still %s; archive has %s). '
'Check ownership of /usr/local/CyberCP/%s and CyberPanel logs.'
% (installed_version, expected_from_zip, plugin_name)
)
logging.writeToFile('Plugin %s: %s' % (plugin_name, err))
return JsonResponse({'success': False, 'error': err}, status=500)
_invalidate_plugin_store_cache()
logging.writeToFile(
'Plugin %s upgraded successfully from %s to %s' % (plugin_name, installed_version, new_version)
)
backup_message = ''
if backup_path:
@@ -2106,7 +2307,8 @@ def plugin_settings_proxy(request, plugin_name):
for candidate in ('settings', 'settings_view', 'settings_simple', 'unified_settings'):
settings_view = getattr(views_mod, candidate, None)
if callable(settings_view):
return settings_view(request)
response = settings_view(request)
return _inject_activation_store_hook(response, plugin_name)
except ModuleNotFoundError as e:
last_err = str(e)
continue
@@ -2123,6 +2325,84 @@ def plugin_settings_proxy(request, plugin_name):
return HttpResponseNotFound('Plugin not found.')
def _inject_activation_store_hook(response, plugin_name):
"""
Tiny safety hook for plugin settings pages:
if a plugin activation request succeeds client-side, persist the key in
CyberPanel DB via /plugins/api/store-activation/<plugin>/.
"""
try:
content_type = (response.get('Content-Type', '') or '').lower()
if 'text/html' not in content_type:
return response
body = response.content.decode('utf-8', errors='ignore')
hook_script = """
<script>
(function () {
if (window.__cpActivationStoreHookInstalled) return;
window.__cpActivationStoreHookInstalled = true;
var pluginName = %s;
function getCsrfToken() {
var m = document.cookie.match(/(?:^|; )csrftoken=([^;]+)/);
return m ? decodeURIComponent(m[1]) : '';
}
function tryParseBody(body) {
if (!body || typeof body !== 'string') return '';
try {
var obj = JSON.parse(body);
if (obj && typeof obj.activation_key === 'string') return obj.activation_key.trim();
} catch (e) {}
var rx = /activation_key\\s*[:=]\\s*["']?([A-Za-z0-9\\-_.]{6,})/i;
var m = body.match(rx);
return m ? m[1] : '';
}
async function persistActivationKey(activationKey) {
if (!activationKey) return;
try {
await window.__cpOriginalFetch('/plugins/api/store-activation/' + encodeURIComponent(pluginName) + '/', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-CSRFToken': getCsrfToken()
},
body: JSON.stringify({ activation_key: activationKey })
});
} catch (e) {}
}
if (!window.fetch) return;
window.__cpOriginalFetch = window.fetch.bind(window);
window.fetch = async function(input, init) {
var url = (typeof input === 'string') ? input : ((input && input.url) || '');
var body = init && init.body ? String(init.body) : '';
var activationKey = tryParseBody(body);
var resp = await window.__cpOriginalFetch(input, init);
try {
var looksLikeActivation = /activate|activation|activate_key/i.test(url || '');
if (!looksLikeActivation) return resp;
var clone = resp.clone();
var ct = (clone.headers.get('content-type') || '').toLowerCase();
if (ct.indexOf('application/json') === -1) return resp;
var data = await clone.json();
var ok = !!(data && (data.has_access === true || data.status === 1 || data.success === true));
if (ok && activationKey) {
persistActivationKey(activationKey);
}
} catch (e) {}
return resp;
};
})();
</script>
""" % json.dumps(plugin_name)
if '</body>' in body:
body = body.replace('</body>', hook_script + '</body>')
else:
body += hook_script
response.content = body.encode('utf-8')
return response
except Exception:
return response
def plugin_help(request, plugin_name):
"""Plugin-specific help page - shows plugin information, version history, and help content"""
mailUtilities.checkHome()
@@ -2322,10 +2602,11 @@ def plugin_help(request, plugin_name):
return proc.render()
@csrf_exempt
@require_http_methods(["GET"])
@require_http_methods(["GET", "POST"])
def check_plugin_subscription(request, plugin_name):
"""
API endpoint to check if user has Patreon subscription for a paid plugin
API endpoint to check plugin premium access.
Supports optional activation key save/verify to persist entitlement in MariaDB.
Args:
request: Django request object
@@ -2342,21 +2623,56 @@ def check_plugin_subscription(request, plugin_name):
try:
if not user_can_manage_plugins(request):
return deny_plugin_manage_json_response(request)
# Check if user is authenticated
if not request.user or not request.user.is_authenticated:
# Load plugin metadata
from .plugin_access import (
check_plugin_access,
_load_plugin_meta,
save_activation_key,
verify_saved_activation_key
)
plugin_meta = _load_plugin_meta(plugin_name)
user_email = _resolve_logged_in_plugin_identity(request)
if not user_email:
return JsonResponse({
'success': False,
'has_access': False,
'is_paid': False,
'message': 'Please log in to check subscription status',
'message': 'Unable to determine user identity',
'patreon_url': None
}, status=401)
# Load plugin metadata
from .plugin_access import check_plugin_access, _load_plugin_meta
plugin_meta = _load_plugin_meta(plugin_name)
}, status=400)
activation_key = ''
if request.method == 'POST':
try:
payload = json.loads(request.body.decode('utf-8') or '{}')
except Exception:
payload = {}
activation_key = str(payload.get('activation_key', '')).strip()
if activation_key and user_email:
# If key is already known for this user/plugin -> immediate access
if verify_saved_activation_key(plugin_name, user_email, activation_key):
return JsonResponse({
'success': True,
'has_access': True,
'is_paid': bool(plugin_meta and plugin_meta.get('is_paid', False)),
'message': 'Access granted',
'patreon_url': None,
'activation_saved': True
})
# Save submitted key as persistent entitlement (admin-managed workflow)
saved = save_activation_key(plugin_name, user_email, activation_key, source='plugin_settings')
if saved:
return JsonResponse({
'success': True,
'has_access': True,
'is_paid': bool(plugin_meta and plugin_meta.get('is_paid', False)),
'message': 'Activation key saved',
'patreon_url': None,
'activation_saved': True
})
# Check access
access_result = check_plugin_access(request, plugin_name, plugin_meta)
@@ -2365,7 +2681,8 @@ def check_plugin_subscription(request, plugin_name):
'has_access': access_result['has_access'],
'is_paid': access_result['is_paid'],
'message': access_result['message'],
'patreon_url': access_result.get('patreon_url')
'patreon_url': access_result.get('patreon_url'),
'activation_saved': access_result['has_access'] and access_result['is_paid']
})
except Exception as e:
@@ -2374,6 +2691,40 @@ def check_plugin_subscription(request, plugin_name):
'success': False,
'has_access': False,
'is_paid': False,
'message': f'Error checking subscription: {str(e)}',
'message': 'Error checking subscription',
'patreon_url': None
}, status=500)
@csrf_exempt
@require_http_methods(["POST"])
def store_plugin_activation_key(request, plugin_name):
"""
Store activation key in MariaDB so upgrades do not lose premium entitlement.
"""
try:
if not user_can_manage_plugins(request):
return deny_plugin_manage_json_response(request)
try:
payload = json.loads(request.body.decode('utf-8') or '{}')
except Exception:
payload = {}
activation_key = str(payload.get('activation_key', '')).strip()
if not activation_key:
return JsonResponse({'success': False, 'message': 'activation_key is required'}, status=400)
user_email = _resolve_logged_in_plugin_identity(request)
if not user_email:
return JsonResponse({'success': False, 'message': 'Unable to determine user identity'}, status=400)
from .plugin_access import save_activation_key
ok = save_activation_key(plugin_name, user_email, activation_key, source='api')
if not ok:
return JsonResponse({'success': False, 'message': 'Failed to persist activation key'}, status=500)
return JsonResponse({'success': True, 'message': 'Activation key saved'})
except Exception as e:
logging.writeToFile('store_plugin_activation_key failed for %s: %s' % (plugin_name, str(e)))
return JsonResponse({'success': False, 'message': 'Internal server error'}, status=500)

View File

@@ -58,6 +58,35 @@ class pluginInstaller:
pluginHome = '/usr/local/CyberCP/' + pluginName
return os.path.exists(pluginHome + '/enable_migrations')
@staticmethod
def shouldApplyPluginDatabaseMigrations(pluginName: str) -> bool:
"""
Run Django migrations when the plugin opts in (enable_migrations file)
or when a migrations/ package with real migration modules is shipped.
"""
if pluginInstaller.migrationsEnabled(pluginName):
return True
mig_dir = '/usr/local/CyberCP/' + pluginName + '/migrations'
if not os.path.isdir(mig_dir):
return False
try:
for fn in os.listdir(mig_dir):
if fn.endswith('.py') and fn != '__init__.py':
return True
except OSError:
return False
return False
@staticmethod
def _manage_python_executable():
for candidate in ('/usr/local/CyberCP/bin/python', '/usr/local/CyberCP/bin/python3'):
try:
if os.path.isfile(candidate) and os.access(candidate, os.X_OK):
return candidate
except OSError:
continue
return 'python3'
@staticmethod
def _write_lines_to_protected_file(target_path, lines):
"""
@@ -338,12 +367,31 @@ class pluginInstaller:
@staticmethod
def installMigrations(pluginName):
currentDir = os.getcwd()
os.chdir('/usr/local/CyberCP')
command = "python3 /usr/local/CyberCP/manage.py makemigrations %s" % pluginName
subprocess.call(shlex.split(command))
command = "python3 /usr/local/CyberCP/manage.py migrate %s" % pluginName
subprocess.call(shlex.split(command))
os.chdir(currentDir)
manage_py = '/usr/local/CyberCP/manage.py'
py = pluginInstaller._manage_python_executable()
try:
os.chdir('/usr/local/CyberCP')
mk = subprocess.call(
[py, manage_py, 'makemigrations', pluginName],
stdin=subprocess.DEVNULL,
)
if mk != 0:
pluginInstaller.stdOut(
'makemigrations %s exited %s (ok if no model changes)' % (pluginName, mk)
)
mig = subprocess.call(
[py, manage_py, 'migrate', pluginName, '--noinput'],
stdin=subprocess.DEVNULL,
)
if mig != 0:
pluginInstaller.stdOut(
'migrate %s exited %s — check CyberPanel logs and DB permissions' % (pluginName, mig)
)
finally:
try:
os.chdir(currentDir)
except OSError:
pass
@staticmethod
@@ -427,12 +475,14 @@ class pluginInstaller:
##
if pluginInstaller.migrationsEnabled(pluginName):
pluginInstaller.stdOut('Running Migrations..')
if pluginInstaller.shouldApplyPluginDatabaseMigrations(pluginName):
pluginInstaller.stdOut('Running database migrations for %s..' % pluginName)
pluginInstaller.installMigrations(pluginName)
pluginInstaller.stdOut('Migrations Completed..')
pluginInstaller.stdOut('Database migrations step finished for %s.' % pluginName)
else:
pluginInstaller.stdOut('Migrations not enabled, add file \'enable_migrations\' to plugin to enable')
pluginInstaller.stdOut(
'No plugin migrations to apply (no migrations/ package and no enable_migrations marker).'
)
##
@@ -625,8 +675,11 @@ class pluginInstaller:
def removeMigrations(pluginName):
currentDir = os.getcwd()
os.chdir('/usr/local/CyberCP')
command = "python3 /usr/local/CyberCP/manage.py migrate %s zero" % pluginName
subprocess.call(shlex.split(command))
py = pluginInstaller._manage_python_executable()
subprocess.call(
[py, '/usr/local/CyberCP/manage.py', 'migrate', pluginName, 'zero', '--noinput'],
stdin=subprocess.DEVNULL,
)
os.chdir(currentDir)
@staticmethod
@@ -640,12 +693,12 @@ class pluginInstaller:
##
if pluginInstaller.migrationsEnabled(pluginName):
pluginInstaller.stdOut('Removing migrations..')
if pluginInstaller.shouldApplyPluginDatabaseMigrations(pluginName):
pluginInstaller.stdOut('Reverting database migrations for %s..' % pluginName)
pluginInstaller.removeMigrations(pluginName)
pluginInstaller.stdOut('Migrations removed..')
pluginInstaller.stdOut('Database migrations reverted for %s.' % pluginName)
else:
pluginInstaller.stdOut('Migrations not enabled, add file \'enable_migrations\' to plugin to enable')
pluginInstaller.stdOut('Skipping migrate zero (no migrations package / marker).')
##
@@ -711,4 +764,4 @@ def main():
pluginInstaller.removePlugin(args.pluginName)
if __name__ == "__main__":
main()
main()

View File

@@ -0,0 +1,68 @@
<?php
define("PMA_SIGNON_INDEX", 1);
try {
define('PMA_SIGNON_SESSIONNAME', 'SignonSession');
define('PMA_DISABLE_SSL_PEER_VALIDATION', TRUE);
// Handle both GET and POST parameters for token and username
$token = isset($_POST['token']) ? $_POST['token'] : (isset($_GET['token']) ? $_GET['token'] : null);
$username = isset($_POST['username']) ? $_POST['username'] : (isset($_GET['username']) ? $_GET['username'] : null);
if ($token && $username) {
### Get credentials using the token
$token = htmlspecialchars($token, ENT_QUOTES, 'UTF-8');
$username = htmlspecialchars($username, ENT_QUOTES, 'UTF-8');
//$url = "/dataBases/fetchDetailsPHPMYAdmin?token=" . $token . '&username=' . $username;
$url = "/dataBases/fetchDetailsPHPMYAdmin";
// header('Location: ' . $url);
// Redirect with POST data
echo '<form id="redirectForm" action="' . $url . '" method="post">';
echo '<input type="hidden" value="' . $token . '" name="token">';
echo '<input type="hidden" value="' . $username . '" name="username">';
echo '</form>';
echo '<script>document.getElementById("redirectForm").submit();</script>';
} else if (isset($_POST['logout']) || isset($_GET['logout'])) {
session_name(PMA_SIGNON_SESSIONNAME);
@session_start();
$_SESSION = array();
$params = session_get_cookie_params();
setcookie(session_name(), '', time() - 86400, $params["path"], $params["domain"], $params["secure"], $params["httponly"]);
session_destroy();
header('Location: /base/');
exit;
} else if (isset($_POST['password'])) {
session_name(PMA_SIGNON_SESSIONNAME);
@session_start();
$username = htmlspecialchars($_POST['username'], ENT_QUOTES, 'UTF-8');
$password = $_POST['password'];
$host = isset($_POST['host']) ? trim($_POST['host']) : '127.0.0.1';
if ($host === 'localhost') { $host = '127.0.0.1'; }
$_SESSION['PMA_single_signon_user'] = $username;
$_SESSION['PMA_single_signon_password'] = $password;
$_SESSION['PMA_single_signon_host'] = $host;
@session_write_close();
header('Location: /phpmyadmin/index.php?server=' . PMA_SIGNON_INDEX);
}
} catch (Exception $e) {
echo 'Caught exception: ', $e->getMessage(), "\n";
$params = session_get_cookie_params();
setcookie(session_name(), '', time() - 86400, $params["path"], $params["domain"], $params["secure"], $params["httponly"]);
session_destroy();
header('Location: /dataBases/phpMyAdmin');
return;
}