Merge pull request #1744 from master3395/v2.5.5-dev

V2.5.5 dev
This commit is contained in:
Master3395
2026-03-25 19:10:43 +01:00
committed by GitHub
3 changed files with 275 additions and 62 deletions

View File

@@ -11,6 +11,7 @@ https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
import sys
from django.utils.translation import gettext_lazy as _
# Patreon OAuth (optional): for paid-plugin verification via Patreon membership.
@@ -281,6 +282,20 @@ DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
_cybercp_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if os.path.isdir(_cybercp_root):
try:
# Allow importing plugin packages from the on-disk plugin store sources.
# This helps when /usr/local/CyberCP/<plugin>/ exists as an "installed marker"
# but the full plugin code is missing/incomplete.
_plugin_source_roots = [
'/home/cyberpanel/plugins',
'/home/cyberpanel-plugins',
]
for _src_root in _plugin_source_roots:
try:
if os.path.isdir(_src_root) and _src_root not in sys.path:
sys.path.append(_src_root)
except Exception:
pass
_existing_apps = set(INSTALLED_APPS)
for _name in os.listdir(_cybercp_root):
if _name.startswith('.'):
@@ -290,8 +305,27 @@ if os.path.isdir(_cybercp_root):
continue
if _name in _existing_apps:
continue
if (os.path.exists(os.path.join(_plugin_dir, 'meta.xml')) and
os.path.exists(os.path.join(_plugin_dir, 'urls.py'))):
_installed_has_meta_and_urls = (
os.path.exists(os.path.join(_plugin_dir, 'meta.xml')) and
os.path.exists(os.path.join(_plugin_dir, 'urls.py'))
)
# Fallback: if the installed directory exists but is incomplete,
# try to treat the plugin as installed if we can find meta.xml + urls.py in source.
_source_has_meta_and_urls = False
for _src_root in (
'/home/cyberpanel/plugins',
'/home/cyberpanel-plugins',
):
_src_dir = os.path.join(_src_root, _name)
if not os.path.isdir(_src_dir):
continue
if os.path.exists(os.path.join(_src_dir, 'meta.xml')) and os.path.exists(os.path.join(_src_dir, 'urls.py')):
_source_has_meta_and_urls = True
break
if _installed_has_meta_and_urls or _source_has_meta_and_urls:
INSTALLED_APPS.append(_name)
_existing_apps.add(_name)
except (OSError, IOError):

View File

@@ -1949,7 +1949,8 @@ function toggleView(view, updateHash = true) {
else if (document.getElementById('viewBtnUpgradesNoPlugins')) document.getElementById('viewBtnUpgradesNoPlugins').classList.add('active');
if (storePlugins.length === 0) {
loadPluginStore(true);
// Lazy-load: only fetch when upgrades view is opened by user action (not initial render).
if (updateHash) loadPluginStore(true);
} else {
displayUpgradesAvailable();
}
@@ -2580,7 +2581,9 @@ function upgradePlugin(pluginName, currentVersion, newVersion) {
}
// Refetch store so upgrades list and badge update (plugin no longer shows as upgradable)
if (typeof loadPluginStore === 'function') {
loadPluginStore(true);
const upgradesViewEl = document.getElementById('upgradesView');
const shouldShowUpgrades = upgradesViewEl && upgradesViewEl.style.display === 'block';
loadPluginStore(shouldShowUpgrades);
}
} else {
if (typeof PNotify !== 'undefined') {

View File

@@ -18,6 +18,7 @@ import sys
import urllib.request
import urllib.error
import time
import threading
import inspect
sys.path.append('/usr/local/CyberCP')
from pluginInstaller.pluginInstaller import pluginInstaller
@@ -31,6 +32,7 @@ PLUGIN_STORE_CACHE_DIR = '/home/cyberpanel/plugin_store_cache'
PLUGIN_STORE_CACHE_FILE = os.path.join(PLUGIN_STORE_CACHE_DIR, 'plugins_cache.json')
PLUGIN_STORE_CACHE_DURATION = 3600 # Base cache duration: 1 hour (3600 seconds)
PLUGIN_STORE_CACHE_RANDOM_OFFSET = 600 # Random offset: ±10 minutes (600 seconds) to prevent simultaneous requests
PLUGIN_STORE_REFRESH_LOCK_FILE = os.path.join(PLUGIN_STORE_CACHE_DIR, 'plugins_cache_refresh.lock')
GITHUB_REPO_API = 'https://api.github.com/repos/master3395/cyberpanel-plugins/contents'
GITHUB_RAW_BASE = 'https://raw.githubusercontent.com/master3395/cyberpanel-plugins/main'
GITHUB_COMMITS_API = 'https://api.github.com/repos/master3395/cyberpanel-plugins/commits'
@@ -123,6 +125,30 @@ def _get_plugin_source_path(plugin_name):
return path
return None
def _get_local_plugin_meta_modify_date(plugin_name):
"""
Compute plugin modify date from local meta.xml file timestamps.
This avoids per-plugin GitHub commits API calls while still providing
a useful "Modify date" column in the plugin store UI.
"""
candidate_paths = []
installed_meta = os.path.join('/usr/local/CyberCP', plugin_name, 'meta.xml')
candidate_paths.append(installed_meta)
for base in PLUGIN_SOURCE_PATHS:
candidate_paths.append(os.path.join(base, plugin_name, 'meta.xml'))
for meta_path in candidate_paths:
try:
if os.path.exists(meta_path) and os.path.isfile(meta_path):
modify_time = os.path.getmtime(meta_path)
return datetime.fromtimestamp(modify_time).strftime('%Y-%m-%d %H:%M:%S')
except Exception:
continue
return 'N/A'
def _ensure_plugin_meta_xml(plugin_name):
"""
If plugin is installed (directory exists) but meta.xml is missing,
@@ -142,10 +168,13 @@ def _ensure_plugin_meta_xml(plugin_name):
except Exception as e:
logging.writeToFile(f"Could not restore meta.xml for {plugin_name}: {e}")
return
try:
_sync_meta_xml_from_github(plugin_name)
except Exception:
pass
# Performance: do not call GitHub during /plugins/installed render path.
# If meta.xml is still missing, we just skip enrichment for this plugin.
logging.writeToFile(
f"meta.xml still missing for {plugin_name}; GitHub sync skipped for performance"
)
return
def _get_plugin_state_file(plugin_name):
"""Get the path to the plugin state file"""
@@ -217,6 +246,17 @@ def installed(request):
errorPlugins = []
processed_plugins = set() # Track which plugins we've already processed
# Timing instrumentation for /plugins/installed slowness
t_total_start = time.perf_counter()
t_repair_start = t_total_start
t_repair = 0.0
repair_attempts = 0
t_source_loop_start = None
t_source_loop = 0.0
t_installed_fallback_start = None
t_installed_fallback = 0.0
t_filesystem_count_start = None
# Repair pass: ensure every installed plugin dir has meta.xml (from source or GitHub) so counts and grid are correct
if os.path.exists(installedPath):
for plugin in os.listdir(installedPath):
@@ -224,10 +264,13 @@ def installed(request):
continue
plugin_dir = os.path.join(installedPath, plugin)
if os.path.isdir(plugin_dir):
repair_attempts += 1
_ensure_plugin_meta_xml(plugin)
t_repair = time.perf_counter() - t_repair_start
# First, process plugins from source directories (multiple paths: /home/cyberpanel/plugins, /home/cyberpanel-plugins)
# BUT: Skip plugins that are already installed - we'll process those from the installed location instead
t_source_loop_start = time.perf_counter()
for pluginPath in PLUGIN_SOURCE_PATHS:
if not os.path.exists(pluginPath):
continue
@@ -398,6 +441,8 @@ def installed(request):
# Also check for installed plugins that don't have source directories
# This handles plugins installed from the store that may not be in /home/cyberpanel/plugins/
if os.path.exists(installedPath):
t_source_loop = time.perf_counter() - t_source_loop_start
t_installed_fallback_start = time.perf_counter()
for plugin in os.listdir(installedPath):
# Skip if already processed
if plugin in processed_plugins:
@@ -517,6 +562,7 @@ def installed(request):
errorPlugins.append({'name': plugin, 'error': f'Error loading installed plugin: {str(e)}'})
logging.writeToFile(f"Installed plugin {plugin}: Error loading - {str(e)}")
continue
t_installed_fallback = time.perf_counter() - t_installed_fallback_start
# Ensure redisManager and memcacheManager load when present (fallback if missed by listdir)
for plugin_name in ('redisManager', 'memcacheManager'):
@@ -574,6 +620,7 @@ def installed(request):
logging.writeToFile(f"Plugin {plugin_name} fallback load error: {str(e)}")
# Calculate installed and active counts: only count real plugins (have meta.xml, not core apps)
t_filesystem_count_start = time.perf_counter()
installed_plugins_in_filesystem = set()
if os.path.exists(installedPath):
for plugin in os.listdir(installedPath):
@@ -609,6 +656,20 @@ def installed(request):
# Sort plugins A-Å by name (case-insensitive) for Grid and Table view
pluginList.sort(key=lambda p: (p.get('name') or '').lower())
# Summary timing log (keep it single-line to avoid huge logs)
try:
t_total = time.perf_counter() - t_total_start
# The individual phase durations were captured immediately after their loops.
t_filesystem_count = (time.perf_counter() - t_filesystem_count_start) if t_filesystem_count_start else 0.0
logging.writeToFile(
f"/plugins/installed timing: total={t_total:.3f}s repair_attempts={repair_attempts} "
f"repair={t_repair:.3f}s source_loop={t_source_loop:.3f}s installed_fallback={t_installed_fallback:.3f}s "
f"filesystem_count={t_filesystem_count:.3f}s pluginList={len(pluginList)} installed_count={installed_count} active_count={active_count}"
)
except Exception:
# Never break page render due to logging failure.
pass
proc = httpProc(request, 'pluginHolder/plugins.html',
{'plugins': pluginList, 'error_plugins': errorPlugins,
'installed_count': installed_count, 'active_count': active_count,
@@ -959,6 +1020,54 @@ def _save_plugins_cache(plugins):
except Exception as e:
logging.writeToFile(f"Error saving plugin store cache: {str(e)}")
def _try_start_plugin_store_refresh_background():
"""
Best-effort background refresh of the plugin store cache.
Returns True if a refresh thread was started, False otherwise.
"""
lock_path = PLUGIN_STORE_REFRESH_LOCK_FILE
try:
_ensure_cache_dir()
# Try to acquire a file lock so multiple workers don't stampede GitHub.
try:
fd = os.open(lock_path, os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0o644)
with os.fdopen(fd, 'w') as f:
f.write(str(os.getpid()))
except FileExistsError:
return False
except Exception as e:
logging.writeToFile(f"Plugin store refresh lock acquire failed: {str(e)}")
return False
def _worker():
try:
t0 = time.perf_counter()
plugins = _fetch_plugins_from_github()
if plugins:
_save_plugins_cache(plugins)
dt = time.perf_counter() - t0
logging.writeToFile(
f"Background plugin store refresh complete: plugins={len(plugins)} duration={dt:.3f}s"
)
else:
logging.writeToFile("Background plugin store refresh: fetched 0 plugins")
except Exception as e:
# Avoid leaking secrets; just record the error summary.
logging.writeToFile(f"Background plugin store refresh failed: {str(e)}")
finally:
try:
if os.path.exists(lock_path):
os.remove(lock_path)
except Exception:
pass
threading.Thread(target=_worker, daemon=True).start()
return True
except Exception as e:
logging.writeToFile(f"Error starting plugin store background refresh: {str(e)}")
return False
def _compare_versions(version1, version2):
"""
Compare two version strings (semantic versioning)
@@ -1288,33 +1397,11 @@ def _fetch_plugins_from_github():
# Parse meta.xml
root = ElementTree.fromstring(meta_xml_content)
# Fetch last commit date for this plugin from GitHub
modify_date = 'N/A'
try:
commits_url = f"{GITHUB_COMMITS_API}?path={plugin_name}&per_page=1"
commits_req = urllib.request.Request(
commits_url,
headers={
'User-Agent': 'CyberPanel-Plugin-Store/1.0',
'Accept': 'application/vnd.github.v3+json'
}
)
with urllib.request.urlopen(commits_req, timeout=10) as commits_response:
commits_data = json.loads(commits_response.read().decode('utf-8'))
if commits_data and len(commits_data) > 0:
commit_date = commits_data[0].get('commit', {}).get('author', {}).get('date', '')
if commit_date:
# Parse ISO 8601 date and format it
try:
from datetime import datetime
dt = datetime.fromisoformat(commit_date.replace('Z', '+00:00'))
modify_date = dt.strftime('%Y-%m-%d %H:%M:%S')
except Exception:
modify_date = commit_date[:19].replace('T', ' ') # Fallback formatting
except Exception as e:
logging.writeToFile(f"Could not fetch commit date for {plugin_name}: {str(e)}")
modify_date = 'N/A'
# Performance: avoid per-plugin GitHub commits API calls.
# Instead, compute modify_date from local meta.xml timestamps
# (installed meta.xml if present, otherwise plugin source meta.xml).
modify_date = _get_local_plugin_meta_modify_date(plugin_name)
freshness = _get_freshness_badge(modify_date)
# Extract paid plugin information
paid_elem = root.find('paid')
@@ -1340,7 +1427,6 @@ def _fetch_plugins_from_github():
logging.writeToFile(f"Plugin {plugin_name}: Invalid category '{type_elem.text}', skipping (use Utility, Security, Backup, or Performance)")
continue
freshness = _get_freshness_badge(modify_date)
plugin_data = {
'plugin_dir': plugin_name,
'name': root.find('name').text if root.find('name') is not None else plugin_name,
@@ -1415,20 +1501,46 @@ def fetch_plugin_store(request):
}, status=401)
try:
# Try to get from cache first
cached_plugins = _get_cached_plugins()
t_total_start = time.perf_counter()
# 1) Fast path: non-expired cache hit
cached_plugins = _get_cached_plugins(allow_expired=False)
if cached_plugins is not None:
# Enrich cached plugins with installed/enabled status
t_enrich_start = time.perf_counter()
enriched_plugins = _enrich_store_plugins(cached_plugins)
dt_total = time.perf_counter() - t_total_start
logging.writeToFile(
f"fetch_plugin_store: cache_hit plugins={len(cached_plugins)} duration={dt_total:.3f}s enrich={time.perf_counter() - t_enrich_start:.3f}s"
)
return JsonResponse({
'success': True,
'plugins': enriched_plugins,
'cached': True
})
# 2) Cache miss OR expired cache: return stale-but-available cache immediately (if present)
stale_plugins = _get_cached_plugins(allow_expired=True)
if stale_plugins is not None:
started_refresh = _try_start_plugin_store_refresh_background()
t_enrich_start = time.perf_counter()
enriched_plugins = _enrich_store_plugins(stale_plugins)
dt_total = time.perf_counter() - t_total_start
warning = 'Using stale plugin store cache (refreshing in background).' if started_refresh else 'Using stale plugin store cache.'
logging.writeToFile(
f"fetch_plugin_store: cache_stale_fallback plugins={len(stale_plugins)} refresh_started={started_refresh} duration={dt_total:.3f}s enrich={time.perf_counter() - t_enrich_start:.3f}s"
)
return JsonResponse({
'success': True,
'plugins': enriched_plugins,
'cached': True,
'warning': warning
})
# Cache miss or expired - fetch from GitHub
# 3) No cache available: fetch from GitHub (slow path)
t_fetch_start = time.perf_counter()
plugins = _fetch_plugins_from_github()
dt_fetch = time.perf_counter() - t_fetch_start
# Enrich plugins with installed/enabled status
enriched_plugins = _enrich_store_plugins(plugins)
@@ -1436,6 +1548,10 @@ def fetch_plugin_store(request):
if plugins:
_save_plugins_cache(plugins)
dt_total = time.perf_counter() - t_total_start
logging.writeToFile(
f"fetch_plugin_store: cache_miss_fetched plugins={len(plugins)} fetch_duration={dt_fetch:.3f}s total_duration={dt_total:.3f}s"
)
return JsonResponse({
'success': True,
'plugins': enriched_plugins,
@@ -1929,32 +2045,82 @@ def plugin_settings_proxy(request, plugin_name):
Proxy for /plugins/<plugin_name>/settings/ so plugin settings pages work even when
the plugin was installed after the worker started (dynamic URL list is built at import time).
"""
import re
import sys
import importlib
mailUtilities.checkHome()
if not user_can_manage_plugins(request):
from django.http import HttpResponseForbidden
return HttpResponseForbidden('You are not authorized to manage plugins.')
plugin_path = '/usr/local/CyberCP/' + plugin_name
urls_py = os.path.join(plugin_path, 'urls.py')
if not plugin_name or not os.path.isdir(plugin_path) or not os.path.exists(urls_py):
# Basic hardening against path traversal / unexpected module names.
if not plugin_name or not re.match(r'^[A-Za-z0-9_]+$', plugin_name):
from django.http import HttpResponseNotFound
return HttpResponseNotFound('Plugin not found or has no URL configuration.')
return HttpResponseNotFound('Invalid plugin.')
# Reserved internal directories / apps.
if plugin_name in RESERVED_PLUGIN_DIRS or plugin_name in (
'api', 'installed', 'help', 'emailMarketing', 'emailPremium', 'pluginHolder'
):
from django.http import HttpResponseNotFound
return HttpResponseNotFound('Invalid plugin.')
installed_plugin_path = os.path.join('/usr/local/CyberCP', plugin_name)
# Try to import plugin settings from either:
# - installed plugin directory (/usr/local/CyberCP/<plugin_name>/)
# - plugin source directories (/home/cyberpanel-plugins/<plugin_name>/ etc)
# This fixes 404s when the installed copy is incomplete.
source_candidates = []
for src_base in PLUGIN_SOURCE_PATHS:
src_dir = os.path.join(src_base, plugin_name)
if os.path.isdir(src_dir):
source_candidates.append((src_base, src_dir))
parents_to_try = ['/usr/local/CyberCP'] + [p for (p, _) in source_candidates]
orig_sys_path = list(sys.path)
last_err = None
try:
import importlib
views_mod = importlib.import_module(plugin_name + '.views')
settings_view = getattr(views_mod, 'settings', None)
if not callable(settings_view):
from django.http import HttpResponseNotFound
return HttpResponseNotFound('Plugin has no settings view.')
return settings_view(request)
except Exception as e:
logging.writeToFile(f"plugin_settings_proxy for {plugin_name}: {str(e)}")
from django.http import HttpResponseServerError
return HttpResponseServerError(f'Plugin settings error: {str(e)}')
for parent in parents_to_try:
if not parent or not os.path.isdir(parent):
continue
# Ensure import searches this candidate parent first.
if parent in sys.path:
sys.path.remove(parent)
sys.path.insert(0, parent)
# Clear partially imported modules so we can retry from a different path.
for mod_name in [plugin_name, plugin_name + '.views', plugin_name + '.urls']:
if mod_name in sys.modules:
del sys.modules[mod_name]
try:
views_mod = importlib.import_module(plugin_name + '.views')
# Different plugins use different view function names.
# Common ones are:
# - settings(request)
# - settings_view(request) (used by multiple first-party plugins)
for candidate in ('settings', 'settings_view', 'settings_simple', 'unified_settings'):
settings_view = getattr(views_mod, candidate, None)
if callable(settings_view):
return settings_view(request)
except ModuleNotFoundError as e:
last_err = str(e)
continue
except Exception as e:
last_err = str(e)
continue
finally:
sys.path = orig_sys_path
from django.http import HttpResponseNotFound
# If the plugin directory exists under /usr/local/CyberCP, treat this as a broken/incomplete install.
if os.path.isdir(installed_plugin_path):
return HttpResponseNotFound('Plugin settings not available (incomplete installation).')
return HttpResponseNotFound('Plugin not found.')
def plugin_help(request, plugin_name):
@@ -1962,11 +2128,21 @@ def plugin_help(request, plugin_name):
mailUtilities.checkHome()
# Paths for the plugin
plugin_path = '/usr/local/CyberCP/' + plugin_name
meta_xml_path = os.path.join(plugin_path, 'meta.xml')
# Check if plugin exists
if not os.path.exists(plugin_path) or not os.path.exists(meta_xml_path):
installed_plugin_path = '/usr/local/CyberCP/' + plugin_name
meta_xml_path = os.path.join(installed_plugin_path, 'meta.xml')
# If installed meta.xml is missing (e.g. incomplete install), fall back to plugin source.
plugin_path = installed_plugin_path
if not os.path.exists(meta_xml_path):
for src_base in PLUGIN_SOURCE_PATHS:
candidate = os.path.join(src_base, plugin_name, 'meta.xml')
if os.path.exists(candidate):
plugin_path = os.path.join(src_base, plugin_name)
meta_xml_path = candidate
break
# Check if plugin exists (at least meta.xml must exist)
if not os.path.exists(meta_xml_path):
proc = httpProc(request, 'pluginHolder/plugin_not_found.html', {
'plugin_name': plugin_name
}, 'managePlugins')
@@ -1985,7 +2161,7 @@ def plugin_help(request, plugin_name):
plugin_type = root.find('type').text if root.find('type') is not None else 'Plugin'
# Check if plugin is installed
installed = os.path.exists(plugin_path)
installed = os.path.exists(installed_plugin_path)
except Exception as e:
logging.writeToFile(f"Error parsing meta.xml for {plugin_name}: {str(e)}")