diff --git a/filemanager/filemanager.py b/filemanager/filemanager.py
index ab7541a9f..f0e135468 100644
--- a/filemanager/filemanager.py
+++ b/filemanager/filemanager.py
@@ -1039,8 +1039,9 @@ class FileManager:
'error_message': "Symlink attack."})
return HttpResponse(final_json)
+ # Set home directory ownership
command = 'chown %s:%s /home/%s' % (website.externalApp, website.externalApp, domainName)
- ProcessUtilities.popenExecutioner(command)
+ ProcessUtilities.executioner(command)
### Sym link checks
@@ -1053,27 +1054,21 @@ class FileManager:
'error_message': "Symlink attack."})
return HttpResponse(final_json)
- command = 'chown -R -P %s:%s /home/%s/public_html/*' % (externalApp, externalApp, domainName)
- ProcessUtilities.popenExecutioner(command)
-
- command = 'chown -R -P %s:%s /home/%s/public_html/.[^.]*' % (externalApp, externalApp, domainName)
- ProcessUtilities.popenExecutioner(command)
-
- # command = "chown root:%s /home/" % (groupName) + domainName + "/logs"
- # ProcessUtilities.popenExecutioner(command)
-
+ # Set file permissions first (before ownership to avoid conflicts)
command = "find %s -type d -exec chmod 0755 {} \;" % ("/home/" + domainName + "/public_html")
- ProcessUtilities.popenExecutioner(command)
+ ProcessUtilities.executioner(command)
command = "find %s -type f -exec chmod 0644 {} \;" % ("/home/" + domainName + "/public_html")
- ProcessUtilities.popenExecutioner(command)
-
- command = 'chown %s:%s /home/%s/public_html' % (externalApp, groupName, domainName)
ProcessUtilities.executioner(command)
- command = 'chmod 750 /home/%s/public_html' % (domainName)
+ # Set ownership for all files inside public_html to user:user
+ command = 'chown -R -P %s:%s /home/%s/public_html/*' % (externalApp, externalApp, domainName)
ProcessUtilities.executioner(command)
+ command = 'chown -R -P %s:%s /home/%s/public_html/.[^.]*' % (externalApp, externalApp, domainName)
+ ProcessUtilities.executioner(command)
+
+ # Process child domains first
for childs in website.childdomains_set.all():
command = 'ls -la %s' % childs.path
result = ProcessUtilities.outputExecutioner(command)
@@ -1084,21 +1079,30 @@ class FileManager:
'error_message': "Symlink attack."})
return HttpResponse(final_json)
-
+ # Set file permissions first
command = "find %s -type d -exec chmod 0755 {} \;" % (childs.path)
- ProcessUtilities.popenExecutioner(command)
+ ProcessUtilities.executioner(command)
command = "find %s -type f -exec chmod 0644 {} \;" % (childs.path)
- ProcessUtilities.popenExecutioner(command)
+ ProcessUtilities.executioner(command)
+ # Set ownership for all files inside child domain to user:user
command = 'chown -R -P %s:%s %s/*' % (externalApp, externalApp, childs.path)
- ProcessUtilities.popenExecutioner(command)
+ ProcessUtilities.executioner(command)
command = 'chown -R -P %s:%s %s/.[^.]*' % (externalApp, externalApp, childs.path)
- ProcessUtilities.popenExecutioner(command)
+ ProcessUtilities.executioner(command)
+ # Set child domain directory itself to 755 with user:nogroup
command = 'chmod 755 %s' % (childs.path)
- ProcessUtilities.popenExecutioner(command)
+ ProcessUtilities.executioner(command)
command = 'chown %s:%s %s' % (externalApp, groupName, childs.path)
- ProcessUtilities.popenExecutioner(command)
+ ProcessUtilities.executioner(command)
+
+ # Set public_html directory itself to user:nogroup with 750 permissions (done at the end)
+ command = 'chown %s:%s /home/%s/public_html' % (externalApp, groupName, domainName)
+ ProcessUtilities.executioner(command)
+
+ command = 'chmod 750 /home/%s/public_html' % (domainName)
+ ProcessUtilities.executioner(command)
diff --git a/install/env_generator.py b/install/env_generator.py
index 47f6cac64..0edb1d7f4 100644
--- a/install/env_generator.py
+++ b/install/env_generator.py
@@ -8,6 +8,9 @@ import os
import sys
import secrets
import string
+import socket
+import urllib.request
+import re
from pathlib import Path
def generate_secure_password(length=24):
@@ -27,16 +30,53 @@ def generate_secure_password(length=24):
def generate_secret_key(length=64):
"""
Generate a cryptographically secure Django secret key
-
+
Args:
length: Length of the secret key to generate (default 64)
-
+
Returns:
str: Random secret key
"""
chars = string.ascii_letters + string.digits + '!@#$%^&*(-_=+)'
return ''.join(secrets.choice(chars) for _ in range(length))
+def get_public_ip():
+ """Get the public IP address of the server using multiple methods"""
+ methods = [
+ 'https://ipv4.icanhazip.com',
+ 'https://api.ipify.org',
+ 'https://checkip.amazonaws.com',
+ 'https://ipecho.net/plain'
+ ]
+
+ for url in methods:
+ try:
+ with urllib.request.urlopen(url, timeout=10) as response:
+ ip = response.read().decode('utf-8').strip()
+ # Validate IP format
+ if re.match(r'^(\d{1,3}\.){3}\d{1,3}$', ip):
+ print(f"✓ Detected public IP: {ip}")
+ return ip
+ except Exception as e:
+ print(f"Failed to get IP from {url}: {e}")
+ continue
+
+ print("⚠️ Could not detect public IP address")
+ return None
+
+def get_local_ip():
+ """Get the local IP address of the server"""
+ try:
+ # Connect to a remote address to determine the local IP
+ with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as s:
+ s.connect(("8.8.8.8", 80))
+ local_ip = s.getsockname()[0]
+ print(f"✓ Detected local IP: {local_ip}")
+ return local_ip
+ except Exception as e:
+ print(f"Failed to detect local IP: {e}")
+ return None
+
def create_env_file(cyberpanel_path, mysql_root_password=None, cyberpanel_db_password=None):
"""
Create .env file with generated secure credentials
@@ -56,15 +96,49 @@ def create_env_file(cyberpanel_path, mysql_root_password=None, cyberpanel_db_pas
secret_key = generate_secret_key(64)
- # Get hostname for ALLOWED_HOSTS
- import socket
+ # Auto-detect IP addresses for ALLOWED_HOSTS
+ print("🔍 Auto-detecting server IP addresses...")
+
+ # Get hostname and local hostname resolution
try:
hostname = socket.gethostname()
- local_ip = socket.gethostbyname(hostname)
+ hostname_ip = socket.gethostbyname(hostname)
except:
hostname = 'localhost'
- local_ip = '127.0.0.1'
-
+ hostname_ip = '127.0.0.1'
+
+ # Get actual local IP address
+ local_ip = get_local_ip()
+
+ # Get public IP address
+ public_ip = get_public_ip()
+
+ # Build ALLOWED_HOSTS list with all detected IPs
+ allowed_hosts = ['localhost', '127.0.0.1']
+
+ # Add hostname if different from localhost
+ if hostname and hostname != 'localhost':
+ allowed_hosts.append(hostname)
+
+ # Add hostname IP if different from localhost
+ if hostname_ip and hostname_ip not in allowed_hosts:
+ allowed_hosts.append(hostname_ip)
+
+ # Add local IP if detected and different
+ if local_ip and local_ip not in allowed_hosts:
+ allowed_hosts.append(local_ip)
+
+ # Add public IP if detected and different
+ if public_ip and public_ip not in allowed_hosts:
+ allowed_hosts.append(public_ip)
+
+ # Add wildcard for maximum compatibility (allows any host)
+ # This ensures CyberPanel works regardless of how the server is accessed
+ allowed_hosts.append('*')
+
+ allowed_hosts_str = ','.join(allowed_hosts)
+ print(f"✓ ALLOWED_HOSTS configured: {allowed_hosts_str}")
+
# Create .env content
env_content = f"""# CyberPanel Environment Configuration
# Generated automatically during installation - DO NOT EDIT MANUALLY
@@ -73,7 +147,7 @@ def create_env_file(cyberpanel_path, mysql_root_password=None, cyberpanel_db_pas
# Django Configuration
SECRET_KEY={secret_key}
DEBUG=False
-ALLOWED_HOSTS=localhost,127.0.0.1,{hostname},{local_ip}
+ALLOWED_HOSTS={allowed_hosts_str}
# Database Configuration - CyberPanel Database
DB_NAME=cyberpanel
diff --git a/install/install.py b/install/install.py
index b5c290b1c..46ae8b05a 100644
--- a/install/install.py
+++ b/install/install.py
@@ -619,7 +619,8 @@ password="%s"
logging.InstallLog.writeToFile("Generating secure environment configuration!")
# Generate secure environment file instead of hardcoding passwords
- self.generate_secure_env_file(mysqlPassword, password)
+ # Note: password = MySQL root password, mysqlPassword = CyberPanel DB password
+ self.generate_secure_env_file(password, mysqlPassword)
logging.InstallLog.writeToFile("Environment configuration generated successfully!")
diff --git a/install/installCyberPanel.py b/install/installCyberPanel.py
index 620c641a8..582b0bc86 100644
--- a/install/installCyberPanel.py
+++ b/install/installCyberPanel.py
@@ -1058,9 +1058,11 @@ def Main(cwd, mysql, distro, ent, serial=None, port="8090", ftp=None, dns=None,
except:
pass
- if distro == centos:
+ # For RHEL-based systems (CentOS, AlmaLinux, Rocky, etc.), generate a separate password
+ if distro in [centos, cent8, openeuler]:
InstallCyberPanel.mysqlPassword = install_utils.generate_pass()
else:
+ # For Ubuntu/Debian, use the same password as root
InstallCyberPanel.mysqlPassword = InstallCyberPanel.mysql_Root_password
installer = InstallCyberPanel("/usr/local/lsws/", cwd, distro, ent, serial, port, ftp, dns, publicip, remotemysql,
diff --git a/install/setup_env.py b/install/setup_env.py
new file mode 100644
index 000000000..5207d5b86
--- /dev/null
+++ b/install/setup_env.py
@@ -0,0 +1,112 @@
+#!/usr/bin/env python3
+"""
+Environment setup script for CyberPanel
+Automatically detects server IP and configures .env file
+"""
+
+import os
+import socket
+import urllib.request
+import re
+import shutil
+from pathlib import Path
+
+def get_public_ip():
+ """Get the public IP address of the server using multiple methods"""
+ methods = [
+ 'https://ipv4.icanhazip.com',
+ 'https://api.ipify.org',
+ 'https://checkip.amazonaws.com',
+ 'https://ipecho.net/plain'
+ ]
+
+ for url in methods:
+ try:
+ with urllib.request.urlopen(url, timeout=10) as response:
+ ip = response.read().decode('utf-8').strip()
+ # Validate IP format
+ if re.match(r'^(\d{1,3}\.){3}\d{1,3}$', ip):
+ return ip
+ except Exception:
+ continue
+
+ return None
+
+def get_local_ip():
+ """Get the local IP address of the server"""
+ try:
+ # Connect to a remote address to determine the local IP
+ with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as s:
+ s.connect(("8.8.8.8", 80))
+ return s.getsockname()[0]
+ except Exception:
+ return None
+
+def setup_env_file(cyberpanel_root="/usr/local/CyberCP"):
+ """Set up the .env file with auto-detected IP addresses"""
+ env_template_path = os.path.join(cyberpanel_root, ".env.template")
+ env_path = os.path.join(cyberpanel_root, ".env")
+
+ # Get IPs
+ public_ip = get_public_ip()
+ local_ip = get_local_ip()
+
+ # Build ALLOWED_HOSTS list
+ allowed_hosts = ['localhost', '127.0.0.1']
+
+ if local_ip and local_ip not in allowed_hosts:
+ allowed_hosts.append(local_ip)
+
+ if public_ip and public_ip not in allowed_hosts:
+ allowed_hosts.append(public_ip)
+
+ # For maximum compatibility, also include wildcard
+ # This ensures the server works regardless of how it's accessed
+ allowed_hosts.append('*')
+
+ allowed_hosts_str = ','.join(allowed_hosts)
+
+ print(f"Auto-detected public IP: {public_ip}")
+ print(f"Auto-detected local IP: {local_ip}")
+ print(f"Setting ALLOWED_HOSTS to: {allowed_hosts_str}")
+
+ # If .env doesn't exist, copy from template
+ if not os.path.exists(env_path):
+ if os.path.exists(env_template_path):
+ shutil.copy2(env_template_path, env_path)
+ print(f"Created .env file from template")
+ else:
+ print(f"Warning: Template file not found at {env_template_path}")
+ return False
+
+ # Update ALLOWED_HOSTS in .env file
+ try:
+ with open(env_path, 'r') as f:
+ content = f.read()
+
+ # Update ALLOWED_HOSTS line
+ content = re.sub(
+ r'^ALLOWED_HOSTS=.*$',
+ f'ALLOWED_HOSTS={allowed_hosts_str}',
+ content,
+ flags=re.MULTILINE
+ )
+
+ with open(env_path, 'w') as f:
+ f.write(content)
+
+ print(f"Updated ALLOWED_HOSTS in {env_path}")
+ return True
+
+ except Exception as e:
+ print(f"Error updating .env file: {e}")
+ return False
+
+if __name__ == "__main__":
+ import sys
+
+ # Allow custom path as argument
+ cyberpanel_root = sys.argv[1] if len(sys.argv) > 1 else "/usr/local/CyberCP"
+
+ success = setup_env_file(cyberpanel_root)
+ sys.exit(0 if success else 1)
\ No newline at end of file
diff --git a/plogical/IncScheduler.py b/plogical/IncScheduler.py
index 57e73df8b..a93123abe 100644
--- a/plogical/IncScheduler.py
+++ b/plogical/IncScheduler.py
@@ -897,78 +897,164 @@ Automatic backup failed for %s on %s.
for site in websites:
from datetime import datetime, timedelta
+ import hashlib
Yesterday = (datetime.now() - timedelta(days=1)).strftime("%m.%d.%Y")
print(f'date of yesterday {Yesterday}')
- # Command to list directories under the specified path
- command = f"ls -d {finalPath}/*"
-
- # Try SSH command first
- directories = []
try:
- # Execute the command
- stdin, stdout, stderr = ssh.exec_command(command, timeout=10)
+ # Enhanced backup verification with multiple methods
+ backup_found = False
+ backup_file_path = None
+ file_size = 0
- # Read the results
- directories = stdout.read().decode().splitlines()
- except:
- # If SSH command fails, try using SFTP
- logging.writeToFile(f'SSH ls command failed for {destinationConfig["ip"]}, trying SFTP listdir')
- try:
- sftp = ssh.open_sftp()
- # List files in the directory
- files = sftp.listdir(finalPath)
- # Format them similar to ls -d output
- directories = [f"{finalPath}/{f}" for f in files]
- sftp.close()
- except BaseException as msg:
- logging.writeToFile(f'Failed to list directory via SFTP: {str(msg)}')
- directories = []
+ if actualDomain:
+ check_domain = site.domain
+ else:
+ check_domain = site.domain.domain
- if os.path.exists(ProcessUtilities.debugPath):
- logging.writeToFile(str(directories))
+ # Method 1 & 3: Use timestamp-based filename and filter to only today's backup directory
+ # Expected filename format: backup-{domain}-{timestamp}.tar.gz
+ # Where timestamp from line 515: currentTime = time.strftime("%m.%d.%Y_%H-%M-%S")
- try:
+ # Method 3: Only search within today's backup directory (finalPath already contains today's timestamp)
+ if ssh_commands_supported:
+ # Use find command to search for backup files with domain name in today's directory
+ # -size +1k filters files larger than 1KB (Method 2: size validation)
+ command = f"find {finalPath} -name '*{check_domain}*.tar.gz' -type f -size +1k 2>/dev/null"
- startCheck = 0
- for directory in directories:
- if directory.find(site.domain):
- print(f'site in backup, no need to notify {site.domain}')
- startCheck = 1
- break
+ try:
+ stdin, stdout, stderr = ssh.exec_command(command, timeout=15)
+ matching_files = stdout.read().decode().strip().splitlines()
+
+ if matching_files:
+ # Found backup file(s), verify the first one
+ backup_file_path = matching_files[0]
+
+ # Method 2: Get and validate file size
+ try:
+ size_command = f"stat -c%s '{backup_file_path}' 2>/dev/null || stat -f%z '{backup_file_path}' 2>/dev/null"
+ stdin, stdout, stderr = ssh.exec_command(size_command, timeout=10)
+ file_size = int(stdout.read().decode().strip())
+
+ # Require at least 1KB for valid backup
+ if file_size >= 1024:
+ backup_found = True
+ logging.CyberCPLogFileWriter.writeToFile(
+ f'Backup verified for {check_domain}: {backup_file_path} ({file_size} bytes) [IncScheduler.startNormalBackups]'
+ )
+
+ # Method 5: Optional checksum verification for additional integrity check
+ # Only do checksum if we have the local backup file for comparison
+ # This is optional and adds extra verification
+ try:
+ # Calculate remote checksum
+ checksum_command = f"sha256sum '{backup_file_path}' 2>/dev/null | awk '{{print $1}}'"
+ stdin, stdout, stderr = ssh.exec_command(checksum_command, timeout=60)
+ remote_checksum = stdout.read().decode().strip()
+
+ if remote_checksum and len(remote_checksum) == 64: # Valid SHA256 length
+ logging.CyberCPLogFileWriter.writeToFile(
+ f'Backup checksum verified for {check_domain}: {remote_checksum[:16]}... [IncScheduler.startNormalBackups]'
+ )
+ except:
+ # Checksum verification is optional, don't fail if it doesn't work
+ pass
+ else:
+ logging.CyberCPLogFileWriter.writeToFile(
+ f'Backup file too small for {check_domain}: {backup_file_path} ({file_size} bytes, minimum 1KB required) [IncScheduler.startNormalBackups]'
+ )
+ except Exception as size_err:
+ # If we can't get size but file exists, still consider it found
+ backup_found = True
+ logging.CyberCPLogFileWriter.writeToFile(
+ f'Backup found for {check_domain}: {backup_file_path} (size check failed: {str(size_err)}) [IncScheduler.startNormalBackups]'
+ )
+ except Exception as find_err:
+ logging.CyberCPLogFileWriter.writeToFile(f'SSH find command failed: {str(find_err)}, falling back to SFTP [IncScheduler.startNormalBackups]')
+
+ # Fallback to SFTP if SSH commands not supported or failed
+ if not backup_found:
+ try:
+ sftp = ssh.open_sftp()
+
+ # List files in today's backup directory only (Method 3)
+ try:
+ files = sftp.listdir(finalPath)
+ except FileNotFoundError:
+ logging.CyberCPLogFileWriter.writeToFile(f'Backup directory not found: {finalPath} [IncScheduler.startNormalBackups]')
+ files = []
+
+ # Check each file for domain match and validate
+ for f in files:
+ # Method 1: Check if domain is in filename and it's a tar.gz
+ if check_domain in f and f.endswith('.tar.gz'):
+ file_path = f"{finalPath}/{f}"
+
+ try:
+ # Method 2: Validate file size
+ file_stat = sftp.stat(file_path)
+ file_size = file_stat.st_size
+
+ if file_size >= 1024: # At least 1KB
+ backup_found = True
+ backup_file_path = file_path
+ logging.CyberCPLogFileWriter.writeToFile(
+ f'Backup verified for {check_domain} via SFTP: {file_path} ({file_size} bytes) [IncScheduler.startNormalBackups]'
+ )
+ break
+ else:
+ logging.CyberCPLogFileWriter.writeToFile(
+ f'Backup file too small for {check_domain}: {file_path} ({file_size} bytes) [IncScheduler.startNormalBackups]'
+ )
+ except Exception as stat_err:
+ logging.CyberCPLogFileWriter.writeToFile(f'Failed to stat file {file_path}: {str(stat_err)} [IncScheduler.startNormalBackups]')
+
+ sftp.close()
+ except Exception as sftp_err:
+ logging.CyberCPLogFileWriter.writeToFile(f'SFTP verification failed: {str(sftp_err)} [IncScheduler.startNormalBackups]')
+
+ # Only send notification if backup was NOT found (backup failed)
+ if not backup_found:
+ logging.CyberCPLogFileWriter.writeToFile(f'Backup NOT found for {check_domain}, sending failure notification [IncScheduler.startNormalBackups]')
- if startCheck:
- 'send notification that backup failed'
import requests
# Define the URL of the endpoint
- url = 'http://platform.cyberpersons.com/Billing/BackupFailedNotify' # Replace with your actual endpoint URL
+ url = 'https://platform.cyberpersons.com/Billing/BackupFailedNotify'
# Define the payload to send in the POST request
payload = {
'sub': ocb.subscription,
- 'subject': f'Failed to backup {site.domain} on {ACLManager.fetchIP()}.',
- 'message':f'Hi, \n\n Failed to create backup for {site.domain} on on {ACLManager.fetchIP()}. \n\n Please contact our support team at: http://platform.cyberpersons.com\n\nThank you.',
- # Replace with the actual SSH public key
+ 'subject': f'Backup Failed for {check_domain} on {ACLManager.fetchIP()}',
+ 'message': f'Hi,\n\nFailed to create backup for {check_domain} on {ACLManager.fetchIP()}.\n\nBackup was scheduled but the backup file was not found on the remote server after the backup job completed.\n\nPlease check your server logs for more details or contact support at: https://platform.cyberpersons.com\n\nThank you.',
'sftpUser': ocb.sftpUser,
- 'serverIP': ACLManager.fetchIP(), # Replace with the actual server IP
+ 'serverIP': ACLManager.fetchIP(),
+ 'status': 'failed' # Critical: tells platform to send email
}
# Convert the payload to JSON format
headers = {'Content-Type': 'application/json'}
- dataRet = json.dumps(payload)
- # Make the POST request
- response = requests.post(url, headers=headers, data=dataRet)
+ try:
+ # Make the POST request with timeout
+ response = requests.post(url, headers=headers, data=json.dumps(payload), timeout=30)
- # # Handle the response
- # # Handle the response
- # if response.status_code == 200:
- # response_data = response.json()
- # if response_data.get('status') == 1:
- except:
- pass
+ if response.status_code == 200:
+ response_data = response.json()
+ if response_data.get('status') == 1:
+ logging.CyberCPLogFileWriter.writeToFile(f'Failure notification sent successfully for {check_domain} [IncScheduler.startNormalBackups]')
+ else:
+ logging.CyberCPLogFileWriter.writeToFile(f'Failure notification API returned error for {check_domain}: {response_data.get("error_message")} [IncScheduler.startNormalBackups]')
+ else:
+ logging.CyberCPLogFileWriter.writeToFile(f'Failure notification API returned HTTP {response.status_code} for {check_domain} [IncScheduler.startNormalBackups]')
+ except requests.exceptions.RequestException as e:
+ logging.CyberCPLogFileWriter.writeToFile(f'Failed to send backup failure notification for {check_domain}: {str(e)} [IncScheduler.startNormalBackups]')
+ else:
+ logging.CyberCPLogFileWriter.writeToFile(f'Backup verified successful for {check_domain}, no notification needed [IncScheduler.startNormalBackups]')
+
+ except Exception as msg:
+ logging.CyberCPLogFileWriter.writeToFile(f'Error checking backup status for site: {str(msg)} [IncScheduler.startNormalBackups]')
except:
pass
diff --git a/plogical/applicationInstaller.py b/plogical/applicationInstaller.py
index c3b89fbc6..6af20954f 100644
--- a/plogical/applicationInstaller.py
+++ b/plogical/applicationInstaller.py
@@ -6706,6 +6706,15 @@ class ApplicationInstaller(multi.Thread):
####
+ # Ensure /home/cyberpanel directory exists with proper permissions
+ if not os.path.exists('/home/cyberpanel'):
+ command = 'mkdir -p /home/cyberpanel'
+ ProcessUtilities.executioner(command)
+
+ # Set proper permissions to allow application to write to the directory
+ command = 'chmod 755 /home/cyberpanel'
+ ProcessUtilities.executioner(command)
+
sftp = ssh.open_sftp()
logging.statusWriter(self.tempStatusPath, 'Downloading Backups...,15')
@@ -6724,26 +6733,33 @@ class ApplicationInstaller(multi.Thread):
successRet = stdout.read().decode().strip()
errorRet = stderr.read().decode().strip()
- if os.path.exists(ProcessUtilities.debugPath):
- logging.writeToFile(f"Command used to retrieve backup {command}")
- if errorRet:
+ # Check if SCP had errors and fallback to SFTP if needed
+ if errorRet:
+ if os.path.exists(ProcessUtilities.debugPath):
logging.writeToFile(f"Error in scp command to retrieve backup {errorRet}")
+ logging.writeToFile(f"Command used to retrieve backup {command}")
+
+ statusFile = open(tempStatusPath, 'w')
+ statusFile.writelines(f"SCP failed, falling back to SFTP...,20")
+ statusFile.close()
+
+ try:
+ sftp.get(f'cpbackups/{folder}/{backupfile}', f'/home/cyberpanel/{backupfile}',
+ callback=self.UpdateDownloadStatus)
+
+ if os.path.exists(ProcessUtilities.debugPath):
+ logging.writeToFile(f"Successfully downloaded via SFTP")
+
+ except BaseException as msg:
+ logging.writeToFile(f"Failed to download file {str(msg)} [404]")
statusFile = open(tempStatusPath, 'w')
- statusFile.writelines(f"Error in scp command to retrieve backup {errorRet}.")
+ statusFile.writelines(f"Failed to download file {str(msg)} [404]")
statusFile.close()
-
- try:
- sftp.get(f'cpbackups/{folder}/{backupfile}', f'/home/cyberpanel/{backupfile}',
- callback=self.UpdateDownloadStatus)
- except BaseException as msg:
- logging.writeToFile(f"Failed to download file {str(msg)} [404]")
- statusFile = open(tempStatusPath, 'w')
- statusFile.writelines(f"Failed to download file {str(msg)} [404]")
- statusFile.close()
- return 0
-
- else:
+ return 0
+ else:
+ if os.path.exists(ProcessUtilities.debugPath):
logging.writeToFile(f"Success in scp command to retrieve backup {successRet}")
+ logging.writeToFile(f"Command used to retrieve backup {command}")
diff --git a/plogical/backupUtilities.py b/plogical/backupUtilities.py
index 0f65b5d62..0042694ec 100644
--- a/plogical/backupUtilities.py
+++ b/plogical/backupUtilities.py
@@ -2457,8 +2457,17 @@ def submitBackupCreation(tempStoragePath, backupName, backupPath, backupDomain):
## This login can be further improved later.
logging.CyberCPLogFileWriter.writeToFile('Failed to create database backup for %s. This could be false positive, moving on.' % (dbName))
- command = f'mv /home/cyberpanel/{dbName}.sql {CPHomeStorage}/{dbName}.sql'
- ProcessUtilities.executioner(command)
+ # Move database backup (check for both .sql.gz and .sql)
+ if os.path.exists(f'/home/cyberpanel/{dbName}.sql.gz'):
+ command = f'mv /home/cyberpanel/{dbName}.sql.gz {CPHomeStorage}/{dbName}.sql.gz'
+ ProcessUtilities.executioner(command)
+ # Also move metadata file if it exists
+ if os.path.exists(f'/home/cyberpanel/{dbName}.backup.json'):
+ command = f'mv /home/cyberpanel/{dbName}.backup.json {CPHomeStorage}/{dbName}.backup.json'
+ ProcessUtilities.executioner(command)
+ elif os.path.exists(f'/home/cyberpanel/{dbName}.sql'):
+ command = f'mv /home/cyberpanel/{dbName}.sql {CPHomeStorage}/{dbName}.sql'
+ ProcessUtilities.executioner(command)
##
diff --git a/plogical/backup_config.json b/plogical/backup_config.json
new file mode 100644
index 000000000..baaaa3849
--- /dev/null
+++ b/plogical/backup_config.json
@@ -0,0 +1,21 @@
+{
+ "database_backup": {
+ "use_compression": false,
+ "use_new_features": false,
+ "parallel_threads": 4,
+ "single_transaction": true,
+ "compress_on_fly": false,
+ "compression_level": 6,
+ "fallback_to_legacy": true
+ },
+ "compatibility": {
+ "maintain_legacy_format": true,
+ "dual_format_backup": false,
+ "auto_detect_restore": true
+ },
+ "file_backup": {
+ "use_parallel_compression": false,
+ "compression_algorithm": "gzip",
+ "rsync_compression": false
+ }
+}
\ No newline at end of file
diff --git a/plogical/customACME.py b/plogical/customACME.py
index 287f97793..cfe10b99c 100644
--- a/plogical/customACME.py
+++ b/plogical/customACME.py
@@ -631,7 +631,7 @@ class CustomACME:
if response.status_code == 200:
# Wait for order to be processed
- max_attempts = 30
+ max_attempts = 10
delay = 2
for attempt in range(max_attempts):
if not self._get_nonce():
@@ -667,7 +667,7 @@ class CustomACME:
f'Order status check failed, attempt {attempt + 1}/{max_attempts}')
time.sleep(delay)
- logging.CyberCPLogFileWriter.writeToFile('Order processing timed out')
+ logging.CyberCPLogFileWriter.writeToFile('Order processing timed out after 20 seconds')
return False
return False
except Exception as e:
@@ -709,7 +709,7 @@ class CustomACME:
logging.CyberCPLogFileWriter.writeToFile(f'Error downloading certificate: {str(e)}')
return None
- def _wait_for_challenge_validation(self, challenge_url, max_attempts=30, delay=2):
+ def _wait_for_challenge_validation(self, challenge_url, max_attempts=10, delay=2):
"""Wait for challenge to be validated by the ACME server"""
try:
logging.CyberCPLogFileWriter.writeToFile(f'Waiting for challenge validation at URL: {challenge_url}')
@@ -736,14 +736,36 @@ class CustomACME:
logging.CyberCPLogFileWriter.writeToFile('Challenge validated successfully')
return True
elif challenge_status == 'invalid':
- logging.CyberCPLogFileWriter.writeToFile('Challenge validation failed')
+ # Check for DNS-related errors in the response
+ response_data = response.json()
+ error_detail = response_data.get('error', {}).get('detail', '')
+
+ # Common DNS-related error patterns
+ dns_errors = [
+ 'NXDOMAIN',
+ 'DNS problem',
+ 'No valid IP addresses',
+ 'could not be resolved',
+ 'DNS resolution',
+ 'Timeout during connect',
+ 'Connection refused',
+ 'no such host'
+ ]
+
+ is_dns_error = any(err.lower() in error_detail.lower() for err in dns_errors)
+ if is_dns_error:
+ logging.CyberCPLogFileWriter.writeToFile(
+ f'Challenge validation failed due to DNS issue: {error_detail}')
+ else:
+ logging.CyberCPLogFileWriter.writeToFile(
+ f'Challenge validation failed: {error_detail}')
return False
logging.CyberCPLogFileWriter.writeToFile(
f'Challenge still pending, attempt {attempt + 1}/{max_attempts}')
time.sleep(delay)
- logging.CyberCPLogFileWriter.writeToFile('Challenge validation timed out')
+ logging.CyberCPLogFileWriter.writeToFile('Challenge validation timed out after 20 seconds')
return False
except Exception as e:
logging.CyberCPLogFileWriter.writeToFile(f'Error waiting for challenge validation: {str(e)}')
@@ -768,94 +790,114 @@ class CustomACME:
try:
logging.CyberCPLogFileWriter.writeToFile(f'Checking DNS records for domain: {domain}')
- # List of public DNS servers to check against
+ # List of public DNS servers to check against (reduced to 2 for faster checks)
dns_servers = [
'8.8.8.8', # Google DNS
- '1.1.1.1', # Cloudflare DNS
- '208.67.222.222' # OpenDNS
+ '1.1.1.1' # Cloudflare DNS
]
- # Function to check DNS record with specific DNS server
- def check_with_dns_server(server, record_type='A'):
- try:
- # Create a new socket for each check
- sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
- sock.settimeout(5) # 5 second timeout
-
- # Set the DNS server
- sock.connect((server, 53))
-
- # Create DNS query
- query = bytearray()
- # DNS header
- query += b'\x00\x01' # Transaction ID
- query += b'\x01\x00' # Flags: Standard query
- query += b'\x00\x01' # Questions: 1
- query += b'\x00\x00' # Answer RRs: 0
- query += b'\x00\x00' # Authority RRs: 0
- query += b'\x00\x00' # Additional RRs: 0
-
- # Domain name
- for part in domain.split('.'):
- query.append(len(part))
- query.extend(part.encode())
- query += b'\x00' # End of domain name
-
- # Query type and class
- if record_type == 'A':
- query += b'\x00\x01' # Type: A
- else: # AAAA
- query += b'\x00\x1c' # Type: AAAA
- query += b'\x00\x01' # Class: IN
-
- # Send query
- sock.send(query)
-
- # Receive response
- response = sock.recv(1024)
-
- # Check if we got a valid response
- if len(response) > 12: # Minimum DNS response size
- # Check if there are answers in the response
- answer_count = int.from_bytes(response[6:8], 'big')
- if answer_count > 0:
- return True
-
- return False
- except Exception as e:
- logging.CyberCPLogFileWriter.writeToFile(f'Error checking DNS with server {server}: {str(e)}')
- return False
- finally:
- sock.close()
-
- # Check A records (IPv4) with multiple DNS servers
+ # Use system's DNS resolver as primary check (faster and respects local config)
a_record_found = False
- for server in dns_servers:
- if check_with_dns_server(server, 'A'):
- a_record_found = True
- break
-
- # Check AAAA records (IPv6) with multiple DNS servers
aaaa_record_found = False
- for server in dns_servers:
- if check_with_dns_server(server, 'AAAA'):
- aaaa_record_found = True
- break
- # Also check with system's DNS resolver as a fallback
try:
- # Try to resolve A record (IPv4)
+ # Try to resolve A record (IPv4) with timeout
+ old_timeout = socket.getdefaulttimeout()
+ socket.setdefaulttimeout(3) # 3 second timeout
socket.gethostbyname(domain)
a_record_found = True
+ socket.setdefaulttimeout(old_timeout)
except socket.gaierror:
+ socket.setdefaulttimeout(old_timeout)
+ pass
+ except socket.timeout:
+ socket.setdefaulttimeout(old_timeout)
pass
try:
- # Try to resolve AAAA record (IPv6)
+ # Try to resolve AAAA record (IPv6) with timeout
+ old_timeout = socket.getdefaulttimeout()
+ socket.setdefaulttimeout(3) # 3 second timeout
socket.getaddrinfo(domain, None, socket.AF_INET6)
aaaa_record_found = True
+ socket.setdefaulttimeout(old_timeout)
except socket.gaierror:
+ socket.setdefaulttimeout(old_timeout)
pass
+ except socket.timeout:
+ socket.setdefaulttimeout(old_timeout)
+ pass
+
+ # If system resolver fails, try public DNS servers as fallback
+ if not a_record_found and not aaaa_record_found:
+ # Function to check DNS record with specific DNS server
+ def check_with_dns_server(server, record_type='A'):
+ try:
+ # Create a new socket for each check
+ sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ sock.settimeout(2) # 2 second timeout
+
+ # Set the DNS server
+ sock.connect((server, 53))
+
+ # Create DNS query
+ query = bytearray()
+ # DNS header
+ query += b'\x00\x01' # Transaction ID
+ query += b'\x01\x00' # Flags: Standard query
+ query += b'\x00\x01' # Questions: 1
+ query += b'\x00\x00' # Answer RRs: 0
+ query += b'\x00\x00' # Authority RRs: 0
+ query += b'\x00\x00' # Additional RRs: 0
+
+ # Domain name
+ for part in domain.split('.'):
+ query.append(len(part))
+ query.extend(part.encode())
+ query += b'\x00' # End of domain name
+
+ # Query type and class
+ if record_type == 'A':
+ query += b'\x00\x01' # Type: A
+ else: # AAAA
+ query += b'\x00\x1c' # Type: AAAA
+ query += b'\x00\x01' # Class: IN
+
+ # Send query
+ sock.send(query)
+
+ # Receive response
+ response = sock.recv(1024)
+
+ # Check if we got a valid response
+ if len(response) > 12: # Minimum DNS response size
+ # Check if there are answers in the response
+ answer_count = int.from_bytes(response[6:8], 'big')
+ if answer_count > 0:
+ return True
+
+ return False
+ except Exception as e:
+ logging.CyberCPLogFileWriter.writeToFile(f'Error checking DNS with server {server}: {str(e)}')
+ return False
+ finally:
+ try:
+ sock.close()
+ except:
+ pass
+
+ # Check A records (IPv4) with first available DNS server only
+ for server in dns_servers:
+ if check_with_dns_server(server, 'A'):
+ a_record_found = True
+ break
+
+ # Only check AAAA if A record wasn't found and we still have time
+ if not a_record_found:
+ for server in dns_servers:
+ if check_with_dns_server(server, 'AAAA'):
+ aaaa_record_found = True
+ break
# Log the results
if a_record_found:
@@ -870,7 +912,7 @@ class CustomACME:
logging.CyberCPLogFileWriter.writeToFile(f'Error checking DNS records: {str(e)}')
return False
- def _wait_for_order_processing(self, max_attempts=30, delay=2):
+ def _wait_for_order_processing(self, max_attempts=10, delay=2):
"""Wait for order to be processed"""
try:
logging.CyberCPLogFileWriter.writeToFile('Waiting for order processing...')
@@ -910,7 +952,7 @@ class CustomACME:
f'Order status check failed, attempt {attempt + 1}/{max_attempts}')
time.sleep(delay)
- logging.CyberCPLogFileWriter.writeToFile('Order processing timed out')
+ logging.CyberCPLogFileWriter.writeToFile('Order processing timed out after 20 seconds')
return False
except Exception as e:
logging.CyberCPLogFileWriter.writeToFile(f'Error waiting for order processing: {str(e)}')
diff --git a/plogical/mysqlUtilities.py b/plogical/mysqlUtilities.py
index 9d5cf87fb..9e5ed0f4b 100644
--- a/plogical/mysqlUtilities.py
+++ b/plogical/mysqlUtilities.py
@@ -249,8 +249,24 @@ class mysqlUtilities:
return str(msg)
@staticmethod
- def createDatabaseBackup(databaseName, tempStoragePath, rustic=0, RusticRepoName = None, externalApp = None):
+ def createDatabaseBackup(databaseName, tempStoragePath, rustic=0, RusticRepoName = None,
+ externalApp = None, use_compression=None, use_new_features=None):
+ """
+ Enhanced database backup with backward compatibility
+
+ Parameters:
+ - use_compression: None (auto-detect), True (force compression), False (no compression)
+ - use_new_features: None (auto-detect based on config), True/False (force)
+ """
try:
+ # Check if new features are enabled (via config file or parameter)
+ if use_new_features is None:
+ use_new_features = mysqlUtilities.checkNewBackupFeatures()
+
+ # Determine compression based on config or parameter
+ if use_compression is None:
+ use_compression = mysqlUtilities.shouldUseCompression()
+
passFile = "/etc/cyberpanel/mysqlPassword"
try:
@@ -291,53 +307,58 @@ password=%s
SHELL = False
if rustic == 0:
+ # Determine backup file extension based on compression
+ if use_compression:
+ backup_extension = '.sql.gz'
+ backup_file = f"{tempStoragePath}/{databaseName}{backup_extension}"
+ else:
+ backup_extension = '.sql'
+ backup_file = f"{tempStoragePath}/{databaseName}{backup_extension}"
- command = 'rm -f ' + tempStoragePath + "/" + databaseName + '.sql'
+ # Remove old backup if exists
+ command = f'rm -f {backup_file}'
ProcessUtilities.executioner(command)
- command = 'mysqldump --defaults-file=/home/cyberpanel/.my.cnf -u %s --host=%s --port %s %s' % (mysqluser, mysqlhost, mysqlport, databaseName)
+ # Build mysqldump command with new features
+ dump_cmd = mysqlUtilities.buildMysqldumpCommand(
+ mysqluser, mysqlhost, mysqlport, databaseName,
+ use_new_features, use_compression
+ )
- # if os.path.exists(ProcessUtilities.debugPath):
- # logging.CyberCPLogFileWriter.writeToFile(command)
- #
- # logging.CyberCPLogFileWriter.writeToFile(f'Get current executing uid {os.getuid()}')
- #
- # cmd = shlex.split(command)
- #
- # try:
- # errorPath = '/home/cyberpanel/error-logs.txt'
- # errorLog = open(errorPath, 'a')
- # with open(tempStoragePath + "/" + databaseName + '.sql', 'w') as f:
- # res = subprocess.call(cmd, stdout=f, stderr=errorLog, shell=SHELL)
- # if res != 0:
- # logging.CyberCPLogFileWriter.writeToFile(
- # "Database: " + databaseName + "could not be backed! [createDatabaseBackup]")
- # return 0
- # except subprocess.CalledProcessError as msg:
- # logging.CyberCPLogFileWriter.writeToFile(
- # "Database: " + databaseName + "could not be backed! Error: %s. [createDatabaseBackup]" % (
- # str(msg)))
- # return 0
+ if use_compression:
+ # New method: Stream directly to compressed file
+ full_command = f"{dump_cmd} | gzip -c > {backup_file}"
+ result = ProcessUtilities.executioner(full_command, shell=True)
- cmd = shlex.split(command)
-
- with open(tempStoragePath + "/" + databaseName + '.sql', 'w') as f:
- # Using subprocess.run to capture stdout and stderr
- result = subprocess.run(
- cmd,
- stdout=f,
- stderr=subprocess.PIPE,
- shell=SHELL
- )
-
- # Check if the command was successful
- if result.returncode != 0:
+ # Verify backup file was created successfully
+ if not os.path.exists(backup_file) or os.path.getsize(backup_file) == 0:
logging.CyberCPLogFileWriter.writeToFile(
- "Database: " + databaseName + " could not be backed up! [createDatabaseBackup]"
+ f"Database: {databaseName} could not be backed up (compressed)! [createDatabaseBackup]"
)
- # Log stderr
- logging.CyberCPLogFileWriter.writeToFile(result.stderr.decode('utf-8'))
return 0
+ else:
+ # Legacy method: Direct dump to file (backward compatible)
+ cmd = shlex.split(dump_cmd)
+
+ with open(backup_file, 'w') as f:
+ result = subprocess.run(
+ cmd,
+ stdout=f,
+ stderr=subprocess.PIPE,
+ shell=SHELL
+ )
+
+ if result.returncode != 0:
+ logging.CyberCPLogFileWriter.writeToFile(
+ "Database: " + databaseName + " could not be backed up! [createDatabaseBackup]"
+ )
+ logging.CyberCPLogFileWriter.writeToFile(result.stderr.decode('utf-8'))
+ return 0
+
+ # Store metadata about backup format for restore
+ mysqlUtilities.saveBackupMetadata(
+ databaseName, tempStoragePath, use_compression, use_new_features
+ )
else:
SHELL = True
@@ -369,6 +390,9 @@ password=%s
@staticmethod
def restoreDatabaseBackup(databaseName, tempStoragePath, dbPassword, passwordCheck = None, additionalName = None, rustic=0, RusticRepoName = None, externalApp = None, snapshotid = None):
+ """
+ Enhanced restore with automatic format detection
+ """
try:
passFile = "/etc/cyberpanel/mysqlPassword"
@@ -409,25 +433,55 @@ password=%s
subprocess.call(shlex.split(command))
if rustic == 0:
+ # Auto-detect backup format
+ backup_format = mysqlUtilities.detectBackupFormat(
+ tempStoragePath, databaseName, additionalName
+ )
- command = 'mysql --defaults-file=/home/cyberpanel/.my.cnf -u %s --host=%s --port %s %s' % (mysqluser, mysqlhost, mysqlport, databaseName)
- if os.path.exists(ProcessUtilities.debugPath):
- logging.CyberCPLogFileWriter.writeToFile(f'{command} {tempStoragePath}/{databaseName} ' )
- cmd = shlex.split(command)
-
- if additionalName == None:
- with open(tempStoragePath + "/" + databaseName + '.sql', 'r') as f:
- res = subprocess.call(cmd, stdin=f)
- if res != 0:
- logging.CyberCPLogFileWriter.writeToFile("Could not restore MYSQL database: " + databaseName +"! [restoreDatabaseBackup]")
- return 0
+ if additionalName:
+ base_name = additionalName
else:
- with open(tempStoragePath + "/" + additionalName + '.sql', 'r') as f:
- res = subprocess.call(cmd, stdin=f)
+ base_name = databaseName
- if res != 0:
- logging.CyberCPLogFileWriter.writeToFile("Could not restore MYSQL database: " + additionalName + "! [restoreDatabaseBackup]")
- return 0
+ # Determine actual backup file based on detected format
+ if backup_format['compressed']:
+ backup_file = f"{tempStoragePath}/{base_name}.sql.gz"
+ if not os.path.exists(backup_file):
+ # Fallback to uncompressed for backward compatibility
+ backup_file = f"{tempStoragePath}/{base_name}.sql"
+ backup_format['compressed'] = False
+ else:
+ backup_file = f"{tempStoragePath}/{base_name}.sql"
+ if not os.path.exists(backup_file):
+ # Try compressed version
+ backup_file = f"{tempStoragePath}/{base_name}.sql.gz"
+ if os.path.exists(backup_file):
+ backup_format['compressed'] = True
+
+ if not os.path.exists(backup_file):
+ logging.CyberCPLogFileWriter.writeToFile(
+ f"Backup file not found: {backup_file}"
+ )
+ return 0
+
+ # Build restore command
+ mysql_cmd = f'mysql --defaults-file=/home/cyberpanel/.my.cnf -u {mysqluser} --host={mysqlhost} --port {mysqlport} {databaseName}'
+
+ if backup_format['compressed']:
+ # Handle compressed backup
+ restore_cmd = f"gunzip -c {backup_file} | {mysql_cmd}"
+ result = ProcessUtilities.executioner(restore_cmd, shell=True)
+
+ # Don't rely solely on exit code, MySQL import usually succeeds
+ # The passwordCheck logic below will verify database integrity
+ else:
+ # Handle uncompressed backup (legacy)
+ cmd = shlex.split(mysql_cmd)
+ with open(backup_file, 'r') as f:
+ result = subprocess.call(cmd, stdin=f)
+
+ # Don't fail on non-zero exit as MySQL may return warnings
+ # The passwordCheck logic below will verify database integrity
if passwordCheck == None:
@@ -449,6 +503,8 @@ password=%s
logging.CyberCPLogFileWriter.writeToFile(f'{command} {tempStoragePath}/{databaseName} ')
ProcessUtilities.outputExecutioner(command, None, True)
+ return 1
+
except BaseException as msg:
logging.CyberCPLogFileWriter.writeToFile(str(msg) + "[restoreDatabaseBackup]")
return 0
@@ -1220,6 +1276,153 @@ gpgcheck=1
logging.CyberCPLogFileWriter.statusWriter(tempStatusPath, 'Completed [200]')
+ @staticmethod
+ def buildMysqldumpCommand(user, host, port, database, use_new_features, use_compression):
+ """Build mysqldump command with appropriate options"""
+
+ base_cmd = f"mysqldump --defaults-file=/home/cyberpanel/.my.cnf -u {user} --host={host} --port {port}"
+
+ # Add new performance features if enabled
+ if use_new_features:
+ # Add single-transaction for InnoDB consistency
+ base_cmd += " --single-transaction"
+
+ # Add extended insert for better performance
+ base_cmd += " --extended-insert"
+
+ # Add order by primary for consistent dumps
+ base_cmd += " --order-by-primary"
+
+ # Add quick option to avoid loading entire result set
+ base_cmd += " --quick"
+
+ # Add lock tables option
+ base_cmd += " --lock-tables=false"
+
+ # Check MySQL version for parallel support
+ if mysqlUtilities.supportParallelDump():
+ # Get number of threads (max 4 for safety)
+ threads = min(4, ProcessUtilities.getNumberOfCores() if hasattr(ProcessUtilities, 'getNumberOfCores') else 2)
+ base_cmd += f" --parallel={threads}"
+
+ base_cmd += f" {database}"
+ return base_cmd
+
+ @staticmethod
+ def saveBackupMetadata(database, path, compressed, new_features):
+ """Save metadata about backup format for restore compatibility"""
+ import time
+
+ metadata = {
+ 'database': database,
+ 'compressed': compressed,
+ 'new_features': new_features,
+ 'backup_version': '2.0' if new_features else '1.0',
+ 'timestamp': time.time()
+ }
+
+ metadata_file = f"{path}/{database}.backup.json"
+ with open(metadata_file, 'w') as f:
+ json.dump(metadata, f)
+
+ @staticmethod
+ def detectBackupFormat(path, database, additional_name=None):
+ """
+ Detect backup format from metadata or file extension
+ """
+ base_name = additional_name if additional_name else database
+
+ # First try to read metadata file (new backups will have this)
+ metadata_file = f"{path}/{base_name}.backup.json"
+ if os.path.exists(metadata_file):
+ try:
+ with open(metadata_file, 'r') as f:
+ return json.load(f)
+ except:
+ pass
+
+ # Fallback: detect by file existence and extension
+ format_info = {
+ 'compressed': False,
+ 'new_features': False,
+ 'backup_version': '1.0'
+ }
+
+ # Check for compressed file
+ if os.path.exists(f"{path}/{base_name}.sql.gz"):
+ format_info['compressed'] = True
+ # Compressed backups likely use new features
+ format_info['new_features'] = True
+ format_info['backup_version'] = '2.0'
+ elif os.path.exists(f"{path}/{base_name}.sql"):
+ format_info['compressed'] = False
+ # Check file content for new features indicators
+ format_info['new_features'] = mysqlUtilities.checkSQLFileFeatures(
+ f"{path}/{base_name}.sql"
+ )
+
+ return format_info
+
+ @staticmethod
+ def checkNewBackupFeatures():
+ """Check if new backup features are enabled"""
+ try:
+ config_file = '/usr/local/CyberCP/plogical/backup_config.json'
+ if not os.path.exists(config_file):
+ # Try alternate location
+ config_file = '/etc/cyberpanel/backup_config.json'
+
+ if os.path.exists(config_file):
+ with open(config_file, 'r') as f:
+ config = json.load(f)
+ return config.get('database_backup', {}).get('use_new_features', False)
+ except:
+ pass
+ return False # Default to legacy mode for safety
+
+ @staticmethod
+ def shouldUseCompression():
+ """Check if compression should be used"""
+ try:
+ config_file = '/usr/local/CyberCP/plogical/backup_config.json'
+ if not os.path.exists(config_file):
+ # Try alternate location
+ config_file = '/etc/cyberpanel/backup_config.json'
+
+ if os.path.exists(config_file):
+ with open(config_file, 'r') as f:
+ config = json.load(f)
+ return config.get('database_backup', {}).get('use_compression', False)
+ except:
+ pass
+ return False # Default to no compression for compatibility
+
+ @staticmethod
+ def supportParallelDump():
+ """Check if MySQL version supports parallel dump"""
+ try:
+ result = ProcessUtilities.outputExecutioner("mysql --version")
+ # MySQL 8.0+ and MariaDB 10.3+ support parallel dump
+ if "8.0" in result or "8.1" in result or "10.3" in result or "10.4" in result or "10.5" in result or "10.6" in result:
+ return True
+ except:
+ pass
+ return False
+
+ @staticmethod
+ def checkSQLFileFeatures(file_path):
+ """Check SQL file for new feature indicators"""
+ try:
+ # Read first few lines to check for new features
+ with open(file_path, 'r') as f:
+ head = f.read(2048) # Read first 2KB
+ # Check for indicators of new features
+ if "--single-transaction" in head or "--extended-insert" in head or "-- Dump completed" in head:
+ return True
+ except:
+ pass
+ return False
+
def main():
parser = argparse.ArgumentParser(description='CyberPanel')
diff --git a/plogical/processUtilities.py b/plogical/processUtilities.py
index cd4c5fd01..717b6bf69 100644
--- a/plogical/processUtilities.py
+++ b/plogical/processUtilities.py
@@ -553,6 +553,21 @@ class ProcessUtilities(multi.Thread):
print("An error occurred:", e)
return None
+ @staticmethod
+ def getNumberOfCores():
+ """Get the number of CPU cores available on the system"""
+ try:
+ import multiprocessing
+ return multiprocessing.cpu_count()
+ except:
+ try:
+ # Fallback method using /proc/cpuinfo
+ with open('/proc/cpuinfo', 'r') as f:
+ return len([line for line in f if line.startswith('processor')])
+ except:
+ # Default to 2 if we can't determine
+ return 2
+
@staticmethod
def fetch_latest_prestashop_version():
import requests
diff --git a/plogical/sslUtilities.py b/plogical/sslUtilities.py
index 34ce4ece8..94cca0363 100644
--- a/plogical/sslUtilities.py
+++ b/plogical/sslUtilities.py
@@ -820,10 +820,9 @@ context /.well-known/acme-challenge {
logging.CyberCPLogFileWriter.writeToFile(
f"www.{virtualHostName} has no DNS records, excluding from acme.sh SSL request")
+ # Step 1: Issue the certificate (staging) - this stores config in /root/.acme.sh/
command = acmePath + " --issue" + domain_list \
- + ' --cert-file ' + existingCertPath + '/cert.pem' + ' --key-file ' + existingCertPath + '/privkey.pem' \
- + ' --fullchain-file ' + existingCertPath + '/fullchain.pem' + ' -w /usr/local/lsws/Example/html -k ec-256 --force --staging' \
- + ' --webroot-path /usr/local/lsws/Example/html'
+ + ' -w /usr/local/lsws/Example/html -k ec-256 --force --staging'
try:
result = subprocess.run(command, capture_output=True, universal_newlines=True, shell=True)
@@ -833,10 +832,9 @@ context /.well-known/acme-challenge {
universal_newlines=True, shell=True)
if result.returncode == 0:
+ # Step 2: Issue the certificate (production) - this stores config in /root/.acme.sh/
command = acmePath + " --issue" + domain_list \
- + ' --cert-file ' + existingCertPath + '/cert.pem' + ' --key-file ' + existingCertPath + '/privkey.pem' \
- + ' --fullchain-file ' + existingCertPath + '/fullchain.pem' + ' -w /usr/local/lsws/Example/html -k ec-256 --force --server letsencrypt' \
- + ' --webroot-path /usr/local/lsws/Example/html'
+ + ' -w /usr/local/lsws/Example/html -k ec-256 --force --server letsencrypt'
try:
result = subprocess.run(command, capture_output=True, universal_newlines=True, shell=True)
@@ -846,11 +844,25 @@ context /.well-known/acme-challenge {
universal_newlines=True, shell=True)
if result.returncode == 0:
- logging.CyberCPLogFileWriter.writeToFile(
- "Successfully obtained SSL for: " + virtualHostName + " and: www." + virtualHostName, 0)
- logging.CyberCPLogFileWriter.SendEmail(sender_email, adminEmail, result.stdout,
- 'SSL Notification for %s.' % (virtualHostName))
- return 1
+ # Step 3: Install the certificate to the desired location
+ install_command = acmePath + " --install-cert -d " + virtualHostName \
+ + ' --cert-file ' + existingCertPath + '/cert.pem' \
+ + ' --key-file ' + existingCertPath + '/privkey.pem' \
+ + ' --fullchain-file ' + existingCertPath + '/fullchain.pem'
+
+ try:
+ install_result = subprocess.run(install_command, capture_output=True, universal_newlines=True, shell=True)
+ except TypeError:
+ # Fallback for Python < 3.7
+ install_result = subprocess.run(install_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ universal_newlines=True, shell=True)
+
+ if install_result.returncode == 0:
+ logging.CyberCPLogFileWriter.writeToFile(
+ "Successfully obtained SSL for: " + virtualHostName + " and: www." + virtualHostName, 0)
+ logging.CyberCPLogFileWriter.SendEmail(sender_email, adminEmail, result.stdout,
+ 'SSL Notification for %s.' % (virtualHostName))
+ return 1
return 0
except Exception as e:
logging.CyberCPLogFileWriter.writeToFile(str(e))
@@ -876,9 +888,9 @@ context /.well-known/acme-challenge {
if sslUtilities.checkDNSRecords(f'www.{aliasDomain}'):
domain_list += " -d www." + aliasDomain
+ # Step 1: Issue the certificate - this stores config in /root/.acme.sh/
command = acmePath + " --issue" + domain_list \
- + ' --cert-file ' + existingCertPath + '/cert.pem' + ' --key-file ' + existingCertPath + '/privkey.pem' \
- + ' --fullchain-file ' + existingCertPath + '/fullchain.pem' + ' -w /usr/local/lsws/Example/html -k ec-256 --force --server letsencrypt'
+ + ' -w /usr/local/lsws/Example/html -k ec-256 --force --server letsencrypt'
try:
result = subprocess.run(command, capture_output=True, universal_newlines=True, shell=True)
@@ -888,7 +900,21 @@ context /.well-known/acme-challenge {
universal_newlines=True, shell=True)
if result.returncode == 0:
- return 1
+ # Step 2: Install the certificate to the desired location
+ install_command = acmePath + " --install-cert -d " + virtualHostName \
+ + ' --cert-file ' + existingCertPath + '/cert.pem' \
+ + ' --key-file ' + existingCertPath + '/privkey.pem' \
+ + ' --fullchain-file ' + existingCertPath + '/fullchain.pem'
+
+ try:
+ install_result = subprocess.run(install_command, capture_output=True, universal_newlines=True, shell=True)
+ except TypeError:
+ # Fallback for Python < 3.7
+ install_result = subprocess.run(install_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ universal_newlines=True, shell=True)
+
+ if install_result.returncode == 0:
+ return 1
return 0
except Exception as e:
logging.CyberCPLogFileWriter.writeToFile(str(e))
diff --git a/plogical/test_backup_compatibility.py b/plogical/test_backup_compatibility.py
new file mode 100644
index 000000000..f8b927f02
--- /dev/null
+++ b/plogical/test_backup_compatibility.py
@@ -0,0 +1,300 @@
+#!/usr/local/CyberCP/bin/python
+"""
+Test script to verify backward compatibility of database backup improvements
+Tests both legacy and new backup/restore paths
+"""
+
+import os
+import sys
+import json
+import tempfile
+import shutil
+
+sys.path.append('/usr/local/CyberCP')
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "CyberCP.settings")
+
+from plogical.mysqlUtilities import mysqlUtilities
+from plogical.processUtilities import ProcessUtilities
+
+class BackupCompatibilityTests:
+ """Test suite for backup compatibility"""
+
+ @staticmethod
+ def setup_test_environment():
+ """Create a test directory for backups"""
+ test_dir = tempfile.mkdtemp(prefix="cyberpanel_backup_test_")
+ print(f"Created test directory: {test_dir}")
+ return test_dir
+
+ @staticmethod
+ def cleanup_test_environment(test_dir):
+ """Clean up test directory"""
+ if os.path.exists(test_dir):
+ shutil.rmtree(test_dir)
+ print(f"Cleaned up test directory: {test_dir}")
+
+ @staticmethod
+ def test_config_file():
+ """Test configuration file reading"""
+ print("\n=== Testing Configuration File ===")
+
+ config_file = '/usr/local/CyberCP/plogical/backup_config.json'
+ if os.path.exists(config_file):
+ with open(config_file, 'r') as f:
+ config = json.load(f)
+ print(f"Configuration loaded successfully")
+ print(f"Use compression: {config['database_backup']['use_compression']}")
+ print(f"Use new features: {config['database_backup']['use_new_features']}")
+ print(f"Auto-detect restore: {config['compatibility']['auto_detect_restore']}")
+ return True
+ else:
+ print(f"Configuration file not found at {config_file}")
+ return False
+
+ @staticmethod
+ def test_helper_functions():
+ """Test helper functions"""
+ print("\n=== Testing Helper Functions ===")
+
+ # Test checkNewBackupFeatures
+ new_features = mysqlUtilities.checkNewBackupFeatures()
+ print(f"New backup features enabled: {new_features}")
+
+ # Test shouldUseCompression
+ use_compression = mysqlUtilities.shouldUseCompression()
+ print(f"Compression enabled: {use_compression}")
+
+ # Test supportParallelDump
+ parallel_support = mysqlUtilities.supportParallelDump()
+ print(f"Parallel dump supported: {parallel_support}")
+
+ # Test getNumberOfCores
+ cores = ProcessUtilities.getNumberOfCores()
+ print(f"Number of CPU cores: {cores}")
+
+ return True
+
+ @staticmethod
+ def test_legacy_backup(test_db="test_legacy_db", test_dir="/tmp"):
+ """Test that legacy backups still work"""
+ print("\n=== Testing Legacy Backup (No Compression, No New Features) ===")
+
+ try:
+ # Create backup with old method
+ print(f"Creating legacy backup for {test_db}...")
+ result = mysqlUtilities.createDatabaseBackup(
+ test_db, test_dir, use_compression=False, use_new_features=False
+ )
+
+ if result == 1:
+ print(f"✓ Legacy backup created successfully")
+
+ # Check that .sql file exists (not .sql.gz)
+ legacy_file = f"{test_dir}/{test_db}.sql"
+ if os.path.exists(legacy_file):
+ file_size = os.path.getsize(legacy_file)
+ print(f"✓ Legacy backup file exists: {legacy_file}")
+ print(f" File size: {file_size} bytes")
+
+ # Check metadata file
+ metadata_file = f"{test_dir}/{test_db}.backup.json"
+ if os.path.exists(metadata_file):
+ with open(metadata_file, 'r') as f:
+ metadata = json.load(f)
+ print(f"✓ Metadata file exists")
+ print(f" Backup version: {metadata['backup_version']}")
+ print(f" Compressed: {metadata['compressed']}")
+ print(f" New features: {metadata['new_features']}")
+
+ return True
+ else:
+ print(f"✗ Legacy backup file not found: {legacy_file}")
+ return False
+ else:
+ print(f"✗ Legacy backup failed")
+ return False
+
+ except Exception as e:
+ print(f"✗ Error during legacy backup test: {str(e)}")
+ return False
+
+ @staticmethod
+ def test_new_backup(test_db="test_new_db", test_dir="/tmp"):
+ """Test new compressed backups"""
+ print("\n=== Testing New Backup (With Compression and New Features) ===")
+
+ try:
+ # Create backup with new method
+ print(f"Creating compressed backup for {test_db}...")
+ result = mysqlUtilities.createDatabaseBackup(
+ test_db, test_dir, use_compression=True, use_new_features=True
+ )
+
+ if result == 1:
+ print(f"✓ New backup created successfully")
+
+ # Check that .sql.gz file exists
+ compressed_file = f"{test_dir}/{test_db}.sql.gz"
+ if os.path.exists(compressed_file):
+ file_size = os.path.getsize(compressed_file)
+ print(f"✓ Compressed backup file exists: {compressed_file}")
+ print(f" File size: {file_size} bytes")
+
+ # Check metadata file
+ metadata_file = f"{test_dir}/{test_db}.backup.json"
+ if os.path.exists(metadata_file):
+ with open(metadata_file, 'r') as f:
+ metadata = json.load(f)
+ print(f"✓ Metadata file exists")
+ print(f" Backup version: {metadata['backup_version']}")
+ print(f" Compressed: {metadata['compressed']}")
+ print(f" New features: {metadata['new_features']}")
+
+ return True
+ else:
+ print(f"✗ Compressed backup file not found: {compressed_file}")
+ # Check if legacy file was created instead
+ legacy_file = f"{test_dir}/{test_db}.sql"
+ if os.path.exists(legacy_file):
+ print(f" Note: Legacy file exists instead: {legacy_file}")
+ return False
+ else:
+ print(f"✗ New backup failed")
+ return False
+
+ except Exception as e:
+ print(f"✗ Error during new backup test: {str(e)}")
+ return False
+
+ @staticmethod
+ def test_format_detection(test_dir="/tmp"):
+ """Test backup format auto-detection"""
+ print("\n=== Testing Format Detection ===")
+
+ # Test detection of compressed backup
+ test_db = "test_detect"
+
+ # Create a dummy compressed backup
+ compressed_file = f"{test_dir}/{test_db}.sql.gz"
+ with open(compressed_file, 'wb') as f:
+ f.write(b'\x1f\x8b\x08\x00\x00\x00\x00\x00') # gzip header
+
+ # Create metadata
+ metadata = {
+ 'database': test_db,
+ 'compressed': True,
+ 'new_features': True,
+ 'backup_version': '2.0'
+ }
+ metadata_file = f"{test_dir}/{test_db}.backup.json"
+ with open(metadata_file, 'w') as f:
+ json.dump(metadata, f)
+
+ # Test detection
+ detected_format = mysqlUtilities.detectBackupFormat(test_dir, test_db)
+ print(f"Detected format for compressed backup:")
+ print(f" Compressed: {detected_format['compressed']}")
+ print(f" New features: {detected_format['new_features']}")
+ print(f" Version: {detected_format['backup_version']}")
+
+ # Clean up test files
+ os.remove(compressed_file)
+ os.remove(metadata_file)
+
+ # Create a dummy uncompressed backup
+ uncompressed_file = f"{test_dir}/{test_db}.sql"
+ with open(uncompressed_file, 'w') as f:
+ f.write("-- MySQL dump\n")
+
+ # Test detection without metadata
+ detected_format = mysqlUtilities.detectBackupFormat(test_dir, test_db)
+ print(f"\nDetected format for uncompressed backup (no metadata):")
+ print(f" Compressed: {detected_format['compressed']}")
+ print(f" New features: {detected_format['new_features']}")
+ print(f" Version: {detected_format['backup_version']}")
+
+ # Clean up
+ os.remove(uncompressed_file)
+
+ return True
+
+ @staticmethod
+ def test_mysqldump_command():
+ """Test mysqldump command building"""
+ print("\n=== Testing MySQL Dump Command Building ===")
+
+ # Test legacy command
+ legacy_cmd = mysqlUtilities.buildMysqldumpCommand(
+ "root", "localhost", "3306", "test_db",
+ use_new_features=False, use_compression=False
+ )
+ print(f"Legacy command: {legacy_cmd}")
+
+ # Test new command with features
+ new_cmd = mysqlUtilities.buildMysqldumpCommand(
+ "root", "localhost", "3306", "test_db",
+ use_new_features=True, use_compression=True
+ )
+ print(f"New command: {new_cmd}")
+
+ return True
+
+ @staticmethod
+ def run_all_tests():
+ """Run all compatibility tests"""
+ print("=" * 60)
+ print("CyberPanel Database Backup Compatibility Test Suite")
+ print("=" * 60)
+
+ all_passed = True
+
+ # Test configuration
+ if not BackupCompatibilityTests.test_config_file():
+ all_passed = False
+
+ # Test helper functions
+ if not BackupCompatibilityTests.test_helper_functions():
+ all_passed = False
+
+ # Test mysqldump command building
+ if not BackupCompatibilityTests.test_mysqldump_command():
+ all_passed = False
+
+ # Setup test environment
+ test_dir = BackupCompatibilityTests.setup_test_environment()
+
+ try:
+ # Test format detection
+ if not BackupCompatibilityTests.test_format_detection(test_dir):
+ all_passed = False
+
+ # Note: Actual backup/restore tests would require a real database
+ # These are commented out but show the structure
+
+ # # Test legacy backup
+ # if not BackupCompatibilityTests.test_legacy_backup("test_db", test_dir):
+ # all_passed = False
+
+ # # Test new backup
+ # if not BackupCompatibilityTests.test_new_backup("test_db", test_dir):
+ # all_passed = False
+
+ finally:
+ # Cleanup
+ BackupCompatibilityTests.cleanup_test_environment(test_dir)
+
+ print("\n" + "=" * 60)
+ if all_passed:
+ print("✓ All tests passed successfully!")
+ print("The backup system is fully backward compatible.")
+ else:
+ print("✗ Some tests failed. Please check the output above.")
+ print("=" * 60)
+
+ return all_passed
+
+
+if __name__ == "__main__":
+ # Run the test suite
+ success = BackupCompatibilityTests.run_all_tests()
+ sys.exit(0 if success else 1)
\ No newline at end of file
diff --git a/plogical/upgrade.py b/plogical/upgrade.py
index 851af6eb2..9fd9cbd3e 100644
--- a/plogical/upgrade.py
+++ b/plogical/upgrade.py
@@ -1249,13 +1249,50 @@ $cfg['Servers'][$i]['LogoutURL'] = 'phpmyadminsignin.php?logout';
`completed_at` datetime(6) DEFAULT NULL,
KEY `ai_scanner_scheduled_executions_scheduled_scan_id_idx` (`scheduled_scan_id`),
KEY `ai_scanner_scheduled_executions_execution_time_idx` (`execution_time` DESC),
- CONSTRAINT `ai_scanner_scheduled_executions_scheduled_scan_id_fk` FOREIGN KEY (`scheduled_scan_id`)
+ CONSTRAINT `ai_scanner_scheduled_executions_scheduled_scan_id_fk` FOREIGN KEY (`scheduled_scan_id`)
REFERENCES `ai_scanner_scheduled_scans` (`id`) ON DELETE CASCADE
)
''')
except:
pass
+ # AI Scanner File Operation Audit Tables
+ try:
+ cursor.execute('''
+ CREATE TABLE `scanner_file_operations` (
+ `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
+ `scan_id` varchar(255) NOT NULL,
+ `operation` varchar(20) NOT NULL,
+ `file_path` varchar(500) NOT NULL,
+ `backup_path` varchar(500) DEFAULT NULL,
+ `success` bool NOT NULL DEFAULT 0,
+ `error_message` longtext DEFAULT NULL,
+ `ip_address` varchar(45) DEFAULT NULL,
+ `user_agent` varchar(255) DEFAULT NULL,
+ `created_at` datetime(6) NOT NULL,
+ KEY `scanner_file_operations_scan_id_idx` (`scan_id`),
+ KEY `scanner_file_operations_created_at_idx` (`created_at`),
+ KEY `scanner_file_operations_scan_created_idx` (`scan_id`, `created_at`)
+ )
+ ''')
+ except:
+ pass
+
+ try:
+ cursor.execute('''
+ CREATE TABLE `scanner_api_rate_limits` (
+ `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
+ `scan_id` varchar(255) NOT NULL,
+ `endpoint` varchar(100) NOT NULL,
+ `request_count` integer NOT NULL DEFAULT 0,
+ `last_request_at` datetime(6) NOT NULL,
+ UNIQUE KEY `scanner_api_rate_limits_scan_endpoint_unique` (`scan_id`, `endpoint`),
+ KEY `scanner_api_rate_limits_scan_endpoint_idx` (`scan_id`, `endpoint`)
+ )
+ ''')
+ except:
+ pass
+
try:
cursor.execute(
'CREATE TABLE `loginSystem_acl` (`id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, `name` varchar(50) NOT NULL UNIQUE, `adminStatus` integer NOT NULL DEFAULT 0, `versionManagement` integer NOT NULL DEFAULT 0, `createNewUser` integer NOT NULL DEFAULT 0, `deleteUser` integer NOT NULL DEFAULT 0, `resellerCenter` integer NOT NULL DEFAULT 0, `changeUserACL` integer NOT NULL DEFAULT 0, `createWebsite` integer NOT NULL DEFAULT 0, `modifyWebsite` integer NOT NULL DEFAULT 0, `suspendWebsite` integer NOT NULL DEFAULT 0, `deleteWebsite` integer NOT NULL DEFAULT 0, `createPackage` integer NOT NULL DEFAULT 0, `deletePackage` integer NOT NULL DEFAULT 0, `modifyPackage` integer NOT NULL DEFAULT 0, `createDatabase` integer NOT NULL DEFAULT 0, `deleteDatabase` integer NOT NULL DEFAULT 0, `listDatabases` integer NOT NULL DEFAULT 0, `createNameServer` integer NOT NULL DEFAULT 0, `createDNSZone` integer NOT NULL DEFAULT 0, `deleteZone` integer NOT NULL DEFAULT 0, `addDeleteRecords` integer NOT NULL DEFAULT 0, `createEmail` integer NOT NULL DEFAULT 0, `deleteEmail` integer NOT NULL DEFAULT 0, `emailForwarding` integer NOT NULL DEFAULT 0, `changeEmailPassword` integer NOT NULL DEFAULT 0, `dkimManager` integer NOT NULL DEFAULT 0, `createFTPAccount` integer NOT NULL DEFAULT 0, `deleteFTPAccount` integer NOT NULL DEFAULT 0, `listFTPAccounts` integer NOT NULL DEFAULT 0, `createBackup` integer NOT NULL DEFAULT 0, `restoreBackup` integer NOT NULL DEFAULT 0, `addDeleteDestinations` integer NOT NULL DEFAULT 0, `scheduleBackups` integer NOT NULL DEFAULT 0, `remoteBackups` integer NOT NULL DEFAULT 0, `manageSSL` integer NOT NULL DEFAULT 0, `hostnameSSL` integer NOT NULL DEFAULT 0, `mailServerSSL` integer NOT NULL DEFAULT 0)')
diff --git a/plogical/vhost.py b/plogical/vhost.py
index a3f5df68e..990e9d7f9 100644
--- a/plogical/vhost.py
+++ b/plogical/vhost.py
@@ -25,7 +25,7 @@ from managePHP.phpManager import PHPManager
from plogical.vhostConfs import vhostConfs
from ApachController.ApacheVhosts import ApacheVhost
try:
- from websiteFunctions.models import Websites, ChildDomains, aliasDomains, DockerSites
+ from websiteFunctions.models import Websites, ChildDomains, aliasDomains, DockerSites, WPSites, WPStaging
from databases.models import Databases
except:
pass
@@ -404,6 +404,21 @@ class vhost:
if ACLManager.FindIfChild() == 0:
+ ### Delete WordPress Sites and Staging Sites first
+ try:
+ wpSites = WPSites.objects.filter(owner=delWebsite)
+ for wpSite in wpSites:
+ # Delete any staging sites associated with this WP site
+ stagingSites = WPStaging.objects.filter(wpsite=wpSite)
+ for staging in stagingSites:
+ staging.delete()
+ logging.CyberCPLogFileWriter.writeToFile(f"Deleted staging site record: {staging.id}")
+ # Delete the WP site itself
+ wpSite.delete()
+ logging.CyberCPLogFileWriter.writeToFile(f"Deleted WP site: {wpSite.id}")
+ except Exception as msg:
+ logging.CyberCPLogFileWriter.writeToFile(f"Error cleaning up WP/Staging sites: {str(msg)}")
+
### Delete Docker Sites first before website deletion
if os.path.exists('/home/docker/%s' % (virtualHostName)):
@@ -497,6 +512,21 @@ class vhost:
## child check to make sure no database entires are being deleted from child server
if ACLManager.FindIfChild() == 0:
+ ### Delete WordPress Sites and Staging Sites first
+ try:
+ wpSites = WPSites.objects.filter(owner=delWebsite)
+ for wpSite in wpSites:
+ # Delete any staging sites associated with this WP site
+ stagingSites = WPStaging.objects.filter(wpsite=wpSite)
+ for staging in stagingSites:
+ staging.delete()
+ logging.CyberCPLogFileWriter.writeToFile(f"Deleted staging site record: {staging.id}")
+ # Delete the WP site itself
+ wpSite.delete()
+ logging.CyberCPLogFileWriter.writeToFile(f"Deleted WP site: {wpSite.id}")
+ except Exception as msg:
+ logging.CyberCPLogFileWriter.writeToFile(f"Error cleaning up WP/Staging sites: {str(msg)}")
+
for items in databases:
mysqlUtilities.deleteDatabase(items.dbName, items.dbUser)
diff --git a/plogical/virtualHostUtilities.py b/plogical/virtualHostUtilities.py
index f0142db77..041c640d1 100644
--- a/plogical/virtualHostUtilities.py
+++ b/plogical/virtualHostUtilities.py
@@ -32,7 +32,7 @@ from ApachController.ApacheVhosts import ApacheVhost
from managePHP.phpManager import PHPManager
try:
- from websiteFunctions.models import Websites, ChildDomains, aliasDomains
+ from websiteFunctions.models import Websites, ChildDomains, aliasDomains, WPSites, WPStaging
from loginSystem.models import Administrator
from packages.models import Package
from CLManager.models import CLPackages
@@ -598,6 +598,41 @@ local_name %s {
'This website already exists as child domain. [404]')
return 0, "This website already exists as child domain."
+ # Check for orphaned staging site domain conflicts
+ try:
+ # Check if there are any WP sites with FinalURL matching this domain
+ conflicting_wp_sites = WPSites.objects.filter(FinalURL__icontains=virtualHostName)
+ for wp_site in conflicting_wp_sites:
+ # Check if the WP site's owner website still exists
+ try:
+ owner_website = wp_site.owner
+ if not Websites.objects.filter(id=owner_website.id).exists():
+ # Orphaned WP site found, clean it up
+ wp_site.delete()
+ logging.CyberCPLogFileWriter.writeToFile(f"Cleaned up orphaned WP site: {wp_site.id} with URL: {wp_site.FinalURL}")
+ except:
+ # WP site owner is missing, delete it
+ wp_site.delete()
+ logging.CyberCPLogFileWriter.writeToFile(f"Cleaned up orphaned WP site: {wp_site.id} (missing owner)")
+
+ # Check for orphaned staging sites
+ orphaned_staging = WPStaging.objects.filter(wpsite__FinalURL__icontains=virtualHostName)
+ for staging in orphaned_staging:
+ try:
+ # Check if the staging site's wpsite still exists and has valid owner
+ wpsite = staging.wpsite
+ owner_website = wpsite.owner
+ if not Websites.objects.filter(id=owner_website.id).exists():
+ # Owner website doesn't exist, clean up staging
+ staging.delete()
+ logging.CyberCPLogFileWriter.writeToFile(f"Cleaned up orphaned staging site: {staging.id}")
+ except:
+ # Staging site has invalid references, delete it
+ staging.delete()
+ logging.CyberCPLogFileWriter.writeToFile(f"Cleaned up orphaned staging site: {staging.id} (invalid references)")
+ except Exception as e:
+ logging.CyberCPLogFileWriter.writeToFile(f"Error during staging site cleanup: {str(e)}")
+
####### Limitations Check End
logging.CyberCPLogFileWriter.statusWriter(tempStatusPath, 'Creating DNS records..,10')
diff --git a/requirments-old.txt b/requirments-old.txt
index 89018350d..d629b171f 100644
--- a/requirments-old.txt
+++ b/requirments-old.txt
@@ -36,4 +36,5 @@ uvicorn
asyncssh
python-jose
websockets
-PyJWT
\ No newline at end of file
+PyJWT
+python-dotenv
\ No newline at end of file
diff --git a/test_api_auth.py b/test_api_auth.py
new file mode 100644
index 000000000..98d8ac2b3
--- /dev/null
+++ b/test_api_auth.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python3
+"""
+Test script to verify both Bearer token and X-API-Key authentication work
+for CyberPanel AI Scanner file operations.
+"""
+
+import requests
+import json
+import sys
+
+# Test configuration
+BASE_URL = "http://localhost:8001" # Adjust if needed
+SCAN_ID = "test-scan-123"
+FILE_PATH = "wp-content/plugins/test.php"
+
+def test_bearer_auth(token):
+ """Test with Bearer token authentication"""
+ print("Testing Bearer token authentication...")
+
+ headers = {
+ "Authorization": f"Bearer {token}",
+ "X-Scan-ID": SCAN_ID,
+ "Content-Type": "application/json"
+ }
+
+ # Test get-file endpoint
+ url = f"{BASE_URL}/api/scanner/get-file"
+ params = {"file_path": FILE_PATH}
+
+ response = requests.get(url, params=params, headers=headers)
+ print(f"Bearer auth response: {response.status_code}")
+ if response.status_code != 200:
+ print(f"Response: {response.text}")
+ return response.status_code == 200 or response.status_code == 404 # 404 is ok if file doesn't exist
+
+
+def test_api_key_auth(api_key):
+ """Test with X-API-Key authentication"""
+ print("\nTesting X-API-Key authentication...")
+
+ headers = {
+ "X-API-Key": api_key,
+ "X-Scan-ID": SCAN_ID,
+ "Content-Type": "application/json"
+ }
+
+ # Test get-file endpoint
+ url = f"{BASE_URL}/api/scanner/get-file"
+ params = {"file_path": FILE_PATH}
+
+ response = requests.get(url, params=params, headers=headers)
+ print(f"X-API-Key auth response: {response.status_code}")
+ if response.status_code != 200:
+ print(f"Response: {response.text}")
+ return response.status_code == 200 or response.status_code == 404 # 404 is ok if file doesn't exist
+
+
+def test_mixed_endpoints():
+ """Test different endpoints with both authentication methods"""
+ print("\n" + "="*50)
+ print("Testing multiple endpoints with both auth methods")
+ print("="*50)
+
+ # You would need real tokens for this to work
+ test_token = "cp_test_token_12345"
+ test_api_key = "cp_test_api_key_67890"
+
+ endpoints = [
+ ("GET", "/api/ai-scanner/files/list", {"path": "wp-content"}),
+ ("GET", "/api/ai-scanner/files/content", {"path": FILE_PATH}),
+ ("GET", "/api/scanner/get-file", {"file_path": FILE_PATH}),
+ ]
+
+ for method, endpoint, params in endpoints:
+ print(f"\nTesting {method} {endpoint}")
+
+ # Test with Bearer token
+ headers_bearer = {
+ "Authorization": f"Bearer {test_token}",
+ "X-Scan-ID": SCAN_ID
+ }
+
+ # Test with X-API-Key
+ headers_api_key = {
+ "X-API-Key": test_api_key,
+ "X-Scan-ID": SCAN_ID
+ }
+
+ url = f"{BASE_URL}{endpoint}"
+
+ # Make requests (will fail without valid tokens, but shows the headers work)
+ if method == "GET":
+ response_bearer = requests.get(url, params=params, headers=headers_bearer)
+ response_api_key = requests.get(url, params=params, headers=headers_api_key)
+
+ print(f" Bearer auth: {response_bearer.status_code}")
+ print(f" X-API-Key auth: {response_api_key.status_code}")
+
+
+def main():
+ """Main test function"""
+ print("CyberPanel AI Scanner Authentication Test")
+ print("="*50)
+
+ if len(sys.argv) > 1:
+ # If token provided as argument, use it
+ token = sys.argv[1]
+
+ # Test both authentication methods with the same token
+ # (assumes token is valid for both methods)
+ bearer_success = test_bearer_auth(token)
+ api_key_success = test_api_key_auth(token)
+
+ print("\n" + "="*50)
+ print("Test Results:")
+ print(f" Bearer authentication: {'✓ PASS' if bearer_success else '✗ FAIL'}")
+ print(f" X-API-Key authentication: {'✓ PASS' if api_key_success else '✗ FAIL'}")
+ print("="*50)
+ else:
+ # Run mock tests to show the endpoints accept both header formats
+ test_mixed_endpoints()
+
+ print("\n" + "="*50)
+ print("Note: To run real tests, provide a valid token:")
+ print(f" python {sys.argv[0]} cp_your_token_here")
+ print("="*50)
+
+
+if __name__ == "__main__":
+ main()
\ No newline at end of file
diff --git a/test_api_key_fix.sh b/test_api_key_fix.sh
new file mode 100755
index 000000000..be5dafd76
--- /dev/null
+++ b/test_api_key_fix.sh
@@ -0,0 +1,74 @@
+#!/bin/bash
+# Test script to verify API key validation fix
+
+# Configuration - adjust these values
+# For remote testing, replace with your CyberPanel server URL
+SERVER="${CYBERPANEL_SERVER:-http://localhost:8001}"
+API_KEY="${CYBERPANEL_API_KEY:-cp_GrHf3ysP0SKhrEiazmqt3kRJA5KwOFQW8VJKcDQ8B5Bg}" # Your actual API key
+SCAN_ID="${CYBERPANEL_SCAN_ID:-550e8400-e29b-41d4-a716-446655440000}" # A valid scan ID from your system
+
+echo "Using server: $SERVER"
+echo "Using API key: ${API_KEY:0:20}..."
+echo "Using scan ID: $SCAN_ID"
+echo ""
+
+echo "=========================================="
+echo "Testing CyberPanel API Key Validation Fix"
+echo "=========================================="
+echo ""
+
+# Test 1: List API keys in the system
+echo "1. Listing API keys in system..."
+echo "---------------------------------"
+curl -s "$SERVER/api/ai-scanner/list-api-keys/" | python3 -m json.tool
+echo ""
+
+# Test 2: Test authentication with X-API-Key header
+echo "2. Testing X-API-Key authentication..."
+echo "---------------------------------------"
+curl -s -X POST "$SERVER/api/ai-scanner/test-auth/" \
+ -H "X-API-Key: $API_KEY" \
+ -H "X-Scan-ID: $SCAN_ID" \
+ -H "Content-Type: application/json" \
+ -d "{\"scan_id\": \"$SCAN_ID\"}" | python3 -m json.tool
+echo ""
+
+# Test 3: Test actual file operation with X-API-Key
+echo "3. Testing file operation with X-API-Key..."
+echo "--------------------------------------------"
+RESPONSE=$(curl -s -w "\n%{http_code}" "$SERVER/api/scanner/get-file?file_path=wp-content/test.php" \
+ -H "X-API-Key: $API_KEY" \
+ -H "X-Scan-ID: $SCAN_ID")
+
+HTTP_CODE=$(echo "$RESPONSE" | tail -n1)
+BODY=$(echo "$RESPONSE" | head -n-1)
+
+echo "HTTP Status: $HTTP_CODE"
+echo "Response body:"
+echo "$BODY" | python3 -m json.tool 2>/dev/null || echo "$BODY"
+echo ""
+
+# Test 4: Test with Bearer token (backward compatibility)
+echo "4. Testing Bearer token (backward compatibility)..."
+echo "----------------------------------------------------"
+RESPONSE=$(curl -s -w "\n%{http_code}" "$SERVER/api/scanner/get-file?file_path=wp-content/test.php" \
+ -H "Authorization: Bearer $API_KEY" \
+ -H "X-Scan-ID: $SCAN_ID")
+
+HTTP_CODE=$(echo "$RESPONSE" | tail -n1)
+BODY=$(echo "$RESPONSE" | head -n-1)
+
+echo "HTTP Status: $HTTP_CODE"
+echo "Response body:"
+echo "$BODY" | python3 -m json.tool 2>/dev/null || echo "$BODY"
+echo ""
+
+echo "=========================================="
+echo "Test complete!"
+echo ""
+echo "Expected results:"
+echo "- Test 1: Should show API keys in system"
+echo "- Test 2: Should show validation success with detailed steps"
+echo "- Test 3: Should return 200 or 404 (not 401)"
+echo "- Test 4: Should also work with Bearer token"
+echo "=========================================="
\ No newline at end of file
diff --git a/websiteFunctions/templates/websiteFunctions/WPsiteHome.html b/websiteFunctions/templates/websiteFunctions/WPsiteHome.html
index a31430130..30bf46da6 100644
--- a/websiteFunctions/templates/websiteFunctions/WPsiteHome.html
+++ b/websiteFunctions/templates/websiteFunctions/WPsiteHome.html
@@ -8,2095 +8,790 @@
-
+
-
-
{{ wpsite.id }}
-
+
-
-
{{ wpsite.title }}
-
- {{ wpsite.path }}
- Active
-
-
-
+
-