Merge branch 'v2.4.3' into v2.4.2
This commit is contained in:
commit
f745bb558d
|
|
@ -127,8 +127,38 @@ class secMiddleware:
|
|||
logging.writeToFile(f'Value being scanned {str(value)}')
|
||||
|
||||
# Skip validation for ports key to allow port ranges with colons
|
||||
if key == 'ports':
|
||||
# but only for CSF modifyPorts endpoint
|
||||
if key == 'ports' and pathActual == '/firewall/modifyPorts':
|
||||
# Validate that ports only contain numbers, commas, and colons
|
||||
if type(value) == str:
|
||||
import re
|
||||
# Allow only: digits, commas, colons, and whitespace
|
||||
if re.match(r'^[\d,:,\s]+$', value):
|
||||
continue
|
||||
else:
|
||||
logging.writeToFile(f"Invalid port format in CSF configuration: {value}")
|
||||
final_dic = {
|
||||
'error_message': "Invalid port format. Only numbers, commas, and colons are allowed for port ranges.",
|
||||
"errorMessage": "Invalid port format. Only numbers, commas, and colons are allowed for port ranges."}
|
||||
final_json = json.dumps(final_dic)
|
||||
return HttpResponse(final_json)
|
||||
continue
|
||||
elif key == 'ports':
|
||||
# For other endpoints, ports key continues to skip validation
|
||||
continue
|
||||
|
||||
# Allow protocol parameter for CSF modifyPorts endpoint
|
||||
if key == 'protocol' and pathActual == '/firewall/modifyPorts':
|
||||
# Validate protocol values
|
||||
if value in ['TCP_IN', 'TCP_OUT', 'UDP_IN', 'UDP_OUT']:
|
||||
continue
|
||||
else:
|
||||
logging.writeToFile(f"Invalid protocol in CSF configuration: {value}")
|
||||
final_dic = {
|
||||
'error_message': "Invalid protocol. Only TCP_IN, TCP_OUT, UDP_IN, UDP_OUT are allowed.",
|
||||
"errorMessage": "Invalid protocol. Only TCP_IN, TCP_OUT, UDP_IN, UDP_OUT are allowed."}
|
||||
final_json = json.dumps(final_dic)
|
||||
return HttpResponse(final_json)
|
||||
|
||||
if type(value) == str or type(value) == bytes:
|
||||
pass
|
||||
|
|
@ -137,14 +167,14 @@ class secMiddleware:
|
|||
if os.path.exists(ProcessUtilities.debugPath):
|
||||
logging.writeToFile(f'Item type detected as list')
|
||||
for items in value:
|
||||
if items.find('- -') > -1 or items.find('\n') > -1 or items.find(';') > -1 or items.find(
|
||||
if isinstance(items, str) and (items.find('- -') > -1 or items.find('\n') > -1 or items.find(';') > -1 or items.find(
|
||||
'&&') > -1 or items.find('|') > -1 or items.find('...') > -1 \
|
||||
or items.find("`") > -1 or items.find("$") > -1 or items.find(
|
||||
"(") > -1 or items.find(")") > -1 \
|
||||
or items.find("'") > -1 or items.find("[") > -1 or items.find(
|
||||
"]") > -1 or items.find("{") > -1 or items.find("}") > -1 \
|
||||
or items.find(":") > -1 or items.find("<") > -1 or items.find(
|
||||
">") > -1 or items.find("&") > -1:
|
||||
">") > -1 or items.find("&") > -1):
|
||||
logging.writeToFile(request.body)
|
||||
final_dic = {
|
||||
'error_message': "Data supplied is not accepted, following characters are not allowed in the input ` $ & ( ) [ ] { } ; : ‘ < >.",
|
||||
|
|
@ -168,11 +198,11 @@ class secMiddleware:
|
|||
pathActual.find('saveSpamAssassinConfigurations') > -1 or
|
||||
pathActual.find('docker') > -1 or pathActual.find('cloudAPI') > -1 or
|
||||
pathActual.find('verifyLogin') > -1 or pathActual.find('submitUserCreation') > -1 or
|
||||
pathActual.find('/api/') > -1)
|
||||
pathActual.find('/api/') > -1 or pathActual.find('aiscanner/scheduled-scans') > -1)
|
||||
|
||||
if isAPIEndpoint:
|
||||
# For API endpoints, still check for the most dangerous command injection characters
|
||||
if (value.find('- -') > -1 or value.find('\n') > -1 or value.find(';') > -1 or
|
||||
if isinstance(value, (str, bytes)) and (value.find('- -') > -1 or value.find('\n') > -1 or value.find(';') > -1 or
|
||||
value.find('&&') > -1 or value.find('||') > -1 or value.find('|') > -1 or
|
||||
value.find('...') > -1 or value.find("`") > -1 or value.find("$") > -1 or
|
||||
value.find('../') > -1 or value.find('../../') > -1):
|
||||
|
|
@ -190,29 +220,33 @@ class secMiddleware:
|
|||
or key == 'imageByPass' or key == 'passwordByPass' or key == 'PasswordByPass' or key == 'cronCommand' \
|
||||
or key == 'emailMessage' or key == 'configData' or key == 'rewriteRules' \
|
||||
or key == 'modSecRules' or key == 'recordContentTXT' or key == 'SecAuditLogRelevantStatus' \
|
||||
or key == 'fileContent' or key == 'commands' or key == 'gitHost' or key == 'ipv6' or key == 'contentNow':
|
||||
or key == 'fileContent' or key == 'commands' or key == 'gitHost' or key == 'ipv6' or key == 'contentNow' \
|
||||
or key == 'time_of_day' or key == 'notification_emails' or key == 'domains':
|
||||
continue
|
||||
|
||||
if valueAlreadyChecked == 0:
|
||||
if value.find('- -') > -1 or value.find('\n') > -1 or value.find(';') > -1 or value.find(
|
||||
# Skip validation for API endpoints that need JSON structure characters
|
||||
if not isAPIEndpoint and valueAlreadyChecked == 0:
|
||||
# Only check string values, skip lists and other types
|
||||
if (type(value) == str or type(value) == bytes) and (value.find('- -') > -1 or value.find('\n') > -1 or value.find(';') > -1 or value.find(
|
||||
'&&') > -1 or value.find('|') > -1 or value.find('...') > -1 \
|
||||
or value.find("`") > -1 or value.find("$") > -1 or value.find("(") > -1 or value.find(
|
||||
")") > -1 \
|
||||
or value.find("'") > -1 or value.find("[") > -1 or value.find("]") > -1 or value.find(
|
||||
"{") > -1 or value.find("}") > -1 \
|
||||
or value.find(":") > -1 or value.find("<") > -1 or value.find(">") > -1 or value.find(
|
||||
"&") > -1:
|
||||
"&") > -1):
|
||||
logging.writeToFile(request.body)
|
||||
final_dic = {
|
||||
'error_message': "Data supplied is not accepted, following characters are not allowed in the input ` $ & ( ) [ ] { } ; : ‘ < >.",
|
||||
"errorMessage": "Data supplied is not accepted, following characters are not allowed in the input ` $ & ( ) [ ] { } ; : ‘ < >."}
|
||||
final_json = json.dumps(final_dic)
|
||||
return HttpResponse(final_json)
|
||||
if key.find(';') > -1 or key.find('&&') > -1 or key.find('|') > -1 or key.find('...') > -1 \
|
||||
# Skip key validation for API endpoints that need JSON structure characters
|
||||
if not isAPIEndpoint and (key.find(';') > -1 or key.find('&&') > -1 or key.find('|') > -1 or key.find('...') > -1 \
|
||||
or key.find("`") > -1 or key.find("$") > -1 or key.find("(") > -1 or key.find(")") > -1 \
|
||||
or key.find("'") > -1 or key.find("[") > -1 or key.find("]") > -1 or key.find(
|
||||
"{") > -1 or key.find("}") > -1 \
|
||||
or key.find(":") > -1 or key.find("<") > -1 or key.find(">") > -1 or key.find("&") > -1:
|
||||
or key.find(":") > -1 or key.find("<") > -1 or key.find(">") > -1 or key.find("&") > -1):
|
||||
logging.writeToFile(request.body)
|
||||
final_dic = {'error_message': "Data supplied is not accepted.",
|
||||
"errorMessage": "Data supplied is not accepted following characters are not allowed in the input ` $ & ( ) [ ] { } ; : ‘ < >."}
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ def main():
|
|||
parser.add_argument('function', help='Specific a function to call!')
|
||||
args = parser.parse_args()
|
||||
|
||||
command = f"/usr/local/CyberCP/bin/python /usr/local/CyberCP/plogical/IncScheduler.py '{args.function}'" % ()
|
||||
command = f"/usr/local/CyberCP/bin/python /usr/local/CyberCP/plogical/IncScheduler.py '{args.function}'"
|
||||
ProcessUtilities.normalExecutioner(command)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@ class AIScannerManager:
|
|||
# Get user's websites for scan selection using ACL-aware method
|
||||
try:
|
||||
websites = ACLManager.findWebsiteObjects(currentACL, userID)
|
||||
self.logger.writeToFile(f'[AIScannerManager.scannerHome] Found {websites.count()} websites for {admin.userName}')
|
||||
self.logger.writeToFile(f'[AIScannerManager.scannerHome] Found {len(websites)} websites for {admin.userName}')
|
||||
except Exception as e:
|
||||
self.logger.writeToFile(f'[AIScannerManager.scannerHome] Error fetching websites: {str(e)}')
|
||||
websites = []
|
||||
|
|
|
|||
|
|
@ -0,0 +1,453 @@
|
|||
from django.core.management.base import BaseCommand
|
||||
from django.utils import timezone
|
||||
from datetime import datetime, timedelta
|
||||
import json
|
||||
import time
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Run scheduled AI security scans'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--daemon',
|
||||
action='store_true',
|
||||
help='Run as daemon, checking for scheduled scans every minute',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--scan-id',
|
||||
type=int,
|
||||
help='Run a specific scheduled scan by ID',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--verbose',
|
||||
action='store_true',
|
||||
help='Show detailed information about all scheduled scans',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--force',
|
||||
action='store_true',
|
||||
help='Force run all active scheduled scans immediately, ignoring schedule',
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.verbose = options.get('verbose', False)
|
||||
self.force = options.get('force', False)
|
||||
|
||||
if options['daemon']:
|
||||
self.stdout.write('Starting scheduled scan daemon...')
|
||||
self.run_daemon()
|
||||
elif options['scan_id']:
|
||||
self.stdout.write(f'Running scheduled scan ID {options["scan_id"]}...')
|
||||
self.run_scheduled_scan_by_id(options['scan_id'])
|
||||
elif options['force']:
|
||||
self.stdout.write('Force running all active scheduled scans...')
|
||||
self.force_run_all_scans()
|
||||
else:
|
||||
self.stdout.write('Checking for scheduled scans to run...')
|
||||
self.check_and_run_scans()
|
||||
|
||||
def run_daemon(self):
|
||||
"""Run as daemon, checking for scans every minute"""
|
||||
while True:
|
||||
try:
|
||||
self.stdout.write(f'\n[{timezone.now().strftime("%Y-%m-%d %H:%M:%S UTC")}] Checking for scheduled scans...')
|
||||
self.check_and_run_scans()
|
||||
time.sleep(60) # Check every minute
|
||||
except KeyboardInterrupt:
|
||||
self.stdout.write('\nDaemon stopped by user')
|
||||
break
|
||||
except Exception as e:
|
||||
self.stderr.write(f'Error in daemon: {str(e)}')
|
||||
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
|
||||
logging.writeToFile(f'[Scheduled Scan Daemon] Error: {str(e)}')
|
||||
time.sleep(60) # Continue after error
|
||||
|
||||
def force_run_all_scans(self):
|
||||
"""Force run all active scheduled scans immediately"""
|
||||
from aiScanner.models import ScheduledScan
|
||||
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
|
||||
|
||||
# Find all active scheduled scans
|
||||
active_scans = ScheduledScan.objects.filter(status='active')
|
||||
|
||||
if active_scans.count() == 0:
|
||||
self.stdout.write('No active scheduled scans found')
|
||||
logging.writeToFile('[Scheduled Scan Force] No active scheduled scans found')
|
||||
return
|
||||
|
||||
self.stdout.write(f'Found {active_scans.count()} active scheduled scans to force run')
|
||||
logging.writeToFile(f'[Scheduled Scan Force] Found {active_scans.count()} active scheduled scans to force run')
|
||||
|
||||
for scan in active_scans:
|
||||
self.stdout.write(f'Force running scheduled scan: {scan.name} (ID: {scan.id})')
|
||||
logging.writeToFile(f'[Scheduled Scan Force] Force running scheduled scan: {scan.name} (ID: {scan.id})')
|
||||
self.execute_scheduled_scan(scan)
|
||||
|
||||
def check_and_run_scans(self):
|
||||
"""Check for scheduled scans that need to run"""
|
||||
from aiScanner.models import ScheduledScan
|
||||
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
|
||||
|
||||
now = timezone.now()
|
||||
|
||||
# Log all scheduled scans and their status
|
||||
all_scans = ScheduledScan.objects.all()
|
||||
self.stdout.write(f'Total scheduled scans: {all_scans.count()}')
|
||||
logging.writeToFile(f'[Scheduled Scan Check] Total scheduled scans: {all_scans.count()}')
|
||||
|
||||
for scan in all_scans:
|
||||
if self.verbose:
|
||||
self.stdout.write(f'\n--- Scan Details: {scan.name} (ID: {scan.id}) ---')
|
||||
self.stdout.write(f' Owner: {scan.admin.userName}')
|
||||
self.stdout.write(f' Frequency: {scan.frequency}')
|
||||
self.stdout.write(f' Scan Type: {scan.scan_type}')
|
||||
self.stdout.write(f' Status: {scan.status}')
|
||||
self.stdout.write(f' Domains: {", ".join(scan.domain_list)}')
|
||||
self.stdout.write(f' Created: {scan.created_at.strftime("%Y-%m-%d %H:%M:%S UTC")}')
|
||||
if scan.last_run:
|
||||
self.stdout.write(f' Last Run: {scan.last_run.strftime("%Y-%m-%d %H:%M:%S UTC")}')
|
||||
else:
|
||||
self.stdout.write(f' Last Run: Never')
|
||||
|
||||
if scan.status != 'active':
|
||||
reason = f'Scan "{scan.name}" (ID: {scan.id}) is not active (status: {scan.status})'
|
||||
self.stdout.write(f' ❌ {reason}')
|
||||
logging.writeToFile(f'[Scheduled Scan Check] {reason}')
|
||||
continue
|
||||
|
||||
if scan.next_run is None:
|
||||
reason = f'Scan "{scan.name}" (ID: {scan.id}) has no next_run scheduled'
|
||||
self.stdout.write(f' ❌ {reason}')
|
||||
logging.writeToFile(f'[Scheduled Scan Check] {reason}')
|
||||
# Try to calculate next run
|
||||
if self.verbose:
|
||||
self.stdout.write(f' 🔧 Attempting to calculate next run time...')
|
||||
try:
|
||||
scan.next_run = scan.calculate_next_run()
|
||||
scan.save()
|
||||
self.stdout.write(f' ✅ Next run set to: {scan.next_run.strftime("%Y-%m-%d %H:%M:%S UTC")}')
|
||||
except Exception as e:
|
||||
self.stdout.write(f' ❌ Failed to calculate next run: {str(e)}')
|
||||
continue
|
||||
|
||||
if scan.next_run > now:
|
||||
time_until_run = scan.next_run - now
|
||||
days = int(time_until_run.total_seconds() // 86400)
|
||||
hours = int((time_until_run.total_seconds() % 86400) // 3600)
|
||||
minutes = int((time_until_run.total_seconds() % 3600) // 60)
|
||||
|
||||
time_str = ""
|
||||
if days > 0:
|
||||
time_str = f"{days}d {hours}h {minutes}m"
|
||||
else:
|
||||
time_str = f"{hours}h {minutes}m"
|
||||
|
||||
reason = f'Scan "{scan.name}" (ID: {scan.id}) scheduled to run in {time_str} at {scan.next_run.strftime("%Y-%m-%d %H:%M:%S UTC")}'
|
||||
self.stdout.write(f' ⏰ {reason}')
|
||||
logging.writeToFile(f'[Scheduled Scan Check] {reason}')
|
||||
continue
|
||||
|
||||
# Find scans that are due to run
|
||||
due_scans = ScheduledScan.objects.filter(
|
||||
status='active',
|
||||
next_run__lte=now
|
||||
)
|
||||
|
||||
if due_scans.count() == 0:
|
||||
self.stdout.write('No scheduled scans are due to run at this time')
|
||||
logging.writeToFile('[Scheduled Scan Check] No scheduled scans are due to run at this time')
|
||||
else:
|
||||
self.stdout.write(f'Found {due_scans.count()} scans due to run')
|
||||
logging.writeToFile(f'[Scheduled Scan Check] Found {due_scans.count()} scans due to run')
|
||||
|
||||
for scan in due_scans:
|
||||
self.stdout.write(f'Running scheduled scan: {scan.name} (ID: {scan.id})')
|
||||
logging.writeToFile(f'[Scheduled Scan Check] Running scheduled scan: {scan.name} (ID: {scan.id})')
|
||||
self.execute_scheduled_scan(scan)
|
||||
|
||||
def run_scheduled_scan_by_id(self, scan_id):
|
||||
"""Run a specific scheduled scan by ID"""
|
||||
from aiScanner.models import ScheduledScan
|
||||
|
||||
try:
|
||||
scan = ScheduledScan.objects.get(id=scan_id)
|
||||
self.stdout.write(f'Running scheduled scan: {scan.name}')
|
||||
self.execute_scheduled_scan(scan)
|
||||
except ScheduledScan.DoesNotExist:
|
||||
self.stderr.write(f'Scheduled scan with ID {scan_id} not found')
|
||||
|
||||
def execute_scheduled_scan(self, scheduled_scan):
|
||||
"""Execute a scheduled scan"""
|
||||
from aiScanner.models import ScheduledScanExecution, ScanHistory
|
||||
from aiScanner.aiScannerManager import AIScannerManager
|
||||
from loginSystem.models import Administrator
|
||||
from websiteFunctions.models import Websites
|
||||
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
|
||||
|
||||
# Create execution record
|
||||
execution = ScheduledScanExecution.objects.create(
|
||||
scheduled_scan=scheduled_scan,
|
||||
status='running',
|
||||
started_at=timezone.now()
|
||||
)
|
||||
|
||||
try:
|
||||
# Update last run time
|
||||
scheduled_scan.last_run = timezone.now()
|
||||
scheduled_scan.next_run = scheduled_scan.calculate_next_run()
|
||||
scheduled_scan.save()
|
||||
|
||||
# Get domains to scan
|
||||
domains_to_scan = []
|
||||
admin = scheduled_scan.admin
|
||||
|
||||
# Validate domains still exist and user has access
|
||||
for domain in scheduled_scan.domain_list:
|
||||
try:
|
||||
website = Websites.objects.get(domain=domain, admin=admin)
|
||||
domains_to_scan.append(domain)
|
||||
except Websites.DoesNotExist:
|
||||
logging.writeToFile(f'[Scheduled Scan] Domain {domain} no longer accessible for user {admin.userName}')
|
||||
continue
|
||||
|
||||
if not domains_to_scan:
|
||||
execution.status = 'failed'
|
||||
execution.error_message = 'No accessible domains found for scanning'
|
||||
execution.completed_at = timezone.now()
|
||||
execution.save()
|
||||
self.stderr.write(f'No accessible domains for scheduled scan {scheduled_scan.name}')
|
||||
return
|
||||
|
||||
execution.set_scanned_domains(domains_to_scan)
|
||||
execution.total_scans = len(domains_to_scan)
|
||||
execution.save()
|
||||
|
||||
# Initialize scanner manager
|
||||
sm = AIScannerManager()
|
||||
scan_ids = []
|
||||
successful_scans = 0
|
||||
failed_scans = 0
|
||||
total_cost = 0.0
|
||||
|
||||
# Execute scans for each domain
|
||||
for domain in domains_to_scan:
|
||||
try:
|
||||
self.stdout.write(f'Starting scan for domain: {domain}')
|
||||
|
||||
# Create a fake request object for the scanner manager
|
||||
class FakeRequest:
|
||||
def __init__(self, admin_id, domain, scan_type):
|
||||
self.session = {'userID': admin_id}
|
||||
self.method = 'POST'
|
||||
self.POST = {
|
||||
'domain': domain,
|
||||
'scan_type': scan_type
|
||||
}
|
||||
# Create JSON body that startScan expects
|
||||
import json
|
||||
self.body = json.dumps({
|
||||
'domain': domain,
|
||||
'scan_type': scan_type
|
||||
}).encode('utf-8')
|
||||
|
||||
def get_host(self):
|
||||
# Get the hostname from CyberPanel settings
|
||||
try:
|
||||
from plogical.acl import ACLManager
|
||||
server_ip = ACLManager.fetchIP()
|
||||
return f"{server_ip}:8090" # Default CyberPanel port
|
||||
except:
|
||||
return "localhost:8090" # Fallback
|
||||
|
||||
fake_request = FakeRequest(admin.pk, domain, scheduled_scan.scan_type)
|
||||
|
||||
# Start the scan
|
||||
result = sm.startScan(fake_request, admin.pk)
|
||||
|
||||
if hasattr(result, 'content'):
|
||||
# It's an HTTP response, parse the JSON
|
||||
import json
|
||||
response_data = json.loads(result.content.decode('utf-8'))
|
||||
else:
|
||||
# It's already a dict
|
||||
response_data = result
|
||||
|
||||
if response_data.get('success'):
|
||||
scan_id = response_data.get('scan_id')
|
||||
if scan_id:
|
||||
scan_ids.append(scan_id)
|
||||
successful_scans += 1
|
||||
|
||||
# Get cost estimate if available
|
||||
if 'cost_estimate' in response_data:
|
||||
total_cost += float(response_data['cost_estimate'])
|
||||
|
||||
logging.writeToFile(f'[Scheduled Scan] Successfully started scan {scan_id} for {domain}')
|
||||
else:
|
||||
failed_scans += 1
|
||||
logging.writeToFile(f'[Scheduled Scan] Failed to get scan ID for {domain}')
|
||||
else:
|
||||
failed_scans += 1
|
||||
error_msg = response_data.get('error', 'Unknown error')
|
||||
logging.writeToFile(f'[Scheduled Scan] Failed to start scan for {domain}: {error_msg}')
|
||||
|
||||
except Exception as e:
|
||||
failed_scans += 1
|
||||
error_msg = str(e)
|
||||
logging.writeToFile(f'[Scheduled Scan] Exception starting scan for {domain}: {error_msg}')
|
||||
|
||||
# Small delay between scans to avoid overwhelming the system
|
||||
time.sleep(2)
|
||||
|
||||
# Update execution record
|
||||
execution.successful_scans = successful_scans
|
||||
execution.failed_scans = failed_scans
|
||||
execution.total_cost = total_cost
|
||||
execution.set_scan_ids(scan_ids)
|
||||
execution.status = 'completed' if failed_scans == 0 else 'completed' # Always completed if we tried all
|
||||
execution.completed_at = timezone.now()
|
||||
execution.save()
|
||||
|
||||
# Send notifications if configured
|
||||
if scheduled_scan.email_notifications:
|
||||
self.send_notifications(scheduled_scan, execution)
|
||||
|
||||
self.stdout.write(
|
||||
f'Scheduled scan completed: {successful_scans} successful, {failed_scans} failed'
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
# Update execution record with error
|
||||
execution.status = 'failed'
|
||||
execution.error_message = str(e)
|
||||
execution.completed_at = timezone.now()
|
||||
execution.save()
|
||||
|
||||
logging.writeToFile(f'[Scheduled Scan] Failed to execute scheduled scan {scheduled_scan.name}: {str(e)}')
|
||||
self.stderr.write(f'Failed to execute scheduled scan {scheduled_scan.name}: {str(e)}')
|
||||
|
||||
# Send failure notification
|
||||
if scheduled_scan.email_notifications and scheduled_scan.notify_on_failure:
|
||||
self.send_failure_notification(scheduled_scan, str(e))
|
||||
|
||||
def send_notifications(self, scheduled_scan, execution):
|
||||
"""Send email notifications for completed scan"""
|
||||
try:
|
||||
# Determine if we should send notification
|
||||
should_notify = False
|
||||
|
||||
if execution.status == 'failed' and scheduled_scan.notify_on_failure:
|
||||
should_notify = True
|
||||
elif execution.status == 'completed':
|
||||
if scheduled_scan.notify_on_completion:
|
||||
should_notify = True
|
||||
elif scheduled_scan.notify_on_threats and execution.successful_scans > 0:
|
||||
# Check if any scans found threats
|
||||
# This would require checking the scan results, which might not be available immediately
|
||||
# For now, we'll just send completion notifications
|
||||
should_notify = scheduled_scan.notify_on_completion
|
||||
|
||||
if should_notify:
|
||||
self.send_execution_notification(scheduled_scan, execution)
|
||||
|
||||
except Exception as e:
|
||||
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
|
||||
logging.writeToFile(f'[Scheduled Scan] Failed to send notification: {str(e)}')
|
||||
|
||||
def send_execution_notification(self, scheduled_scan, execution):
|
||||
"""Send notification email for scan execution"""
|
||||
try:
|
||||
# Get notification emails
|
||||
notification_emails = scheduled_scan.notification_email_list
|
||||
if not notification_emails:
|
||||
# Use admin email as fallback
|
||||
notification_emails = [scheduled_scan.admin.email] if scheduled_scan.admin.email else []
|
||||
|
||||
if not notification_emails:
|
||||
return
|
||||
|
||||
# Prepare email content
|
||||
subject = f'AI Scanner: Scheduled Scan "{scheduled_scan.name}" Completed'
|
||||
|
||||
status_text = execution.status.title()
|
||||
if execution.status == 'completed':
|
||||
if execution.failed_scans == 0:
|
||||
status_text = 'Completed Successfully'
|
||||
else:
|
||||
status_text = f'Completed with {execution.failed_scans} failures'
|
||||
|
||||
message = f"""
|
||||
Scheduled AI Security Scan Report
|
||||
|
||||
Scan Name: {scheduled_scan.name}
|
||||
Status: {status_text}
|
||||
Execution Time: {execution.execution_time.strftime('%Y-%m-%d %H:%M:%S UTC')}
|
||||
|
||||
Results:
|
||||
- Total Domains: {execution.total_scans}
|
||||
- Successful Scans: {execution.successful_scans}
|
||||
- Failed Scans: {execution.failed_scans}
|
||||
- Total Cost: ${execution.total_cost:.4f}
|
||||
|
||||
Domains Scanned: {', '.join(execution.scanned_domains)}
|
||||
|
||||
{f'Error Message: {execution.error_message}' if execution.error_message else ''}
|
||||
|
||||
Scan IDs: {', '.join(execution.scan_id_list)}
|
||||
|
||||
View detailed results in your CyberPanel AI Scanner dashboard.
|
||||
"""
|
||||
|
||||
# Send email using CyberPanel's email system
|
||||
from plogical.mailUtilities import mailUtilities
|
||||
sender = 'noreply@cyberpanel.local'
|
||||
mailUtilities.SendEmail(sender, notification_emails, message)
|
||||
|
||||
# Log notification sent
|
||||
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
|
||||
logging.writeToFile(f'[Scheduled Scan] Notification sent for {scheduled_scan.name} to {len(notification_emails)} recipients')
|
||||
|
||||
except Exception as e:
|
||||
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
|
||||
logging.writeToFile(f'[Scheduled Scan] Failed to send notification email: {str(e)}')
|
||||
|
||||
def send_failure_notification(self, scheduled_scan, error_message):
|
||||
"""Send notification email for scan failure"""
|
||||
try:
|
||||
# Get notification emails
|
||||
notification_emails = scheduled_scan.notification_email_list
|
||||
if not notification_emails:
|
||||
# Use admin email as fallback
|
||||
notification_emails = [scheduled_scan.admin.email] if scheduled_scan.admin.email else []
|
||||
|
||||
if not notification_emails:
|
||||
return
|
||||
|
||||
# Prepare email content
|
||||
subject = f'AI Scanner: Scheduled Scan "{scheduled_scan.name}" Failed'
|
||||
|
||||
message = f"""
|
||||
Scheduled AI Security Scan Failure
|
||||
|
||||
Scan Name: {scheduled_scan.name}
|
||||
Status: Failed
|
||||
Time: {timezone.now().strftime('%Y-%m-%d %H:%M:%S UTC')}
|
||||
|
||||
Error: {error_message}
|
||||
|
||||
Please check your CyberPanel AI Scanner configuration and try again.
|
||||
"""
|
||||
|
||||
# Send email using CyberPanel's email system
|
||||
from plogical.mailUtilities import mailUtilities
|
||||
sender = 'noreply@cyberpanel.local'
|
||||
mailUtilities.SendEmail(sender, notification_emails, message)
|
||||
|
||||
# Log notification sent
|
||||
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
|
||||
logging.writeToFile(f'[Scheduled Scan] Failure notification sent for {scheduled_scan.name}')
|
||||
|
||||
except Exception as e:
|
||||
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
|
||||
logging.writeToFile(f'[Scheduled Scan] Failed to send failure notification email: {str(e)}')
|
||||
|
|
@ -107,3 +107,216 @@ class FileAccessToken(models.Model):
|
|||
def is_expired(self):
|
||||
from django.utils import timezone
|
||||
return timezone.now() > self.expires_at
|
||||
|
||||
|
||||
class ScheduledScan(models.Model):
|
||||
"""Store scheduled scan configurations"""
|
||||
FREQUENCY_CHOICES = [
|
||||
('daily', 'Daily'),
|
||||
('weekly', 'Weekly'),
|
||||
('monthly', 'Monthly'),
|
||||
('quarterly', 'Quarterly'),
|
||||
]
|
||||
|
||||
SCAN_TYPE_CHOICES = [
|
||||
('full', 'Full Scan'),
|
||||
('quick', 'Quick Scan'),
|
||||
('custom', 'Custom Scan'),
|
||||
]
|
||||
|
||||
STATUS_CHOICES = [
|
||||
('active', 'Active'),
|
||||
('paused', 'Paused'),
|
||||
('disabled', 'Disabled'),
|
||||
]
|
||||
|
||||
admin = models.ForeignKey(Administrator, on_delete=models.CASCADE, related_name='scheduled_scans')
|
||||
name = models.CharField(max_length=200, help_text="Name for this scheduled scan")
|
||||
domains = models.TextField(help_text="JSON array of domains to scan")
|
||||
frequency = models.CharField(max_length=20, choices=FREQUENCY_CHOICES, default='weekly')
|
||||
scan_type = models.CharField(max_length=20, choices=SCAN_TYPE_CHOICES, default='full')
|
||||
time_of_day = models.TimeField(help_text="Time of day to run the scan (UTC)")
|
||||
day_of_week = models.IntegerField(null=True, blank=True, help_text="Day of week for weekly scans (0=Monday, 6=Sunday)")
|
||||
day_of_month = models.IntegerField(null=True, blank=True, help_text="Day of month for monthly scans (1-31)")
|
||||
status = models.CharField(max_length=20, choices=STATUS_CHOICES, default='active')
|
||||
last_run = models.DateTimeField(null=True, blank=True)
|
||||
next_run = models.DateTimeField(null=True, blank=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
# Notification settings
|
||||
email_notifications = models.BooleanField(default=True)
|
||||
notification_emails = models.TextField(blank=True, help_text="JSON array of email addresses")
|
||||
notify_on_threats = models.BooleanField(default=True)
|
||||
notify_on_completion = models.BooleanField(default=False)
|
||||
notify_on_failure = models.BooleanField(default=True)
|
||||
|
||||
class Meta:
|
||||
db_table = 'ai_scanner_scheduled_scans'
|
||||
ordering = ['-created_at']
|
||||
|
||||
def __str__(self):
|
||||
return f"Scheduled Scan: {self.name} ({self.frequency})"
|
||||
|
||||
@property
|
||||
def domain_list(self):
|
||||
"""Parse domains JSON"""
|
||||
if self.domains:
|
||||
try:
|
||||
return json.loads(self.domains)
|
||||
except json.JSONDecodeError:
|
||||
return []
|
||||
return []
|
||||
|
||||
@property
|
||||
def notification_email_list(self):
|
||||
"""Parse notification emails JSON"""
|
||||
if self.notification_emails:
|
||||
try:
|
||||
return json.loads(self.notification_emails)
|
||||
except json.JSONDecodeError:
|
||||
return []
|
||||
return []
|
||||
|
||||
def set_domains(self, domain_list):
|
||||
"""Set domains from list"""
|
||||
self.domains = json.dumps(domain_list)
|
||||
|
||||
def set_notification_emails(self, email_list):
|
||||
"""Set notification emails from list"""
|
||||
self.notification_emails = json.dumps(email_list)
|
||||
|
||||
def calculate_next_run(self):
|
||||
"""Calculate next run time based on frequency"""
|
||||
from django.utils import timezone
|
||||
from datetime import datetime, timedelta
|
||||
import calendar
|
||||
|
||||
now = timezone.now()
|
||||
|
||||
if self.frequency == 'daily':
|
||||
# Daily: next run is tomorrow at specified time
|
||||
next_run = now.replace(hour=self.time_of_day.hour, minute=self.time_of_day.minute, second=0, microsecond=0)
|
||||
if next_run <= now:
|
||||
next_run += timedelta(days=1)
|
||||
|
||||
elif self.frequency == 'weekly':
|
||||
# Weekly: next run is on specified day of week at specified time
|
||||
days_ahead = self.day_of_week - now.weekday()
|
||||
if days_ahead <= 0: # Target day already happened this week
|
||||
days_ahead += 7
|
||||
next_run = now + timedelta(days=days_ahead)
|
||||
next_run = next_run.replace(hour=self.time_of_day.hour, minute=self.time_of_day.minute, second=0, microsecond=0)
|
||||
|
||||
elif self.frequency == 'monthly':
|
||||
# Monthly: next run is on specified day of month at specified time
|
||||
year = now.year
|
||||
month = now.month
|
||||
day = min(self.day_of_month, calendar.monthrange(year, month)[1])
|
||||
|
||||
next_run = now.replace(day=day, hour=self.time_of_day.hour, minute=self.time_of_day.minute, second=0, microsecond=0)
|
||||
|
||||
if next_run <= now:
|
||||
# Move to next month
|
||||
if month == 12:
|
||||
year += 1
|
||||
month = 1
|
||||
else:
|
||||
month += 1
|
||||
day = min(self.day_of_month, calendar.monthrange(year, month)[1])
|
||||
next_run = next_run.replace(year=year, month=month, day=day)
|
||||
|
||||
elif self.frequency == 'quarterly':
|
||||
# Quarterly: next run is 3 months from now
|
||||
next_run = now.replace(hour=self.time_of_day.hour, minute=self.time_of_day.minute, second=0, microsecond=0)
|
||||
month = now.month
|
||||
year = now.year
|
||||
|
||||
# Add 3 months
|
||||
month += 3
|
||||
if month > 12:
|
||||
year += 1
|
||||
month -= 12
|
||||
|
||||
day = min(self.day_of_month or 1, calendar.monthrange(year, month)[1])
|
||||
next_run = next_run.replace(year=year, month=month, day=day)
|
||||
|
||||
if next_run <= now:
|
||||
# Add another 3 months
|
||||
month += 3
|
||||
if month > 12:
|
||||
year += 1
|
||||
month -= 12
|
||||
day = min(self.day_of_month or 1, calendar.monthrange(year, month)[1])
|
||||
next_run = next_run.replace(year=year, month=month, day=day)
|
||||
|
||||
else:
|
||||
# Default to weekly
|
||||
next_run = now + timedelta(weeks=1)
|
||||
|
||||
return next_run
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""Override save to calculate next_run"""
|
||||
if not self.next_run or self.status == 'active':
|
||||
self.next_run = self.calculate_next_run()
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
|
||||
class ScheduledScanExecution(models.Model):
|
||||
"""Track individual executions of scheduled scans"""
|
||||
STATUS_CHOICES = [
|
||||
('pending', 'Pending'),
|
||||
('running', 'Running'),
|
||||
('completed', 'Completed'),
|
||||
('failed', 'Failed'),
|
||||
('cancelled', 'Cancelled'),
|
||||
]
|
||||
|
||||
scheduled_scan = models.ForeignKey(ScheduledScan, on_delete=models.CASCADE, related_name='executions')
|
||||
execution_time = models.DateTimeField(auto_now_add=True)
|
||||
status = models.CharField(max_length=20, choices=STATUS_CHOICES, default='pending')
|
||||
domains_scanned = models.TextField(blank=True, help_text="JSON array of domains that were scanned")
|
||||
total_scans = models.IntegerField(default=0)
|
||||
successful_scans = models.IntegerField(default=0)
|
||||
failed_scans = models.IntegerField(default=0)
|
||||
total_cost = models.DecimalField(max_digits=10, decimal_places=6, default=0.0)
|
||||
scan_ids = models.TextField(blank=True, help_text="JSON array of scan IDs created")
|
||||
error_message = models.TextField(blank=True, null=True)
|
||||
started_at = models.DateTimeField(null=True, blank=True)
|
||||
completed_at = models.DateTimeField(null=True, blank=True)
|
||||
|
||||
class Meta:
|
||||
db_table = 'ai_scanner_scheduled_executions'
|
||||
ordering = ['-execution_time']
|
||||
|
||||
def __str__(self):
|
||||
return f"Execution of {self.scheduled_scan.name} at {self.execution_time}"
|
||||
|
||||
@property
|
||||
def scanned_domains(self):
|
||||
"""Parse domains scanned JSON"""
|
||||
if self.domains_scanned:
|
||||
try:
|
||||
return json.loads(self.domains_scanned)
|
||||
except json.JSONDecodeError:
|
||||
return []
|
||||
return []
|
||||
|
||||
@property
|
||||
def scan_id_list(self):
|
||||
"""Parse scan IDs JSON"""
|
||||
if self.scan_ids:
|
||||
try:
|
||||
return json.loads(self.scan_ids)
|
||||
except json.JSONDecodeError:
|
||||
return []
|
||||
return []
|
||||
|
||||
def set_scanned_domains(self, domain_list):
|
||||
"""Set scanned domains from list"""
|
||||
self.domains_scanned = json.dumps(domain_list)
|
||||
|
||||
def set_scan_ids(self, scan_id_list):
|
||||
"""Set scan IDs from list"""
|
||||
self.scan_ids = json.dumps(scan_id_list)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,270 @@
|
|||
from django.shortcuts import render, redirect
|
||||
from django.http import JsonResponse
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from django.views.decorators.http import require_http_methods
|
||||
from loginSystem.views import loadLoginPage
|
||||
import json
|
||||
|
||||
|
||||
@require_http_methods(['GET', 'POST'])
|
||||
def scheduledScans(request):
|
||||
"""Manage scheduled scans"""
|
||||
try:
|
||||
userID = request.session['userID']
|
||||
from loginSystem.models import Administrator
|
||||
from .models import ScheduledScan
|
||||
from plogical.acl import ACLManager
|
||||
|
||||
admin = Administrator.objects.get(pk=userID)
|
||||
currentACL = ACLManager.loadedACL(userID)
|
||||
|
||||
if request.method == 'GET':
|
||||
# Get scheduled scans with ACL respect
|
||||
if currentACL['admin'] == 1:
|
||||
# Admin can see all scheduled scans
|
||||
scheduled_scans = ScheduledScan.objects.all()
|
||||
else:
|
||||
# Users can only see their own scheduled scans and their sub-users' scans
|
||||
user_admins = ACLManager.loadUserObjects(userID)
|
||||
scheduled_scans = ScheduledScan.objects.filter(admin__in=user_admins)
|
||||
|
||||
scan_data = []
|
||||
for scan in scheduled_scans:
|
||||
scan_data.append({
|
||||
'id': scan.id,
|
||||
'name': scan.name,
|
||||
'domains': scan.domain_list,
|
||||
'frequency': scan.frequency,
|
||||
'scan_type': scan.scan_type,
|
||||
'time_of_day': scan.time_of_day.strftime('%H:%M'),
|
||||
'day_of_week': scan.day_of_week,
|
||||
'day_of_month': scan.day_of_month,
|
||||
'status': scan.status,
|
||||
'last_run': scan.last_run.isoformat() if scan.last_run else None,
|
||||
'next_run': scan.next_run.isoformat() if scan.next_run else None,
|
||||
'email_notifications': scan.email_notifications,
|
||||
'notification_emails': scan.notification_email_list,
|
||||
'notify_on_threats': scan.notify_on_threats,
|
||||
'notify_on_completion': scan.notify_on_completion,
|
||||
'notify_on_failure': scan.notify_on_failure,
|
||||
'created_at': scan.created_at.isoformat()
|
||||
})
|
||||
|
||||
return JsonResponse({'success': True, 'scheduled_scans': scan_data})
|
||||
|
||||
elif request.method == 'POST':
|
||||
# Create new scheduled scan
|
||||
data = json.loads(request.body)
|
||||
|
||||
# Validate required fields
|
||||
required_fields = ['name', 'domains', 'frequency', 'scan_type', 'time_of_day']
|
||||
for field in required_fields:
|
||||
if field not in data or not data[field]:
|
||||
return JsonResponse({'success': False, 'error': f'Missing required field: {field}'})
|
||||
|
||||
# Validate domains
|
||||
if not isinstance(data['domains'], list) or len(data['domains']) == 0:
|
||||
return JsonResponse({'success': False, 'error': 'At least one domain must be selected'})
|
||||
|
||||
# Check if user has access to these domains
|
||||
if currentACL['admin'] != 1:
|
||||
from websiteFunctions.models import Websites
|
||||
user_domains = set(Websites.objects.filter(admin=admin).values_list('domain', flat=True))
|
||||
requested_domains = set(data['domains'])
|
||||
|
||||
if not requested_domains.issubset(user_domains):
|
||||
return JsonResponse({'success': False, 'error': 'You do not have access to some of the selected domains'})
|
||||
|
||||
# Parse time
|
||||
from datetime import datetime
|
||||
try:
|
||||
time_obj = datetime.strptime(data['time_of_day'], '%H:%M').time()
|
||||
except ValueError:
|
||||
return JsonResponse({'success': False, 'error': 'Invalid time format'})
|
||||
|
||||
# Create scheduled scan
|
||||
scheduled_scan = ScheduledScan(
|
||||
admin=admin,
|
||||
name=data['name'],
|
||||
frequency=data['frequency'],
|
||||
scan_type=data['scan_type'],
|
||||
time_of_day=time_obj,
|
||||
email_notifications=data.get('email_notifications', True),
|
||||
notify_on_threats=data.get('notify_on_threats', True),
|
||||
notify_on_completion=data.get('notify_on_completion', False),
|
||||
notify_on_failure=data.get('notify_on_failure', True)
|
||||
)
|
||||
|
||||
# Set domains
|
||||
scheduled_scan.set_domains(data['domains'])
|
||||
|
||||
# Set notification emails
|
||||
if data.get('notification_emails'):
|
||||
scheduled_scan.set_notification_emails(data['notification_emails'])
|
||||
|
||||
# Set frequency-specific fields
|
||||
if data['frequency'] == 'weekly' and 'day_of_week' in data:
|
||||
scheduled_scan.day_of_week = int(data['day_of_week'])
|
||||
elif data['frequency'] in ['monthly', 'quarterly'] and 'day_of_month' in data:
|
||||
scheduled_scan.day_of_month = int(data['day_of_month'])
|
||||
|
||||
scheduled_scan.save()
|
||||
|
||||
return JsonResponse({'success': True, 'id': scheduled_scan.id})
|
||||
|
||||
except KeyError:
|
||||
return JsonResponse({'success': False, 'error': 'Not authenticated'})
|
||||
except Exception as e:
|
||||
return JsonResponse({'success': False, 'error': str(e)})
|
||||
|
||||
|
||||
@require_http_methods(['GET', 'DELETE'])
|
||||
def scheduledScanDetail(request, scan_id):
|
||||
"""Get or delete a specific scheduled scan"""
|
||||
try:
|
||||
userID = request.session['userID']
|
||||
from loginSystem.models import Administrator
|
||||
from .models import ScheduledScan
|
||||
from plogical.acl import ACLManager
|
||||
|
||||
admin = Administrator.objects.get(pk=userID)
|
||||
currentACL = ACLManager.loadedACL(userID)
|
||||
|
||||
# Get scheduled scan with ACL respect
|
||||
try:
|
||||
scheduled_scan = ScheduledScan.objects.get(id=scan_id)
|
||||
|
||||
# Check if user has access to this scheduled scan
|
||||
if currentACL['admin'] != 1:
|
||||
user_admins = ACLManager.loadUserObjects(userID)
|
||||
if scheduled_scan.admin not in user_admins:
|
||||
return JsonResponse({'success': False, 'error': 'Access denied to this scheduled scan'})
|
||||
except ScheduledScan.DoesNotExist:
|
||||
return JsonResponse({'success': False, 'error': 'Scheduled scan not found'})
|
||||
|
||||
if request.method == 'GET':
|
||||
# Return scheduled scan details
|
||||
scan_data = {
|
||||
'id': scheduled_scan.id,
|
||||
'name': scheduled_scan.name,
|
||||
'domains': scheduled_scan.domain_list,
|
||||
'frequency': scheduled_scan.frequency,
|
||||
'scan_type': scheduled_scan.scan_type,
|
||||
'time_of_day': scheduled_scan.time_of_day.strftime('%H:%M'),
|
||||
'day_of_week': scheduled_scan.day_of_week,
|
||||
'day_of_month': scheduled_scan.day_of_month,
|
||||
'status': scheduled_scan.status,
|
||||
'last_run': scheduled_scan.last_run.isoformat() if scheduled_scan.last_run else None,
|
||||
'next_run': scheduled_scan.next_run.isoformat() if scheduled_scan.next_run else None,
|
||||
'email_notifications': scheduled_scan.email_notifications,
|
||||
'notification_emails': scheduled_scan.notification_email_list,
|
||||
'notify_on_threats': scheduled_scan.notify_on_threats,
|
||||
'notify_on_completion': scheduled_scan.notify_on_completion,
|
||||
'notify_on_failure': scheduled_scan.notify_on_failure,
|
||||
'created_at': scheduled_scan.created_at.isoformat()
|
||||
}
|
||||
|
||||
return JsonResponse({'success': True, 'scheduled_scan': scan_data})
|
||||
|
||||
elif request.method == 'DELETE':
|
||||
# Delete scheduled scan
|
||||
scheduled_scan.delete()
|
||||
return JsonResponse({'success': True})
|
||||
|
||||
except KeyError:
|
||||
return JsonResponse({'success': False, 'error': 'Not authenticated'})
|
||||
except Exception as e:
|
||||
return JsonResponse({'success': False, 'error': str(e)})
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
@require_http_methods(['POST'])
|
||||
def toggleScheduledScan(request, scan_id):
|
||||
"""Toggle scheduled scan status (active/paused)"""
|
||||
try:
|
||||
userID = request.session['userID']
|
||||
from loginSystem.models import Administrator
|
||||
from .models import ScheduledScan
|
||||
from plogical.acl import ACLManager
|
||||
|
||||
admin = Administrator.objects.get(pk=userID)
|
||||
currentACL = ACLManager.loadedACL(userID)
|
||||
|
||||
# Get scheduled scan with ACL respect
|
||||
try:
|
||||
scheduled_scan = ScheduledScan.objects.get(id=scan_id)
|
||||
|
||||
# Check if user has access to this scheduled scan
|
||||
if currentACL['admin'] != 1:
|
||||
user_admins = ACLManager.loadUserObjects(userID)
|
||||
if scheduled_scan.admin not in user_admins:
|
||||
return JsonResponse({'success': False, 'error': 'Access denied to this scheduled scan'})
|
||||
except ScheduledScan.DoesNotExist:
|
||||
return JsonResponse({'success': False, 'error': 'Scheduled scan not found'})
|
||||
|
||||
# Toggle status
|
||||
if scheduled_scan.status == 'active':
|
||||
scheduled_scan.status = 'paused'
|
||||
else:
|
||||
scheduled_scan.status = 'active'
|
||||
|
||||
scheduled_scan.save()
|
||||
|
||||
return JsonResponse({'success': True, 'status': scheduled_scan.status})
|
||||
|
||||
except KeyError:
|
||||
return JsonResponse({'success': False, 'error': 'Not authenticated'})
|
||||
except Exception as e:
|
||||
return JsonResponse({'success': False, 'error': str(e)})
|
||||
|
||||
|
||||
@require_http_methods(['GET'])
|
||||
def scheduledScanExecutions(request, scan_id):
|
||||
"""Get execution history for a scheduled scan"""
|
||||
try:
|
||||
userID = request.session['userID']
|
||||
from loginSystem.models import Administrator
|
||||
from .models import ScheduledScan, ScheduledScanExecution
|
||||
from plogical.acl import ACLManager
|
||||
|
||||
admin = Administrator.objects.get(pk=userID)
|
||||
currentACL = ACLManager.loadedACL(userID)
|
||||
|
||||
# Get scheduled scan with ACL respect
|
||||
try:
|
||||
scheduled_scan = ScheduledScan.objects.get(id=scan_id)
|
||||
|
||||
# Check if user has access to this scheduled scan
|
||||
if currentACL['admin'] != 1:
|
||||
user_admins = ACLManager.loadUserObjects(userID)
|
||||
if scheduled_scan.admin not in user_admins:
|
||||
return JsonResponse({'success': False, 'error': 'Access denied to this scheduled scan'})
|
||||
except ScheduledScan.DoesNotExist:
|
||||
return JsonResponse({'success': False, 'error': 'Scheduled scan not found'})
|
||||
|
||||
# Get execution history
|
||||
executions = ScheduledScanExecution.objects.filter(scheduled_scan=scheduled_scan).order_by('-execution_time')[:20]
|
||||
|
||||
execution_data = []
|
||||
for execution in executions:
|
||||
execution_data.append({
|
||||
'id': execution.id,
|
||||
'execution_time': execution.execution_time.isoformat(),
|
||||
'status': execution.status,
|
||||
'domains_scanned': execution.scanned_domains,
|
||||
'total_scans': execution.total_scans,
|
||||
'successful_scans': execution.successful_scans,
|
||||
'failed_scans': execution.failed_scans,
|
||||
'total_cost': float(execution.total_cost),
|
||||
'scan_ids': execution.scan_id_list,
|
||||
'error_message': execution.error_message,
|
||||
'started_at': execution.started_at.isoformat() if execution.started_at else None,
|
||||
'completed_at': execution.completed_at.isoformat() if execution.completed_at else None
|
||||
})
|
||||
|
||||
return JsonResponse({'success': True, 'executions': execution_data})
|
||||
|
||||
except KeyError:
|
||||
return JsonResponse({'success': False, 'error': 'Not authenticated'})
|
||||
except Exception as e:
|
||||
return JsonResponse({'success': False, 'error': str(e)})
|
||||
|
|
@ -642,6 +642,27 @@ AI Security Scanner - CyberPanel
|
|||
</div>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<!-- Scheduled Scans -->
|
||||
{% if is_payment_configured or vps_info.is_vps|default:False %}
|
||||
<div class="scanner-section">
|
||||
<div style="display: flex; justify-content: space-between; align-items: center; margin-bottom: 20px;">
|
||||
<h3 class="scanner-title" style="margin: 0;">
|
||||
<i class="fas fa-calendar-alt" style="color: #5856d6;"></i>
|
||||
Scheduled Scans
|
||||
</h3>
|
||||
<button class="btn-primary btn-xs" onclick="showScheduleModal()">
|
||||
<i class="fas fa-plus"></i> Schedule New Scan
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div id="scheduledScansContainer">
|
||||
<!-- Scheduled scans will be loaded here -->
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<!-- Scan History -->
|
||||
|
|
@ -1539,5 +1560,454 @@ setInterval(() => {
|
|||
refreshScanHistory();
|
||||
}
|
||||
}, 30000);
|
||||
|
||||
// Scheduled Scans Functions
|
||||
function showScheduleModal() {
|
||||
// Reset form for new schedule
|
||||
document.getElementById('scheduleForm').reset();
|
||||
document.getElementById('scheduleId').value = '';
|
||||
|
||||
// Uncheck all domain checkboxes
|
||||
document.querySelectorAll('input[name="domains"]').forEach(cb => cb.checked = false);
|
||||
|
||||
// Reset checkboxes to their defaults
|
||||
document.getElementById('emailNotifications').checked = true;
|
||||
document.getElementById('notifyOnThreats').checked = true;
|
||||
document.getElementById('notifyOnCompletion').checked = false;
|
||||
document.getElementById('notifyOnFailure').checked = true;
|
||||
|
||||
$('#scheduleModal').modal('show');
|
||||
}
|
||||
|
||||
function loadScheduledScans() {
|
||||
fetch('/aiscanner/scheduled-scans/')
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
if (data.success) {
|
||||
displayScheduledScans(data.scheduled_scans);
|
||||
} else {
|
||||
console.error('Failed to load scheduled scans:', data.error);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error loading scheduled scans:', error);
|
||||
});
|
||||
}
|
||||
|
||||
function displayScheduledScans(scans) {
|
||||
const container = document.getElementById('scheduledScansContainer');
|
||||
|
||||
if (!scans || scans.length === 0) {
|
||||
container.innerHTML = '<p style="color: #94a3b8; text-align: center; padding: 20px;">No scheduled scans configured yet.</p>';
|
||||
return;
|
||||
}
|
||||
|
||||
let html = '<div class="scheduled-scans-grid">';
|
||||
|
||||
scans.forEach(scan => {
|
||||
const statusClass = scan.status === 'active' ? 'success' : scan.status === 'paused' ? 'warning' : 'danger';
|
||||
const nextRun = scan.next_run ? new Date(scan.next_run).toLocaleString() : 'Not scheduled';
|
||||
const lastRun = scan.last_run ? new Date(scan.last_run).toLocaleString() : 'Never';
|
||||
|
||||
html += `
|
||||
<div class="scheduled-scan-card">
|
||||
<div class="scheduled-scan-header">
|
||||
<h4>${scan.name}</h4>
|
||||
<span class="status-label ${statusClass}">${scan.status}</span>
|
||||
</div>
|
||||
<div class="scheduled-scan-info">
|
||||
<p><strong>Frequency:</strong> ${scan.frequency}</p>
|
||||
<p><strong>Scan Type:</strong> ${scan.scan_type}</p>
|
||||
<p><strong>Domains:</strong> ${scan.domains.join(', ')}</p>
|
||||
<p><strong>Next Run:</strong> ${nextRun}</p>
|
||||
<p><strong>Last Run:</strong> ${lastRun}</p>
|
||||
</div>
|
||||
<div class="scheduled-scan-actions">
|
||||
<button class="btn-xs btn-default" onclick="editScheduledScan(${scan.id})">
|
||||
<i class="fas fa-edit"></i> Edit
|
||||
</button>
|
||||
<button class="btn-xs btn-${scan.status === 'active' ? 'warning' : 'success'}"
|
||||
onclick="toggleScheduledScan(${scan.id})">
|
||||
<i class="fas fa-${scan.status === 'active' ? 'pause' : 'play'}"></i>
|
||||
${scan.status === 'active' ? 'Pause' : 'Activate'}
|
||||
</button>
|
||||
<button class="btn-xs btn-danger" onclick="deleteScheduledScan(${scan.id})">
|
||||
<i class="fas fa-trash"></i> Delete
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
});
|
||||
|
||||
html += '</div>';
|
||||
container.innerHTML = html;
|
||||
}
|
||||
|
||||
function saveScheduledScan() {
|
||||
const form = document.getElementById('scheduleForm');
|
||||
const formData = new FormData(form);
|
||||
const data = {};
|
||||
|
||||
// Process form data, excluding checkboxes and multi-select fields
|
||||
for (let [key, value] of formData.entries()) {
|
||||
if (!['email_notifications', 'notify_on_threats', 'notify_on_completion', 'notify_on_failure', 'domains'].includes(key)) {
|
||||
data[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
// Get selected domains
|
||||
const selectedDomains = Array.from(document.querySelectorAll('input[name="domains"]:checked'))
|
||||
.map(cb => cb.value);
|
||||
|
||||
if (selectedDomains.length === 0) {
|
||||
alert('Please select at least one domain to scan.');
|
||||
return;
|
||||
}
|
||||
|
||||
data.domains = selectedDomains;
|
||||
|
||||
// Get notification emails
|
||||
const notificationEmails = document.getElementById('notificationEmails').value.split(',')
|
||||
.map(email => email.trim())
|
||||
.filter(email => email.length > 0);
|
||||
|
||||
data.notification_emails = notificationEmails;
|
||||
|
||||
// Convert checkbox values to booleans explicitly
|
||||
data.email_notifications = document.getElementById('emailNotifications').checked;
|
||||
data.notify_on_threats = document.getElementById('notifyOnThreats').checked;
|
||||
data.notify_on_completion = document.getElementById('notifyOnCompletion').checked;
|
||||
data.notify_on_failure = document.getElementById('notifyOnFailure').checked;
|
||||
|
||||
fetch('/aiscanner/scheduled-scans/', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-CSRFToken': getCookie('csrftoken')
|
||||
},
|
||||
body: JSON.stringify(data)
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
if (data.success) {
|
||||
$('#scheduleModal').modal('hide');
|
||||
loadScheduledScans();
|
||||
document.getElementById('scheduleForm').reset();
|
||||
} else {
|
||||
alert('Error: ' + data.error);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error:', error);
|
||||
alert('Failed to save scheduled scan. Please try again.');
|
||||
});
|
||||
}
|
||||
|
||||
function editScheduledScan(id) {
|
||||
// Fetch scheduled scan details and populate form
|
||||
fetch(`/aiscanner/scheduled-scans/${id}/`)
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
if (data.success) {
|
||||
populateScheduleForm(data.scheduled_scan);
|
||||
$('#scheduleModal').modal('show');
|
||||
} else {
|
||||
alert('Error: ' + data.error);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error:', error);
|
||||
alert('Failed to load scheduled scan details.');
|
||||
});
|
||||
}
|
||||
|
||||
function populateScheduleForm(scan) {
|
||||
document.getElementById('scheduleId').value = scan.id;
|
||||
document.getElementById('scheduleName').value = scan.name;
|
||||
document.getElementById('frequency').value = scan.frequency;
|
||||
document.getElementById('scanType').value = scan.scan_type;
|
||||
document.getElementById('timeOfDay').value = scan.time_of_day;
|
||||
|
||||
if (scan.day_of_week !== null) {
|
||||
document.getElementById('dayOfWeek').value = scan.day_of_week;
|
||||
}
|
||||
|
||||
if (scan.day_of_month !== null) {
|
||||
document.getElementById('dayOfMonth').value = scan.day_of_month;
|
||||
}
|
||||
|
||||
// Select domains
|
||||
scan.domains.forEach(domain => {
|
||||
const checkbox = document.querySelector(`input[name="domains"][value="${domain}"]`);
|
||||
if (checkbox) {
|
||||
checkbox.checked = true;
|
||||
}
|
||||
});
|
||||
|
||||
// Set notification settings
|
||||
document.getElementById('emailNotifications').checked = scan.email_notifications;
|
||||
document.getElementById('notifyOnThreats').checked = scan.notify_on_threats;
|
||||
document.getElementById('notifyOnCompletion').checked = scan.notify_on_completion;
|
||||
document.getElementById('notifyOnFailure').checked = scan.notify_on_failure;
|
||||
|
||||
if (scan.notification_emails && scan.notification_emails.length > 0) {
|
||||
document.getElementById('notificationEmails').value = scan.notification_emails.join(', ');
|
||||
}
|
||||
|
||||
updateFrequencyOptions();
|
||||
}
|
||||
|
||||
function toggleScheduledScan(id) {
|
||||
fetch(`/aiscanner/scheduled-scans/${id}/toggle/`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-CSRFToken': getCookie('csrftoken')
|
||||
}
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
if (data.success) {
|
||||
loadScheduledScans();
|
||||
} else {
|
||||
alert('Error: ' + data.error);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error:', error);
|
||||
alert('Failed to toggle scheduled scan.');
|
||||
});
|
||||
}
|
||||
|
||||
function deleteScheduledScan(id) {
|
||||
if (confirm('Are you sure you want to delete this scheduled scan?')) {
|
||||
fetch(`/aiscanner/scheduled-scans/${id}/`, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-CSRFToken': getCookie('csrftoken')
|
||||
}
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
if (data.success) {
|
||||
loadScheduledScans();
|
||||
} else {
|
||||
alert('Error: ' + data.error);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error:', error);
|
||||
alert('Failed to delete scheduled scan.');
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function updateFrequencyOptions() {
|
||||
const frequency = document.getElementById('frequency').value;
|
||||
const dayOfWeekGroup = document.getElementById('dayOfWeekGroup');
|
||||
const dayOfMonthGroup = document.getElementById('dayOfMonthGroup');
|
||||
|
||||
// Hide all optional fields first
|
||||
dayOfWeekGroup.style.display = 'none';
|
||||
dayOfMonthGroup.style.display = 'none';
|
||||
|
||||
// Show relevant fields based on frequency
|
||||
if (frequency === 'weekly') {
|
||||
dayOfWeekGroup.style.display = 'block';
|
||||
} else if (frequency === 'monthly' || frequency === 'quarterly') {
|
||||
dayOfMonthGroup.style.display = 'block';
|
||||
}
|
||||
}
|
||||
|
||||
// Load scheduled scans when page loads
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
if (document.getElementById('scheduledScansContainer')) {
|
||||
loadScheduledScans();
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<!-- Scheduled Scan Modal -->
|
||||
<div class="modal fade" id="scheduleModal" tabindex="-1" role="dialog" aria-labelledby="scheduleModalLabel" aria-hidden="true">
|
||||
<div class="modal-dialog modal-lg" role="document">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<h5 class="modal-title" id="scheduleModalLabel">Schedule New Scan</h5>
|
||||
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
|
||||
<span aria-hidden="true">×</span>
|
||||
</button>
|
||||
</div>
|
||||
<form id="scheduleForm">
|
||||
<div class="modal-body">
|
||||
<input type="hidden" id="scheduleId" name="id" value="">
|
||||
|
||||
<div class="form-group">
|
||||
<label for="scheduleName">Schedule Name</label>
|
||||
<input type="text" class="form-control" id="scheduleName" name="name" required>
|
||||
<small class="form-text text-muted">Give this scheduled scan a descriptive name</small>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<div class="form-group">
|
||||
<label for="frequency">Frequency</label>
|
||||
<select class="form-control" id="frequency" name="frequency" onchange="updateFrequencyOptions()" required>
|
||||
<option value="daily">Daily</option>
|
||||
<option value="weekly" selected>Weekly</option>
|
||||
<option value="monthly">Monthly</option>
|
||||
<option value="quarterly">Quarterly</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-6">
|
||||
<div class="form-group">
|
||||
<label for="scanType">Scan Type</label>
|
||||
<select class="form-control" id="scanType" name="scan_type" required>
|
||||
<option value="full">Full Scan</option>
|
||||
<option value="quick">Quick Scan</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<div class="col-md-4">
|
||||
<div class="form-group">
|
||||
<label for="timeOfDay">Time of Day (UTC)</label>
|
||||
<input type="time" class="form-control" id="timeOfDay" name="time_of_day" value="02:00" required>
|
||||
<small class="form-text text-muted">Time in UTC timezone</small>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-4" id="dayOfWeekGroup" style="display: none;">
|
||||
<div class="form-group">
|
||||
<label for="dayOfWeek">Day of Week</label>
|
||||
<select class="form-control" id="dayOfWeek" name="day_of_week">
|
||||
<option value="0">Monday</option>
|
||||
<option value="1">Tuesday</option>
|
||||
<option value="2">Wednesday</option>
|
||||
<option value="3">Thursday</option>
|
||||
<option value="4">Friday</option>
|
||||
<option value="5">Saturday</option>
|
||||
<option value="6" selected>Sunday</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-4" id="dayOfMonthGroup" style="display: none;">
|
||||
<div class="form-group">
|
||||
<label for="dayOfMonth">Day of Month</label>
|
||||
<input type="number" class="form-control" id="dayOfMonth" name="day_of_month" min="1" max="31" value="1">
|
||||
<small class="form-text text-muted">1-31 (will adjust for shorter months)</small>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label>Websites to Scan</label>
|
||||
<div class="checkbox-group" style="max-height: 200px; overflow-y: auto; border: 1px solid #ddd; padding: 10px; border-radius: 4px;">
|
||||
{% for website in websites %}
|
||||
<div class="form-check">
|
||||
<input class="form-check-input" type="checkbox" name="domains" value="{{ website.domain }}" id="domain_{{ forloop.counter }}">
|
||||
<label class="form-check-label" for="domain_{{ forloop.counter }}">
|
||||
{{ website.domain }}
|
||||
</label>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<div class="form-check">
|
||||
<input class="form-check-input" type="checkbox" id="emailNotifications" name="email_notifications" checked>
|
||||
<label class="form-check-label" for="emailNotifications">
|
||||
Enable Email Notifications
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label for="notificationEmails">Notification Email Addresses</label>
|
||||
<input type="text" class="form-control" id="notificationEmails" name="notification_emails" placeholder="email1@example.com, email2@example.com">
|
||||
<small class="form-text text-muted">Separate multiple emails with commas. Leave empty to use your account email.</small>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label>Notification Preferences</label>
|
||||
<div class="form-check">
|
||||
<input class="form-check-input" type="checkbox" id="notifyOnThreats" name="notify_on_threats" checked>
|
||||
<label class="form-check-label" for="notifyOnThreats">
|
||||
Notify when threats are found
|
||||
</label>
|
||||
</div>
|
||||
<div class="form-check">
|
||||
<input class="form-check-input" type="checkbox" id="notifyOnCompletion" name="notify_on_completion">
|
||||
<label class="form-check-label" for="notifyOnCompletion">
|
||||
Notify when scan completes successfully
|
||||
</label>
|
||||
</div>
|
||||
<div class="form-check">
|
||||
<input class="form-check-input" type="checkbox" id="notifyOnFailure" name="notify_on_failure" checked>
|
||||
<label class="form-check-label" for="notifyOnFailure">
|
||||
Notify when scan fails
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-secondary" data-dismiss="modal">Cancel</button>
|
||||
<button type="button" class="btn btn-primary" onclick="saveScheduledScan()">Save Schedule</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.scheduled-scans-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fill, minmax(350px, 1fr));
|
||||
gap: 20px;
|
||||
}
|
||||
|
||||
.scheduled-scan-card {
|
||||
border: 1px solid #e8e9ff;
|
||||
border-radius: 8px;
|
||||
padding: 20px;
|
||||
background: white;
|
||||
}
|
||||
|
||||
.scheduled-scan-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
|
||||
.scheduled-scan-header h4 {
|
||||
margin: 0;
|
||||
color: #2f3640;
|
||||
font-size: 16px;
|
||||
}
|
||||
|
||||
.scheduled-scan-info p {
|
||||
margin: 5px 0;
|
||||
color: #64748b;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.scheduled-scan-actions {
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
margin-top: 15px;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.checkbox-group {
|
||||
background: #f8f9fa;
|
||||
}
|
||||
|
||||
.form-check {
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
from django.urls import path
|
||||
from . import views, api
|
||||
from . import views, api, scheduled_views
|
||||
|
||||
urlpatterns = [
|
||||
# Main AI Scanner pages
|
||||
|
|
@ -18,6 +18,12 @@ urlpatterns = [
|
|||
path('platform-monitor-url/<str:scan_id>/', views.getPlatformMonitorUrl, name='aiScannerPlatformMonitorUrl'),
|
||||
path('platform-status/<str:scan_id>/', views.getPlatformScanStatus, name='aiScannerPlatformStatus'),
|
||||
|
||||
# Scheduled scans management
|
||||
path('scheduled-scans/', scheduled_views.scheduledScans, name='aiScannerScheduledScans'),
|
||||
path('scheduled-scans/<int:scan_id>/', scheduled_views.scheduledScanDetail, name='aiScannerScheduledScanDetail'),
|
||||
path('scheduled-scans/<int:scan_id>/toggle/', scheduled_views.toggleScheduledScan, name='aiScannerToggleScheduledScan'),
|
||||
path('scheduled-scans/<int:scan_id>/executions/', scheduled_views.scheduledScanExecutions, name='aiScannerScheduledScanExecutions'),
|
||||
|
||||
# Note: RESTful API endpoints are in /api/urls.py for external access
|
||||
|
||||
# Legacy API endpoints (for backward compatibility)
|
||||
|
|
|
|||
459
cyberpanel.sh
459
cyberpanel.sh
|
|
@ -106,6 +106,148 @@ Debug_Log2 "Starting installation..,1"
|
|||
|
||||
}
|
||||
|
||||
# Helper Functions for Package Management
|
||||
install_package() {
|
||||
local package="$1"
|
||||
case "$Server_OS" in
|
||||
"CentOS"|"openEuler")
|
||||
if [[ "$Server_OS_Version" -ge 8 ]]; then
|
||||
dnf install -y "$package"
|
||||
else
|
||||
yum install -y "$package"
|
||||
fi
|
||||
;;
|
||||
"Ubuntu")
|
||||
DEBIAN_FRONTEND=noninteractive apt install -y "$package"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Helper Function for Service Management
|
||||
manage_service() {
|
||||
local service="$1"
|
||||
local action="$2"
|
||||
systemctl "$action" "$service"
|
||||
}
|
||||
|
||||
# Helper Function for Development Tools Installation
|
||||
install_dev_tools() {
|
||||
case "$Server_OS" in
|
||||
"CentOS"|"openEuler")
|
||||
yum groupinstall "Development Tools" -y
|
||||
yum install autoconf automake zlib-devel openssl-devel expat-devel pcre-devel libmemcached-devel cyrus-sasl* -y
|
||||
;;
|
||||
"Ubuntu")
|
||||
DEBIAN_FRONTEND=noninteractive apt install build-essential zlib1g-dev libexpat1-dev openssl libssl-dev libsasl2-dev libpcre3-dev git -y
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Helper Function for PHP Package Installation
|
||||
install_php_packages() {
|
||||
local php_extension="$1"
|
||||
case "$Server_OS" in
|
||||
"CentOS"|"openEuler")
|
||||
install_package "lsphp??-${php_extension} lsphp??-pecl-${php_extension}"
|
||||
;;
|
||||
"Ubuntu")
|
||||
install_package "lsphp*-${php_extension}"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Helper Function for configuring memcached
|
||||
configure_memcached() {
|
||||
if [[ "$Server_OS" = "CentOS" ]] || [[ "$Server_OS" = "openEuler" ]]; then
|
||||
sed -i 's|OPTIONS=""|OPTIONS="-l 127.0.0.1 -U 0"|g' /etc/sysconfig/memcached
|
||||
fi
|
||||
}
|
||||
|
||||
# Helper Function for EPEL repository setup
|
||||
setup_epel_repo() {
|
||||
case "$Server_OS_Version" in
|
||||
"7")
|
||||
rpm --import https://cyberpanel.sh/dl.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-7
|
||||
yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm
|
||||
Check_Return "yum repo" "no_exit"
|
||||
;;
|
||||
"8")
|
||||
rpm --import https://cyberpanel.sh/dl.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-8
|
||||
yum install -y https://cyberpanel.sh/dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm
|
||||
Check_Return "yum repo" "no_exit"
|
||||
;;
|
||||
"9")
|
||||
yum install -y https://cyberpanel.sh/dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm
|
||||
Check_Return "yum repo" "no_exit"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Helper Function for MariaDB repository setup
|
||||
setup_mariadb_repo() {
|
||||
if [[ "$Server_OS_Version" = "7" ]]; then
|
||||
cat <<EOF >/etc/yum.repos.d/MariaDB.repo
|
||||
# MariaDB 10.4 CentOS repository list - created 2021-08-06 02:01 UTC
|
||||
# http://downloads.mariadb.org/mariadb/repositories/
|
||||
[mariadb]
|
||||
name = MariaDB
|
||||
baseurl = http://yum.mariadb.org/10.4/centos7-amd64
|
||||
gpgkey=https://yum.mariadb.org/RPM-GPG-KEY-MariaDB
|
||||
gpgcheck=1
|
||||
EOF
|
||||
elif [[ "$Server_OS_Version" = "8" ]]; then
|
||||
cat <<EOF >/etc/yum.repos.d/MariaDB.repo
|
||||
# MariaDB 10.11 RHEL8 repository list
|
||||
# http://downloads.mariadb.org/mariadb/repositories/
|
||||
[mariadb]
|
||||
name = MariaDB
|
||||
baseurl = http://yum.mariadb.org/10.11/rhel8-amd64
|
||||
module_hotfixes=1
|
||||
gpgkey=https://yum.mariadb.org/RPM-GPG-KEY-MariaDB
|
||||
gpgcheck=1
|
||||
EOF
|
||||
elif [[ "$Server_OS_Version" = "9" ]] && uname -m | grep -q 'x86_64'; then
|
||||
cat <<EOF >/etc/yum.repos.d/MariaDB.repo
|
||||
# MariaDB 10.11 CentOS repository list - created 2021-08-06 02:01 UTC
|
||||
# http://downloads.mariadb.org/mariadb/repositories/
|
||||
[mariadb]
|
||||
name = MariaDB
|
||||
baseurl = http://yum.mariadb.org/10.11/rhel9-amd64/
|
||||
gpgkey=https://yum.mariadb.org/RPM-GPG-KEY-MariaDB
|
||||
enabled=1
|
||||
gpgcheck=1
|
||||
EOF
|
||||
fi
|
||||
}
|
||||
|
||||
# Helper Function for PHP timezone configuration
|
||||
configure_php_timezone() {
|
||||
local php_version="$1"
|
||||
local php_ini_path=$(find "$php_version" -name php.ini)
|
||||
|
||||
# Common configuration
|
||||
"${php_version}/bin/phpize"
|
||||
./configure --with-php-config="${php_version}/bin/php-config"
|
||||
make
|
||||
make install
|
||||
|
||||
# OS-specific configuration
|
||||
if [[ "$Server_OS" = "CentOS" ]] || [[ "$Server_OS" = "openEuler" ]]; then
|
||||
if [[ ! -d "${php_version}/tmp" ]]; then
|
||||
mkdir "${php_version}/tmp"
|
||||
fi
|
||||
"${php_version}/bin/pecl" channel-update pecl.php.net
|
||||
"${php_version}/bin/pear" config-set temp_dir "${php_version}/tmp"
|
||||
echo "extension=timezonedb.so" > "${php_version}/etc/php.d/20-timezone.ini"
|
||||
else
|
||||
echo "extension=timezonedb.so" > "/usr/local/lsws/${php_version: 16:7}/etc/php/${php_version: 21:1}.${php_version: 22:1}/mods-available/20-timezone.ini"
|
||||
fi
|
||||
|
||||
make clean
|
||||
sed -i 's|expose_php = On|expose_php = Off|g' "$php_ini_path"
|
||||
sed -i 's|mail.add_x_header = On|mail.add_x_header = Off|g' "$php_ini_path"
|
||||
}
|
||||
|
||||
Debug_Log() {
|
||||
echo -e "\n${1}=${2}\n" >> "/var/log/cyberpanel_debug_$(date +"%Y-%m-%d")_${Random_Log_Name}.log"
|
||||
}
|
||||
|
|
@ -178,7 +320,15 @@ do
|
|||
echo "command $1 failed for 50 times, exit..."
|
||||
exit 2
|
||||
else
|
||||
$1 && break || echo -e "\n$1 has failed for $i times\nWait for 3 seconds and try again...\n"; sleep 3;
|
||||
$1 && break || {
|
||||
echo -e "\n$1 has failed for $i times\nWait and try again...\n"
|
||||
# Exponential backoff: 1s, 2s, 4s, 8s, then cap at 10s
|
||||
if [[ $i -le 4 ]]; then
|
||||
sleep $((2**($i-1)))
|
||||
else
|
||||
sleep 10
|
||||
fi
|
||||
}
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
|
@ -359,30 +509,16 @@ fi
|
|||
}
|
||||
|
||||
Check_Process() {
|
||||
if systemctl is-active --quiet httpd; then
|
||||
systemctl disable httpd
|
||||
systemctl stop httpd
|
||||
systemctl mask httpd
|
||||
echo -e "\nhttpd process detected, disabling...\n"
|
||||
fi
|
||||
if systemctl is-active --quiet apache2; then
|
||||
systemctl disable apache2
|
||||
systemctl stop apache2
|
||||
systemctl mask apache2
|
||||
echo -e "\napache2 process detected, disabling...\n"
|
||||
fi
|
||||
if systemctl is-active --quiet named; then
|
||||
systemctl stop named
|
||||
systemctl disable named
|
||||
systemctl mask named
|
||||
echo -e "\nnamed process detected, disabling...\n"
|
||||
fi
|
||||
if systemctl is-active --quiet exim; then
|
||||
systemctl stop exim
|
||||
systemctl disable exim
|
||||
systemctl mask exim
|
||||
echo -e "\nexim process detected, disabling...\n"
|
||||
fi
|
||||
local services=("httpd" "apache2" "named" "exim")
|
||||
|
||||
for service in "${services[@]}"; do
|
||||
if systemctl is-active --quiet "$service"; then
|
||||
manage_service "$service" "stop"
|
||||
manage_service "$service" "disable"
|
||||
manage_service "$service" "mask"
|
||||
echo -e "\n$service process detected, disabling...\n"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
Check_Provider() {
|
||||
|
|
@ -838,9 +974,14 @@ if [[ $Server_OS = "CentOS" ]] ; then
|
|||
yum autoremove -y epel-release
|
||||
rm -f /etc/yum.repos.d/epel.repo
|
||||
rm -f /etc/yum.repos.d/epel.repo.rpmsave
|
||||
|
||||
# Setup EPEL repository based on version
|
||||
setup_epel_repo
|
||||
|
||||
# Setup MariaDB repository
|
||||
setup_mariadb_repo
|
||||
|
||||
if [[ "$Server_OS_Version" = "9" ]]; then
|
||||
|
||||
# Check if architecture is aarch64
|
||||
if uname -m | grep -q 'aarch64' ; then
|
||||
# Run the following commands if architecture is aarch64
|
||||
|
|
@ -860,34 +1001,12 @@ if [[ $Server_OS = "CentOS" ]] ; then
|
|||
dnf config-manager --set-enabled crb
|
||||
fi
|
||||
|
||||
yum install -y https://cyberpanel.sh/dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm
|
||||
Check_Return "yum repo" "no_exit"
|
||||
yum install -y https://rpms.remirepo.net/enterprise/remi-release-9.rpm
|
||||
Check_Return "yum repo" "no_exit"
|
||||
#!/bin/bash
|
||||
|
||||
# Check if architecture is x86_64
|
||||
if uname -m | grep -q 'x86_64' ; then
|
||||
# Create the MariaDB repository configuration file for x86_64 architecture
|
||||
cat <<EOF >/etc/yum.repos.d/MariaDB.repo
|
||||
# MariaDB 10.11 CentOS repository list - created 2021-08-06 02:01 UTC
|
||||
# http://downloads.mariadb.org/mariadb/repositories/
|
||||
[mariadb]
|
||||
name = MariaDB
|
||||
baseurl = http://yum.mariadb.org/10.11/rhel9-amd64/
|
||||
gpgkey=https://yum.mariadb.org/RPM-GPG-KEY-MariaDB
|
||||
enabled=1
|
||||
gpgcheck=1
|
||||
EOF
|
||||
echo "MariaDB repository file created for x86_64 architecture."
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$Server_OS_Version" = "8" ]]; then
|
||||
rpm --import https://cyberpanel.sh/www.centos.org/keys/RPM-GPG-KEY-CentOS-Official
|
||||
rpm --import https://cyberpanel.sh/dl.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-8
|
||||
yum install -y https://cyberpanel.sh/dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm
|
||||
Check_Return "yum repo" "no_exit"
|
||||
|
||||
sed -i 's/mirrorlist/#mirrorlist/g' /etc/yum.repos.d/CentOS-* > /dev/null 2>&1
|
||||
sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-* > /dev/null 2>&1
|
||||
|
|
@ -1042,8 +1161,8 @@ for i in {1..50} ;
|
|||
break
|
||||
else
|
||||
echo -e "\n Requirement list has failed to download for $i times..."
|
||||
echo -e "Wait for 30 seconds and try again...\n"
|
||||
sleep 30
|
||||
echo -e "Wait for 5 seconds and try again...\n"
|
||||
sleep 5
|
||||
fi
|
||||
done
|
||||
#special made function for Gitee.com , for whatever reason , sometimes it fails to download this file
|
||||
|
|
@ -1054,6 +1173,8 @@ Pre_Install_Required_Components() {
|
|||
Debug_Log2 "Installing necessary components..,3"
|
||||
|
||||
if [[ "$Server_OS" = "CentOS" ]] || [[ "$Server_OS" = "openEuler" ]] ; then
|
||||
# System-wide update - consider making this optional for faster installs
|
||||
# Could add a --skip-system-update flag to bypass this
|
||||
yum update -y
|
||||
if [[ "$Server_OS_Version" = "7" ]] ; then
|
||||
yum install -y wget strace net-tools curl which bc telnet htop libevent-devel gcc libattr-devel xz-devel gpgme-devel curl-devel git socat openssl-devel MariaDB-shared mariadb-devel yum-utils python36u python36u-pip python36u-devel zip unzip bind-utils
|
||||
|
|
@ -1061,9 +1182,7 @@ if [[ "$Server_OS" = "CentOS" ]] || [[ "$Server_OS" = "openEuler" ]] ; then
|
|||
yum -y groupinstall development
|
||||
Check_Return
|
||||
elif [[ "$Server_OS_Version" = "8" ]] ; then
|
||||
dnf install -y libnsl zip wget strace net-tools curl which bc telnet htop libevent-devel gcc libattr-devel xz-devel mariadb-devel curl-devel git platform-python-devel tar socat python3 zip unzip bind-utils
|
||||
Check_Return
|
||||
dnf install -y gpgme-devel
|
||||
dnf install -y libnsl zip wget strace net-tools curl which bc telnet htop libevent-devel gcc libattr-devel xz-devel mariadb-devel curl-devel git platform-python-devel tar socat python3 zip unzip bind-utils gpgme-devel
|
||||
Check_Return
|
||||
elif [[ "$Server_OS_Version" = "9" ]] ; then
|
||||
|
||||
|
|
@ -1073,14 +1192,15 @@ if [[ "$Server_OS" = "CentOS" ]] || [[ "$Server_OS" = "openEuler" ]] ; then
|
|||
dnf install -y libnsl zip wget strace net-tools curl which bc telnet htop libevent-devel gcc libattr-devel xz-devel MariaDB-server MariaDB-client MariaDB-devel curl-devel git platform-python-devel tar socat python3 zip unzip bind-utils gpgme-devel openssl-devel
|
||||
Check_Return
|
||||
elif [[ "$Server_OS_Version" = "20" ]] || [[ "$Server_OS_Version" = "22" ]] ; then
|
||||
dnf install -y libnsl zip wget strace net-tools curl which bc telnet htop libevent-devel gcc libattr-devel xz-devel mariadb-devel curl-devel git python3-devel tar socat python3 zip unzip bind-utils
|
||||
Check_Return
|
||||
dnf install -y gpgme-devel
|
||||
dnf install -y libnsl zip wget strace net-tools curl which bc telnet htop libevent-devel gcc libattr-devel xz-devel mariadb-devel curl-devel git python3-devel tar socat python3 zip unzip bind-utils gpgme-devel
|
||||
Check_Return
|
||||
fi
|
||||
ln -s /usr/bin/pip3 /usr/bin/pip
|
||||
else
|
||||
# Update package lists (required for installations)
|
||||
apt update -y
|
||||
# System-wide upgrade - consider making this optional for faster installs
|
||||
# Could add a --skip-system-upgrade flag to bypass this
|
||||
DEBIAN_FRONTEND=noninteractive apt upgrade -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold"
|
||||
if [[ "$Server_Provider" = "Alibaba Cloud" ]] ; then
|
||||
apt install -y --allow-downgrades libgnutls30=3.6.13-2ubuntu1.3
|
||||
|
|
@ -1094,19 +1214,11 @@ else
|
|||
Check_Return
|
||||
fi
|
||||
|
||||
DEBIAN_FRONTEND=noninteractive apt install -y python3-pip
|
||||
DEBIAN_FRONTEND=noninteractive apt install -y python3-pip build-essential libssl-dev libffi-dev python3-dev python3-venv cron inetutils-ping
|
||||
Check_Return
|
||||
|
||||
ln -s /usr/bin/pip3 /usr/bin/pip3.6
|
||||
ln -s /usr/bin/pip3.6 /usr/bin/pip
|
||||
|
||||
DEBIAN_FRONTEND=noninteractive apt install -y build-essential libssl-dev libffi-dev python3-dev
|
||||
Check_Return
|
||||
DEBIAN_FRONTEND=noninteractive apt install -y python3-venv
|
||||
Check_Return
|
||||
|
||||
DEBIAN_FRONTEND=noninteractive apt install -y cron inetutils-ping
|
||||
Check_Return
|
||||
# Oracle Ubuntu ARM misses ping and cron
|
||||
|
||||
DEBIAN_FRONTEND=noninteractive apt install -y locales
|
||||
|
|
@ -1145,10 +1257,6 @@ Debug_Log2 "Installing requirments..,3"
|
|||
Retry_Command "pip install --default-timeout=3600 -r /usr/local/requirments.txt"
|
||||
Check_Return "requirments" "no_exit"
|
||||
|
||||
if [[ "$Server_OS" = "Ubuntu" ]] && [[ "$Server_OS_Version" = "22" ]] ; then
|
||||
cp /usr/bin/python3.10 /usr/local/CyberCP/bin/python3
|
||||
fi
|
||||
|
||||
rm -rf cyberpanel
|
||||
echo -e "\nFetching files from ${Git_Clone_URL}...\n"
|
||||
|
||||
|
|
@ -1311,8 +1419,14 @@ if ! grep -q "pid_max" /etc/rc.local 2>/dev/null ; then
|
|||
fi
|
||||
|
||||
systemctl restart systemd-networkd >/dev/null 2>&1
|
||||
sleep 3
|
||||
#take a break ,or installer will break
|
||||
# Wait for network to come up, but check more frequently
|
||||
for j in {1..6}; do
|
||||
sleep 0.5
|
||||
# Check if network is ready by trying to resolve DNS
|
||||
if ping -c 1 -W 1 8.8.8.8 >/dev/null 2>&1 || nslookup cyberpanel.sh >/dev/null 2>&1; then
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# Check Connectivity
|
||||
if ping -q -c 1 -W 1 cyberpanel.sh >/dev/null; then
|
||||
|
|
@ -1326,14 +1440,22 @@ if ! grep -q "pid_max" /etc/rc.local 2>/dev/null ; then
|
|||
systemctl restart systemd-networkd >/dev/null 2>&1
|
||||
echo -e "\nReturns the nameservers settings to default..\n"
|
||||
echo -e "\nContinue installation..\n"
|
||||
sleep 3
|
||||
# Brief pause for network stabilization
|
||||
sleep 1
|
||||
fi
|
||||
|
||||
cp /etc/resolv.conf /etc/resolv.conf-tmp
|
||||
|
||||
# Find the line containing nameserver 8.8.8.8 pattern
|
||||
Line1="$(grep -n "f.write('nameserver 8.8.8.8')" installCyberPanel.py | head -n 1 | cut -d: -f1)"
|
||||
sed -i "${Line1}i\ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ subprocess.call\(command, shell=True)" installCyberPanel.py
|
||||
sed -i "${Line1}i\ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ command = 'cat /etc/resolv.conf-tmp > /etc/resolv.conf'" installCyberPanel.py
|
||||
|
||||
# Only modify the file if the pattern was found
|
||||
if [[ -n "$Line1" ]] && [[ "$Line1" =~ ^[0-9]+$ ]]; then
|
||||
sed -i "${Line1}i\ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ subprocess.call\(command, shell=True)" installCyberPanel.py
|
||||
sed -i "${Line1}i\ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ command = 'cat /etc/resolv.conf-tmp > /etc/resolv.conf'" installCyberPanel.py
|
||||
else
|
||||
echo "Warning: Could not find 'nameserver 8.8.8.8' pattern in installCyberPanel.py - skipping resolv.conf modification"
|
||||
fi
|
||||
}
|
||||
|
||||
License_Validation() {
|
||||
|
|
@ -1352,7 +1474,8 @@ tar xzvf "lsws-$LSWS_Stable_Version-ent-x86_64-linux.tar.gz" >/dev/null
|
|||
cd "/root/cyberpanel-tmp/lsws-$LSWS_Stable_Version/conf" || exit
|
||||
if [[ "$License_Key" = "Trial" ]]; then
|
||||
Retry_Command "wget -q https://cyberpanel.sh/license.litespeedtech.com/reseller/trial.key"
|
||||
sed -i "s|writeSerial = open('lsws-6.0/serial.no', 'w')|command = 'wget -q --output-document=./lsws-$LSWS_Stable_Version/trial.key https://cyberpanel.sh/license.litespeedtech.com/reseller/trial.key'|g" "$Current_Dir/installCyberPanel.py"
|
||||
# Update the serial number handling to use trial key
|
||||
sed -i "s|writeSerial = open('lsws-[0-9.]\+/serial.no', 'w')|command = 'wget -q --output-document=./lsws-$LSWS_Stable_Version/trial.key https://cyberpanel.sh/license.litespeedtech.com/reseller/trial.key'|g" "$Current_Dir/installCyberPanel.py"
|
||||
sed -i 's|writeSerial.writelines(self.serial)|subprocess.call(command, shell=True)|g' "$Current_Dir/installCyberPanel.py"
|
||||
sed -i 's|writeSerial.close()||g' "$Current_Dir/installCyberPanel.py"
|
||||
else
|
||||
|
|
@ -1526,22 +1649,8 @@ fi
|
|||
}
|
||||
|
||||
Post_Install_Addon_Mecached_LSMCD() {
|
||||
if [[ $Server_OS = "CentOS" ]] || [[ $Server_OS = "openEuler" ]]; then
|
||||
yum groupinstall "Development Tools" -y
|
||||
yum install autoconf automake zlib-devel openssl-devel expat-devel pcre-devel libmemcached-devel cyrus-sasl* -y
|
||||
wget -O lsmcd-master.zip https://cyberpanel.sh/codeload.github.com/litespeedtech/lsmcd/zip/master
|
||||
unzip lsmcd-master.zip
|
||||
Current_Dir=$(pwd)
|
||||
cd "$Current_Dir/lsmcd-master" || exit
|
||||
./fixtimestamp.sh
|
||||
./configure CFLAGS=" -O3" CXXFLAGS=" -O3"
|
||||
make
|
||||
make install
|
||||
systemctl enable lsmcd
|
||||
systemctl start lsmcd
|
||||
cd "$Current_Dir" || exit
|
||||
else
|
||||
DEBIAN_FRONTEND=noninteractive apt install build-essential zlib1g-dev libexpat1-dev openssl libssl-dev libsasl2-dev libpcre3-dev git -y
|
||||
install_dev_tools
|
||||
|
||||
wget -O lsmcd-master.zip https://cyberpanel.sh/codeload.github.com/litespeedtech/lsmcd/zip/master
|
||||
unzip lsmcd-master.zip
|
||||
Current_Dir=$(pwd)
|
||||
|
|
@ -1551,98 +1660,75 @@ else
|
|||
make
|
||||
make install
|
||||
cd "$Current_Dir" || exit
|
||||
systemctl enable lsmcd
|
||||
systemctl start lsmcd
|
||||
fi
|
||||
|
||||
manage_service "lsmcd" "enable"
|
||||
manage_service "lsmcd" "start"
|
||||
}
|
||||
|
||||
Post_Install_Addon_Memcached() {
|
||||
if [[ $Server_OS = "CentOS" ]]; then
|
||||
yum install -y lsphp??-memcached lsphp??-pecl-memcached
|
||||
if [[ $Total_RAM -eq "2048" ]] || [[ $Total_RAM -gt "2048" ]]; then
|
||||
install_php_packages "memcached"
|
||||
|
||||
if [[ $Total_RAM -ge 2048 ]]; then
|
||||
Post_Install_Addon_Mecached_LSMCD
|
||||
else
|
||||
yum install -y memcached
|
||||
sed -i 's|OPTIONS=""|OPTIONS="-l 127.0.0.1 -U 0"|g' /etc/sysconfig/memcached
|
||||
#turn off UDP and bind to 127.0.0.1 only
|
||||
systemctl enable memcached
|
||||
systemctl start memcached
|
||||
install_package "memcached"
|
||||
configure_memcached
|
||||
manage_service "memcached" "enable"
|
||||
manage_service "memcached" "start"
|
||||
fi
|
||||
fi
|
||||
if [[ $Server_OS = "Ubuntu" ]]; then
|
||||
DEBIAN_FRONTEND=noninteractive apt install -y "lsphp*-memcached"
|
||||
|
||||
if [[ "$Total_RAM" -eq "2048" ]] || [[ "$Total_RAM" -gt "2048" ]]; then
|
||||
Post_Install_Addon_Mecached_LSMCD
|
||||
else
|
||||
DEBIAN_FRONTEND=noninteractive apt install -y memcached
|
||||
systemctl enable memcached
|
||||
systemctl start memcached
|
||||
if pgrep "lsmcd" ; then
|
||||
echo -e "\n\nLiteSpeed Memcached installed and running..."
|
||||
fi
|
||||
fi
|
||||
if [[ $Server_OS = "openEuler" ]]; then
|
||||
yum install -y lsphp??-memcached lsphp??-pecl-memcached
|
||||
if [[ $Total_RAM -eq "2048" ]] || [[ $Total_RAM -gt "2048" ]]; then
|
||||
Post_Install_Addon_Mecached_LSMCD
|
||||
else
|
||||
yum install -y memcached
|
||||
sed -i 's|OPTIONS=""|OPTIONS="-l 127.0.0.1 -U 0"|g' /etc/sysconfig/memcached
|
||||
#turn off UDP and bind to 127.0.0.1 only
|
||||
systemctl enable memcached
|
||||
systemctl start memcached
|
||||
|
||||
if pgrep "memcached" ; then
|
||||
echo -e "\n\nMemcached installed and running..."
|
||||
fi
|
||||
fi
|
||||
|
||||
if pgrep "lsmcd" ; then
|
||||
echo -e "\n\nLiteSpeed Memcached installed and running..."
|
||||
fi
|
||||
|
||||
if pgrep "memcached" ; then
|
||||
echo -e "\n\nMemcached installed and running..."
|
||||
fi
|
||||
}
|
||||
|
||||
Post_Install_Addon_Redis() {
|
||||
if [[ "$Server_OS" = "CentOS" ]]; then
|
||||
if [[ "$Server_OS_Version" = "8" || "$Server_OS_Version" = "9" ]]; then
|
||||
yum install -y lsphp??-redis redis
|
||||
else
|
||||
yum -y install http://rpms.remirepo.net/enterprise/remi-release-7.rpm
|
||||
yum-config-manager --disable remi
|
||||
yum-config-manager --disable remi-safe
|
||||
yum -y --enablerepo=remi install redis
|
||||
# Install PHP Redis extension
|
||||
install_php_packages "redis"
|
||||
|
||||
# Install Redis server
|
||||
if [[ "$Server_OS" = "CentOS" ]]; then
|
||||
if [[ "$Server_OS_Version" = "8" || "$Server_OS_Version" = "9" ]]; then
|
||||
install_package "redis"
|
||||
else
|
||||
yum -y install http://rpms.remirepo.net/enterprise/remi-release-7.rpm
|
||||
yum-config-manager --disable remi
|
||||
yum-config-manager --disable remi-safe
|
||||
yum -y --enablerepo=remi install redis
|
||||
fi
|
||||
elif [[ "$Server_OS" = "Ubuntu" ]]; then
|
||||
install_package "redis"
|
||||
elif [[ "$Server_OS" = "openEuler" ]]; then
|
||||
install_package "redis6"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $Server_OS = "Ubuntu" ]]; then
|
||||
DEBIAN_FRONTEND=noninteractive apt install -y "lsphp*-redis" redis
|
||||
fi
|
||||
# Configure Redis for IPv6
|
||||
if ifconfig -a | grep inet6; then
|
||||
echo -e "\nIPv6 detected...\n"
|
||||
else
|
||||
sed -i 's|bind 127.0.0.1 ::1|bind 127.0.0.1|g' /etc/redis/redis.conf
|
||||
echo -e "\n no IPv6 detected..."
|
||||
fi
|
||||
|
||||
if ifconfig -a | grep inet6; then
|
||||
echo -e "\nIPv6 detected...\n"
|
||||
else
|
||||
sed -i 's|bind 127.0.0.1 ::1|bind 127.0.0.1|g' /etc/redis/redis.conf
|
||||
echo -e "\n no IPv6 detected..."
|
||||
fi
|
||||
# Start Redis service
|
||||
if [[ $Server_OS = "Ubuntu" ]]; then
|
||||
manage_service "redis-server" "stop"
|
||||
rm -f /var/run/redis/redis-server.pid
|
||||
manage_service "redis-server" "enable"
|
||||
manage_service "redis-server" "start"
|
||||
else
|
||||
manage_service "redis" "enable"
|
||||
manage_service "redis" "start"
|
||||
fi
|
||||
|
||||
if [[ $Server_OS = "Ubuntu" ]]; then
|
||||
systemctl stop redis-server
|
||||
rm -f /var/run/redis/redis-server.pid
|
||||
systemctl enable redis-server
|
||||
systemctl start redis-server
|
||||
else
|
||||
systemctl enable redis
|
||||
systemctl start redis
|
||||
fi
|
||||
|
||||
if [[ "$Server_OS" = "openEuler" ]]; then
|
||||
yum install -y lsphp??-redis redis6
|
||||
fi
|
||||
|
||||
if pgrep "redis" ; then
|
||||
echo -e "\n\nRedis installed and running..."
|
||||
touch /home/cyberpanel/redis
|
||||
fi
|
||||
if pgrep "redis" ; then
|
||||
echo -e "\n\nRedis installed and running..."
|
||||
touch /home/cyberpanel/redis
|
||||
fi
|
||||
}
|
||||
|
||||
Post_Install_PHP_Session_Setup() {
|
||||
|
|
@ -1663,44 +1749,19 @@ wget -O timezonedb.tgz https://cyberpanel.sh/pecl.php.net/get/timezonedb
|
|||
tar xzvf timezonedb.tgz
|
||||
cd timezonedb-* || exit
|
||||
|
||||
# Install required packages for building PHP extensions
|
||||
if [[ "$Server_OS" = "Ubuntu" ]] ; then
|
||||
DEBIAN_FRONTEND=noninteractive apt install libmagickwand-dev pkg-config build-essential -y
|
||||
DEBIAN_FRONTEND=noninteractive apt install -y lsphp*-dev
|
||||
install_package "libmagickwand-dev pkg-config build-essential lsphp*-dev"
|
||||
else
|
||||
install_package "lsphp??-mysqlnd lsphp??-devel make gcc glibc-devel libmemcached-devel zlib-devel"
|
||||
yum remove -y lsphp??-mysql
|
||||
yum install -y lsphp??-mysqlnd
|
||||
yum install -y lsphp??-devel make gcc glibc-devel libmemcached-devel zlib-devel
|
||||
fi
|
||||
|
||||
for PHP_Version in /usr/local/lsws/lsphp?? ;
|
||||
do
|
||||
PHP_INI_Path=$(find "$PHP_Version" -name php.ini)
|
||||
# Configure timezone extension for each PHP version
|
||||
for PHP_Version in /usr/local/lsws/lsphp?? ; do
|
||||
configure_php_timezone "$PHP_Version"
|
||||
done
|
||||
|
||||
if [[ "$Server_OS" = "CentOS" ]] || [[ "$Server_OS" = "openEuler" ]]; then
|
||||
if [[ ! -d "${PHP_Version}/tmp" ]]; then
|
||||
mkdir "${PHP_Version}/tmp"
|
||||
fi
|
||||
"${PHP_Version}"/bin/pecl channel-update pecl.php.net
|
||||
"${PHP_Version}"/bin/pear config-set temp_dir "${PHP_Version}/tmp"
|
||||
"${PHP_Version}"/bin/phpize
|
||||
./configure --with-php-config="${PHP_Version}"/bin/php-config
|
||||
make
|
||||
make install
|
||||
echo "extension=timezonedb.so" > "${PHP_Version}/etc/php.d/20-timezone.ini"
|
||||
make clean
|
||||
sed -i 's|expose_php = On|expose_php = Off|g' "$PHP_INI_Path"
|
||||
sed -i 's|mail.add_x_header = On|mail.add_x_header = Off|g' "$PHP_INI_Path"
|
||||
else
|
||||
"${PHP_Version}"/bin/phpize
|
||||
./configure --with-php-config="${PHP_Version}"/bin/php-config
|
||||
make
|
||||
make install
|
||||
echo "extension=timezonedb.so" > "/usr/local/lsws/${PHP_Version: 16:7}/etc/php/${PHP_Version: 21:1}.${PHP_Version: 22:1}/mods-available/20-timezone.ini"
|
||||
make clean
|
||||
sed -i 's|expose_php = On|expose_php = Off|g' "$PHP_INI_Path"
|
||||
sed -i 's|mail.add_x_header = On|mail.add_x_header = Off|g' "$PHP_INI_Path"
|
||||
fi
|
||||
done
|
||||
rm -rf /usr/local/lsws/cyberpanel-tmp
|
||||
cd "$Current_Dir" || exit
|
||||
Debug_Log2 "Installing timezoneDB...,95"
|
||||
|
|
|
|||
|
|
@ -459,10 +459,10 @@ elif [[ "$Server_OS" = "Ubuntu" ]] ; then
|
|||
export DEBIAN_FRONTEND=noninteractive ; apt-get -o Dpkg::Options::="--force-confold" upgrade -y
|
||||
|
||||
if [[ "$Server_OS_Version" = "22" ]] ; then
|
||||
DEBIAN_FRONTEND=noninteracitve apt install -y dnsutils net-tools htop telnet libcurl4-gnutls-dev libgnutls28-dev libgcrypt20-dev libattr1 libattr1-dev liblzma-dev libgpgme-dev libcurl4-gnutls-dev libssl-dev nghttp2 libnghttp2-dev idn2 libidn2-dev libidn2-0-dev librtmp-dev libpsl-dev nettle-dev libgnutls28-dev libldap2-dev libgssapi-krb5-2 libk5crypto3 libkrb5-dev libcomerr2 libldap2-dev virtualenv git socat vim unzip zip libmariadb-dev-compat libmariadb-dev
|
||||
DEBIAN_FRONTEND=noninteractive apt install -y dnsutils net-tools htop telnet libcurl4-gnutls-dev libgnutls28-dev libgcrypt20-dev libattr1 libattr1-dev liblzma-dev libgpgme-dev libcurl4-gnutls-dev libssl-dev nghttp2 libnghttp2-dev idn2 libidn2-dev libidn2-0-dev librtmp-dev libpsl-dev nettle-dev libgnutls28-dev libldap2-dev libgssapi-krb5-2 libk5crypto3 libkrb5-dev libcomerr2 libldap2-dev virtualenv git socat vim unzip zip libmariadb-dev-compat libmariadb-dev
|
||||
|
||||
else
|
||||
DEBIAN_FRONTEND=noninteracitve apt install -y htop telnet libcurl4-gnutls-dev libgnutls28-dev libgcrypt20-dev libattr1 libattr1-dev liblzma-dev libgpgme-dev libmariadbclient-dev libcurl4-gnutls-dev libssl-dev nghttp2 libnghttp2-dev idn2 libidn2-dev libidn2-0-dev librtmp-dev libpsl-dev nettle-dev libgnutls28-dev libldap2-dev libgssapi-krb5-2 libk5crypto3 libkrb5-dev libcomerr2 libldap2-dev virtualenv git dnsutils
|
||||
DEBIAN_FRONTEND=noninteractive apt install -y htop telnet libcurl4-gnutls-dev libgnutls28-dev libgcrypt20-dev libattr1 libattr1-dev liblzma-dev libgpgme-dev libmariadbclient-dev libcurl4-gnutls-dev libssl-dev nghttp2 libnghttp2-dev idn2 libidn2-dev libidn2-0-dev librtmp-dev libpsl-dev nettle-dev libgnutls28-dev libldap2-dev libgssapi-krb5-2 libk5crypto3 libkrb5-dev libcomerr2 libldap2-dev virtualenv git dnsutils
|
||||
fi
|
||||
DEBIAN_FRONTEND=noninteractive apt install -y python3-pip
|
||||
DEBIAN_FRONTEND=noninteractive apt install -y build-essential libssl-dev libffi-dev python3-dev
|
||||
|
|
|
|||
|
|
@ -432,7 +432,7 @@
|
|||
ng-model="ftpUserName" placeholder="john_ftp" required>
|
||||
<span class="username-suffix">{{ OwnerFTP }}_{$ ftpUserName $}</span>
|
||||
</div>
|
||||
<small style="color: #64748b; margin-top: 0.5rem; display: block;">
|
||||
<small style="color: #64748b; margin-top: 0.5rem; display: block; font-size: 0.875rem;">
|
||||
<i class="fas fa-info-circle"></i>
|
||||
{% trans "Your FTP username will be prefixed with the owner username" %}
|
||||
</small>
|
||||
|
|
@ -467,7 +467,7 @@
|
|||
{% trans "Use This Password" %}
|
||||
</button>
|
||||
</div>
|
||||
<small style="color: #64748b; margin-top: 0.5rem; display: block;">
|
||||
<small style="color: #64748b; margin-top: 0.5rem; display: block; font-size: 0.875rem;">
|
||||
<i class="fas fa-info-circle"></i>
|
||||
{% trans "Make sure to save this password in a secure location" %}
|
||||
</small>
|
||||
|
|
|
|||
|
|
@ -424,7 +424,7 @@
|
|||
{% trans "Use This Password" %}
|
||||
</button>
|
||||
</div>
|
||||
<small style="color: #64748b; margin-top: 0.5rem; display: block;">
|
||||
<small style="color: #64748b; margin-top: 0.5rem; display: block; font-size: 0.875rem;">
|
||||
<i class="fas fa-info-circle"></i>
|
||||
{% trans "Make sure to save this password in a secure location" %}
|
||||
</small>
|
||||
|
|
|
|||
|
|
@ -14,112 +14,42 @@ from os.path import *
|
|||
from stat import *
|
||||
import stat
|
||||
import secrets
|
||||
import install_utils
|
||||
|
||||
VERSION = '2.4'
|
||||
BUILD = 2
|
||||
|
||||
char_set = {'small': 'abcdefghijklmnopqrstuvwxyz', 'nums': '0123456789', 'big': 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'}
|
||||
# Using shared char_set from install_utils
|
||||
char_set = install_utils.char_set
|
||||
|
||||
|
||||
def generate_pass(length=14):
|
||||
chars = string.ascii_uppercase + string.ascii_lowercase + string.digits
|
||||
size = length
|
||||
return ''.join(random.choice(chars) for x in range(size))
|
||||
# Using shared function from install_utils
|
||||
generate_pass = install_utils.generate_pass
|
||||
|
||||
|
||||
# There can not be peace without first a great suffering.
|
||||
|
||||
# distros
|
||||
# distros - using from install_utils
|
||||
centos = install_utils.centos
|
||||
ubuntu = install_utils.ubuntu
|
||||
cent8 = install_utils.cent8
|
||||
openeuler = install_utils.openeuler
|
||||
cent9 = 4 # Not in install_utils yet
|
||||
CloudLinux8 = 0 # Not in install_utils yet
|
||||
|
||||
centos = 0
|
||||
ubuntu = 1
|
||||
cent8 = 2
|
||||
cent9 = 4
|
||||
openeuler = 3
|
||||
CloudLinux8 = 0
|
||||
|
||||
def FetchCloudLinuxAlmaVersionVersion():
|
||||
if os.path.exists('/etc/os-release'):
|
||||
data = open('/etc/os-release', 'r').read()
|
||||
if (data.find('CloudLinux') > -1 or data.find('cloudlinux') > -1) and (data.find('8.9') > -1 or data.find('Anatoly Levchenko') > -1 or data.find('VERSION="8.') > -1):
|
||||
return 'cl-89'
|
||||
elif (data.find('CloudLinux') > -1 or data.find('cloudlinux') > -1) and (data.find('8.8') > -1 or data.find('Anatoly Filipchenko') > -1):
|
||||
return 'cl-88'
|
||||
elif (data.find('CloudLinux') > -1 or data.find('cloudlinux') > -1) and (data.find('9.4') > -1 or data.find('VERSION="9.') > -1):
|
||||
return 'cl-88'
|
||||
elif (data.find('AlmaLinux') > -1 or data.find('almalinux') > -1) and (data.find('8.9') > -1 or data.find('Midnight Oncilla') > -1 or data.find('VERSION="8.') > -1):
|
||||
return 'al-88'
|
||||
elif (data.find('AlmaLinux') > -1 or data.find('almalinux') > -1) and (data.find('8.7') > -1 or data.find('Stone Smilodon') > -1):
|
||||
return 'al-87'
|
||||
elif (data.find('AlmaLinux') > -1 or data.find('almalinux') > -1) and (data.find('9.4') > -1 or data.find('9.3') > -1 or data.find('Shamrock Pampas') > -1 or data.find('Seafoam Ocelot') > -1 or data.find('VERSION="9.') > -1):
|
||||
return 'al-93'
|
||||
else:
|
||||
return -1
|
||||
# Using shared function from install_utils
|
||||
FetchCloudLinuxAlmaVersionVersion = install_utils.FetchCloudLinuxAlmaVersionVersion
|
||||
|
||||
|
||||
def get_distro():
|
||||
distro = -1
|
||||
distro_file = ""
|
||||
if exists("/etc/lsb-release"):
|
||||
distro_file = "/etc/lsb-release"
|
||||
with open(distro_file) as f:
|
||||
for line in f:
|
||||
if line == "DISTRIB_ID=Ubuntu\n":
|
||||
distro = ubuntu
|
||||
|
||||
elif exists("/etc/redhat-release"):
|
||||
distro_file = "/etc/redhat-release"
|
||||
distro = centos
|
||||
|
||||
data = open('/etc/redhat-release', 'r').read()
|
||||
|
||||
|
||||
if data.find('CentOS Linux release 8') > -1:
|
||||
return cent8
|
||||
## if almalinux 9 then pretty much same as cent8
|
||||
if data.find('AlmaLinux release 8') > -1 or data.find('AlmaLinux release 9') > -1:
|
||||
return cent8
|
||||
if data.find('Rocky Linux release 8') > -1 or data.find('Rocky Linux 8') > -1 or data.find('rocky:8') > -1:
|
||||
return cent8
|
||||
if data.find('CloudLinux 8') or data.find('cloudlinux 8'):
|
||||
return cent8
|
||||
|
||||
else:
|
||||
if exists("/etc/openEuler-release"):
|
||||
distro_file = "/etc/openEuler-release"
|
||||
distro = openeuler
|
||||
|
||||
else:
|
||||
logging.InstallLog.writeToFile("Can't find linux release file - fatal error")
|
||||
preFlightsChecks.stdOut("Can't find linux release file - fatal error")
|
||||
os._exit(os.EX_UNAVAILABLE)
|
||||
|
||||
if distro == -1:
|
||||
logging.InstallLog.writeToFile("Can't find distro name in " + distro_file + " - fatal error")
|
||||
preFlightsChecks.stdOut("Can't find distro name in " + distro_file + " - fatal error")
|
||||
os._exit(os.EX_UNAVAILABLE)
|
||||
|
||||
return distro
|
||||
# Using shared function from install_utils
|
||||
get_distro = install_utils.get_distro
|
||||
|
||||
|
||||
def get_Ubuntu_release():
|
||||
release = -1
|
||||
if exists("/etc/lsb-release"):
|
||||
distro_file = "/etc/lsb-release"
|
||||
with open(distro_file) as f:
|
||||
for line in f:
|
||||
if line[:16] == "DISTRIB_RELEASE=":
|
||||
release = float(line[16:])
|
||||
|
||||
if release == -1:
|
||||
preFlightsChecks.stdOut("Can't find distro release name in " + distro_file + " - fatal error", 1, 1,
|
||||
os.EX_UNAVAILABLE)
|
||||
|
||||
else:
|
||||
logging.InstallLog.writeToFile("Can't find linux release file - fatal error")
|
||||
preFlightsChecks.stdOut("Can't find linux release file - fatal error")
|
||||
os._exit(os.EX_UNAVAILABLE)
|
||||
|
||||
release = install_utils.get_Ubuntu_release(use_print=False, exit_on_error=True)
|
||||
if release == -1:
|
||||
preFlightsChecks.stdOut("Can't find distro release name in /etc/lsb-release - fatal error", 1, 1,
|
||||
os.EX_UNAVAILABLE)
|
||||
return release
|
||||
|
||||
|
||||
|
|
@ -128,6 +58,34 @@ class preFlightsChecks:
|
|||
cyberPanelMirror = "mirror.cyberpanel.net/pip"
|
||||
cdn = 'cyberpanel.sh'
|
||||
SnappyVersion = '2.38.2'
|
||||
apt_updated = False # Track if apt update has been run
|
||||
|
||||
def install_package(self, package_name, options="", silent=False):
|
||||
"""Unified package installation across distributions"""
|
||||
command, shell = install_utils.get_package_install_command(self.distro, package_name, options)
|
||||
|
||||
if not silent:
|
||||
return preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, shell)
|
||||
else:
|
||||
return preFlightsChecks.call(command, self.distro, command, command, 0, 0, os.EX_OSERR, shell)
|
||||
|
||||
def is_centos_family(self):
|
||||
"""Check if distro is CentOS, CentOS 8, or OpenEuler"""
|
||||
return self.distro in [centos, cent8, openeuler]
|
||||
|
||||
def manage_service(self, service_name, action="start"):
|
||||
"""Unified service management"""
|
||||
command = f'systemctl {action} {service_name}'
|
||||
return preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
||||
def remove_package(self, package_name, silent=False):
|
||||
"""Unified package removal across distributions"""
|
||||
command, shell = install_utils.get_package_remove_command(self.distro, package_name)
|
||||
|
||||
if not silent:
|
||||
return preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR, shell)
|
||||
else:
|
||||
return preFlightsChecks.call(command, self.distro, command, command, 0, 0, os.EX_OSERR, shell)
|
||||
|
||||
def __init__(self, rootPath, ip, path, cwd, cyberPanelPath, distro, remotemysql=None, mysqlhost=None, mysqldb=None,
|
||||
mysqluser=None, mysqlpassword=None, mysqlport=None):
|
||||
|
|
@ -148,11 +106,8 @@ class preFlightsChecks:
|
|||
def installQuota(self,):
|
||||
try:
|
||||
|
||||
if self.distro == centos or self.distro == cent8 or self.distro == openeuler:
|
||||
command = "yum install quota -y"
|
||||
preFlightsChecks.call(command, self.distro, command,
|
||||
command,
|
||||
1, 0, os.EX_OSERR)
|
||||
if self.is_centos_family():
|
||||
self.install_package("quota", silent=True)
|
||||
|
||||
if self.edit_fstab('/', '/') == 0:
|
||||
preFlightsChecks.stdOut("Quotas will not be abled as we failed to modify fstab file.")
|
||||
|
|
@ -184,18 +139,14 @@ class preFlightsChecks:
|
|||
|
||||
if self.distro == ubuntu:
|
||||
self.stdOut("Install Quota on Ubuntu")
|
||||
command = 'apt update -y'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt install quota -y'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR, True)
|
||||
# Skip apt update as it was already done in cyberpanel.sh
|
||||
self.install_package("quota", silent=True)
|
||||
|
||||
command = "find /lib/modules/ -type f -name '*quota_v*.ko*'"
|
||||
|
||||
|
||||
if subprocess.check_output(command,shell=True).decode("utf-8").find("quota/") == -1:
|
||||
command = "DEBIAN_FRONTEND=noninteractive apt install linux-image-extra-virtual -y"
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR, True)
|
||||
self.install_package("linux-image-extra-virtual", silent=True)
|
||||
|
||||
if self.edit_fstab('/', '/') == 0:
|
||||
preFlightsChecks.stdOut("Quotas will not be abled as we are are failed to modify fstab file.")
|
||||
|
|
@ -334,18 +285,7 @@ class preFlightsChecks:
|
|||
|
||||
@staticmethod
|
||||
def stdOut(message, log=0, do_exit=0, code=os.EX_OK):
|
||||
print("\n\n")
|
||||
print(("[" + time.strftime(
|
||||
"%m.%d.%Y_%H-%M-%S") + "] #########################################################################\n"))
|
||||
print(("[" + time.strftime("%m.%d.%Y_%H-%M-%S") + "] " + message + "\n"))
|
||||
print(("[" + time.strftime(
|
||||
"%m.%d.%Y_%H-%M-%S") + "] #########################################################################\n"))
|
||||
|
||||
if log:
|
||||
logging.InstallLog.writeToFile(message)
|
||||
if do_exit:
|
||||
logging.InstallLog.writeToFile(message)
|
||||
sys.exit(code)
|
||||
install_utils.stdOut(message, log, do_exit, code)
|
||||
|
||||
def mountTemp(self):
|
||||
try:
|
||||
|
|
@ -357,10 +297,7 @@ class preFlightsChecks:
|
|||
|
||||
if result.stdout.find('openvz') > -1:
|
||||
if self.distro == ubuntu:
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt install inetutils-inetd -y'
|
||||
preFlightsChecks.call(command, self.distro, command,
|
||||
command,
|
||||
1, 0, os.EX_OSERR)
|
||||
self.install_package("inetutils-inetd")
|
||||
|
||||
# ## On OpenVZ there is an issue using .tempdisk for /tmp as it breaks network on container after reboot.
|
||||
#
|
||||
|
|
@ -437,42 +374,15 @@ class preFlightsChecks:
|
|||
return 'pure-ftpd-mysql'
|
||||
return 'pure-ftpd'
|
||||
|
||||
# Using shared function from install_utils
|
||||
@staticmethod
|
||||
def resFailed(distro, res):
|
||||
if distro == ubuntu and res != 0:
|
||||
return True
|
||||
elif distro == centos and res != 0:
|
||||
return True
|
||||
return False
|
||||
return install_utils.resFailed(distro, res)
|
||||
|
||||
# Using shared function from install_utils
|
||||
@staticmethod
|
||||
def call(command, distro, bracket, message, log=0, do_exit=0, code=os.EX_OK, shell=False):
|
||||
finalMessage = 'Running: %s' % (message)
|
||||
preFlightsChecks.stdOut(finalMessage, log)
|
||||
count = 0
|
||||
while True:
|
||||
if shell == False:
|
||||
res = subprocess.call(shlex.split(command))
|
||||
else:
|
||||
res = subprocess.call(command, shell=True)
|
||||
|
||||
if preFlightsChecks.resFailed(distro, res):
|
||||
count = count + 1
|
||||
finalMessage = 'Running %s failed. Running again, try number %s' % (message, str(count))
|
||||
preFlightsChecks.stdOut(finalMessage)
|
||||
if count == 3:
|
||||
fatal_message = ''
|
||||
if do_exit:
|
||||
fatal_message = '. Fatal error, see /var/log/installLogs.txt for full details'
|
||||
|
||||
preFlightsChecks.stdOut("[ERROR] We are not able to run " + message + ' return code: ' + str(res) +
|
||||
fatal_message + ".", 1, do_exit, code)
|
||||
return False
|
||||
else:
|
||||
preFlightsChecks.stdOut('Successfully ran: %s.' % (message), log)
|
||||
break
|
||||
|
||||
return True
|
||||
return install_utils.call(command, distro, bracket, message, log, do_exit, code, shell)
|
||||
|
||||
def checkIfSeLinuxDisabled(self):
|
||||
try:
|
||||
|
|
@ -505,11 +415,8 @@ class preFlightsChecks:
|
|||
def setup_account_cyberpanel(self):
|
||||
try:
|
||||
|
||||
if self.distro == centos or self.distro == cent8 or self.distro == openeuler:
|
||||
command = "yum install sudo -y"
|
||||
preFlightsChecks.call(command, self.distro, command,
|
||||
command,
|
||||
1, 0, os.EX_OSERR)
|
||||
if self.is_centos_family():
|
||||
self.install_package("sudo", silent=True)
|
||||
|
||||
##
|
||||
|
||||
|
|
@ -595,13 +502,7 @@ class preFlightsChecks:
|
|||
|
||||
def install_psmisc(self):
|
||||
self.stdOut("Install psmisc")
|
||||
|
||||
if self.distro == centos or self.distro == cent8 or self.distro == openeuler:
|
||||
command = "yum -y install psmisc"
|
||||
else:
|
||||
command = "DEBIAN_FRONTEND=noninteractive apt-get -y install psmisc"
|
||||
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR, True)
|
||||
self.install_package("psmisc")
|
||||
|
||||
def download_install_CyberPanel(self, mysqlPassword, mysql):
|
||||
##
|
||||
|
|
@ -879,7 +780,7 @@ password="%s"
|
|||
command = "find /usr/local/CyberCP/ -name '*.pyc' -delete"
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
||||
if self.distro == cent8 or self.distro == centos or self.distro == openeuler:
|
||||
if self.is_centos_family():
|
||||
command = 'chown root:pdns /etc/pdns/pdns.conf'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
||||
|
|
@ -934,24 +835,14 @@ password="%s"
|
|||
def install_unzip(self):
|
||||
self.stdOut("Install unzip")
|
||||
try:
|
||||
if self.distro == centos or self.distro == cent8 or self.distro == openeuler:
|
||||
command = 'yum -y install unzip'
|
||||
else:
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt-get -y install unzip'
|
||||
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR, True)
|
||||
self.install_package("unzip")
|
||||
except BaseException as msg:
|
||||
logging.InstallLog.writeToFile('[ERROR] ' + str(msg) + " [install_unzip]")
|
||||
|
||||
def install_zip(self):
|
||||
self.stdOut("Install zip")
|
||||
try:
|
||||
if self.distro == centos or self.distro == cent8 or self.distro == openeuler:
|
||||
command = 'yum -y install zip'
|
||||
else:
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt-get -y install zip'
|
||||
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR, True)
|
||||
self.install_package("zip")
|
||||
except BaseException as msg:
|
||||
logging.InstallLog.writeToFile('[ERROR] ' + str(msg) + " [install_zip]")
|
||||
|
||||
|
|
@ -979,7 +870,7 @@ password="%s"
|
|||
|
||||
## Write secret phrase
|
||||
|
||||
rString = ''.join([random.choice(string.ascii_letters + string.digits) for n in range(32)])
|
||||
rString = install_utils.generate_random_string(32)
|
||||
|
||||
data = open('/usr/local/CyberCP/public/phpmyadmin/config.sample.inc.php', 'r').readlines()
|
||||
|
||||
|
|
@ -1044,11 +935,9 @@ $cfg['Servers'][$i]['LogoutURL'] = 'phpmyadminsignin.php?logout';
|
|||
|
||||
try:
|
||||
if self.distro == centos:
|
||||
command = 'yum remove postfix -y'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
self.remove_package("postfix")
|
||||
elif self.distro == ubuntu:
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt-get -y remove postfix'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR, True)
|
||||
self.remove_package("postfix")
|
||||
|
||||
self.stdOut("Install dovecot - do the install")
|
||||
|
||||
|
|
@ -1075,8 +964,7 @@ $cfg['Servers'][$i]['LogoutURL'] = 'phpmyadminsignin.php?logout';
|
|||
preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
else:
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt-get -y install debconf-utils'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR, True)
|
||||
self.install_package("debconf-utils", silent=True)
|
||||
file_name = self.cwd + '/pf.unattend.text'
|
||||
pf = open(file_name, 'w')
|
||||
pf.write('postfix postfix/mailname string ' + str(socket.getfqdn() + '\n'))
|
||||
|
|
@ -1413,13 +1301,8 @@ $cfg['Servers'][$i]['LogoutURL'] = 'phpmyadminsignin.php?logout';
|
|||
|
||||
################################### Restart postix
|
||||
|
||||
command = 'systemctl enable postfix.service'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
||||
##
|
||||
|
||||
command = 'systemctl start postfix.service'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
self.manage_service('postfix', 'enable')
|
||||
self.manage_service('postfix', 'start')
|
||||
|
||||
######################################## Permissions
|
||||
|
||||
|
|
@ -1433,18 +1316,12 @@ $cfg['Servers'][$i]['LogoutURL'] = 'phpmyadminsignin.php?logout';
|
|||
|
||||
################################### Restart dovecot
|
||||
|
||||
command = 'systemctl enable dovecot.service'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
self.manage_service('dovecot', 'enable')
|
||||
self.manage_service('dovecot', 'start')
|
||||
|
||||
##
|
||||
|
||||
command = 'systemctl start dovecot.service'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
||||
##
|
||||
|
||||
command = 'systemctl restart postfix.service'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
self.manage_service('postfix', 'restart')
|
||||
|
||||
## chaging permissions for main.cf
|
||||
|
||||
|
|
@ -1479,8 +1356,7 @@ $cfg['Servers'][$i]['LogoutURL'] = 'phpmyadminsignin.php?logout';
|
|||
writeToFile.writelines(items)
|
||||
writeToFile.close()
|
||||
|
||||
command = "systemctl restart dovecot"
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
self.manage_service('dovecot', 'restart')
|
||||
|
||||
logging.InstallLog.writeToFile("Postfix and Dovecot configured")
|
||||
except BaseException as msg:
|
||||
|
|
@ -1653,7 +1529,7 @@ $cfg['Servers'][$i]['LogoutURL'] = 'phpmyadminsignin.php?logout';
|
|||
###################################################### Email setup ends!
|
||||
|
||||
def reStartLiteSpeed(self):
|
||||
command = '%sbin/lswsctrl restart' % (self.server_root_path)
|
||||
command = install_utils.format_restart_litespeed_command(self.server_root_path)
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
||||
def removeUfw(self):
|
||||
|
|
@ -1695,29 +1571,17 @@ $cfg['Servers'][$i]['LogoutURL'] = 'phpmyadminsignin.php?logout';
|
|||
try:
|
||||
preFlightsChecks.stdOut("Enabling Firewall!")
|
||||
|
||||
if self.distro == ubuntu:
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt-get -y install firewalld'
|
||||
else:
|
||||
command = 'yum -y install firewalld'
|
||||
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR, True)
|
||||
self.install_package("firewalld")
|
||||
|
||||
######
|
||||
if self.distro == centos:
|
||||
# Not available in ubuntu
|
||||
command = 'systemctl restart dbus'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
self.manage_service('dbus', 'restart')
|
||||
|
||||
command = 'systemctl restart systemd-logind'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
self.manage_service('systemd-logind', 'restart')
|
||||
|
||||
command = 'systemctl start firewalld'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
||||
##########
|
||||
|
||||
command = 'systemctl enable firewalld'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
self.manage_service('firewalld', 'start')
|
||||
self.manage_service('firewalld', 'enable')
|
||||
|
||||
FirewallUtilities.addRule("tcp", "8090")
|
||||
FirewallUtilities.addRule("tcp", "7080")
|
||||
|
|
@ -1768,19 +1632,14 @@ $cfg['Servers'][$i]['LogoutURL'] = 'phpmyadminsignin.php?logout';
|
|||
os.chdir(self.cwd)
|
||||
|
||||
if self.distro == ubuntu:
|
||||
command = "DEBIAN_FRONTEND=noninteractive apt-get -y install gcc g++ make autoconf rcs"
|
||||
self.install_package("gcc g++ make autoconf rcs")
|
||||
else:
|
||||
command = 'yum -y install gcc gcc-c++ make autoconf glibc'
|
||||
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR, True)
|
||||
self.install_package("gcc gcc-c++ make autoconf glibc")
|
||||
|
||||
if self.distro == ubuntu:
|
||||
command = "DEBIAN_FRONTEND=noninteractive apt-get -y install libpcre3 libpcre3-dev openssl libexpat1 libexpat1-dev libgeoip-dev" \
|
||||
" zlib1g zlib1g-dev libudns-dev whichman curl"
|
||||
self.install_package("libpcre3 libpcre3-dev openssl libexpat1 libexpat1-dev libgeoip-dev zlib1g zlib1g-dev libudns-dev whichman curl")
|
||||
else:
|
||||
command = 'yum -y install pcre-devel openssl-devel expat-devel geoip-devel zlib-devel udns-devel'
|
||||
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR, True)
|
||||
self.install_package("pcre-devel openssl-devel expat-devel geoip-devel zlib-devel udns-devel")
|
||||
|
||||
command = 'tar zxf lscp.tar.gz -C /usr/local/'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
|
@ -1847,14 +1706,14 @@ $cfg['Servers'][$i]['LogoutURL'] = 'phpmyadminsignin.php?logout';
|
|||
except:
|
||||
pass
|
||||
|
||||
if self.distro == centos or self.distro == cent8 or self.distro == openeuler:
|
||||
if self.is_centos_family():
|
||||
command = 'adduser lscpd -M -d /usr/local/lscp'
|
||||
else:
|
||||
command = 'useradd lscpd -M -d /usr/local/lscp'
|
||||
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
||||
if self.distro == centos or self.distro == cent8 or self.distro == openeuler:
|
||||
if self.is_centos_family():
|
||||
command = 'groupadd lscpd'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
# Added group in useradd for Ubuntu
|
||||
|
|
@ -2044,8 +1903,7 @@ $cfg['Servers'][$i]['LogoutURL'] = 'phpmyadminsignin.php?logout';
|
|||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
||||
##
|
||||
command = 'systemctl enable lscpd.service'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
self.manage_service('lscpd', 'enable')
|
||||
|
||||
##
|
||||
count = 0
|
||||
|
|
@ -2075,26 +1933,17 @@ $cfg['Servers'][$i]['LogoutURL'] = 'phpmyadminsignin.php?logout';
|
|||
try:
|
||||
## first install crontab
|
||||
|
||||
if self.distro == centos or self.distro == cent8 or self.distro == openeuler:
|
||||
command = 'yum install cronie -y'
|
||||
if self.is_centos_family():
|
||||
self.install_package('cronie')
|
||||
else:
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt-get -y install cron'
|
||||
self.install_package('cron')
|
||||
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR, True)
|
||||
|
||||
if self.distro == centos or self.distro == cent8 or self.distro == openeuler:
|
||||
command = 'systemctl enable crond'
|
||||
if self.is_centos_family():
|
||||
self.manage_service('crond', 'enable')
|
||||
self.manage_service('crond', 'start')
|
||||
else:
|
||||
command = 'systemctl enable cron'
|
||||
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
||||
if self.distro == centos or self.distro == cent8 or self.distro == openeuler:
|
||||
command = 'systemctl start crond'
|
||||
else:
|
||||
command = 'systemctl start cron'
|
||||
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
self.manage_service('cron', 'enable')
|
||||
self.manage_service('cron', 'start')
|
||||
|
||||
##
|
||||
|
||||
|
|
@ -2152,12 +2001,10 @@ $cfg['Servers'][$i]['LogoutURL'] = 'phpmyadminsignin.php?logout';
|
|||
command = 'chmod 600 %s' % (cronPath)
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
||||
if self.distro == centos or self.distro == cent8 or self.distro == openeuler:
|
||||
command = 'systemctl restart crond.service'
|
||||
if self.is_centos_family():
|
||||
self.manage_service('crond', 'restart')
|
||||
else:
|
||||
command = 'systemctl restart cron.service'
|
||||
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
self.manage_service('cron', 'restart')
|
||||
|
||||
except BaseException as msg:
|
||||
logging.InstallLog.writeToFile('[ERROR] ' + str(msg) + " [setup_cron]")
|
||||
|
|
@ -2179,12 +2026,7 @@ $cfg['Servers'][$i]['LogoutURL'] = 'phpmyadminsignin.php?logout';
|
|||
|
||||
def install_rsync(self):
|
||||
try:
|
||||
if self.distro == centos or self.distro == cent8 or self.distro == openeuler:
|
||||
command = 'yum -y install rsync'
|
||||
else:
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt-get -y install rsync'
|
||||
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR, True)
|
||||
self.install_package('rsync')
|
||||
|
||||
except BaseException as msg:
|
||||
logging.InstallLog.writeToFile('[ERROR] ' + str(msg) + " [install_rsync]")
|
||||
|
|
@ -2235,22 +2077,10 @@ $cfg['Servers'][$i]['LogoutURL'] = 'phpmyadminsignin.php?logout';
|
|||
|
||||
def installOpenDKIM(self):
|
||||
try:
|
||||
if self.distro == centos:
|
||||
command = 'yum -y install opendkim'
|
||||
elif self.distro == cent8 or self.distro == openeuler:
|
||||
command = 'dnf install opendkim -y'
|
||||
if self.distro == cent8 or self.distro == openeuler or self.distro == ubuntu:
|
||||
self.install_package('opendkim opendkim-tools')
|
||||
else:
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt-get -y install opendkim'
|
||||
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR, True)
|
||||
|
||||
if self.distro == cent8 or self.distro == openeuler:
|
||||
command = 'dnf install opendkim-tools -y'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
||||
if self.distro == ubuntu:
|
||||
command = 'apt install opendkim-tools -y'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
self.install_package('opendkim')
|
||||
|
||||
command = 'mkdir -p /etc/opendkim/keys/'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
|
@ -2307,16 +2137,9 @@ milter_default_action = accept
|
|||
|
||||
#### Restarting Postfix and OpenDKIM
|
||||
|
||||
command = "systemctl start opendkim"
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
||||
command = "systemctl enable opendkim"
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
||||
##
|
||||
|
||||
command = "systemctl start postfix"
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
self.manage_service('opendkim', 'start')
|
||||
self.manage_service('opendkim', 'enable')
|
||||
self.manage_service('postfix', 'start')
|
||||
|
||||
except BaseException as msg:
|
||||
logging.InstallLog.writeToFile('[ERROR] ' + str(msg) + " [configureOpenDKIM]")
|
||||
|
|
@ -2513,9 +2336,8 @@ milter_default_action = accept
|
|||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
||||
else:
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt-get update -y'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR, True)
|
||||
|
||||
# Skip apt-get update as it was already done in cyberpanel.sh
|
||||
# Just install the package directly
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt-get install restic -y'
|
||||
preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR, True)
|
||||
|
||||
|
|
@ -2847,8 +2669,6 @@ admin_password = "12345"
|
|||
""")
|
||||
writeToFile.close()
|
||||
|
||||
import randomPassword
|
||||
|
||||
content = """<?php
|
||||
|
||||
$_ENV['snappymail_INCLUDE_AS_API'] = true;
|
||||
|
|
@ -2858,7 +2678,7 @@ $oConfig = \snappymail\Api::Config();
|
|||
$oConfig->SetPassword('%s');
|
||||
echo $oConfig->Save() ? 'Done' : 'Error';
|
||||
|
||||
?>""" % (randomPassword.generate_pass())
|
||||
?>""" % (generate_pass())
|
||||
|
||||
writeToFile = open('/usr/local/CyberCP/public/snappymail.php', 'w')
|
||||
writeToFile.write(content)
|
||||
|
|
|
|||
|
|
@ -3,63 +3,107 @@ import subprocess
|
|||
import os
|
||||
from mysqlUtilities import mysqlUtilities
|
||||
import installLog as logging
|
||||
import randomPassword
|
||||
import errno
|
||||
import MySQLdb as mariadb
|
||||
import install
|
||||
from os.path import exists
|
||||
import time
|
||||
import install_utils
|
||||
|
||||
# distros
|
||||
centos = 0
|
||||
ubuntu = 1
|
||||
cent8 = 2
|
||||
openeuler = 3
|
||||
# distros - using from install_utils
|
||||
centos = install_utils.centos
|
||||
ubuntu = install_utils.ubuntu
|
||||
cent8 = install_utils.cent8
|
||||
openeuler = install_utils.openeuler
|
||||
|
||||
|
||||
def get_Ubuntu_release():
|
||||
release = -1
|
||||
if exists("/etc/lsb-release"):
|
||||
distro_file = "/etc/lsb-release"
|
||||
with open(distro_file) as f:
|
||||
for line in f:
|
||||
if line[:16] == "DISTRIB_RELEASE=":
|
||||
release = float(line[16:])
|
||||
|
||||
if release == -1:
|
||||
print("Can't find distro release name in " + distro_file + " - fatal error")
|
||||
|
||||
else:
|
||||
logging.InstallLog.writeToFile("Can't find linux release file - fatal error")
|
||||
print("Can't find linux release file - fatal error")
|
||||
os._exit(os.EX_UNAVAILABLE)
|
||||
|
||||
return release
|
||||
return install_utils.get_Ubuntu_release(use_print=True, exit_on_error=True)
|
||||
|
||||
|
||||
def FetchCloudLinuxAlmaVersionVersion():
|
||||
if os.path.exists('/etc/os-release'):
|
||||
data = open('/etc/os-release', 'r').read()
|
||||
if (data.find('CloudLinux') > -1 or data.find('cloudlinux') > -1) and (data.find('8.9') > -1 or data.find('Anatoly Levchenko') > -1 or data.find('VERSION="8.') > -1):
|
||||
return 'cl-89'
|
||||
elif (data.find('CloudLinux') > -1 or data.find('cloudlinux') > -1) and (data.find('8.8') > -1 or data.find('Anatoly Filipchenko') > -1):
|
||||
return 'cl-88'
|
||||
elif (data.find('CloudLinux') > -1 or data.find('cloudlinux') > -1) and (data.find('9.4') > -1 or data.find('VERSION="9.') > -1):
|
||||
return 'cl-88'
|
||||
elif (data.find('AlmaLinux') > -1 or data.find('almalinux') > -1) and (data.find('8.9') > -1 or data.find('Midnight Oncilla') > -1 or data.find('VERSION="8.') > -1):
|
||||
return 'al-88'
|
||||
elif (data.find('AlmaLinux') > -1 or data.find('almalinux') > -1) and (data.find('8.7') > -1 or data.find('Stone Smilodon') > -1):
|
||||
return 'al-87'
|
||||
elif (data.find('AlmaLinux') > -1 or data.find('almalinux') > -1) and (data.find('9.4') > -1 or data.find('9.3') > -1 or data.find('Shamrock Pampas') > -1 or data.find('Seafoam Ocelot') > -1 or data.find('VERSION="9.') > -1):
|
||||
return 'al-93'
|
||||
else:
|
||||
return -1
|
||||
# Using shared function from install_utils
|
||||
FetchCloudLinuxAlmaVersionVersion = install_utils.FetchCloudLinuxAlmaVersionVersion
|
||||
|
||||
class InstallCyberPanel:
|
||||
mysql_Root_password = ""
|
||||
mysqlPassword = ""
|
||||
CloudLinux8 = 0
|
||||
|
||||
def install_package(self, package_name, options=""):
|
||||
"""Unified package installation across distributions"""
|
||||
command, shell = install_utils.get_package_install_command(self.distro, package_name, options)
|
||||
|
||||
# InstallCyberPanel always uses verbose mode (no silent option)
|
||||
if self.distro == ubuntu:
|
||||
return install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, shell)
|
||||
else:
|
||||
# For non-Ubuntu, original code didn't pass shell parameter
|
||||
return install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
def manage_service(self, service_name, action="start"):
|
||||
"""Unified service management"""
|
||||
service_map = {
|
||||
'mariadb': 'mariadb',
|
||||
'pureftpd': 'pure-ftpd-mysql' if self.distro == ubuntu else 'pure-ftpd',
|
||||
'pdns': 'pdns'
|
||||
}
|
||||
|
||||
actual_service = service_map.get(service_name, service_name)
|
||||
command = f'systemctl {action} {actual_service}'
|
||||
return install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
def modify_file_content(self, file_path, replacements):
|
||||
"""Generic file content modification"""
|
||||
try:
|
||||
with open(file_path, 'r') as f:
|
||||
data = f.readlines()
|
||||
|
||||
with open(file_path, 'w') as f:
|
||||
for line in data:
|
||||
modified_line = line
|
||||
for old, new in replacements.items():
|
||||
if old in line:
|
||||
modified_line = line.replace(old, new)
|
||||
break
|
||||
f.write(modified_line)
|
||||
return True
|
||||
except IOError as e:
|
||||
logging.InstallLog.writeToFile(f'[ERROR] {str(e)} [modify_file_content]')
|
||||
return False
|
||||
|
||||
def copy_config_file(self, source_dir, dest_path, mysql_mode='One'):
|
||||
"""Handle configuration file copying with mode selection"""
|
||||
# For directories like 'dns' vs 'dns-one', 'pure-ftpd' vs 'pure-ftpd-one'
|
||||
# Default mode is 'One' which uses the -one directories
|
||||
if mysql_mode == 'Two':
|
||||
source_path = source_dir
|
||||
else:
|
||||
# Default mode 'One' uses directories with -one suffix
|
||||
source_path = f"{source_dir}-one"
|
||||
|
||||
# Determine the actual file to copy
|
||||
if os.path.isdir(source_path):
|
||||
# If it's a directory, we need to copy the whole directory
|
||||
if os.path.exists(dest_path):
|
||||
if os.path.isdir(dest_path):
|
||||
shutil.rmtree(dest_path)
|
||||
shutil.copytree(source_path, dest_path)
|
||||
else:
|
||||
# If source is a directory but dest is a file, find the config file
|
||||
if os.path.isdir(source_dir) or os.path.isdir(f"{source_dir}-one"):
|
||||
# Look for pdns.conf or similar config file
|
||||
if dest_path.endswith('pdns.conf'):
|
||||
source_file = os.path.join(source_path, 'pdns.conf')
|
||||
elif dest_path.endswith('pureftpd-mysql.conf'):
|
||||
source_file = os.path.join(source_path, 'pureftpd-mysql.conf')
|
||||
else:
|
||||
# Generic case - use basename of dest
|
||||
source_file = os.path.join(source_path, os.path.basename(dest_path))
|
||||
|
||||
if os.path.exists(dest_path):
|
||||
os.remove(dest_path)
|
||||
shutil.copy(source_file, dest_path)
|
||||
|
||||
@staticmethod
|
||||
def ISARM():
|
||||
|
||||
|
|
@ -109,31 +153,23 @@ class InstallCyberPanel:
|
|||
|
||||
@staticmethod
|
||||
def stdOut(message, log=0, exit=0, code=os.EX_OK):
|
||||
install.preFlightsChecks.stdOut(message, log, exit, code)
|
||||
install_utils.stdOut(message, log, exit, code)
|
||||
|
||||
def installLiteSpeed(self):
|
||||
if self.ent == 0:
|
||||
if self.distro == ubuntu:
|
||||
command = "DEBIAN_FRONTEND=noninteractive apt-get -y install openlitespeed"
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
elif self.distro == centos:
|
||||
command = 'yum install -y openlitespeed'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
else:
|
||||
command = 'dnf install -y openlitespeed'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
self.install_package('openlitespeed')
|
||||
|
||||
else:
|
||||
try:
|
||||
try:
|
||||
command = 'groupadd nobody'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
command = 'usermod -a -G nobody nobody'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
|
@ -142,20 +178,20 @@ class InstallCyberPanel:
|
|||
else:
|
||||
command = 'wget https://www.litespeedtech.com/packages/6.0/lsws-6.2-ent-x86_64-linux.tar.gz'
|
||||
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
if InstallCyberPanel.ISARM():
|
||||
command = 'tar zxf lsws-6.2-ent-aarch64-linux.tar.gz'
|
||||
else:
|
||||
command = 'tar zxf lsws-6.2-ent-x86_64-linux.tar.gz'
|
||||
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
if str.lower(self.serial) == 'trial':
|
||||
command = 'wget -q --output-document=lsws-6.2/trial.key http://license.litespeedtech.com/reseller/trial.key'
|
||||
if self.serial == '1111-2222-3333-4444':
|
||||
command = 'wget -q --output-document=/root/cyberpanel/install/lsws-6.2/trial.key http://license.litespeedtech.com/reseller/trial.key'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
else:
|
||||
writeSerial = open('lsws-6.2/serial.no', 'w')
|
||||
writeSerial.writelines(self.serial)
|
||||
|
|
@ -167,13 +203,13 @@ class InstallCyberPanel:
|
|||
os.chdir('lsws-6.2')
|
||||
|
||||
command = 'chmod +x install.sh'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
command = 'chmod +x functions.sh'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
command = './install.sh'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
os.chdir(self.cwd)
|
||||
confPath = '/usr/local/lsws/conf/'
|
||||
|
|
@ -182,7 +218,7 @@ class InstallCyberPanel:
|
|||
shutil.copy('litespeed/httpd.conf', confPath)
|
||||
|
||||
command = 'chown -R lsadm:lsadm ' + confPath
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
||||
except BaseException as msg:
|
||||
logging.InstallLog.writeToFile('[ERROR] ' + str(msg) + " [installLiteSpeed]")
|
||||
|
|
@ -191,8 +227,8 @@ class InstallCyberPanel:
|
|||
return 1
|
||||
|
||||
def reStartLiteSpeed(self):
|
||||
command = self.server_root_path + "bin/lswsctrl restart"
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
command = install_utils.format_restart_litespeed_command(self.server_root_path)
|
||||
install_utils.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
||||
def fix_ols_configs(self):
|
||||
try:
|
||||
|
|
@ -224,97 +260,63 @@ class InstallCyberPanel:
|
|||
try:
|
||||
InstallCyberPanel.stdOut("Changing default port to 80..", 1)
|
||||
|
||||
data = open(self.server_root_path + "conf/httpd_config.conf").readlines()
|
||||
file_path = self.server_root_path + "conf/httpd_config.conf"
|
||||
if self.modify_file_content(file_path, {"*:8088": "*:80"}):
|
||||
InstallCyberPanel.stdOut("Default port is now 80 for OpenLiteSpeed!", 1)
|
||||
else:
|
||||
return 0
|
||||
|
||||
writeDataToFile = open(self.server_root_path + "conf/httpd_config.conf", 'w')
|
||||
|
||||
for items in data:
|
||||
if (items.find("*:8088") > -1):
|
||||
writeDataToFile.writelines(items.replace("*:8088", "*:80"))
|
||||
else:
|
||||
writeDataToFile.writelines(items)
|
||||
|
||||
writeDataToFile.close()
|
||||
|
||||
InstallCyberPanel.stdOut("Default port is now 80 for OpenLiteSpeed!", 1)
|
||||
|
||||
except IOError as msg:
|
||||
except Exception as msg:
|
||||
logging.InstallLog.writeToFile('[ERROR] ' + str(msg) + " [changePortTo80]")
|
||||
return 0
|
||||
|
||||
return self.reStartLiteSpeed()
|
||||
|
||||
def installAllPHPVersions(self):
|
||||
|
||||
php_versions = ['71', '72', '73', '74', '80', '81', '82', '83']
|
||||
|
||||
if self.distro == ubuntu:
|
||||
# Install base PHP 7.x packages
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt-get -y install ' \
|
||||
'lsphp7? lsphp7?-common lsphp7?-curl lsphp7?-dev lsphp7?-imap lsphp7?-intl lsphp7?-json ' \
|
||||
'lsphp7?-ldap lsphp7?-mysql lsphp7?-opcache lsphp7?-pspell lsphp7?-recode ' \
|
||||
'lsphp7?-sqlite3 lsphp7?-tidy'
|
||||
|
||||
os.system(command)
|
||||
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt-get -y install lsphp80*'
|
||||
os.system(command)
|
||||
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt-get -y install lsphp81*'
|
||||
os.system(command)
|
||||
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt-get -y install lsphp82*'
|
||||
os.system(command)
|
||||
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt-get -y install lsphp83*'
|
||||
os.system(command)
|
||||
|
||||
|
||||
# Install PHP 8.x versions
|
||||
for version in php_versions[4:]: # 80, 81, 82, 83
|
||||
self.install_package(f'lsphp{version}*')
|
||||
|
||||
elif self.distro == centos:
|
||||
# First install the group
|
||||
command = 'yum -y groupinstall lsphp-all'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
InstallCyberPanel.stdOut("LiteSpeed PHPs successfully installed!", 1)
|
||||
|
||||
## only php 71
|
||||
if self.distro == centos:
|
||||
command = 'yum install -y lsphp71* --skip-broken'
|
||||
|
||||
subprocess.call(command, shell=True)
|
||||
|
||||
## only php 72
|
||||
command = 'yum install -y lsphp72* --skip-broken'
|
||||
|
||||
subprocess.call(command, shell=True)
|
||||
|
||||
## only php 73
|
||||
command = 'yum install -y lsphp73* --skip-broken'
|
||||
|
||||
subprocess.call(command, shell=True)
|
||||
|
||||
## only php 74
|
||||
command = 'yum install -y lsphp74* --skip-broken'
|
||||
|
||||
subprocess.call(command, shell=True)
|
||||
|
||||
command = 'yum install lsphp80* -y --skip-broken'
|
||||
subprocess.call(command, shell=True)
|
||||
|
||||
command = 'yum install lsphp81* -y --skip-broken'
|
||||
subprocess.call(command, shell=True)
|
||||
|
||||
command = 'yum install lsphp82* -y --skip-broken'
|
||||
subprocess.call(command, shell=True)
|
||||
|
||||
command = 'yum install lsphp83* -y --skip-broken'
|
||||
subprocess.call(command, shell=True)
|
||||
|
||||
if self.distro == cent8:
|
||||
command = 'dnf install lsphp71* lsphp72* lsphp73* lsphp74* lsphp80* --exclude lsphp73-pecl-zip --exclude *imagick* -y --skip-broken'
|
||||
subprocess.call(command, shell=True)
|
||||
|
||||
command = 'dnf install lsphp81* lsphp82* lsphp83* --exclude *imagick* -y --skip-broken'
|
||||
subprocess.call(command, shell=True)
|
||||
|
||||
if self.distro == openeuler:
|
||||
command = 'dnf install lsphp71* lsphp72* lsphp73* lsphp74* lsphp80* lsphp81* lsphp82* lsphp83* -y'
|
||||
subprocess.call(command, shell=True)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
InstallCyberPanel.stdOut("LiteSpeed PHPs successfully installed!", 1)
|
||||
|
||||
# Install individual PHP versions
|
||||
for version in php_versions:
|
||||
self.install_package(f'lsphp{version}*', '--skip-broken')
|
||||
|
||||
elif self.distro == cent8:
|
||||
# Install PHP versions in batches with exclusions
|
||||
exclude_flags = "--exclude lsphp73-pecl-zip --exclude *imagick*"
|
||||
|
||||
# First batch: PHP 7.x and 8.0
|
||||
versions_batch1 = ' '.join([f'lsphp{v}*' for v in php_versions[:5]])
|
||||
self.install_package(versions_batch1, f'{exclude_flags} --skip-broken')
|
||||
|
||||
# Second batch: PHP 8.1+
|
||||
versions_batch2 = ' '.join([f'lsphp{v}*' for v in php_versions[5:]])
|
||||
self.install_package(versions_batch2, f'{exclude_flags} --skip-broken')
|
||||
|
||||
elif self.distro == openeuler:
|
||||
# Install all PHP versions at once
|
||||
all_versions = ' '.join([f'lsphp{v}*' for v in php_versions])
|
||||
self.install_package(all_versions)
|
||||
|
||||
if self.distro != ubuntu:
|
||||
InstallCyberPanel.stdOut("LiteSpeed PHPs successfully installed!", 1)
|
||||
|
||||
def installMySQL(self, mysql):
|
||||
|
||||
|
|
@ -322,17 +324,14 @@ class InstallCyberPanel:
|
|||
|
||||
if self.distro == ubuntu:
|
||||
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt-get install software-properties-common -y'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
|
||||
command = "DEBIAN_FRONTEND=noninteractive apt-get install apt-transport-https curl -y"
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt-get install software-properties-common apt-transport-https curl -y'
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
|
||||
command = "mkdir -p /etc/apt/keyrings"
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
command = "curl -o /etc/apt/keyrings/mariadb-keyring.pgp 'https://mariadb.org/mariadb_release_signing_key.pgp'"
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
RepoPath = '/etc/apt/sources.list.d/mariadb.sources'
|
||||
RepoContent = f"""
|
||||
# MariaDB 10.11 repository list - created 2023-12-11 07:53 UTC
|
||||
|
|
@ -349,15 +348,27 @@ Signed-By: /etc/apt/keyrings/mariadb-keyring.pgp
|
|||
|
||||
if get_Ubuntu_release() > 21.00:
|
||||
command = 'curl -LsS https://downloads.mariadb.com/MariaDB/mariadb_repo_setup | sudo bash -s -- --mariadb-server-version=10.11'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
# WriteToFile = open(RepoPath, 'w')
|
||||
# WriteToFile.write(RepoContent)
|
||||
# WriteToFile.close()
|
||||
result = install_utils.call(command, self.distro, command, command, 1, 0, os.EX_OSERR, True)
|
||||
|
||||
# If the download fails, use manual repo configuration as fallback
|
||||
if result != 1:
|
||||
install_utils.writeToFile("MariaDB repo setup script failed, using manual configuration...")
|
||||
RepoPath = '/etc/apt/sources.list.d/mariadb.list'
|
||||
RepoContent = f"""# MariaDB 10.11 repository list - manual fallback
|
||||
deb [arch=amd64,arm64,ppc64el,s390x signed-by=/usr/share/keyrings/mariadb-keyring.pgp] https://mirror.mariadb.org/repo/10.11/ubuntu {get_Ubuntu_code_name()} main
|
||||
"""
|
||||
# Download and add MariaDB signing key
|
||||
command = 'mkdir -p /usr/share/keyrings && curl -fsSL https://mariadb.org/mariadb_release_signing_key.pgp | gpg --dearmor -o /usr/share/keyrings/mariadb-keyring.pgp'
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
|
||||
WriteToFile = open(RepoPath, 'w')
|
||||
WriteToFile.write(RepoContent)
|
||||
WriteToFile.close()
|
||||
|
||||
|
||||
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt-get update -y'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
|
||||
|
||||
command = "DEBIAN_FRONTEND=noninteractive apt-get install mariadb-server -y"
|
||||
|
|
@ -387,34 +398,34 @@ gpgcheck=1
|
|||
if type == 'cl' and version >= 88:
|
||||
|
||||
command = 'yum remove db-governor db-governor-mysql -y'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
|
||||
command = 'yum install governor-mysql -y'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
|
||||
command = '/usr/share/lve/dbgovernor/mysqlgovernor.py --mysql-version=mariadb106'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
|
||||
command = '/usr/share/lve/dbgovernor/mysqlgovernor.py --install --yes'
|
||||
|
||||
else:
|
||||
|
||||
command = 'curl -LsS https://downloads.mariadb.com/MariaDB/mariadb_repo_setup | sudo bash -s -- --mariadb-server-version=10.11'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
|
||||
command = 'yum remove mariadb* -y'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
|
||||
command = 'sudo dnf -qy module disable mariadb'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
|
||||
command = 'sudo dnf module reset mariadb -y'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
|
||||
|
||||
command = 'dnf install MariaDB-server MariaDB-client MariaDB-backup -y'
|
||||
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR, True)
|
||||
|
||||
############## Start mariadb ######################
|
||||
|
||||
|
|
@ -431,18 +442,13 @@ gpgcheck=1
|
|||
|
||||
command = 'mariadb -u root -e "' + passwordCMD + '"'
|
||||
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 0, 0, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 0, 0, os.EX_OSERR)
|
||||
|
||||
def startMariaDB(self):
|
||||
|
||||
if self.remotemysql == 'OFF':
|
||||
############## Start mariadb ######################
|
||||
if self.distro == cent8 or self.distro == ubuntu:
|
||||
command = 'systemctl start mariadb'
|
||||
else:
|
||||
command = "systemctl start mariadb"
|
||||
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
self.manage_service('mariadb', 'start')
|
||||
|
||||
############## Enable mariadb at system startup ######################
|
||||
|
||||
|
|
@ -451,12 +457,7 @@ gpgcheck=1
|
|||
if os.path.exists('/etc/systemd/system/mariadb.service'):
|
||||
os.remove('/etc/systemd/system/mariadb.service')
|
||||
|
||||
if self.distro == ubuntu:
|
||||
command = "systemctl enable mariadb"
|
||||
else:
|
||||
command = "systemctl enable mariadb"
|
||||
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
self.manage_service('mariadb', 'enable')
|
||||
|
||||
def fixMariaDB(self):
|
||||
self.stdOut("Setup MariaDB so it can support Cyberpanel's needs")
|
||||
|
|
@ -489,49 +490,38 @@ gpgcheck=1
|
|||
|
||||
def installPureFTPD(self):
|
||||
if self.distro == ubuntu:
|
||||
command = 'DEBIAN_FRONTEND=noninteractive apt install pure-ftpd-mysql -y'
|
||||
os.system(command)
|
||||
self.install_package('pure-ftpd-mysql')
|
||||
|
||||
if get_Ubuntu_release() == 18.10:
|
||||
command = 'wget https://rep.cyberpanel.net/pure-ftpd-common_1.0.47-3_all.deb'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
command = 'wget https://rep.cyberpanel.net/pure-ftpd-mysql_1.0.47-3_amd64.deb'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
command = 'dpkg --install --force-confold pure-ftpd-common_1.0.47-3_all.deb'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
command = 'dpkg --install --force-confold pure-ftpd-mysql_1.0.47-3_amd64.deb'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
elif self.distro == centos:
|
||||
command = "yum install -y pure-ftpd"
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
elif self.distro == cent8 or self.distro == openeuler:
|
||||
command = 'dnf install pure-ftpd -y'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
# Special handling for Ubuntu 18.10
|
||||
packages = [
|
||||
('pure-ftpd-common_1.0.47-3_all.deb', 'wget https://rep.cyberpanel.net/pure-ftpd-common_1.0.47-3_all.deb'),
|
||||
('pure-ftpd-mysql_1.0.47-3_amd64.deb', 'wget https://rep.cyberpanel.net/pure-ftpd-mysql_1.0.47-3_amd64.deb')
|
||||
]
|
||||
|
||||
for filename, wget_cmd in packages:
|
||||
install_utils.call(wget_cmd, self.distro, wget_cmd, wget_cmd, 1, 1, os.EX_OSERR)
|
||||
command = f'dpkg --install --force-confold {filename}'
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
else:
|
||||
self.install_package('pure-ftpd')
|
||||
|
||||
####### Install pureftpd to system startup
|
||||
|
||||
command = "systemctl enable " + install.preFlightsChecks.pureFTPDServiceName(self.distro)
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
###### FTP Groups and user settings settings
|
||||
|
||||
command = 'groupadd -g 2001 ftpgroup'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
command = 'useradd -u 2001 -s /bin/false -d /bin/null -c "pureftpd user" -g ftpgroup ftpuser'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
def startPureFTPD(self):
|
||||
############## Start pureftpd ######################
|
||||
if self.distro == ubuntu:
|
||||
command = 'systemctl start pure-ftpd-mysql'
|
||||
else:
|
||||
command = 'systemctl start pure-ftpd'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
self.manage_service('pureftpd', 'start')
|
||||
|
||||
def installPureFTPDConfigurations(self, mysql):
|
||||
try:
|
||||
|
|
@ -550,22 +540,12 @@ gpgcheck=1
|
|||
else:
|
||||
command = 'openssl req -x509 -nodes -days 7300 -newkey rsa:2048 -subj "/C=US/ST=Denial/L=Sprinal-ield/O=Dis/CN=www.example.com" -keyout /etc/ssl/private/pure-ftpd.pem -out /etc/ssl/private/pure-ftpd.pem'
|
||||
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
||||
os.chdir(self.cwd)
|
||||
ftpdPath = "/etc/pure-ftpd"
|
||||
|
||||
if os.path.exists(ftpdPath):
|
||||
shutil.rmtree(ftpdPath)
|
||||
if mysql == 'Two':
|
||||
shutil.copytree("pure-ftpd", ftpdPath)
|
||||
else:
|
||||
shutil.copytree("pure-ftpd-one", ftpdPath)
|
||||
else:
|
||||
if mysql == 'Two':
|
||||
shutil.copytree("pure-ftpd", ftpdPath)
|
||||
else:
|
||||
shutil.copytree("pure-ftpd-one", ftpdPath)
|
||||
self.copy_config_file("pure-ftpd", ftpdPath, mysql)
|
||||
|
||||
if self.distro == ubuntu:
|
||||
try:
|
||||
|
|
@ -592,13 +572,13 @@ gpgcheck=1
|
|||
|
||||
if self.remotemysql == 'ON':
|
||||
command = "sed -i 's|localhost|%s|g' %s" % (self.mysqlhost, ftpConfPath)
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
command = "sed -i 's|3306|%s|g' %s" % (self.mysqlport, ftpConfPath)
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
command = "sed -i 's|MYSQLSocket /var/lib/mysql/mysql.sock||g' %s" % (ftpConfPath)
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
if self.distro == ubuntu:
|
||||
|
||||
|
|
@ -624,13 +604,13 @@ gpgcheck=1
|
|||
subprocess.call(command, shell=True)
|
||||
|
||||
command = 'ln -s /etc/pure-ftpd/conf/MySQLConfigFile /etc/pure-ftpd/auth/30mysql'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
command = 'ln -s /etc/pure-ftpd/conf/UnixAuthentication /etc/pure-ftpd/auth/65unix'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
command = 'systemctl restart pure-ftpd-mysql.service'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
|
||||
|
||||
|
|
@ -639,10 +619,10 @@ gpgcheck=1
|
|||
### change mysql md5 to crypt
|
||||
|
||||
command = "sed -i 's/MYSQLCrypt md5/MYSQLCrypt crypt/g' /etc/pure-ftpd/db/mysql.conf"
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
command = "systemctl restart pure-ftpd-mysql.service"
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
else:
|
||||
|
||||
try:
|
||||
|
|
@ -652,7 +632,7 @@ gpgcheck=1
|
|||
|
||||
if type == 'al' and version >= 90:
|
||||
command = "sed -i 's/MYSQLCrypt md5/MYSQLCrypt crypt/g' /etc/pure-ftpd/pureftpd-mysql.conf"
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
|
@ -666,43 +646,33 @@ gpgcheck=1
|
|||
|
||||
def installPowerDNS(self):
|
||||
try:
|
||||
|
||||
if self.distro == ubuntu or self.distro == cent8 or self.distro == openeuler:
|
||||
command = 'systemctl stop systemd-resolved'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
command = 'systemctl disable systemd-resolved.service'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
# Stop and disable systemd-resolved
|
||||
self.manage_service('systemd-resolved', 'stop')
|
||||
self.manage_service('systemd-resolved.service', 'disable')
|
||||
|
||||
try:
|
||||
os.rename('/etc/resolv.conf', 'etc/resolved.conf')
|
||||
os.rename('/etc/resolv.conf', '/etc/resolv.conf.bak')
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST and e.errno != errno.ENOENT:
|
||||
InstallCyberPanel.stdOut("[ERROR] Unable to rename /etc/resolv.conf to install PowerDNS: " +
|
||||
str(e), 1, 1, os.EX_OSERR)
|
||||
try:
|
||||
os.remove('/etc/resolv.conf')
|
||||
except OSError as e1:
|
||||
InstallCyberPanel.stdOut(
|
||||
"[ERROR] Unable to remove existing /etc/resolv.conf to install PowerDNS: " +
|
||||
str(e1), 1, 1, os.EX_OSERR)
|
||||
|
||||
# try:
|
||||
# f = open('/etc/resolv.conf', 'a')
|
||||
# f.write('nameserver 8.8.8.8')
|
||||
# f.close()
|
||||
# except IOError as e:
|
||||
# InstallCyberPanel.stdOut("[ERROR] Unable to create /etc/resolv.conf: " + str(e) +
|
||||
# ". This may need to be fixed manually as 'echo \"nameserver 8.8.8.8\"> "
|
||||
# "/etc/resolv.conf'", 1, 1, os.EX_OSERR)
|
||||
|
||||
# Create a temporary resolv.conf with Google DNS for package installation
|
||||
try:
|
||||
with open('/etc/resolv.conf', 'w') as f:
|
||||
f.write('nameserver 8.8.8.8\n')
|
||||
f.write('nameserver 8.8.4.4\n')
|
||||
InstallCyberPanel.stdOut("Created temporary /etc/resolv.conf with Google DNS", 1)
|
||||
except IOError as e:
|
||||
InstallCyberPanel.stdOut("[ERROR] Unable to create /etc/resolv.conf: " + str(e), 1, 1, os.EX_OSERR)
|
||||
|
||||
# Install PowerDNS packages
|
||||
if self.distro == ubuntu:
|
||||
command = "DEBIAN_FRONTEND=noninteractive apt-get -y install pdns-server pdns-backend-mysql"
|
||||
os.system(command)
|
||||
self.install_package('pdns-server pdns-backend-mysql')
|
||||
return 1
|
||||
else:
|
||||
command = 'yum -y install pdns pdns-backend-mysql'
|
||||
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
self.install_package('pdns pdns-backend-mysql')
|
||||
|
||||
except BaseException as msg:
|
||||
logging.InstallLog.writeToFile('[ERROR] ' + str(msg) + " [powerDNS]")
|
||||
|
|
@ -718,17 +688,7 @@ gpgcheck=1
|
|||
else:
|
||||
dnsPath = "/etc/powerdns/pdns.conf"
|
||||
|
||||
if os.path.exists(dnsPath):
|
||||
os.remove(dnsPath)
|
||||
if mysql == 'Two':
|
||||
shutil.copy("dns/pdns.conf", dnsPath)
|
||||
else:
|
||||
shutil.copy("dns-one/pdns.conf", dnsPath)
|
||||
else:
|
||||
if mysql == 'Two':
|
||||
shutil.copy("dns/pdns.conf", dnsPath)
|
||||
else:
|
||||
shutil.copy("dns-one/pdns.conf", dnsPath)
|
||||
self.copy_config_file("dns", dnsPath, mysql)
|
||||
|
||||
data = open(dnsPath, "r").readlines()
|
||||
|
||||
|
|
@ -749,10 +709,10 @@ gpgcheck=1
|
|||
|
||||
if self.remotemysql == 'ON':
|
||||
command = "sed -i 's|gmysql-host=localhost|gmysql-host=%s|g' %s" % (self.mysqlhost, dnsPath)
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
command = "sed -i 's|gmysql-port=3306|gmysql-port=%s|g' %s" % (self.mysqlport, dnsPath)
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
install_utils.call(command, self.distro, command, command, 1, 1, os.EX_OSERR)
|
||||
|
||||
InstallCyberPanel.stdOut("PowerDNS configured!", 1)
|
||||
|
||||
|
|
@ -765,17 +725,14 @@ gpgcheck=1
|
|||
|
||||
############## Start PowerDNS ######################
|
||||
|
||||
command = 'systemctl enable pdns'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
|
||||
command = 'systemctl start pdns'
|
||||
install.preFlightsChecks.call(command, self.distro, command, command, 1, 0, os.EX_OSERR)
|
||||
self.manage_service('pdns', 'enable')
|
||||
self.manage_service('pdns', 'start')
|
||||
|
||||
|
||||
def Main(cwd, mysql, distro, ent, serial=None, port="8090", ftp=None, dns=None, publicip=None, remotemysql=None,
|
||||
mysqlhost=None, mysqldb=None, mysqluser=None, mysqlpassword=None, mysqlport=None):
|
||||
InstallCyberPanel.mysqlPassword = randomPassword.generate_pass()
|
||||
InstallCyberPanel.mysql_Root_password = randomPassword.generate_pass()
|
||||
InstallCyberPanel.mysqlPassword = install_utils.generate_pass()
|
||||
InstallCyberPanel.mysql_Root_password = install_utils.generate_pass()
|
||||
|
||||
file_name = '/etc/cyberpanel/mysqlPassword'
|
||||
|
||||
|
|
@ -802,18 +759,18 @@ def Main(cwd, mysql, distro, ent, serial=None, port="8090", ftp=None, dns=None,
|
|||
|
||||
try:
|
||||
command = 'chmod 640 %s' % (file_name)
|
||||
install.preFlightsChecks.call(command, distro, '[chmod]',
|
||||
install_utils.call(command, distro, '[chmod]',
|
||||
'',
|
||||
1, 0, os.EX_OSERR)
|
||||
command = 'chown root:cyberpanel %s' % (file_name)
|
||||
install.preFlightsChecks.call(command, distro, '[chmod]',
|
||||
install_utils.call(command, distro, '[chmod]',
|
||||
'',
|
||||
1, 0, os.EX_OSERR)
|
||||
except:
|
||||
pass
|
||||
|
||||
if distro == centos:
|
||||
InstallCyberPanel.mysqlPassword = randomPassword.generate_pass()
|
||||
InstallCyberPanel.mysqlPassword = install_utils.generate_pass()
|
||||
else:
|
||||
InstallCyberPanel.mysqlPassword = InstallCyberPanel.mysql_Root_password
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,384 @@
|
|||
#!/usr/bin/env python
|
||||
"""
|
||||
Common utility functions for CyberPanel installation scripts.
|
||||
This module contains shared functions used by both install.py and installCyberPanel.py
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import logging
|
||||
import subprocess
|
||||
import shlex
|
||||
import secrets
|
||||
import string
|
||||
from os.path import exists
|
||||
|
||||
|
||||
def FetchCloudLinuxAlmaVersionVersion():
|
||||
"""
|
||||
Detect CloudLinux or AlmaLinux version by parsing /etc/os-release
|
||||
Returns: version string or -1 if not found
|
||||
"""
|
||||
if os.path.exists('/etc/os-release'):
|
||||
data = open('/etc/os-release', 'r').read()
|
||||
if (data.find('CloudLinux') > -1 or data.find('cloudlinux') > -1) and (data.find('8.9') > -1 or data.find('Anatoly Levchenko') > -1 or data.find('VERSION="8.') > -1):
|
||||
return 'cl-89'
|
||||
elif (data.find('CloudLinux') > -1 or data.find('cloudlinux') > -1) and (data.find('8.8') > -1 or data.find('Anatoly Filipchenko') > -1):
|
||||
return 'cl-88'
|
||||
elif (data.find('CloudLinux') > -1 or data.find('cloudlinux') > -1) and (data.find('9.4') > -1 or data.find('VERSION="9.') > -1):
|
||||
return 'cl-88'
|
||||
elif (data.find('AlmaLinux') > -1 or data.find('almalinux') > -1) and (data.find('8.9') > -1 or data.find('Midnight Oncilla') > -1 or data.find('VERSION="8.') > -1):
|
||||
return 'al-88'
|
||||
elif (data.find('AlmaLinux') > -1 or data.find('almalinux') > -1) and (data.find('8.7') > -1 or data.find('Stone Smilodon') > -1):
|
||||
return 'al-87'
|
||||
elif (data.find('AlmaLinux') > -1 or data.find('almalinux') > -1) and (data.find('9.4') > -1 or data.find('9.3') > -1 or data.find('Shamrock Pampas') > -1 or data.find('Seafoam Ocelot') > -1 or data.find('VERSION="9.') > -1):
|
||||
return 'al-93'
|
||||
else:
|
||||
return -1
|
||||
|
||||
|
||||
def get_Ubuntu_release(use_print=False, exit_on_error=True):
|
||||
"""
|
||||
Get Ubuntu release version from /etc/lsb-release
|
||||
|
||||
Args:
|
||||
use_print: If True, use print() for errors, otherwise use the provided output function
|
||||
exit_on_error: If True, exit on error
|
||||
|
||||
Returns: float release number or -1 if not found
|
||||
"""
|
||||
release = -1
|
||||
if exists("/etc/lsb-release"):
|
||||
distro_file = "/etc/lsb-release"
|
||||
with open(distro_file) as f:
|
||||
for line in f:
|
||||
if line[:16] == "DISTRIB_RELEASE=":
|
||||
release = float(line[16:])
|
||||
|
||||
if release == -1:
|
||||
error_msg = "Can't find distro release name in " + distro_file + " - fatal error"
|
||||
if use_print:
|
||||
print(error_msg)
|
||||
else:
|
||||
# This will be overridden by the calling module
|
||||
return -1
|
||||
|
||||
else:
|
||||
error_msg = "Can't find linux release file - fatal error"
|
||||
if hasattr(logging, 'InstallLog'):
|
||||
logging.InstallLog.writeToFile(error_msg)
|
||||
if use_print:
|
||||
print(error_msg)
|
||||
if exit_on_error:
|
||||
os._exit(os.EX_UNAVAILABLE)
|
||||
|
||||
return release
|
||||
|
||||
|
||||
# ANSI color codes
|
||||
class Colors:
|
||||
HEADER = '\033[95m' # Purple
|
||||
INFO = '\033[94m' # Blue
|
||||
SUCCESS = '\033[92m' # Green
|
||||
WARNING = '\033[93m' # Yellow
|
||||
ERROR = '\033[91m' # Red
|
||||
ENDC = '\033[0m' # Reset
|
||||
BOLD = '\033[1m' # Bold
|
||||
UNDERLINE = '\033[4m' # Underline
|
||||
|
||||
|
||||
def get_message_color(message):
|
||||
"""
|
||||
Determine the appropriate color based on message content
|
||||
|
||||
Args:
|
||||
message: The message to analyze
|
||||
|
||||
Returns:
|
||||
str: ANSI color code
|
||||
"""
|
||||
message_lower = message.lower()
|
||||
|
||||
# Error messages
|
||||
if any(word in message_lower for word in ['error', 'failed', 'fatal', 'critical', 'unable']):
|
||||
return Colors.ERROR
|
||||
|
||||
# Warning messages
|
||||
elif any(word in message_lower for word in ['warning', 'warn', 'caution', 'alert']):
|
||||
return Colors.WARNING
|
||||
|
||||
# Success messages
|
||||
elif any(word in message_lower for word in ['success', 'completed', 'installed', 'finished', 'done', 'enabled']):
|
||||
return Colors.SUCCESS
|
||||
|
||||
# Running/Processing messages
|
||||
elif any(word in message_lower for word in ['running', 'installing', 'downloading', 'processing', 'starting', 'configuring']):
|
||||
return Colors.INFO
|
||||
|
||||
# Default color
|
||||
else:
|
||||
return Colors.HEADER
|
||||
|
||||
|
||||
def stdOut(message, log=0, do_exit=0, code=os.EX_OK):
|
||||
"""
|
||||
Standard output function with timestamps, coloring, and logging
|
||||
|
||||
Args:
|
||||
message: Message to output
|
||||
log: If 1, write to log file
|
||||
do_exit: If 1, exit after outputting
|
||||
code: Exit code to use if do_exit is 1
|
||||
"""
|
||||
# Get appropriate color for the message
|
||||
color = get_message_color(message)
|
||||
|
||||
# Check if terminal supports color
|
||||
try:
|
||||
# Check if output is to a terminal
|
||||
if not sys.stdout.isatty():
|
||||
color = ''
|
||||
color_end = ''
|
||||
else:
|
||||
color_end = Colors.ENDC
|
||||
except:
|
||||
color = ''
|
||||
color_end = ''
|
||||
|
||||
# Format timestamps
|
||||
timestamp = time.strftime("%m.%d.%Y_%H-%M-%S")
|
||||
|
||||
print("\n\n")
|
||||
print(f"{color}[{timestamp}] #########################################################################{color_end}\n")
|
||||
print(f"{color}[{timestamp}] {message}{color_end}\n")
|
||||
print(f"{color}[{timestamp}] #########################################################################{color_end}\n")
|
||||
|
||||
if log and hasattr(logging, 'InstallLog'):
|
||||
logging.InstallLog.writeToFile(message)
|
||||
if do_exit:
|
||||
if hasattr(logging, 'InstallLog'):
|
||||
logging.InstallLog.writeToFile(message)
|
||||
sys.exit(code)
|
||||
|
||||
|
||||
def format_restart_litespeed_command(server_root_path):
|
||||
"""
|
||||
Format the LiteSpeed restart command
|
||||
|
||||
Args:
|
||||
server_root_path: Root path of the server installation
|
||||
|
||||
Returns: Formatted command string
|
||||
"""
|
||||
return '%sbin/lswsctrl restart' % (server_root_path)
|
||||
|
||||
|
||||
# Distribution constants
|
||||
ubuntu = 0
|
||||
centos = 1
|
||||
cent8 = 2
|
||||
openeuler = 3
|
||||
|
||||
|
||||
def get_distro():
|
||||
"""
|
||||
Detect Linux distribution
|
||||
|
||||
Returns: Distribution constant (ubuntu, centos, cent8, or openeuler)
|
||||
"""
|
||||
distro = -1
|
||||
distro_file = ""
|
||||
if exists("/etc/lsb-release"):
|
||||
distro_file = "/etc/lsb-release"
|
||||
with open(distro_file) as f:
|
||||
for line in f:
|
||||
if line == "DISTRIB_ID=Ubuntu\n":
|
||||
distro = ubuntu
|
||||
|
||||
elif exists("/etc/redhat-release"):
|
||||
distro_file = "/etc/redhat-release"
|
||||
distro = centos
|
||||
|
||||
data = open('/etc/redhat-release', 'r').read()
|
||||
|
||||
if data.find('CentOS Linux release 8') > -1:
|
||||
return cent8
|
||||
## if almalinux 9 then pretty much same as cent8
|
||||
if data.find('AlmaLinux release 8') > -1 or data.find('AlmaLinux release 9') > -1:
|
||||
return cent8
|
||||
if data.find('Rocky Linux release 8') > -1 or data.find('Rocky Linux 8') > -1 or data.find('rocky:8') > -1:
|
||||
return cent8
|
||||
if data.find('CloudLinux 8') or data.find('cloudlinux 8'):
|
||||
return cent8
|
||||
|
||||
else:
|
||||
if exists("/etc/openEuler-release"):
|
||||
distro_file = "/etc/openEuler-release"
|
||||
distro = openeuler
|
||||
|
||||
else:
|
||||
if hasattr(logging, 'InstallLog'):
|
||||
logging.InstallLog.writeToFile("Can't find linux release file - fatal error")
|
||||
print("Can't find linux release file - fatal error")
|
||||
os._exit(os.EX_UNAVAILABLE)
|
||||
|
||||
if distro == -1:
|
||||
error_msg = "Can't find distro name in " + distro_file + " - fatal error"
|
||||
if hasattr(logging, 'InstallLog'):
|
||||
logging.InstallLog.writeToFile(error_msg)
|
||||
print(error_msg)
|
||||
os._exit(os.EX_UNAVAILABLE)
|
||||
|
||||
return distro
|
||||
|
||||
|
||||
def get_package_install_command(distro, package_name, options=""):
|
||||
"""
|
||||
Get the package installation command for a specific distribution
|
||||
|
||||
Args:
|
||||
distro: Distribution constant
|
||||
package_name: Name of the package to install
|
||||
options: Additional options for the package manager
|
||||
|
||||
Returns:
|
||||
tuple: (command, shell) where shell indicates if shell=True is needed
|
||||
"""
|
||||
if distro == ubuntu:
|
||||
command = f"DEBIAN_FRONTEND=noninteractive apt-get -y install {package_name} {options}"
|
||||
shell = True
|
||||
elif distro == centos:
|
||||
command = f"yum install -y {package_name} {options}"
|
||||
shell = False
|
||||
else: # cent8, openeuler
|
||||
command = f"dnf install -y {package_name} {options}"
|
||||
shell = False
|
||||
|
||||
return command, shell
|
||||
|
||||
|
||||
def get_package_remove_command(distro, package_name):
|
||||
"""
|
||||
Get the package removal command for a specific distribution
|
||||
|
||||
Args:
|
||||
distro: Distribution constant
|
||||
package_name: Name of the package to remove
|
||||
|
||||
Returns:
|
||||
tuple: (command, shell) where shell indicates if shell=True is needed
|
||||
"""
|
||||
if distro == ubuntu:
|
||||
command = f"DEBIAN_FRONTEND=noninteractive apt-get -y remove {package_name}"
|
||||
shell = True
|
||||
elif distro == centos:
|
||||
command = f"yum remove -y {package_name}"
|
||||
shell = False
|
||||
else: # cent8, openeuler
|
||||
command = f"dnf remove -y {package_name}"
|
||||
shell = False
|
||||
|
||||
return command, shell
|
||||
|
||||
|
||||
def resFailed(distro, res):
|
||||
"""
|
||||
Check if a command execution result indicates failure
|
||||
|
||||
Args:
|
||||
distro: Distribution constant
|
||||
res: Return code from subprocess
|
||||
|
||||
Returns:
|
||||
bool: True if failed, False if successful
|
||||
"""
|
||||
if distro == ubuntu and res != 0:
|
||||
return True
|
||||
elif distro == centos and res != 0:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def call(command, distro, bracket, message, log=0, do_exit=0, code=os.EX_OK, shell=False):
|
||||
"""
|
||||
Execute a shell command with retry logic and error handling
|
||||
|
||||
Args:
|
||||
command: Command to execute
|
||||
distro: Distribution constant
|
||||
bracket: Not used (kept for compatibility)
|
||||
message: Description of the command for logging
|
||||
log: If 1, write to log file
|
||||
do_exit: If 1, exit on failure
|
||||
code: Exit code to use if do_exit is 1
|
||||
shell: If True, execute through shell
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False if failed
|
||||
"""
|
||||
finalMessage = 'Running: %s' % (message)
|
||||
stdOut(finalMessage, log)
|
||||
count = 0
|
||||
while True:
|
||||
if shell == False:
|
||||
res = subprocess.call(shlex.split(command))
|
||||
else:
|
||||
res = subprocess.call(command, shell=True)
|
||||
|
||||
if resFailed(distro, res):
|
||||
count = count + 1
|
||||
finalMessage = 'Running %s failed. Running again, try number %s' % (message, str(count))
|
||||
stdOut(finalMessage)
|
||||
if count == 3:
|
||||
fatal_message = ''
|
||||
if do_exit:
|
||||
fatal_message = '. Fatal error, see /var/log/installLogs.txt for full details'
|
||||
|
||||
stdOut("[ERROR] We are not able to run " + message + ' return code: ' + str(res) +
|
||||
fatal_message + ".", 1, do_exit, code)
|
||||
return False
|
||||
else:
|
||||
stdOut('Successfully ran: %s.' % (message), log)
|
||||
break
|
||||
|
||||
return True
|
||||
|
||||
|
||||
# Character sets for password generation (kept for backward compatibility)
|
||||
char_set = {
|
||||
'small': 'abcdefghijklmnopqrstuvwxyz',
|
||||
'nums': '0123456789',
|
||||
'big': 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
||||
}
|
||||
|
||||
|
||||
def generate_pass(length=14):
|
||||
"""
|
||||
Generate a cryptographically secure random password
|
||||
|
||||
Args:
|
||||
length: Length of the password to generate (default 14)
|
||||
|
||||
Returns:
|
||||
str: Random password containing uppercase, lowercase letters and digits
|
||||
"""
|
||||
alphabet = string.ascii_letters + string.digits
|
||||
return ''.join(secrets.choice(alphabet) for _ in range(length))
|
||||
|
||||
|
||||
def generate_random_string(length=32, include_special=False):
|
||||
"""
|
||||
Generate a random string with optional special characters
|
||||
|
||||
Args:
|
||||
length: Length of the string to generate
|
||||
include_special: If True, include special characters
|
||||
|
||||
Returns:
|
||||
str: Random string
|
||||
"""
|
||||
alphabet = string.ascii_letters + string.digits
|
||||
if include_special:
|
||||
alphabet += string.punctuation
|
||||
return ''.join(secrets.choice(alphabet) for _ in range(length))
|
||||
|
|
@ -579,7 +579,7 @@ context /.well-known/acme-challenge {
|
|||
return 1
|
||||
|
||||
@staticmethod
|
||||
def obtainSSLForADomain(virtualHostName, adminEmail, sslpath, aliasDomain=None):
|
||||
def obtainSSLForADomain(virtualHostName, adminEmail, sslpath, aliasDomain=None, isHostname=False):
|
||||
from plogical.acl import ACLManager
|
||||
from plogical.sslv2 import sslUtilities as sslv2
|
||||
from plogical.customACME import CustomACME
|
||||
|
|
@ -609,11 +609,11 @@ context /.well-known/acme-challenge {
|
|||
# Start with just the main domain
|
||||
domains = [virtualHostName]
|
||||
|
||||
# Check if www subdomain has DNS records before adding it
|
||||
if sslUtilities.checkDNSRecords(f'www.{virtualHostName}'):
|
||||
# Check if www subdomain has DNS records before adding it (skip for hostnames)
|
||||
if not isHostname and sslUtilities.checkDNSRecords(f'www.{virtualHostName}'):
|
||||
domains.append(f'www.{virtualHostName}')
|
||||
logging.CyberCPLogFileWriter.writeToFile(f"www.{virtualHostName} has DNS records, including in SSL request")
|
||||
else:
|
||||
elif not isHostname:
|
||||
logging.CyberCPLogFileWriter.writeToFile(f"www.{virtualHostName} has no DNS records, excluding from SSL request")
|
||||
|
||||
if aliasDomain:
|
||||
|
|
@ -648,11 +648,11 @@ context /.well-known/acme-challenge {
|
|||
# Start with just the main domain
|
||||
domains = [virtualHostName]
|
||||
|
||||
# Check if www subdomain has DNS records before adding it
|
||||
if sslUtilities.checkDNSRecords(f'www.{virtualHostName}'):
|
||||
# Check if www subdomain has DNS records before adding it (skip for hostnames)
|
||||
if not isHostname and sslUtilities.checkDNSRecords(f'www.{virtualHostName}'):
|
||||
domains.append(f'www.{virtualHostName}')
|
||||
logging.CyberCPLogFileWriter.writeToFile(f"www.{virtualHostName} has DNS records, including in SSL request")
|
||||
else:
|
||||
elif not isHostname:
|
||||
logging.CyberCPLogFileWriter.writeToFile(f"www.{virtualHostName} has no DNS records, excluding from SSL request")
|
||||
|
||||
if aliasDomain:
|
||||
|
|
@ -692,16 +692,17 @@ context /.well-known/acme-challenge {
|
|||
# Build domain list for acme.sh
|
||||
domain_list = " -d " + virtualHostName
|
||||
|
||||
# Check if www subdomain has DNS records
|
||||
if sslUtilities.checkDNSRecords(f'www.{virtualHostName}'):
|
||||
# Check if www subdomain has DNS records (skip for hostnames)
|
||||
if not isHostname and sslUtilities.checkDNSRecords(f'www.{virtualHostName}'):
|
||||
domain_list += " -d www." + virtualHostName
|
||||
logging.CyberCPLogFileWriter.writeToFile(f"www.{virtualHostName} has DNS records, including in acme.sh SSL request")
|
||||
else:
|
||||
elif not isHostname:
|
||||
logging.CyberCPLogFileWriter.writeToFile(f"www.{virtualHostName} has no DNS records, excluding from acme.sh SSL request")
|
||||
|
||||
command = acmePath + " --issue" + domain_list \
|
||||
+ ' --cert-file ' + existingCertPath + '/cert.pem' + ' --key-file ' + existingCertPath + '/privkey.pem' \
|
||||
+ ' --fullchain-file ' + existingCertPath + '/fullchain.pem' + ' -w /usr/local/lsws/Example/html -k ec-256 --force --staging'
|
||||
+ ' --fullchain-file ' + existingCertPath + '/fullchain.pem' + ' -w /usr/local/lsws/Example/html -k ec-256 --force --staging' \
|
||||
+ ' --webroot-path /usr/local/lsws/Example/html'
|
||||
|
||||
if ProcessUtilities.decideServer() == ProcessUtilities.OLS:
|
||||
result = subprocess.run(command, capture_output=True, universal_newlines=True, shell=True)
|
||||
|
|
@ -711,7 +712,8 @@ context /.well-known/acme-challenge {
|
|||
if result.returncode == 0:
|
||||
command = acmePath + " --issue" + domain_list \
|
||||
+ ' --cert-file ' + existingCertPath + '/cert.pem' + ' --key-file ' + existingCertPath + '/privkey.pem' \
|
||||
+ ' --fullchain-file ' + existingCertPath + '/fullchain.pem' + ' -w /usr/local/lsws/Example/html -k ec-256 --force --server letsencrypt'
|
||||
+ ' --fullchain-file ' + existingCertPath + '/fullchain.pem' + ' -w /usr/local/lsws/Example/html -k ec-256 --force --server letsencrypt' \
|
||||
+ ' --webroot-path /usr/local/lsws/Example/html'
|
||||
|
||||
result = subprocess.run(command, capture_output=True, universal_newlines=True, shell=True)
|
||||
|
||||
|
|
@ -763,9 +765,37 @@ context /.well-known/acme-challenge {
|
|||
return 0
|
||||
|
||||
|
||||
def issueSSLForDomain(domain, adminEmail, sslpath, aliasDomain=None):
|
||||
def issueSSLForDomain(domain, adminEmail, sslpath, aliasDomain=None, isHostname=False):
|
||||
try:
|
||||
if sslUtilities.obtainSSLForADomain(domain, adminEmail, sslpath, aliasDomain) == 1:
|
||||
# Check if certificate already exists and try to renew it first
|
||||
existingCertPath = '/etc/letsencrypt/live/' + domain + '/fullchain.pem'
|
||||
if os.path.exists(existingCertPath):
|
||||
logging.CyberCPLogFileWriter.writeToFile(f"Certificate exists for {domain}, attempting renewal...")
|
||||
|
||||
# Try to renew using acme.sh
|
||||
acmePath = '/root/.acme.sh/acme.sh'
|
||||
if os.path.exists(acmePath):
|
||||
# First set the webroot path for the domain
|
||||
command = f'{acmePath} --update-account --accountemail {adminEmail}'
|
||||
subprocess.call(command, shell=True)
|
||||
|
||||
# Build domain list for renewal
|
||||
renewal_domains = f'-d {domain}'
|
||||
if not isHostname and sslUtilities.checkDNSRecords(f'www.{domain}'):
|
||||
renewal_domains += f' -d www.{domain}'
|
||||
|
||||
# Try to renew with explicit webroot
|
||||
command = f'{acmePath} --renew {renewal_domains} --webroot /usr/local/lsws/Example/html --force'
|
||||
result = subprocess.run(command, capture_output=True, text=True, shell=True)
|
||||
|
||||
if result.returncode == 0:
|
||||
logging.CyberCPLogFileWriter.writeToFile(f"Successfully renewed SSL for {domain}")
|
||||
if sslUtilities.installSSLForDomain(domain, adminEmail) == 1:
|
||||
return [1, "None"]
|
||||
else:
|
||||
logging.CyberCPLogFileWriter.writeToFile(f"Renewal failed for {domain}, falling back to new issuance")
|
||||
|
||||
if sslUtilities.obtainSSLForADomain(domain, adminEmail, sslpath, aliasDomain, isHostname) == 1:
|
||||
if sslUtilities.installSSLForDomain(domain, adminEmail) == 1:
|
||||
return [1, "None"]
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -894,6 +894,63 @@ $cfg['Servers'][$i]['LogoutURL'] = 'phpmyadminsignin.php?logout';
|
|||
except:
|
||||
pass
|
||||
|
||||
# AI Scanner Scheduled Scans Tables
|
||||
try:
|
||||
cursor.execute('''
|
||||
CREATE TABLE `ai_scanner_scheduled_scans` (
|
||||
`id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
|
||||
`admin_id` integer NOT NULL,
|
||||
`name` varchar(200) NOT NULL,
|
||||
`domains` longtext NOT NULL,
|
||||
`frequency` varchar(20) NOT NULL DEFAULT 'weekly',
|
||||
`scan_type` varchar(20) NOT NULL DEFAULT 'full',
|
||||
`time_of_day` time NOT NULL,
|
||||
`day_of_week` integer DEFAULT NULL,
|
||||
`day_of_month` integer DEFAULT NULL,
|
||||
`status` varchar(20) NOT NULL DEFAULT 'active',
|
||||
`last_run` datetime(6) DEFAULT NULL,
|
||||
`next_run` datetime(6) DEFAULT NULL,
|
||||
`created_at` datetime(6) NOT NULL,
|
||||
`updated_at` datetime(6) NOT NULL,
|
||||
`email_notifications` bool NOT NULL DEFAULT 1,
|
||||
`notification_emails` longtext NOT NULL DEFAULT '',
|
||||
`notify_on_threats` bool NOT NULL DEFAULT 1,
|
||||
`notify_on_completion` bool NOT NULL DEFAULT 0,
|
||||
`notify_on_failure` bool NOT NULL DEFAULT 1,
|
||||
KEY `ai_scanner_scheduled_scans_admin_id_idx` (`admin_id`),
|
||||
KEY `ai_scanner_scheduled_scans_status_next_run_idx` (`status`, `next_run`),
|
||||
CONSTRAINT `ai_scanner_scheduled_scans_admin_id_fk` FOREIGN KEY (`admin_id`)
|
||||
REFERENCES `loginSystem_administrator` (`id`) ON DELETE CASCADE
|
||||
)
|
||||
''')
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
cursor.execute('''
|
||||
CREATE TABLE `ai_scanner_scheduled_executions` (
|
||||
`id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
|
||||
`scheduled_scan_id` integer NOT NULL,
|
||||
`execution_time` datetime(6) NOT NULL,
|
||||
`status` varchar(20) NOT NULL DEFAULT 'pending',
|
||||
`domains_scanned` longtext NOT NULL DEFAULT '',
|
||||
`total_scans` integer NOT NULL DEFAULT 0,
|
||||
`successful_scans` integer NOT NULL DEFAULT 0,
|
||||
`failed_scans` integer NOT NULL DEFAULT 0,
|
||||
`total_cost` decimal(10,6) NOT NULL DEFAULT 0.000000,
|
||||
`scan_ids` longtext NOT NULL DEFAULT '',
|
||||
`error_message` longtext DEFAULT NULL,
|
||||
`started_at` datetime(6) DEFAULT NULL,
|
||||
`completed_at` datetime(6) DEFAULT NULL,
|
||||
KEY `ai_scanner_scheduled_executions_scheduled_scan_id_idx` (`scheduled_scan_id`),
|
||||
KEY `ai_scanner_scheduled_executions_execution_time_idx` (`execution_time` DESC),
|
||||
CONSTRAINT `ai_scanner_scheduled_executions_scheduled_scan_id_fk` FOREIGN KEY (`scheduled_scan_id`)
|
||||
REFERENCES `ai_scanner_scheduled_scans` (`id`) ON DELETE CASCADE
|
||||
)
|
||||
''')
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
cursor.execute(
|
||||
'CREATE TABLE `loginSystem_acl` (`id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, `name` varchar(50) NOT NULL UNIQUE, `adminStatus` integer NOT NULL DEFAULT 0, `versionManagement` integer NOT NULL DEFAULT 0, `createNewUser` integer NOT NULL DEFAULT 0, `deleteUser` integer NOT NULL DEFAULT 0, `resellerCenter` integer NOT NULL DEFAULT 0, `changeUserACL` integer NOT NULL DEFAULT 0, `createWebsite` integer NOT NULL DEFAULT 0, `modifyWebsite` integer NOT NULL DEFAULT 0, `suspendWebsite` integer NOT NULL DEFAULT 0, `deleteWebsite` integer NOT NULL DEFAULT 0, `createPackage` integer NOT NULL DEFAULT 0, `deletePackage` integer NOT NULL DEFAULT 0, `modifyPackage` integer NOT NULL DEFAULT 0, `createDatabase` integer NOT NULL DEFAULT 0, `deleteDatabase` integer NOT NULL DEFAULT 0, `listDatabases` integer NOT NULL DEFAULT 0, `createNameServer` integer NOT NULL DEFAULT 0, `createDNSZone` integer NOT NULL DEFAULT 0, `deleteZone` integer NOT NULL DEFAULT 0, `addDeleteRecords` integer NOT NULL DEFAULT 0, `createEmail` integer NOT NULL DEFAULT 0, `deleteEmail` integer NOT NULL DEFAULT 0, `emailForwarding` integer NOT NULL DEFAULT 0, `changeEmailPassword` integer NOT NULL DEFAULT 0, `dkimManager` integer NOT NULL DEFAULT 0, `createFTPAccount` integer NOT NULL DEFAULT 0, `deleteFTPAccount` integer NOT NULL DEFAULT 0, `listFTPAccounts` integer NOT NULL DEFAULT 0, `createBackup` integer NOT NULL DEFAULT 0, `restoreBackup` integer NOT NULL DEFAULT 0, `addDeleteDestinations` integer NOT NULL DEFAULT 0, `scheduleBackups` integer NOT NULL DEFAULT 0, `remoteBackups` integer NOT NULL DEFAULT 0, `manageSSL` integer NOT NULL DEFAULT 0, `hostnameSSL` integer NOT NULL DEFAULT 0, `mailServerSSL` integer NOT NULL DEFAULT 0)')
|
||||
|
|
@ -3076,6 +3133,13 @@ vmail
|
|||
command = """sed -i '/CyberCP/d' /etc/crontab"""
|
||||
Upgrade.executioner(command, command, 0, True)
|
||||
|
||||
# Ensure log directory exists for scheduled scans
|
||||
if not os.path.exists('/usr/local/lscp/logs'):
|
||||
try:
|
||||
os.makedirs('/usr/local/lscp/logs', mode=0o755)
|
||||
except:
|
||||
pass
|
||||
|
||||
if os.path.exists('/usr/local/lsws/conf/httpd.conf'):
|
||||
# Setup /usr/local/lsws/conf/httpd.conf to use new Logformat standard for better stats and accesslogs
|
||||
command = """sed -i "s|^LogFormat.*|LogFormat '%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"' combined|g" /usr/local/lsws/conf/httpd.conf"""
|
||||
|
|
@ -3109,6 +3173,7 @@ vmail
|
|||
0 0 * * 4 /usr/local/CyberCP/bin/python /usr/local/CyberCP/plogical/renew.py >/dev/null 2>&1
|
||||
7 0 * * * "/root/.acme.sh"/acme.sh --cron --home "/root/.acme.sh" > /dev/null
|
||||
*/3 * * * * if ! find /home/*/public_html/ -maxdepth 2 -type f -newer /usr/local/lsws/cgid -name '.htaccess' -exec false {} +; then /usr/local/lsws/bin/lswsctrl restart; fi
|
||||
* * * * * /usr/local/CyberCP/bin/python /usr/local/CyberCP/manage.py run_scheduled_scans >/usr/local/lscp/logs/scheduled_scans.log 2>&1
|
||||
"""
|
||||
|
||||
writeToFile = open(cronPath, 'w')
|
||||
|
|
@ -3133,6 +3198,15 @@ vmail
|
|||
0 1 * * * /usr/local/CyberCP/bin/python /usr/local/CyberCP/IncBackups/IncScheduler.py '1 Day'
|
||||
0 0 */3 * * /usr/local/CyberCP/bin/python /usr/local/CyberCP/IncBackups/IncScheduler.py '3 Days'
|
||||
0 0 * * 0 /usr/local/CyberCP/bin/python /usr/local/CyberCP/IncBackups/IncScheduler.py '1 Week'
|
||||
"""
|
||||
writeToFile = open(cronPath, 'a')
|
||||
writeToFile.write(content)
|
||||
writeToFile.close()
|
||||
|
||||
# Add AI Scanner scheduled scans cron job if missing
|
||||
if data.find('run_scheduled_scans') == -1:
|
||||
content = """
|
||||
* * * * * /usr/local/CyberCP/bin/python /usr/local/CyberCP/manage.py run_scheduled_scans >/usr/local/lscp/logs/scheduled_scans.log 2>&1
|
||||
"""
|
||||
writeToFile = open(cronPath, 'a')
|
||||
writeToFile.write(content)
|
||||
|
|
@ -3149,6 +3223,7 @@ vmail
|
|||
7 0 * * * "/root/.acme.sh"/acme.sh --cron --home "/root/.acme.sh" > /dev/null
|
||||
0 0 * * * /usr/local/CyberCP/bin/python /usr/local/CyberCP/IncBackups/IncScheduler.py Daily
|
||||
0 0 * * 0 /usr/local/CyberCP/bin/python /usr/local/CyberCP/IncBackups/IncScheduler.py Weekly
|
||||
* * * * * /usr/local/CyberCP/bin/python /usr/local/CyberCP/manage.py run_scheduled_scans >/usr/local/lscp/logs/scheduled_scans.log 2>&1
|
||||
"""
|
||||
writeToFile = open(cronPath, 'w')
|
||||
writeToFile.write(content)
|
||||
|
|
|
|||
|
|
@ -966,7 +966,7 @@ local_name %s {
|
|||
|
||||
adminEmail = "email@" + virtualHost
|
||||
|
||||
retValues = sslUtilities.issueSSLForDomain(virtualHost, adminEmail, path)
|
||||
retValues = sslUtilities.issueSSLForDomain(virtualHost, adminEmail, path, None, isHostname=True)
|
||||
|
||||
if retValues[0] == 0:
|
||||
print("0," + str(retValues[1]))
|
||||
|
|
@ -1042,7 +1042,7 @@ local_name %s {
|
|||
srcPrivKey = '/etc/letsencrypt/live/' + virtualHost + '/privkey.pem'
|
||||
|
||||
adminEmail = "email@" + virtualHost
|
||||
retValues = sslUtilities.issueSSLForDomain(virtualHost, adminEmail, path)
|
||||
retValues = sslUtilities.issueSSLForDomain(virtualHost, adminEmail, path, None, isHostname=True)
|
||||
|
||||
if retValues[0] == 0:
|
||||
print("0," + str(retValues[1]))
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -681,7 +681,7 @@
|
|||
<div class="website-details" ng-if="isExpanded(web.domain)">
|
||||
<div class="details-content">
|
||||
<div class="screenshot-section">
|
||||
<img ng-src="https://api.microlink.io/?url=https://{$ web.domain $}&screenshot=true&meta=false&embed=screenshot.url"
|
||||
<img ng-src="https://api.microlink.io/?url=https://{$ web.domain $}&screenshot=true&meta=false&embed=screenshot.url&ttl=86400000"
|
||||
alt="{$ web.domain $}"
|
||||
class="website-screenshot"
|
||||
onerror="this.onerror=null; this.src='{% static 'baseTemplate/assets/image-resources/webPanel.png' %}';">
|
||||
|
|
|
|||
Loading…
Reference in New Issue