Analyse script Cloudplanel

I’ve created a Bash script that:

  1. Analyzes Nginx logs across all user directories
  2. Tracks outbound traffic using netstat/ss
  3. Monitors user activities and authentication attempts
  4. Checks for recent file changes

To use the script:

  1. Save it to a file (e.g., server_analyzer.sh)
touch server_analyzer.sh
nano server_analyzer.sh
  1. Make it executable:
chmod +x server_analyzer.sh
  1. Run it as root or with sudo:
sudo ./server_analyzer.sh

The script will provide:

  • Top IP addresses accessing your servers
  • HTTP status code distribution
  • Error requests (4xx and 5xx)
  • Outbound connection statistics
  • Recent user login activities
  • Failed login attempts
  • Recent file modifications
#!/bin/bash

set -euo pipefail

RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'

# CloudPanel paths
CLP_PATH="/home/clp"
CLP_DB="${CLP_PATH}/htdocs/app/data/db.sq3"
COMMON_NGINX="/etc/nginx"
COMMON_PHP="/etc/php"

# Timestamp for reports
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
OUTPUT_DIR="cloudpanel_analysis_${TIMESTAMP}"
mkdir -p "${OUTPUT_DIR}"

# Log function
log() {
    echo -e "${2}[$(date '+%Y-%m-%d %H:%M:%S')] ${1}${NC}" | tee -a "${OUTPUT_DIR}/analysis.log"
}

# Function to analyze CloudPanel database
analyze_clp_database() {
    log "Analyzing CloudPanel Database..." "${BLUE}"
    
    # Create directory for database analysis
    mkdir -p "${OUTPUT_DIR}/database"
    
    # Method 1: Using PHP console command for user data
    log "Fetching user data from Doctrine..." "${BLUE}"
    sudo -u clp /usr/bin/php8.1 /home/clp/htdocs/app/files/bin/console doctrine:query:sql "SELECT * from user;" \
        > "${OUTPUT_DIR}/database/user_data.txt" 2>/dev/null || \
        log "Error fetching user data from Doctrine" "${RED}"

    # Method 2: Using SQLite for event data
    log "Fetching event data from SQLite..." "${BLUE}"
    if [ -f "${CLP_DB}" ]; then
        sqlite3 "${CLP_DB}" <<EOF > "${OUTPUT_DIR}/database/event_history.txt"
.mode column
.headers on
SELECT 
    event_name,
    user_name,
    datetime(created_at, 'unixepoch') as event_time
FROM event
ORDER BY event_time DESC;
EOF
        log "Database analysis completed" "${GREEN}"
    else
        log "CloudPanel database not found at ${CLP_DB}" "${RED}"
    fi

    # Generate a combined analysis report
    {
        echo "CloudPanel Database Analysis Report"
        echo "================================="
        echo "Generated at: $(date)"
        echo ""
        echo "1. User Data (from Doctrine):"
        echo "----------------------------"
        cat "${OUTPUT_DIR}/database/user_data.txt"
        echo ""
        echo "2. Event History (from SQLite):"
        echo "-----------------------------"
        cat "${OUTPUT_DIR}/database/event_history.txt"
    } > "${OUTPUT_DIR}/database/database_analysis_report.txt"
}

# Function to analyze bash configuration changes
analyze_bash_configs() {
    log "Analyzing Bash configurations..." "${BLUE}"
    
    # Compare with default Ubuntu configs
    diff "/etc/skel/.bashrc" "${CLP_PATH}/.bashrc" > "${OUTPUT_DIR}/bashrc_changes.diff" 2>&1 || true
    diff "/etc/skel/.bash_logout" "${CLP_PATH}/.bash_logout" > "${OUTPUT_DIR}/bash_logout_changes.diff" 2>&1 || true
    
    # Get last modification times
    stat "${CLP_PATH}/.bashrc" "${CLP_PATH}/.bash_logout" > "${OUTPUT_DIR}/bash_config_stats.txt"
}

# Function to analyze user site structure
analyze_user_sites() {
    log "Analyzing user sites structure..." "${BLUE}"
    
    # Find all user directories
    for user_dir in /home/*/; do
        # Skip clp directory
        if [ "$user_dir" = "/home/clp/" ]; then
            continue
        fi
        
        username=$(basename "$user_dir")
        log "Analyzing user: ${username}" "${GREEN}"
        
        # Create user report directory
        user_report_dir="${OUTPUT_DIR}/users/${username}"
        mkdir -p "${user_report_dir}"
        
        # Analyze site structure
        if [ -d "${user_dir}htdocs" ]; then
            find "${user_dir}htdocs" -type d -maxdepth 2 > "${user_report_dir}/site_structure.txt"
            
            # Check WordPress installations
            find "${user_dir}htdocs" -name "wp-config.php" > "${user_report_dir}/wordpress_installations.txt"
        fi
        
        # Analyze logs
        if [ -d "${user_dir}logs" ]; then
            # Nginx logs analysis
            if [ -d "${user_dir}logs/nginx" ]; then
                log "Analyzing Nginx logs for ${username}..." "${BLUE}"
                for nginx_log in "${user_dir}logs/nginx"/*access.log; do
                    if [ -f "$nginx_log" ]; then
                        # Extract unique IPs and their request counts
                        awk '{print $1}' "$nginx_log" | sort | uniq -c | sort -rn > "${user_report_dir}/nginx_ip_stats.txt"
                        
                        # Get error requests (4xx and 5xx)
                        awk '$9 ~ /^[45]/' "$nginx_log" > "${user_report_dir}/nginx_errors.txt"
                        
                        # Look for suspicious patterns (common attack vectors)
                        grep -i "wp-login.php\|wp-admin\|xmlrpc.php" "$nginx_log" > "${user_report_dir}/wordpress_access_attempts.txt"
                    fi
                done
            fi
            
            # PHP logs analysis
            if [ -d "${user_dir}logs/php" ]; then
                log "Analyzing PHP logs for ${username}..." "${BLUE}"
                for php_log in "${user_dir}logs/php"/*error.log; do
                    if [ -f "$php_log" ]; then
                        # Extract PHP errors and warnings
                        grep -i "error\|warning\|notice" "$php_log" > "${user_report_dir}/php_errors.txt"
                    fi
                done
            fi
            
            # Varnish cache analysis
            if [ -d "${user_dir}logs/varnish-cache" ]; then
                log "Analyzing Varnish cache logs for ${username}..." "${BLUE}"
                ls -lah "${user_dir}logs/varnish-cache" > "${user_report_dir}/varnish_cache_stats.txt"
            fi
        fi
        
        # Analyze backups
        if [ -d "${user_dir}backups" ]; then
            ls -lah "${user_dir}backups" > "${user_report_dir}/backup_stats.txt"
        fi
    done
}

# Function to analyze common Nginx and PHP configurations
analyze_common_configs() {
    log "Analyzing common configurations..." "${BLUE}"
    
    # Nginx config analysis
    if [ -d "$COMMON_NGINX" ]; then
        mkdir -p "${OUTPUT_DIR}/common_configs/nginx"
        cp -r "$COMMON_NGINX/sites-enabled" "${OUTPUT_DIR}/common_configs/nginx/"
        nginx -T 2> "${OUTPUT_DIR}/common_configs/nginx/nginx_config_test.txt" || true
    fi
    
    # PHP config analysis
    if [ -d "$COMMON_PHP" ]; then
        mkdir -p "${OUTPUT_DIR}/common_configs/php"
        for version in "$COMMON_PHP"/*; do
            if [ -d "$version" ]; then
                version_num=$(basename "$version")
                php_info_file="${OUTPUT_DIR}/common_configs/php/phpinfo_${version_num}.txt"
                php -v > "$php_info_file" 2>&1
                php -i >> "$php_info_file" 2>&1
            fi
        done
    fi
}

# Main execution
main() {
    log "Starting CloudPanel analysis..." "${GREEN}"
    
    # Create output directory structure
    mkdir -p "${OUTPUT_DIR}/users"
    
    # Run analysis functions
    analyze_clp_database
    analyze_bash_configs
    analyze_user_sites
    analyze_common_configs
    
    # Generate summary report
    {
        echo "CloudPanel Analysis Summary"
        echo "=========================="
        echo "Analysis Date: $(date)"
        echo ""
        echo "Analysis Components:"
        echo "1. CloudPanel Database"
        echo "2. Bash Configurations"
        echo "3. User Sites Structure"
        echo "4. Common Configurations"
        echo ""
        echo "Output Directory: ${OUTPUT_DIR}"
    } > "${OUTPUT_DIR}/summary.txt"
    
    log "Analysis complete. Results are in: ${OUTPUT_DIR}" "${GREEN}"
}

main "$@"

Script 2

touch analyze_logs.sh
nano analyze_logs.sh
#paste the script from below.
chmod +x analyze_logs.sh
sudo ./analyze_logs.sh
#!/bin/bash

set -euo pipefail

RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'

# Timestamp and Output Directory
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
OUTPUT_DIR="cloudpanel_analysis_${TIMESTAMP}"
mkdir -p "${OUTPUT_DIR}/traffic_analysis"
mkdir -p "${OUTPUT_DIR}/security"
mkdir -p "${OUTPUT_DIR}/file_changes"

# Log function
log() {
    echo -e "${2}[$(date '+%Y-%m-%d %H:%M:%S')] ${1}${NC}" | tee -a "${OUTPUT_DIR}/analysis.log"
}

# Function to analyze IP addresses and traffic patterns
analyze_traffic_patterns() {
    local user_dir="$1"
    local username="$2"
    local analysis_dir="${OUTPUT_DIR}/traffic_analysis/${username}"
    mkdir -p "$analysis_dir"

    log "Analyzing traffic patterns for user: ${username}" "${BLUE}"

    if [ -d "${user_dir}logs/nginx" ]; then
        for access_log in "${user_dir}logs/nginx"/*access.log; do
            if [ -f "$access_log" ]; then
                # Top IP addresses
                log "Extracting top IP addresses..." "${GREEN}"
                awk '{print $1}' "$access_log" | sort | uniq -c | sort -rn | head -n 20 > \
                    "${analysis_dir}/top_ips.txt"

                # HTTP Status Distribution
                log "Analyzing HTTP status codes..." "${GREEN}"
                awk '{print $9}' "$access_log" | sort | uniq -c | sort -rn > \
                    "${analysis_dir}/http_status_distribution.txt"

                # Error Requests Analysis (4xx and 5xx)
                log "Analyzing error requests..." "${GREEN}"
                awk '$9 ~ /^[45]/ {print $1,$9,$7,$time_local}' "$access_log" | \
                    tail -n 100 > "${analysis_dir}/error_requests.txt"

                # Traffic by Hour
                log "Analyzing traffic patterns by hour..." "${GREEN}"
                awk '{print $4}' "$access_log" | cut -d: -f2 | sort | uniq -c | \
                    sort -n > "${analysis_dir}/traffic_by_hour.txt"

                # Large File Transfers
                log "Identifying large file transfers..." "${GREEN}"
                awk '$10 > 1000000 {print $7,$10,$1}' "$access_log" | \
                    sort -nr -k2 | head -n 20 > "${analysis_dir}/large_transfers.txt"

                # Suspicious Patterns
                log "Checking for suspicious patterns..." "${GREEN}"
                {
                    echo "=== SQL Injection Attempts ==="
                    grep -i "union\|select\|insert\|delete\|update" "$access_log" | tail -n 20
                    echo -e "\n=== Script Injection Attempts ==="
                    grep -i "<script\|alert(" "$access_log" | tail -n 20
                    echo -e "\n=== File Upload Attempts ==="
                    grep -i "\.php\|\.jsp\|\.asp" "$access_log" | tail -n 20
                } > "${analysis_dir}/suspicious_patterns.txt"
            fi
        done
    fi
}

# Function to analyze outbound connections
analyze_outbound_traffic() {
    log "Analyzing outbound connections..." "${BLUE}"
    local outbound_dir="${OUTPUT_DIR}/traffic_analysis/outbound"
    mkdir -p "$outbound_dir"

    # Current outbound connections
    if command -v ss &> /dev/null; then
        ss -ntp 2>/dev/null | grep ESTAB > "${outbound_dir}/current_connections.txt"
    elif command -v netstat &> /dev/null; then
        netstat -ntp 2>/dev/null | grep ESTABLISHED > "${outbound_dir}/current_connections.txt"
    fi

    # Analyze varnish cache connections if available
    for user_dir in /home/*/; do
        if [ -d "${user_dir}logs/varnish-cache" ]; then
            username=$(basename "$user_dir")
            log "Analyzing Varnish cache connections for ${username}..." "${GREEN}"
            ls -lah "${user_dir}logs/varnish-cache" > "${outbound_dir}/varnish_${username}.txt"
        fi
    done
}

# Function to analyze user activities and security
analyze_security() {
    log "Analyzing security events..." "${BLUE}"
    local security_dir="${OUTPUT_DIR}/security"

    # Recent user logins
    last -n 100 > "${security_dir}/recent_logins.txt"

    # Failed login attempts
    {
        echo "=== Failed SSH Attempts ==="
        grep "Failed password" /var/log/auth.log 2>/dev/null | tail -n 50
        echo -e "\n=== Invalid Users ==="
        grep "Invalid user" /var/log/auth.log 2>/dev/null | tail -n 50
        echo -e "\n=== Failed sudo ==="
        grep "sudo.*COMMAND" /var/log/auth.log 2>/dev/null | tail -n 50
    } > "${security_dir}/failed_attempts.txt"

    # CloudPanel specific security events
    if [ -f "${CLP_DB}" ]; then
        sqlite3 "${CLP_DB}" <<EOF > "${security_dir}/clp_security_events.txt"
.mode column
.headers on
SELECT event_name, user_name, datetime(created_at, 'unixepoch') as event_time
FROM event
WHERE event_name LIKE '%login%' OR event_name LIKE '%fail%' OR event_name LIKE '%error%'
ORDER BY created_at DESC LIMIT 100;
EOF
    fi
}

# Function to analyze file modifications
analyze_file_changes() {
    log "Analyzing file modifications..." "${BLUE}"
    local changes_dir="${OUTPUT_DIR}/file_changes"

    for user_dir in /home/*/; do
        username=$(basename "$user_dir")
        if [ "$username" != "clp" ]; then
            log "Analyzing file changes for user: ${username}" "${GREEN}"
            {
                echo "=== Recently Modified Files ==="
                find "${user_dir}" -type f -mtime -1 -ls 2>/dev/null | \
                    awk '{print $11,$7,$8,$9,$10}' | tail -n 50

                echo -e "\n=== Recently Created Files ==="
                find "${user_dir}" -type f -ctime -1 -ls 2>/dev/null | \
                    awk '{print $11,$7,$8,$9,$10}' | tail -n 50

                echo -e "\n=== Large Files (>100MB) ==="
                find "${user_dir}" -type f -size +100M -ls 2>/dev/null | \
                    awk '{print $11,$7,$8,$9,$10}'
            } > "${changes_dir}/${username}_file_changes.txt"
        fi
    done

    # Monitor CloudPanel specific files
    if [ -d "/home/clp" ]; then
        log "Analyzing CloudPanel file changes..." "${GREEN}"
        {
            echo "=== CloudPanel Configuration Changes ==="
            find "/home/clp/services" -type f -mtime -7 -ls 2>/dev/null
            
            echo -e "\n=== Recent Database Changes ==="
            ls -la "/home/clp/htdocs/app/data/db.sq3"
        } > "${changes_dir}/cloudpanel_changes.txt"
    fi
}

# Main execution
main() {
    log "Starting enhanced log analysis..." "${GREEN}"

    # Process each user's directory
    for user_dir in /home/*/; do
        username=$(basename "$user_dir")
        if [ "$username" != "clp" ]; then
            analyze_traffic_patterns "$user_dir" "$username"
        fi
    done

    analyze_outbound_traffic
    analyze_security
    analyze_file_changes

    # Generate summary report
    {
        echo "CloudPanel Analysis Summary Report"
        echo "================================="
        echo "Generated at: $(date)"
        echo ""
        echo "Analysis Components:"
        echo "1. Traffic Analysis"
        echo "   - Top IP addresses"
        echo "   - HTTP status distribution"
        echo "   - Error requests (4xx and 5xx)"
        echo "   - Traffic patterns by hour"
        echo ""
        echo "2. Security Analysis"
        echo "   - Recent logins"
        echo "   - Failed login attempts"
        echo "   - Suspicious activities"
        echo ""
        echo "3. File Changes"
        echo "   - Recent modifications"
        echo "   - New files"
        echo "   - Large files"
        echo ""
        echo "4. Outbound Traffic"
        echo "   - Current connections"
        echo "   - Varnish cache analysis"
    } > "${OUTPUT_DIR}/analysis_summary.txt"

    log "Analysis complete. Results are in: ${OUTPUT_DIR}" "${GREEN}"
}

main "$@"