Configuration for L7 DDoS Mitigation

An example SEnginx configuration that uses DDoS protection and sensitive URL vulnerability scanning protection.

Configuration File

http {
    # Original nginx configuration
    ... 
         
    statistics_zone 10m; # Define a 10MB memory for SEnginx access statistics
    ip_blacklist on; # Open the global IP blacklist
    ip_blacklist_size 10240; # Number of IPs stored
    ip_blacklist_timeout 120; # IP in the blacklist retention time (seconds)
    ip_blacklist_mode local; # List saved in SEnginx
    limit_req_zone $binary_remote_addr zone=mudoom:10m rate=10r/s; # Define a token space with a capacity of 10MB, each address can only be requested 10 times per second (replenish 10 tokens per second)
    ip_behavior_zone zone=brand:10m sample_base=10 sample_cycle=2s; # Define a behavior recognition space with a capacity of 10MB, the minimum number of valid requests is 10, and the sampling period is 2 seconds
 
    # Define IP whitelist
    geo $ip_wl {
        # Use to define the address in the form of an address segment
        ranges;
        default 0;
        127.0.0.1-127.0.0.1 1;
    }

    # Define UA whitelist, ignore common web crawlers
    whitelist_ua $ua_wl {
        #Ignore case
        caseless;
        "Baiduspider" ".*\.baidu\.com";
        "Baidu-YunGuanCe-SLABot" ".*\.baidu\.com";
        "Googlebot" ".*\.google\.com";
        "360Spider" ".*\.360\.cn";
        "360JK" ".*\.360\.cn";
        "Sosospider" ".*\.soso\.com";
        "Sogou web spider" ".*\.sogou\.com";
        "bingbot" ".*\.bing\.com";
    }

    server {
 
        virtual_server_name www.mudoom.com; # Define a virtual server name for SEnginx access statistics display
        ip_behavior zone=brand type=sensitive_url; # Open the ip recognition space brand on the current server and perform sensitive URL access behavior recognition.
 
        # Open Robot Challenge
        location @process {
            robot_mitigation_global_whitelist ua_var_name=ua_wl ip_var_name=ip_wl ip_var_value=1; # Use the global IP whitelist and User-Agent whitelist as the robot challenge whitelist
            robot_mitigation on; # Open Robot Mitigation module (robot mitigation)
            robot_mitigation_mode js; # Robot Challenge Mode (JS)
            robot_mitigation_blacklist 50; # Blacklist failure limit
            robot_mitigation_timeout 600; # Valid time after successful challenge (seconds)
            robot_mitigation_challenge_ajax off; # Close challenge ajax type request
            
            # php-fpm and other original configuration
            fastcgi_pass 127.0.0.1:9000;
            ...
        }

        # Request to view the current blacklist. For security reasons, it must be limited to internal access
        location /blacklist_show {            
            allow 127.0.0.1;
            deny all;
            ip_blacklist_show;
        }
 
        # Request to clear the blacklist list. For security reasons, it must be limited to internal access
        location /blacklist_flush {
            allow 127.0.0.1;
            deny all;
            ip_blacklist_flush;
        }

        # The request can display SEnginx access statistics, which must be restricted to internal access for security 
        location /stats {
            allow 127.0.0.1;
            deny all;
            statistics;
        }

        # Location for blocking
        location /RequestDenied {
           return 403;
        }
 
        # Use the location configuration controlled by the security module
        location /mudoom {
            ip_behavior_sensitive; # Sensitive resource identification
            limit_req zone=mudoom burst=100 forbid_action=@process condition=$cond; # 100 original tokens, if exceeding 100 tokens, a robot challenge will be triggered
                         
            ifall ($insensitive_percent >= 0) ($insensitive_percent < 30) { # When the proportion of non-sensitive resources requested is less than 30%, the robot challenge is triggered
                set $cond 1;
            }
 
            # Use global IP whitelist and User-Agent whitelist as cookie tamper-proof whitelist 
            cookie_poisoning_whitelist ua_var_name=ua_wl ip_var_name=ip_wl ip_var_value=1;
            cookie_poisoning on; # Cookie Anti-tampering
            cookie_poisoning_action block; # Specify the action to be performed when the cookie is found to be tampered with. Block, pass release, remove delete the tampered cookie value
            cookie_poisoning_log on; # When an attack is discovered, whether to write log to the error log
 
            # Set the global IP whitelist and User-Agent whitelist as Naxsi whitelist 
            naxsi_whitelist ua_var_name=ua_wl ip_var_name=ip_wl ip_var_value=1;
            # Naxsi parameters                         
            LearningMode;
            SecRulesEnabled;
            #SecRulesDisabled;
            DeniedUrl "/RequestDenied"; # Security rules
            CheckRule "$XSS >= 4" BLOCK;
            CheckRule "$TRAVERSAL >= 4" BLOCK;
            CheckRule "$EVADE >= 8" BLOCK;
            CheckRule "$UPLOAD >= 8" BLOCK;
            CheckRule "$RFI >= 8" BLOCK;
            CheckRule "$SQL >= 8" BLOCK;
 
            # php-fpm and other original configuration
            fastcgi_pass 127.0.0.1:9000;
            ...
        }
    }
}