189 lines
4.4 KiB
Plaintext
189 lines
4.4 KiB
Plaintext
|
|
######################################
|
|
## Reporting options
|
|
##################################
|
|
|
|
# Output to this directory
|
|
output = CHANGEME
|
|
|
|
# Toggle mixed content reporting
|
|
log-mixed-content = false
|
|
|
|
# Toggle logging of all external URLs
|
|
log-external-urls = false
|
|
|
|
# Enable extra cache related logging
|
|
log-cache-mismatches = false
|
|
|
|
# Turn off console statistics reporting
|
|
#quiet = false
|
|
|
|
# Increase verbosity of runtime reporting
|
|
#verbose = false
|
|
|
|
######################################
|
|
## Crawler user agent options
|
|
##################################
|
|
|
|
# Pretend that 'domain' resolves to 'IP'
|
|
#host = domain=IP
|
|
|
|
# Specify header values that will be send with every request
|
|
#header = headername=value
|
|
#header = X-Scanner=skipfish
|
|
|
|
# Specify which one of the pre-defined user agents to use (i|p|f).
|
|
user-agent = i
|
|
|
|
# Set cookie value and send it with every request
|
|
#cookie = name1=value1
|
|
#cookie = name2=value3
|
|
|
|
# Reject any new cookies
|
|
reject-cookies = false
|
|
|
|
######################################
|
|
## Authentication options
|
|
##################################
|
|
|
|
# Specify the location of the login form
|
|
#auth-form = http://example.org/login.php
|
|
|
|
# Specify the username and password that you want to authenticate
|
|
# with. It's advised to use throw away (test) accounts.
|
|
#auth-user = myuser
|
|
#auth-pass = mypass
|
|
|
|
# Specify the credential field names when not detected by skipfish.
|
|
#auth-user-field = user-field-name
|
|
#auth-pass-field = pass-field-name
|
|
|
|
# The URL to test is the scan is authenticated.
|
|
#auth-verify-url = http://example.org/show-profile.php
|
|
|
|
# In some cases, you might have to specify the location to which the
|
|
# form data has to be submitted.
|
|
#auth-form-target
|
|
|
|
# Specify credentials for basic HTTP authentication
|
|
#auth = user:pass
|
|
|
|
|
|
######################################
|
|
## Crawler scope / depth options
|
|
##################################
|
|
|
|
# Maximum crawl tree depth
|
|
max-crawl-depth = 16
|
|
|
|
# Maximum children to index per node
|
|
max-crawl-child = 512
|
|
|
|
# Maximum descendants to index per branch
|
|
max-crawl-descendants = 8192
|
|
|
|
# Max total number of requests to send
|
|
max-request-total = 100000000
|
|
|
|
# Max requests per second
|
|
#max-request-rate = 200
|
|
|
|
# Node and link crawl probability
|
|
crawl-probability = 100
|
|
|
|
# Repeat probabilistic scan with given seed
|
|
#seed = 0xXXXXXX
|
|
|
|
# Only follow URLs matching 'string'
|
|
#include-string = /want/
|
|
|
|
# Exclude URLs matching 'string'
|
|
#exclude-string = /want-not/
|
|
|
|
# Crawl cross-site links to another domain
|
|
#include-domain = scan.also.example.org
|
|
|
|
# Trust, but do not crawl, another domain
|
|
#trust-domain = .google-analytics.com
|
|
|
|
# Do not parse HTML, etc, to find new links
|
|
#no-html-parsing = false
|
|
|
|
# Do not descend into 5xx locations
|
|
skip-error-pages = false
|
|
|
|
# Add new form auto-fill rule
|
|
#form-value = field=value
|
|
|
|
######################################
|
|
## Dictionary management
|
|
##################################
|
|
|
|
# The read-only wordlist that is used for bruteforcing
|
|
wordlist = dictionaries/medium.wl
|
|
|
|
# The read-write wordlist and where learned keywords will be written
|
|
# for future scans.
|
|
#rw-wordlist = my-wordlist.wl
|
|
|
|
# Disable extension fuzzing
|
|
no-extension-brute = false
|
|
|
|
# Disable keyword learning
|
|
no-keyword-learning = false
|
|
|
|
######################################
|
|
## Performance options
|
|
##################################
|
|
|
|
# Max simultaneous TCP connections, global
|
|
max-connections = 40
|
|
|
|
# Max simultaneous connections, per target IP
|
|
max-host-connections = 10
|
|
|
|
# Max number of consecutive HTTP errors
|
|
max-failed-requests = 100
|
|
|
|
# Total request response timeout
|
|
request-timeout = 20
|
|
|
|
# Individual network I/O timeout
|
|
network-timeout = 10
|
|
|
|
# Timeout on idle HTTP connections
|
|
idle-timeout = 10
|
|
|
|
# Response size limit in bytes
|
|
response-size = 400000
|
|
|
|
# Do not keep binary responses for reporting
|
|
discard-binary = true
|
|
|
|
# Flush request / response data immediately to disk
|
|
flush-to-disk = false
|
|
|
|
# Stop scanning after the given duration h:m:s
|
|
#scan-timeout = h:m:s
|
|
|
|
######################################
|
|
## Detection / inject options
|
|
##################################
|
|
|
|
# Specify the signatures file location. To disable signatures, specify /dev/null.
|
|
signatures = signatures/signatures.conf
|
|
|
|
# Enable or disable specific injection tests
|
|
#checks-toggle
|
|
|
|
# Disable all injection tests which means the scan will focus on crawling,
|
|
# bruteforcing and passively detect security issues via signatures.
|
|
no-injection-tests = false
|
|
|
|
# Ignore this parameter in the scan
|
|
#skip-parameter = search
|
|
|
|
# Do not submit forms
|
|
no-form-submits = false
|
|
|