From 093800c9de4116706258982376abce01928a3f7b Mon Sep 17 00:00:00 2001
From: Steve Pinkham
Date: Fri, 21 Dec 2012 23:32:24 -0500
Subject: [PATCH] 2.10b: Many updates, see changelog
---
ChangeLog | 41 +++++
Makefile | 23 ++-
README | 10 +-
assets/index.html | 4 +-
config/example.conf | 188 +++++++++++++++++++
doc/signatures.txt | 63 +++++--
doc/skipfish.1 | 61 +++++--
signatures/apps.sigs | 23 ++-
signatures/files.sigs | 122 ++++++++++---
signatures/messages.sigs | 16 +-
src/analysis.c | 137 ++++++++++----
src/analysis.h | 1 +
src/auth.c | 85 +++++----
src/checks.c | 381 ++++++++++++++++++++++++++++-----------
src/checks.h | 48 +++--
src/config.h | 22 +--
src/crawler.c | 31 ++--
src/crawler.h | 8 +
src/database.c | 12 +-
src/database.h | 16 +-
src/http_client.c | 45 ++++-
src/http_client.h | 15 +-
src/options.c | 120 ++++++++++++
src/options.h | 117 ++++++++++++
src/report.c | 271 +++++++++++++++++++++-------
src/report.h | 5 +
src/signatures.c | 219 +++++++++++++++++++---
src/signatures.h | 63 +++++--
src/skipfish.c | 171 ++++++++----------
src/string-inl.h | 7 +
30 files changed, 1782 insertions(+), 543 deletions(-)
create mode 100644 config/example.conf
create mode 100644 src/options.c
create mode 100644 src/options.h
diff --git a/ChangeLog b/ChangeLog
index 610b355..a5c841a 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,44 @@
+Version 2.10b:
+ - Updated HTML tags and attributes that are checked for URL XSS
+ injections to also include a few HTML5 specific ones
+
+ - Updated test and description for semi-colon injection in HTML meta
+ refresh tags (this is IE6 specific)
+
+ - Relaxed HTML parsing a bit to allow spaces between HTML tag attributes
+ and their values (e.g. "foo =bar").
+
+ - Major update of LFI tests by adding more dynamic tests (double
+ encoding, dynamic amount of ../'s for web.xml). The total amount of
+ tests for this vulnerability is now 40 per injection point.
+
+ - The RFI test is now a separate test and no longer requires special
+ compile options. The default RFI URL and it's payload check are
+ still defined in src/config.h.
+
+ - Using the --flush-to-disk flag will cause requests and responses
+ to be flushed to disk which reduces the memory footprint. (especially
+ noticable in large scans)
+
+ - Fixed a bug where in some conditions (e.g. a page looks similar to
+ another) links were not scraped from responses which lead to links
+ to be missed (thanks to Anurag Chaurasia for reporting)
+
+ - Added configuration file support with the --config flag. In
+ config/example.conf you can find flags and examples.
+
+ - Several signature keyword enhancements have been made. Most
+ significant are the "header" keyword, which allows header matching
+ and the "depend" keyword which allows signature chaining.
+
+ - Fixed basic authentication which was broken per 2.08b. Cheers to
+ Michael Stevens for reporting.
+
+ - Fixed -k scheduling where 1:0:0 would count as a second in stead of
+ an hour (also visa versa). Cheers to Claudio Criscione for reporting.
+
+ - Small fix to compile time warnings
+
Version 2.09b:
- Fixed a crash that could be triggered during 404 fingerprint failures
diff --git a/Makefile b/Makefile
index 5d135b3..ab41c3c 100644
--- a/Makefile
+++ b/Makefile
@@ -2,7 +2,8 @@
# skipfish - Makefile
# -------------------
#
-# Author: Michal Zalewski
+# Author: Michal Zalewski ,
+# Niels Heinen
#
# Copyright 2009, 2010, 2011 by Google Inc. All Rights Reserved.
#
@@ -20,21 +21,22 @@
#
PROGNAME = skipfish
-VERSION = 2.09b
+VERSION = 2.10b
SRCDIR = src
SFILES = http_client.c database.c crawler.c analysis.c report.c \
- checks.c signatures.c auth.c
+ checks.c signatures.c auth.c options.c
IFILES = alloc-inl.h string-inl.h debug.h types.h http_client.h \
database.h crawler.h analysis.h config.h report.h \
- checks.h signatures.h auth.h
+ checks.h signatures.h auth.h options.h
OBJFILES = $(patsubst %,$(SRCDIR)/%,$(SFILES))
INCFILES = $(patsubst %,$(SRCDIR)/%,$(IFILES))
CFLAGS_GEN = -Wall -funsigned-char -g -ggdb -I/usr/local/include/ \
-I/opt/local/include/ $(CFLAGS) -DVERSION=\"$(VERSION)\"
-CFLAGS_DBG = -DLOG_STDERR=1 -DDEBUG_ALLOCATOR=1 $(CFLAGS_GEN)
+CFLAGS_DBG = -DLOG_STDERR=1 -DDEBUG_ALLOCATOR=1 \
+ $(CFLAGS_GEN)
CFLAGS_OPT = -O3 -Wno-format $(CFLAGS_GEN)
LDFLAGS += -L/usr/local/lib/ -L/opt/local/lib
@@ -55,6 +57,12 @@ $(PROGNAME): $(SRCDIR)/$(PROGNAME).c $(OBJFILES) $(INCFILES)
debug: $(SRCDIR)/$(PROGNAME).c $(OBJFILES) $(INCFILES)
$(CC) $(LDFLAGS) $(SRCDIR)/$(PROGNAME).c -o $(PROGNAME) \
$(CFLAGS_DBG) $(OBJFILES) $(LIBS)
+ @echo
+ @echo "The debug build prints runtime information to stderr. You"
+ @echo "probably want to redirect this output to a file. like:"
+ @echo
+ @echo " $ ./skipfish [.option.] 2> debug.log"
+ @echo
clean:
rm -f $(PROGNAME) *.exe *.o *~ a.out core core.[1-9][0-9]* *.stackdump \
@@ -66,6 +74,7 @@ same_test: $(SRCDIR)/same_test.c $(OBJFILES) $(INCFILES)
$(LIBS)
publish: clean
- cd ..; rm -rf skipfish-$(VERSION); cp -pr skipfish skipfish-$(VERSION); \
- tar cfvz ~/www/skipfish.tgz skipfish-$(VERSION)
+ cd ..; rm -rf skipfish-$(VERSION); \
+ cp -pr skipfish-release skipfish-$(VERSION); \
+ tar cfvz ~/www/skipfish.tgz skipfish-$(VERSION); \
chmod 644 ~/www/skipfish.tgz
diff --git a/README b/README
index f068618..77fde30 100644
--- a/README
+++ b/README
@@ -238,7 +238,7 @@ behavior there.
To compile it, simply unpack the archive and try make. Chances are, you will
need to install libidn first.
-Next, you need to read the instructions provided in dictionaries/README-FIRST
+Next, you need to read the instructions provided in doc/dictionaries.txt
to select the right dictionary file and configure it correctly. This step has a
profound impact on the quality of scan results later on, so don't skip it.
@@ -278,7 +278,7 @@ new or changed nodes; and blue background to all new or changed issues
found.
Some sites may require authentication for which our support is described
-in docs/authentication.txt. In most cases, you'll be wanting to use the
+in doc/authentication.txt. In most cases, you'll be wanting to use the
form authentication method which is capable of detecting broken sessions
in order to re-authenticate.
@@ -398,7 +398,7 @@ HTTP links seen, even if they have no immediate security impact. Use the -U
option to have these logged.
Dictionary management is a special topic, and - as mentioned - is covered in
-more detail in dictionaries/README-FIRST. Please read that file before
+more detail in doc/dictionaries.txt. Please read that file before
proceeding. Some of the relevant options include -S and -W (covered earlier),
-L to suppress auto-learning, -G to limit the keyword guess jar size, -R to
drop old dictionary entries, and -Y to inhibit expensive $keyword.$extension
@@ -531,12 +531,8 @@ know:
* Scan resume option, better runtime info.
- * Option to limit document sampling or save samples directly to disk.
-
* Standalone installation (make install) support.
- * Config file support.
-
* Scheduling and management web UI.
-------------------------------------
diff --git a/assets/index.html b/assets/index.html
index e4d1a27..e827336 100644
--- a/assets/index.html
+++ b/assets/index.html
@@ -261,7 +261,7 @@ var issue_desc= {
"10402": "HTTP authentication required",
"10403": "Server error triggered",
"10404": "Directory listing enabled",
- "10405": "Discovered files / directories",
+ "10405": "Hidden files / directories",
"10501": "All external links",
"10502": "External URL redirector",
@@ -304,6 +304,7 @@ var issue_desc= {
"30503": "HTTPS form submitting to a HTTP URL",
"30601": "HTML form with no apparent XSRF protection",
"30602": "JSON response with no apparent XSSI protection",
+ "30603": "Auth form leaks credentials via HTTP GET",
"30701": "Incorrect caching directives (lower risk)",
"30801": "User-controlled response prefix (BOM / plugin attacks)",
"30901": "HTTP header injection vector",
@@ -333,6 +334,7 @@ var issue_desc= {
"50104": "Format string vector",
"50105": "Integer overflow vector",
"50106": "File inclusion",
+ "50107": "Remote file inclusion",
"50201": "SQL query or similar syntax in parameters",
"50301": "PUT request accepted",
"50909": "Signature match detected (high risk)"
diff --git a/config/example.conf b/config/example.conf
new file mode 100644
index 0000000..93b70d6
--- /dev/null
+++ b/config/example.conf
@@ -0,0 +1,188 @@
+
+######################################
+## Reporting options
+##################################
+
+# Output to this directory
+output = CHANGEME
+
+# Toggle mixed content reporting
+log-mixed-content = false
+
+# Toggle logging of all external URLs
+log-external-urls = false
+
+# Enable extra cache related logging
+log-cache-mismatches = false
+
+# Turn off console statistics reporting
+#quiet = false
+
+# Increase verbosity of runtime reporting
+#verbose = false
+
+######################################
+## Crawler user agent options
+##################################
+
+# Pretend that 'domain' resolves to 'IP'
+#host = domain=IP
+
+# Specify header values that will be send with every request
+#header = headername=value
+#header = X-Scanner=skipfish
+
+# Specify which one of the pre-defined user agents to use (i|p|f).
+user-agent = i
+
+# Set cookie value and send it with every request
+#cookie = name1=value1
+#cookie = name2=value3
+
+# Reject any new cookies
+reject-cookies = false
+
+######################################
+## Authentication options
+##################################
+
+# Specify the location of the login form
+#auth-form = http://example.org/login.php
+
+# Specify the username and password that you want to authenticate
+# with. It's advised to use throw away (test) accounts.
+#auth-user = myuser
+#auth-pass = mypass
+
+# Specify the credential field names when not detected by skipfish.
+#auth-user-field = user-field-name
+#auth-pass-field = pass-field-name
+
+# The URL to test is the scan is authenticated.
+#auth-verify-url = http://example.org/show-profile.php
+
+# In some cases, you might have to specify the location to which the
+# form data has to be submitted.
+#auth-form-target
+
+# Specify credentials for basic HTTP authentication
+#auth = user:pass
+
+
+######################################
+## Crawler scope / depth options
+##################################
+
+# Maximum crawl tree depth
+max-crawl-depth = 16
+
+# Maximum children to index per node
+max-crawl-child = 512
+
+# Maximum descendants to index per branch
+max-crawl-descendants = 8192
+
+# Max total number of requests to send
+max-request-total = 100000000
+
+# Max requests per second
+#max-request-rate = 200
+
+# Node and link crawl probability
+crawl-probability = 100
+
+# Repeat probabilistic scan with given seed
+#seed = 0xXXXXXX
+
+# Only follow URLs matching 'string'
+#include-string = /want/
+
+# Exclude URLs matching 'string'
+#exclude-string = /want-not/
+
+# Crawl cross-site links to another domain
+#include-domain = scan.also.example.org
+
+# Trust, but do not crawl, another domain
+#trust-domain = .google-analytics.com
+
+# Do not parse HTML, etc, to find new links
+#no-html-parsing = false
+
+# Do not descend into 5xx locations
+skip-error-pages = false
+
+# Add new form auto-fill rule
+#form-value = field=value
+
+######################################
+## Dictionary management
+##################################
+
+# The read-only wordlist that is used for bruteforcing
+wordlist = dictionaries/medium.wl
+
+# The read-write wordlist and where learned keywords will be written
+# for future scans.
+#rw-wordlist = my-wordlist.wl
+
+# Disable extension fuzzing
+no-extension-brute = false
+
+# Disable keyword learning
+no-keyword-learning = false
+
+######################################
+## Performance options
+##################################
+
+# Max simultaneous TCP connections, global
+max-connections = 40
+
+# Max simultaneous connections, per target IP
+max-host-connections = 10
+
+# Max number of consecutive HTTP errors
+max-failed-requests = 100
+
+# Total request response timeout
+request-timeout = 20
+
+# Individual network I/O timeout
+network-timeout = 10
+
+# Timeout on idle HTTP connections
+idle-timeout = 10
+
+# Response size limit in bytes
+response-size = 400000
+
+# Do not keep binary responses for reporting
+discard-binary = true
+
+# Flush request / response data immediately to disk
+flush-to-disk = false
+
+# Stop scanning after the given duration h:m:s
+#scan-timeout = h:m:s
+
+######################################
+## Detection / inject options
+##################################
+
+# Specify the signatures file location. To disable signatures, specify /dev/null.
+signatures = signatures/signatures.conf
+
+# Enable or disable specific injection tests
+#checks-toggle
+
+# Disable all injection tests which means the scan will focus on crawling,
+# bruteforcing and passively detect security issues via signatures.
+no-injection-tests = false
+
+# Ignore this parameter in the scan
+#skip-parameter = search
+
+# Do not submit forms
+no-form-submits = false
+
diff --git a/doc/signatures.txt b/doc/signatures.txt
index c648bb7..c8a7b85 100644
--- a/doc/signatures.txt
+++ b/doc/signatures.txt
@@ -85,6 +85,32 @@ In a signature that has multiple content strings, static strings can be
mixed with regular expressions. You'll likely get the best performance
by starting with a static string before applying a regular expression.
+=== content modifier: regex_match:""
+
+Regular expressions can capture substrings and with regex_match, it is
+possible to compare with the first substring that is returned
+by the regular expression.
+
+Given "Apache/2.2.14" as payload and "Apache\/([\d\.]+)" as regex,
+you could use regex_match:"2.2.14" to find this specific Apache version.
+
+=== content modifier: nocase
+
+When "nocase" is specified, the content string is matched without case
+sensitivity.
+
+This keyword requires no value.
+
+=== header:""
+
+By default signature matching is performed on the respose body. By
+specifying a header name using the "header" keyword, this behavior is
+changed: the matching will occur on the header value.
+
+The header name is not case sensitive and header signatures are treated
+exactly the same as content signatures meaning that you can use multiple
+content strings and their modifiers.
+
=== mime:""
The given value will be compared with the MIME type specified by the
@@ -133,20 +159,35 @@ errors are related to our tests and report them as such.
=== id:
-The unique signature ID. Currently this is for documentation purpose only
-but in the future we'll probably add signature chaining which requires
-unique ID's as well.
+The unique signature ID. This is for documentation purpose and for using
+the depend keyword which allows signature chaining.
+Note that the signature ID is also included in the report files
+(e.g. samples.js).
+=== depend:
------------------------
- 3. Upcoming keywords
------------------------
+A signature can be made dependent on another signature by specifying it's
+signature ID as the value of this keyword. This means that the signature
+will be skipped unless the dependent signature was successfully matched
+already.
-Amongst other changes, it's likely that the next release will have the
-following keywords implemented:
+One example use case could be a global signature that identifies a
+framework, say Wordpress, and dependent signatures that detect wordpress
+specific issues.
-1) nocase - for case insensitive matching
-2) ssl - match against SSL responses only
-3) header - match against a specific header
+=== proto:"[http|https]"
+
+The "proto" keyword can be used to make a signature only applicable for
+either "http" or "https" type URLs.
+
+This changes the default behavior where every signature is applied to
+both http and https URLs.
+
+=== report:"[once|always]"
+
+Some signatures are to find host specific problems and only need to be
+reported once. This can be acchieved by using report:"once";
+
+This keywords default value is "always".
diff --git a/doc/skipfish.1 b/doc/skipfish.1
index dfde30f..5859b48 100644
--- a/doc/skipfish.1
+++ b/doc/skipfish.1
@@ -72,16 +72,14 @@ Performance settings:
\-s s_limit \- response size limit (200000 B)
\-e \- do not keep binary responses for reporting
-Safety settings:
+Other settings:
\-k duration \- stop scanning after the given duration h:m:s
+ \--config file \- load specified configuration file
.SH AUTHENTICATION AND ACCESS
.PP
Some sites require authentication, and skipfish supports this in different ways. First there is basic HTTP authentication, for which you can use the \-A flag. Second, and more common, are sites that require authentication on a web application level. For these sites, the best approach is to capture authenticated session cookies and provide them to skipfish using the \-C flag (multiple if needed). Last, you'll need to put some effort in protecting the session from being destroyed by excluding logout links with \-X and/or by rejecting new cookies with \-N.
-.IP "-A/--auth "
-For sites requiring basic HTTP authentication, you can use this flag to specify your credentials.
-
.IP "-F/--host "
Using this flag, you can set the \'\fIHost:\fP\' header value to define a custom mapping between a host and an IP (bypassing the resolver). This feature is particularly useful for not-yet-launched or legacy services that don't have the necessary DNS entries.
@@ -97,30 +95,46 @@ This flag allows the user-agent to be specified where \'\fIi\fP\' stands for Int
.IP "-N/--reject-cookies"
This flag causes skipfish to ignore cookies that are being set by the site. This helps to enforce stateless tests and also prevent that cookies set with \'-C\' are not overwritten.
+.IP "-A/--auth "
+For sites requiring basic HTTP authentication, you can use this flag to specify your credentials.
+
+.IP "--auth-form "
+The login form to use with form authentication. By default skipfish will use the form's action URL to submit the credentials. If this is missing than the login data is send to the form URL. In case that is wrong, you can set the form handler URL with --auth-form-target .
+
+.IP "--auth-user "
+The username to be used during form authentication. Skipfish will try to detect the correct form field to use but if it fails to do so (and gives an error), then you can specify the form field name with --auth-user-field.
+
+.IP "--auth-pass "
+The password to be used during form authentication. Similar to auth-user, the form field name can (optionally) be set with --auth-pass-field.
+
+.IP "--auth-verify-url "
+This URL allows skipfish to verify whether authentication was successful. This requires a URL where anonymous and authenticated requests are answered with a different response.
+
+
.SH CRAWLING SCOPE
.PP
Some sites may be too big to scan in a reasonable timeframe. If the site features well-defined tarpits - for example, 100,000 nearly identical user profiles as a part of a social network - these specific locations can be excluded with -X or -S. In other cases, you may need to resort to other settings: -d limits crawl depth to a specified number of subdirectories; -c limits the number of children per directory; -x limits the total number of descendants per crawl tree branch; and -r limits the total number of requests to send in a scan.
-.IP "-d/--max-depth "
+.IP "-d/--max-crawl-depth "
Limit the depth of subdirectories being crawled (see above).
-.IP "-c/--max-child "
+.IP "-c/--max-crawl-child "
Limit the amount of subdirectories per directory we crawl into (see above).
-.IP "-x/--max-descendants "
+.IP "-x/--max-crawl-descendants "
Limit the total number of descendants per crawl tree branch (see above).
-.IP "-r/--max-requests "
+.IP "-r/--max-request-total "
The maximum number of requests can be limited with this flag.
-.IP "-p/--probability <0-100>"
+.IP "-p/--crawl-probability <0-100>"
By specifying a percentage between 1 and 100%, it is possible to tell the crawler to follow fewer than 100% of all links, and try fewer than 100% of all dictionary entries. This \- naturally \- limits the completeness of a scan, but unlike most other settings, it does so in a balanced, non-deterministic manner. It is extremely useful when you are setting up time-bound, but periodic assessments of your infrastructure.
.IP "-q/--seed "
This flag sets the initial random seed for the crawler to a specified value. This can be used to exactly reproduce a previous scan to compare results. Randomness is relied upon most heavily in the -p mode, but also influences a couple of other scan management decisions.
-.IP "-I/--include "
+.IP "-I/--include-string "
With this flag, you can tell skipfish to only crawl and test URLs that match a certain string. This can help to narrow down the scope of a scan by only whitelisting certain sections of a web site (e.g. \-I /shop).
-.IP "-X/--exclude "
+.IP "-X/--exclude-string "
The \-X option can be used to exclude files / directories from the scan. This is useful to avoid session termination (i.e. by excluding /logout) or just for speeding up your scans by excluding static content directories like /icons/, /doc/, /manuals/, and other standard, mundane locations along these lines.
-.IP "-K/--skip-param "
+.IP "-K/--skip-parameter "
This flag allows you to specify parameter names not to fuzz. (useful for applications that put session IDs in the URL, to minimize noise).
.IP "-D/--include-domain "
@@ -132,10 +146,10 @@ In some cases, you do not want to actually crawl a third-party domain, but you t
.IP "-Z/--skip-error-pages"
Do not crawl into pages / directories that give an error 5XX.
-.IP "-O/--skip-forms"
+.IP "-O/--no-form-submits"
Using this flag will cause forms to be ignored during the scan.
-.IP "-P/--ignore-links"
+.IP "-P/--no-html-parsing"
This flag will disable link extracting and effectively disables crawling. Using \-P is useful when you want to test one specific URL or when you want to feed skipfish a list of URLs that were collected with an external crawler.
.SH TESTING SCOPE
@@ -147,7 +161,7 @@ EXPERIMENTAL: Displays the crawler injection tests. The output shows the index n
.IP "--checks-toggle "
EXPERIMENTAL: Every injection test can be enabled/disabled with using this flag. As value, you need to provide the check numbers which can be obtained with the \-\-checks flag. Multiple checks can be toggled via a comma separated value (i.e. \-\-checks\-toggle 1,2 )
-.IP "--no-checks"
+.IP "--no-injection-tests"
EXPERIMENTAL: Disables all injection tests for this scan and limits the scan to crawling and, optionally, bruteforcing. As with all scans, the output directory will contain a pivots.txt file. This file can be used to feed future scans.
.SH REPORTING OPTIONS
@@ -176,7 +190,7 @@ EXPERIMENTAL: Use this flag to enable runtime reporting of, for example, problem
.SH DICTIONARY MANAGEMENT
.PP
-Make sure you've read the instructions provided in dictionaries/README-FIRST to select the right dictionary file and configure it correctly. This step has a profound impact on the quality of scan results later on.
+Make sure you've read the instructions provided in doc/dictionaries.txt to select the right dictionary file and configure it correctly. This step has a profound impact on the quality of scan results later on.
.IP "-S/--wordlist "
Load the specified (read-only) wordlist for use during the scan. This flag is optional but use of a dictionary is highly recommended when performing a blackbox scan as it will highlight hidden files and directories.
@@ -187,7 +201,7 @@ Specify an initially empty file for any newly learned site-specific keywords (wh
.IP "-L/--no-keyword-learning"
During the scan, skipfish will try to learn and use new keywords. This flag disables that behavior and should be used when any form of brute-forcing is not desired.
-.IP "-Y/--no-ext-fuzzing"
+.IP "-Y/--no-extension-brute"
This flag will disable extension guessing during directory bruteforcing.
.IP "-R "
@@ -202,7 +216,7 @@ During the scan, a temporary buffer of newly detected keywords is maintained. Th
.SH PERFORMANCE OPTIONS
The default performance setting should be fine for most servers but when the report indicates there were connection problems, you might want to tweak some of the values here. For unstable servers, the scan coverage is likely to improve when using low values for rate and connection flags.
-.IP "-l/--max-rate "
+.IP "-l/--max-request-rate "
This flag can be used to limit the amount of requests per second. This is very useful when the target server can't keep up with the high amount of requests that are generated by skipfish. Keeping the amount requests per second low can also help preventing some rate-based DoS protection mechanisms from kicking in and ruining the scan.
.IP "-g/--max-connections "
@@ -211,7 +225,7 @@ The max simultaneous TCP connections (global) can be set with this flag.
.IP "-m/--max-host-connections "
The max simultaneous TCP connections, per target IP, can be set with this flag.
-.IP "-f/--max-fail "
+.IP "-f/--max-failed-requests "
Controls the maximum number of consecutive HTTP errors you are willing to see before aborting the scan. For large scans, you probably want to set a higher value here.
.IP "-t/--request-timeout "
@@ -229,7 +243,16 @@ Sets the maximum length of a response to fetch and parse (longer responses will
.IP "-e/--discard-binary"
This prevents binary documents from being kept in memory for reporting purposes, and frees up a lot of RAM.
+.IP "--flush-to-disk"
+This causes request / response data to be flushed to disk instead of being kept in memory. As a result, the memory usage for large scans will be significant lower.
+
.SH EXAMPLES
+\fBScan type: config\fP
+.br
+skipfish \-\-config config/example.conf http://example.com
+.br
+
+.br
\fBScan type: quick\fP
.br
skipfish \-o output/dir/ http://example.com
diff --git a/signatures/apps.sigs b/signatures/apps.sigs
index 05f34f6..d6a3582 100644
--- a/signatures/apps.sigs
+++ b/signatures/apps.sigs
@@ -5,14 +5,27 @@
# default.
# A phpinfo() page
-id:11001; sev:3; content:"phpinfo()phpinfo()phpMyAdmin '; depth:1024; content:''; depth:2048; memo:"phpMyAdmin";
+id:11002; sev:3; content:'phpMyAdmin '; depth:1024; \
+ content:''; depth:2048; \
+ memo:"phpMyAdmin detected";
-id:11003; sev:3; content:"Parallels Plesk Panel"; depth:1024; content:'action="/login_up.php3" method="post"'; memo:"Plesk administrative interface";
+# Plesk admin interface
+id:11003; sev:3; content:"Parallels Plesk Panel"; depth:1024; \
+ content:'action="/login_up.php3" method="post"'; \
+ memo:"Plesk administrative interface";
# Reference: http://httpd.apache.org/docs/2.2/mod/mod_status.html
-id:11004; sev:3; mime:"text/html"; content:"Apache Status"; depth:100; content:"