diff --git a/ChangeLog b/ChangeLog
index 46161ee..0355fde 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,41 @@
+Version 2.08b:
+
+ - Added Host header XSS testing.
+
+ - Added HTML encoding XSS tests to detect scenarios where our
+ injection string ends up in an attributes that execute HTML encoded
+ Javascript. For example: onclick.
+
+ - Bruteforcing is now disabled for URLs that gave a directory listing.
+
+ - Added subject alternate name checking for SSL certificates (cheers
+ to Matt Caroll for his feedback)
+
+ - Added signature matching (see doc/signatures.txt) which means a lot
+ of the content based issues are no longer hardcoded.
+
+ - Added active XSSI test. The passive XSSI stays (for now) but this
+ active check is more acurate and will remove issues detected by the
+ passive one if they cannot be confirmed. This reduces false positives
+
+ - Added HTML tag XSS test which triggers when our payload is used
+ as a tag attribute value but without quotes (courtesy of wavsep).
+
+ - Added javascript: scheme XSS testing (courtesy of wavsep).
+
+ - Added form based authentication. During these authenticated
+ scans, skipfish will check if the session has ended and re-authenticates
+ if necessary.
+
+ - Fixed a bug where in slow scans the console output could mess up
+ due to the high(er) refresh rate.
+
+ - Fixed a bug where a missed response during the injection tests could
+ result in a crash. (courtesy of Sebastian Roschke)
+
+ - Restructure the source package a bit by adding a src/, doc/ and
+ tools/ directory.
+
Version 2.07b:
--------------
diff --git a/Makefile b/Makefile
index 039065f..b824797 100644
--- a/Makefile
+++ b/Makefile
@@ -20,45 +20,49 @@
#
PROGNAME = skipfish
-VERSION = 2.07b
+VERSION = 2.08b
-OBJFILES = http_client.c database.c crawler.c analysis.c report.c \
- checks.c
-INCFILES = alloc-inl.h string-inl.h debug.h types.h http_client.h \
+SRCDIR = src
+SFILES = http_client.c database.c crawler.c analysis.c report.c \
+ checks.c signatures.c auth.c
+IFILES = alloc-inl.h string-inl.h debug.h types.h http_client.h \
database.h crawler.h analysis.h config.h report.h \
- checks.h
+ checks.h signatures.h auth.h
+
+OBJFILES = $(patsubst %,$(SRCDIR)/%,$(SFILES))
+INCFILES = $(patsubst %,$(SRCDIR)/%,$(IFILES))
CFLAGS_GEN = -Wall -funsigned-char -g -ggdb -I/usr/local/include/ \
-I/opt/local/include/ $(CFLAGS) -DVERSION=\"$(VERSION)\"
CFLAGS_DBG = -DLOG_STDERR=1 -DDEBUG_ALLOCATOR=1 $(CFLAGS_GEN)
-CFLAGS_OPT = -O3 -Wno-format $(CFLAGS_GEN)
+CFLAGS_OPT = -O3 -Wno-format $(CFLAGS_GEN)
LDFLAGS += -L/usr/local/lib/ -L/opt/local/lib
-LIBS += -lcrypto -lssl -lidn -lz
+LIBS += -lcrypto -lssl -lidn -lz -lpcre
all: $(PROGNAME)
-$(PROGNAME): $(PROGNAME).c $(OBJFILES) $(INCFILES)
- $(CC) $(LDFLAGS) $(PROGNAME).c -o $(PROGNAME) $(CFLAGS_OPT) \
- $(OBJFILES) $(LIBS)
+$(PROGNAME): $(SRCDIR)/$(PROGNAME).c $(OBJFILES) $(INCFILES)
+ $(CC) $(LDFLAGS) $(SRCDIR)/$(PROGNAME).c -o $(PROGNAME) \
+ $(CFLAGS_OPT) $(OBJFILES) $(LIBS)
@echo
- @echo "See dictionaries/README-FIRST to pick a dictionary for the tool."
+ @echo "See doc/dictionaries.txt to pick a dictionary for the tool."
@echo
@echo "Having problems with your scans? Be sure to visit:"
@echo "http://code.google.com/p/skipfish/wiki/KnownIssues"
@echo
-debug: $(PROGNAME).c $(OBJFILES) $(INCFILES)
- $(CC) $(LDFLAGS) $(PROGNAME).c -o $(PROGNAME) $(CFLAGS_DBG) \
- $(OBJFILES) $(LIBS)
+debug: $(SRCDIR)/$(PROGNAME).c $(OBJFILES) $(INCFILES)
+ $(CC) $(LDFLAGS) $(SRCDIR)/$(PROGNAME).c -o $(PROGNAME) \
+ $(CFLAGS_DBG) $(OBJFILES) $(LIBS)
clean:
rm -f $(PROGNAME) *.exe *.o *~ a.out core core.[1-9][0-9]* *.stackdump \
LOG same_test
rm -rf tmpdir
-same_test: same_test.c $(OBJFILES) $(INCFILES)
- $(CC) same_test.c -o same_test $(CFLAGS_DBG) $(OBJFILES) $(LDFLAGS) \
+same_test: $(SRCDIR)/same_test.c $(OBJFILES) $(INCFILES)
+ $(CC) $(SRCDIR)/same_test.c -o same_test $(CFLAGS_DBG) $(OBJFILES) $(LDFLAGS) \
$(LIBS)
publish: clean
diff --git a/README b/README
index 569cabd..f068618 100644
--- a/README
+++ b/README
@@ -85,6 +85,9 @@ associated with web security scanners. Specific advantages include:
stored XSS (path, parameters, headers), blind SQL or XML injection,
or blind shell injection.
+ * Snort style content signatures which will highlight server errors,
+ information leaks or potentially dangerous web applications.
+
* Report post-processing drastically reduces the noise caused by any
remaining false positives or server gimmicks by identifying repetitive
patterns.
@@ -274,23 +277,15 @@ report will be non-destructively annotated by adding red background to all
new or changed nodes; and blue background to all new or changed issues
found.
-Some sites may require authentication; for simple HTTP credentials, you can
-try:
+Some sites may require authentication for which our support is described
+in docs/authentication.txt. In most cases, you'll be wanting to use the
+form authentication method which is capable of detecting broken sessions
+in order to re-authenticate.
-$ ./skipfish -A user:pass ...other parameters...
-
-Alternatively, if the site relies on HTTP cookies instead, log in in your
-browser or using a simple curl script, and then provide skipfish with a
-session cookie:
-
-$ ./skipfish -C name=val ...other parameters...
-
-Other session cookies may be passed the same way, one per each -C option.
-
-Certain URLs on the site may log out your session; you can combat this in two
-ways: by using the -N option, which causes the scanner to reject attempts to
-set or delete cookies; or with the -X parameter, which prevents matching URLs
-from being fetched:
+Once authenticated, certain URLs on the site may log out your session;
+you can combat this in two ways: by using the -N option, which causes
+the scanner to reject attempts to set or delete cookies; or with the -X
+parameter, which prevents matching URLs from being fetched:
$ ./skipfish -X /logout/logout.aspx ...other parameters...
@@ -544,8 +539,6 @@ know:
* Scheduling and management web UI.
- * A database for banner / version checks or other configurable rules?
-
-------------------------------------
9. Oy! Something went horribly wrong!
-------------------------------------
diff --git a/assets/index.html b/assets/index.html
index f50e54c..ce036f9 100644
--- a/assets/index.html
+++ b/assets/index.html
@@ -278,6 +278,7 @@ var issue_desc= {
"10804": "Conflicting MIME / charset info (low risk)",
"10901": "Numerical filename - consider enumerating",
"10902": "OGNL-like parameter behavior",
+ "10909": "Signature match (informational)",
"20101": "Resource fetch failed",
"20102": "Limits exceeded, fetch suppressed",
@@ -294,6 +295,7 @@ var issue_desc= {
"30203": "SSL certificate host name mismatch",
"30204": "No SSL certificate data found",
"30205": "Weak SSL cipher negotiated",
+ "30206": "Host name length mismatch (name string has null byte)",
"30301": "Directory listing restrictions bypassed",
"30401": "Redirection to attacker-supplied URLs",
"30402": "Attacker-supplied URLs in embedded content (lower risk)",
@@ -305,11 +307,13 @@ var issue_desc= {
"30701": "Incorrect caching directives (lower risk)",
"30801": "User-controlled response prefix (BOM / plugin attacks)",
"30901": "HTTP header injection vector",
+ "30909": "Signature match detected",
"40101": "XSS vector in document body",
"40102": "XSS vector via arbitrary URLs",
"40103": "HTTP response header splitting",
"40104": "Attacker-supplied URLs in embedded content (higher risk)",
+ "40105": "XSS vector via injected HTML tag attribute",
"40201": "External content embedded on a page (higher risk)",
"40202": "Mixed content embedded on a page (higher risk)",
"40301": "Incorrect or missing MIME type (higher risk)",
@@ -321,6 +325,7 @@ var issue_desc= {
"40501": "Directory traversal / file inclusion possible",
"40601": "Incorrect caching directives (higher risk)",
"40701": "Password form submits from or to non-HTTPS page",
+ "40909": "Signature match detected (higher risk)",
"50101": "Server-side XML injection vector",
"50102": "Shell injection vector",
@@ -329,7 +334,8 @@ var issue_desc= {
"50105": "Integer overflow vector",
"50106": "File inclusion",
"50201": "SQL query or similar syntax in parameters",
- "50301": "PUT request accepted"
+ "50301": "PUT request accepted",
+ "50909": "Signature match detected (high risk)"
};
diff --git a/dictionaries/extensions-only.wl b/dictionaries/extensions-only.wl
index 16605ed..45c183f 100644
--- a/dictionaries/extensions-only.wl
+++ b/dictionaries/extensions-only.wl
@@ -86,7 +86,6 @@ e 1 1 1 sql
e 1 1 1 stackdump
e 1 1 1 svn-base
e 1 1 1 swf
-e 1 1 1 swp
e 1 1 1 tar
e 1 1 1 tar.bz2
e 1 1 1 tar.gz
@@ -107,4 +106,3 @@ e 1 1 1 xsl
e 1 1 1 xslt
e 1 1 1 yml
e 1 1 1 zip
-e 1 1 1 ~
diff --git a/doc/authentication.txt b/doc/authentication.txt
new file mode 100644
index 0000000..b06210a
--- /dev/null
+++ b/doc/authentication.txt
@@ -0,0 +1,98 @@
+
+
+This document describes 3 different methods you can use to run
+authenticated skipfish scans.
+
+ 1) Form authentication
+ 2) Cookie authentication
+ 3) Basic HTTP authentication
+
+
+
+-----------------------
+1. Form authentication
+----------------------
+
+With form authentication, skipfish will submit credentials using the
+given login form. The server is expected to reply with authenticated
+cookies which will than be used during the rest of the scan.
+
+An example to login using this feature:
+
+$ ./skipfish --auth-form http://example.org/login \
+ --auth-user myuser \
+ --auth-pass mypass \
+ --auth-verify-url http://example.org/profile \
+ [...other options...]
+
+This is how it works:
+
+1. Upon start of the scan, the authentication form at /login will be
+ fetched by skipfish. We will try to complete the username and password
+ fields and submit the form.
+
+2. Once a server response is obtained, skipfish will fetch the
+ verification URL twice: once with the new session cookies and once
+ without any cookies. Both responses are expected to be different.
+
+3. During the scan, the verification URL will be used many times to
+ test whether we are authenticated. If at some point our session has
+ been terminated server-side, skipfish will re-authenticate using the
+ --auth-form (/login in our example) .
+
+Verifying whether the session is still active requires a good verification
+URL where an authenticated request is going to get a different response
+than an anonymous request. For example a 'profile' or 'my account' page.
+
+Troubleshooting:
+----------------
+
+1. Login field names not recognized
+
+ If the username and password form fields are not recognized, skipfish
+ will complain. In this case, you should specify the field names using
+ the --auth-user-field and --auth-pass-field flags.
+
+2. The form is not submitted to the right location
+
+ If the login form doesn't specify an action="" location, skipfish
+ will submit the form's content to the form URL. This will fail in some
+ occasions. For example, when the login page uses Javascript to submit
+ the form to a different location.
+
+ Use the --auth-form-target flag to specify the URL where you want skipfish
+ to submit the form to.
+
+3. Skipfish keeps getting logged out
+
+ Make sure you blacklist any URLs that will log you out. For example,
+ using the " -X /logout"
+
+
+-------------------------
+2. Cookie authentication
+-------------------------
+
+Alternatively, if the site relies on HTTP cookies you can also feed these
+to skipfish manually. To do this log in using your browser or using a
+simple curl script, and then provide skipfish with a session cookie:
+
+$ ./skipfish -C name=val [...other options...]
+
+Other session cookies may be passed the same way, one per each -C option.
+
+The -N option, which causes new cookies to be rejected by skipfish,
+is almost always a good choice when running cookie authenticated scans
+(e.g. to avoid your precious cookies from being overwritten).
+
+$ ./skipfish -N -C name=val [...other options...]
+
+-----------------------------
+3. Basic HTTP authentication
+-----------------------------
+
+For simple HTTP credentials, you can use the -A option to pass the
+credentials.
+
+$ ./skipfish -A user:pass [...other options...]
+
diff --git a/dictionaries/README-FIRST b/doc/dictionaries.txt
similarity index 100%
rename from dictionaries/README-FIRST
rename to doc/dictionaries.txt
diff --git a/doc/signatures.txt b/doc/signatures.txt
new file mode 100644
index 0000000..c648bb7
--- /dev/null
+++ b/doc/signatures.txt
@@ -0,0 +1,152 @@
+
+-----------------
+ 1. Introduction
+-----------------
+
+With skipfish signatures it is possible to find interesting content,
+or even vulnerabilities, in server responses. The signatures follow
+a Snort-like syntax and most keywords behave similarly as well.
+
+Signatures focus on detecting web application vulnerabilities, information
+leaks and can recognize interesting web applications, such as phpmyadmin
+or phpinfo() pages.
+
+Signatures could also detect vulnerable software packages (e.g. old
+WordPress instances) but this is a task that fits vulnerability scanners,
+like Nessus and Nikto, better.
+
+-----------------
+ 2. Contributing
+-----------------
+
+The current signature list is nice but far from complete. If you have
+new signatures or can optimize existing ones, please help out by reporting
+this via our issue tracker:
+
+https://code.google.com/p/skipfish/issues/entry?template=Content%20signatures
+
+-----------------------
+ 3. Signature keywords
+-----------------------
+
+=== content:[!]"Server Error in '"; memo:"ASP.NET Yellow Screen of Death";
+id:22002; prob:40402; content:"error '"; memo:"Microsoft runtime error";
+id:22003; prob:40402; content:"[an error occurred while processing"; memo:"SHTML error";
+id:22004; prob:40402; content:"Traceback (most recent call last):"; memo:"Python error";
+id:22005; prob:40402; content:"
"; content:"
description The server "; depth:512; memo:"Java server exception";
+
+# PHP HTML and text errors. The text and HTML sigs can perhaps be merged,
+id:22008; prob:40402; content:"Fatal error: "; content:" on line "; depth:512; memo:"PHP error (HTML)";
+id:22009; prob:40402; content:"Fatal error: "; content:" on line "; depth:512; memo:"PHP error (text)";
+id:22010; prob:40402; content:"Parse error: "; content:" on line "; depth:512; memo:"PHP parse error (HTML)";
+id:22011; prob:40402; content:"Parse error: "; content:" on line "; depth:512; memo:"PHP parse error (text)";
+id:22012; prob:40402; content:"Notice: "; content:" on line "; depth:512; memo:"PHP notice (HTML)";
+id:22013; prob:40402; content:"Notice: "; content:" on line "; depth:512; memo:"PHP notice (text)";
+id:22014; prob:40402; content:"Strict Standards: "; content:" on line "; depth:512; memo:"PHP warning (HTML)";
+id:22015; prob:40402; content:"Strict Standards: "; content:" on line "; depth:512; memo:"PHP warning (text)";
+id:22016; prob:40402; content:"Catchable fatal error: "; content:" on line "; depth:512; memo:"PHP error (HTML)";
+id:22017; prob:40402; content:"Catchable fatal error: "; content:" on line "; depth:512; memo:"PHP error (text)";
+id:22018; prob:40402; content:"Warning: "; content:" on line "; depth:512; memo:"PHP warning (HTML)";
+id:22019; prob:40402; content:"Warning: "; content:" on line "; depth:512; memo:"PHP warning (text)";
+
diff --git a/signatures/mime.sigs b/signatures/mime.sigs
new file mode 100644
index 0000000..8a2d54c
--- /dev/null
+++ b/signatures/mime.sigs
@@ -0,0 +1,13 @@
+
+####################################
+# INTERESTING MIME TYPES
+
+id:41001; sev:4; mime:"application/vnd.ms-excel"; memo:"Microsoft Excel spreadsheet (mime)";
+id:41002; sev:4; mime:"application/vnd.ms-project"; memo:"Microsoft Project file (mime)";
+id:41003; sev:4; mime:"application/x-httpd-php-source"; memo:"PHP source file (mime)";
+id:41004; sev:4; mime:"application/x-msdos-program"; memo:"DOS executable or script (mime)";
+id:41005; sev:4; mime:"application/x-msi"; memo:"MSI installer (mime)";
+id:41006; sev:3; mime:"application/x-python-code"; memo:"Python code (mime)";
+id:41007; sev:3; mime:"application/x-shellscript"; memo:"Shell script (mime)";
+
+
diff --git a/signatures/signatures.conf b/signatures/signatures.conf
new file mode 100644
index 0000000..bba169d
--- /dev/null
+++ b/signatures/signatures.conf
@@ -0,0 +1,28 @@
+#############################################
+##
+## Master signature file.
+##
+
+# The mime signatures warn about server responses that have an interesting
+# mime. For example anything that is presented as php-source will likely
+# be interesting
+include signatures/mime.sigs
+
+# The files signature will use the content to determine if a response
+# is an interesting file. For example, a SVN file.
+include signatures/files.sigs
+
+# The messages signatures look for interesting server messages. Most
+# are based on errors, such as caused by incorrect SQL queries or PHP
+# execution failures.
+include signatures/messages.sigs
+
+# The apps signatures will help to find pages and applications who's
+# functionality is a security risk by default. For example, phpinfo()
+# pages that leak information or CMS admin interfaces.
+include signatures/apps.sigs
+
+# Context signatures are linked to injection tests. They look for strings
+# that are relevant to the current injection test and help to highlight
+# potential vulnerabilities.
+include signatures/context.sigs
diff --git a/alloc-inl.h b/src/alloc-inl.h
similarity index 100%
rename from alloc-inl.h
rename to src/alloc-inl.h
diff --git a/analysis.c b/src/analysis.c
similarity index 87%
rename from analysis.c
rename to src/analysis.c
index 2b29aa1..9f3ff34 100644
--- a/analysis.c
+++ b/src/analysis.c
@@ -29,6 +29,8 @@
#include "database.h"
#include "crawler.h"
#include "analysis.h"
+#include "signatures.h"
+#include "pcre.h"
u8 no_parse, /* Disable HTML link detection */
warn_mixed, /* Warn on mixed content */
@@ -448,10 +450,10 @@ static u8 maybe_xsrf(u8* token) {
/* Another helper for scrape_response(): examines all tags
up until , then adds them as parameters to current request. */
-static void collect_form_data(struct http_request* req,
- struct http_request* orig_req,
- struct http_response* orig_res,
- u8* cur_str, u8 is_post) {
+void collect_form_data(struct http_request* req,
+ struct http_request* orig_req,
+ struct http_response* orig_res,
+ u8* cur_str, u8 is_post) {
u8 has_xsrf = 0, pass_form = 0, file_form = 0;
u32 tag_cnt = 0;
@@ -637,6 +639,70 @@ static u8 is_mostly_ascii(struct http_response* res) {
}
+
+
+struct http_request* make_form_req(struct http_request *req,
+ struct http_request *base,
+ u8* cur_str, u8* target) {
+
+ u8 *method, *clean_url;
+ u8 *dirty_url;
+ struct http_request* n;
+ u8 parse_form = 1;
+
+ FIND_AND_MOVE(dirty_url, cur_str, "action=");
+ FIND_AND_MOVE(method, cur_str, "method=");
+
+ /* See if we need to POST this form or not. */
+
+ if (method && *method) {
+ if (strchr("\"'", *method)) method++;
+ if (tolower(method[0]) == 'p') parse_form = 2;
+ }
+
+ /* If a form target is specified, we need to use that */
+
+ if (target) {
+ dirty_url = ck_strdup(target);
+ } else if (!dirty_url || !*dirty_url || !prefix(dirty_url, "\"\"") ||
+ !prefix(dirty_url, "''")) {
+
+ /* Forms with no URL submit to current location. */
+ dirty_url = serialize_path(req, 1, 0);
+ } else {
+ /* Last, extract the URL from the tag */
+ EXTRACT_ALLOC_VAL(dirty_url, dirty_url);
+ }
+
+ clean_url = html_decode_param(dirty_url, 0);
+ ck_free(dirty_url);
+
+ n = ck_alloc(sizeof(struct http_request));
+
+ n->pivot = req->pivot;
+ if (parse_form == 2) {
+ ck_free(n->method);
+ n->method = ck_strdup((u8*)"POST");
+ } else {
+ /* On GET forms, strip existing query params to get a submission
+ target. */
+ u8* qmark = (u8*)strchr((char*)clean_url, '?');
+ if (qmark) *qmark = 0;
+ }
+
+ if (parse_url(clean_url, n, base ? base : req)) {
+ DEBUG("Unable to parse_url from form: %s\n", clean_url);
+ ck_free(clean_url);
+ destroy_request(n);
+ return NULL;
+ }
+
+ ck_free(clean_url);
+ return n;
+
+}
+
+
/* Analyzes response headers (Location, etc), body to extract new links,
keyword guesses. This code is designed to be simple and fast, but it
does not even try to understand the intricacies of HTML or whatever
@@ -687,7 +753,8 @@ void scrape_response(struct http_request* req, struct http_response* res) {
if (*cur_str == '<' && (tag_end = (u8*)strchr((char*)cur_str + 1, '>'))) {
u32 link_type = 0;
- u8 set_base = 0, parse_form = 0;
+ u8 set_base = 0;
+ u8 is_post = 0;
u8 *dirty_url = NULL, *clean_url = NULL, *meta_url = NULL,
*delete_dirty = NULL;
@@ -747,25 +814,16 @@ void scrape_response(struct http_request* req, struct http_response* res) {
} else if (ISTAG(cur_str, "form")) {
- u8* method;
- parse_form = 1;
- FIND_AND_MOVE(dirty_url, cur_str, "action=");
+ /* Parse the form and kick off a new pivot for further testing */
+ struct http_request* n = make_form_req(req, base, cur_str, NULL);
+ if (n) {
+ if (url_allowed(n) && R(100) < crawl_prob && !no_forms) {
+ is_post = (n->method && !strcmp((char*)n->method, "POST"));
- /* See if we need to POST this form or not. */
-
- FIND_AND_MOVE(method, cur_str, "method=");
-
- if (method && *method) {
- if (strchr("\"'", *method)) method++;
- if (tolower(method[0]) == 'p') parse_form = 2;
- }
-
- /* Forms with no URL submit to current location. */
-
- if (!dirty_url || !*dirty_url || !prefix(dirty_url, (char*)"\"\"") ||
- !prefix(dirty_url, (char*)"''")) {
- dirty_url = serialize_path(req, 1, 0);
- delete_dirty = dirty_url;
+ collect_form_data(n, req, res, tag_end + 1, is_post);
+ maybe_add_pivot(n, NULL, 5);
+ }
+ destroy_request(n);
}
} else {
@@ -806,38 +864,6 @@ void scrape_response(struct http_request* req, struct http_response* res) {
n->pivot = req->pivot;
if (!parse_url(clean_url, n, base ? base : req)) base = n;
- } else if (parse_form) {
-
- /* : ") ||
- !prefix(tmp + 3, "Fatal error: ") ||
- !prefix(tmp + 3, "Parse error: ") ||
- !prefix(tmp + 3, "Deprecated: ") ||
- !prefix(tmp + 3, "Strict Standards: ") ||
- !prefix(tmp + 3, "Catchable fatal error: "))) {
- problem(PROB_ERROR_POI, req, res, (u8*)"PHP error (HTML)", req->pivot, 0);
- return;
- }
-
- }
-
- if (strstr((char*)res->payload, "Warning: MySQL: ") ||
- strstr((char*)res->payload, "Unclosed quotation mark") ||
- strstr((char*)res->payload, "Syntax error in string in query expression") ||
- strstr((char*)res->payload, "java.sql.SQLException") ||
- strstr((char*)res->payload, "SqlClient.SqlException: Syntax error") ||
- strstr((char*)res->payload, "Incorrect syntax near") ||
- strstr((char*)res->payload, "PostgreSQL query failed") ||
- strstr((char*)res->payload, "Dynamic SQL Error") ||
- strstr((char*)res->payload, "unable to perform query") ||
- strstr((char*)res->payload, "Microsoft OLE DB Provider for ODBC Drivers") ||
- strstr((char*)res->payload, "[Microsoft][ODBC SQL Server Driver]") ||
- strstr((char*)res->payload, "You have an error in your SQL syntax; ") ||
- strstr((char*)res->payload, "[DM_QUERY_E_SYNTAX]")) {
- problem(PROB_ERROR_POI, req, res, (u8*)"SQL server error", req->pivot, 0);
- return;
- }
-
if (((tmp = (u8*)strstr((char*)res->payload, "ORA-")) ||
(tmp = (u8*)strstr((char*)res->payload, "FRM-"))) &&
isdigit(tmp[4]) && tmp[9] == ':') {
@@ -2383,54 +2335,6 @@ static void check_for_stuff(struct http_request* req,
return;
}
- if (strstr((char*)res->payload, "[an error occurred while processing")) {
- problem(PROB_ERROR_POI, req, res, (u8*)"SHTML error", req->pivot, 0);
- return;
- }
-
- if (strstr((char*)res->payload, "Traceback (most recent call last):")) {
- problem(PROB_ERROR_POI, req, res, (u8*)"Python error", req->pivot, 0);
- return;
- }
-
- /* Interesting files. */
-
- if (strstr((char*)res->payload, "ADDRESS=(PROTOCOL=")) {
- problem(PROB_FILE_POI, req, res, (u8*)"SQL configuration or logs", req->pivot, 0);
- return;
- }
-
- if (inl_strcasestr(res->payload, (u8*)";database=") &&
- inl_strcasestr(res->payload, (u8*)";pwd=")) {
- problem(PROB_FILE_POI, req, res, (u8*)"ODBC connect string", req->pivot, 0);
- return;
- }
-
- if (strstr((char*)sniffbuf, "Index of /") ||
strstr((char*)sniffbuf, ">[To Parent Directory]<")) {
problem(PROB_DIR_LIST, req, res, (u8*)"Directory listing", req->pivot, 0);
+
+ /* Since we have the listing, we'll skip bruteforcing directory */
+ req->pivot->no_fuzz = 3;
return;
}
@@ -2516,61 +2418,12 @@ static void check_for_stuff(struct http_request* req,
}
- if (strstr((char*)sniffbuf, "