1.74b:Non-HTTPS password form analysis added.

This commit is contained in:
Steve Pinkham 2010-11-21 07:37:01 -05:00
parent 8f1f9b0e0f
commit 514ec354db
7 changed files with 24 additions and 8 deletions

View File

@ -1,3 +1,8 @@
Version 1.74b:
--------------
- Non-HTTPS password form analysis added.
Version 1.73b:
--------------

View File

@ -20,7 +20,7 @@
#
PROGNAME = skipfish
VERSION = 1.73b
VERSION = 1.74b
OBJFILES = http_client.c database.c crawler.c analysis.c report.c
INCFILES = alloc-inl.h string-inl.h debug.h types.h http_client.h \

12
README
View File

@ -116,6 +116,7 @@ A rough list of the security checks offered by the tool is outlined below.
* Attacker-supplied script and CSS inclusion vectors (stored and reflected).
* External untrusted script and CSS inclusion vectors.
* Mixed content problems on script and CSS resources (optional).
* Password forms submitting from or to non-SSL pages (optional).
* Incorrect or missing MIME types on renderables.
* Generic MIME types on renderables.
* Incorrect or missing charsets on renderables.
@ -365,11 +366,12 @@ noise; if so, you may use -J to mark these issues as "low risk" unless the
scanner can explicitly sees its own user input being echoed back on the
resulting page. This may miss many subtle attack vectors, though.
Some sites that handle sensitive user data care about SSL - and about getting
it right. Skipfish may optionally assist you in figuring out problematic
mixed content scenarios - use the -M option to enable this. The scanner will
complain about situations such as http:// scripts being loaded on https://
pages - but will disregard non-risk scenarios such as images.
Some sites that handle sensitive user data care about SSL - and about getting
it right. Skipfish may optionally assist you in figuring out problematic
mixed content or password submission scenarios - use the -M option to enable
this. The scanner will complain about situations such as http:// scripts
being loaded on https:// pages - but will disregard non-risk scenarios such
as images.
Likewise, certain pedantic sites may care about cases where caching is
restricted on HTTP/1.1 level, but no explicit HTTP/1.0 caching directive is

View File

@ -580,7 +580,12 @@ next_tag:
final_checks:
if (pass_form) {
problem(PROB_PASS_FORM, req, orig_res, NULL, req->pivot, 0);
if (warn_mixed && (req->proto != PROTO_HTTPS || orig_req->proto != PROTO_HTTPS))
problem(PROB_PASS_NOSSL, req, orig_res, NULL, req->pivot, 0);
else
problem(PROB_PASS_FORM, req, orig_res, NULL, req->pivot, 0);
} else {
if (tag_cnt && !has_xsrf) {

View File

@ -312,6 +312,7 @@ var issue_desc= {
"40402": "Interesting server message",
"40501": "Directory traversal possible",
"40601": "Incorrect caching directives (higher risk)",
"40701": "Password form submits from or to non-HTTPS page",
"50101": "Server-side XML injection vector",
"50102": "Shell injection vector",

View File

@ -262,6 +262,7 @@ u8 is_c_sens(struct pivot_desc* pv);
#define PROB_CACHE_LOW 30701 /* Cache nit-picking */
/* - Moderate severity issues (data compromise): */
#define PROB_BODY_XSS 40101 /* Document body XSS */
@ -284,6 +285,8 @@ u8 is_c_sens(struct pivot_desc* pv);
#define PROB_CACHE_HI 40601 /* Serious caching issues */
#define PROB_PASS_NOSSL 40701 /* Password form, no HTTPS */
/* - High severity issues (system compromise): */
#define PROB_XML_INJECT 50101 /* Backend XML injection */

View File

@ -101,7 +101,7 @@ static void usage(char* argv0) {
" -o dir - write output to specified directory (required)\n"
" -J - be less picky about MIME / charset mismatches\n"
" -M - log warnings about mixed content\n"
" -M - log warnings about mixed content / non-SSL passwords\n"
" -E - log all HTTP/1.0 / HTTP/1.1 caching intent mismatches\n"
" -U - log all external URLs and e-mails seen\n"
" -Q - completely suppress duplicate nodes in reports\n"