1.80b: option not save binary responses, and make charset errors less

noisy by default

  - New option (-e) to delete binary payloads.
  - -J option is now obsolete (on by default).
This commit is contained in:
Steve Pinkham 2010-12-03 15:30:00 -05:00
parent ffee2aec54
commit 35607dcb58
9 changed files with 89 additions and 37 deletions

View File

@ -1,3 +1,10 @@
Version 1.80b:
--------------
- New option (-e) to delete binary payloads.
- -J option is now obsolete (on by default).
Version 1.79b:
--------------

View File

@ -20,7 +20,7 @@
#
PROGNAME = skipfish
VERSION = 1.79b
VERSION = 1.80b
OBJFILES = http_client.c database.c crawler.c analysis.c report.c
INCFILES = alloc-inl.h string-inl.h debug.h types.h http_client.h \

23
README
View File

@ -358,15 +358,6 @@ the test dramatically. Another similarly crippling option that reduces the
risk of persistent effects of a scan is -O, which inhibits all form parsing
and submission steps.
By default, skipfish complains loudly about all MIME or character set
mismatches on renderable documents, and classifies many of them as "medium
risk"; this is because, if any user-controlled content is returned, the
situation could lead to cross-site scripting attacks in certain browsers. On
some poorly designed and maintained sites, this may contribute too much
noise; if so, you may use -J to mark these issues as "low risk" unless the
scanner can explicitly sees its own user input being echoed back on the
resulting page. This may miss many subtle attack vectors, though.
Some sites that handle sensitive user data care about SSL - and about getting
it right. Skipfish may optionally assist you in figuring out problematic
mixed content or password submission scenarios - use the -M option to enable
@ -412,9 +403,12 @@ sites.
Lastly, -f controls the maximum number of consecutive HTTP errors you are
willing to see before aborting the scan; and -s sets the maximum length of a
response to fetch and parse (longer responses will be truncated).
response to fetch and parse (longer responses will be truncated). When
scanning large, multimedia-heavy sites, you may also want to specify -e -
preventing binary documents from being kept verbatim for the report, and
freeing up a lot of RAM.
Further rate-limiting is available through third-party user mode tools such
Further rate-limiting is available through third-party user mode tools such
as trickle, or kernel-level traffic shaping.
Oh, and real-time scan statistics can be suppressed with -u.
@ -430,11 +424,10 @@ issues):
$ ./skipfish -MEU -C "AuthCookie=value" -X /logout.aspx -o output_dir \
http://www.example.com/
Five-connection crawl, but no brute-force; pretending to be MSIE and caring
less about ambiguous MIME or character set mismatches, and trusting
example.com links:
Five-connection crawl, but no brute-force; pretending to be MSIE and and
trusting example.com content):
$ ./skipfish -m 5 -LVJ -W /dev/null -o output_dir -b ie -B example.com \
$ ./skipfish -m 5 -LV -W /dev/null -o output_dir -b ie -B example.com \
http://www.example.com/
Brute force only (no HTML link extraction), limited to a single directory and

View File

@ -34,7 +34,6 @@ u8 no_parse, /* Disable HTML link detection */
warn_mixed, /* Warn on mixed content */
log_ext_urls, /* Log all external URLs */
no_forms, /* Do not submit forms */
relaxed_mime, /* Relax cset / mime checks */
pedantic_cache; /* Match HTTP/1.0 and HTTP/1.1 */
/* Form autofill hints: */
@ -1789,16 +1788,18 @@ binary_checks:
if ((tmp = GET_HDR((u8*)"Content-Disposition", &res->hdr)) &&
inl_strcasestr(tmp, (u8*)"attachment")) return;
if (!relaxed_mime) {
// if (!relaxed_mime) {
//
// /* CHECK 5A: Renderable documents that are not CSS or static JS are of
// particular interest when it comes to MIME / charset mistakes. */
//
// if (is_mostly_ascii(res) && !is_css(res) && (!is_javascript(res) ||
// (!strstr((char*)res->payload, "function ") &&
// !strstr((char*)res->payload, "function(")))) high_risk = 1;
//
// } else
/* CHECK 5A: Renderable documents that are not CSS or static JS are of
particular interest when it comes to MIME / charset mistakes. */
if (is_mostly_ascii(res) && !is_css(res) && (!is_javascript(res) ||
(!strstr((char*)res->payload, "function ") &&
!strstr((char*)res->payload, "function(")))) high_risk = 1;
} else {
{
/* CHECK 5B: Documents with skipfish signature strings echoed back
are of particular interest when it comes to MIME / charset mistakes. */
@ -2544,3 +2545,34 @@ static void check_for_stuff(struct http_request* req,
}
}
/* Deletes payload of binary responses if requested. This is called when pivot
enters PSTATE_DONE. */
void maybe_delete_payload(struct pivot_desc* pv) {
u8 tmp[64];
u32 i;
if (pv->res->pay_len > 256 && !is_mostly_ascii(pv->res)) {
ck_free(pv->res->payload);
sprintf((char*)tmp, "[Deleted binary payload (%u bytes)]", pv->res->pay_len);
pv->res->payload = ck_strdup(tmp);
pv->res->pay_len = strlen((char*)tmp);
}
for (i=0;i<pv->issue_cnt;i++) {
if (pv->issue[i].res->pay_len > 256 &&
!is_mostly_ascii(pv->issue[i].res)) {
ck_free(pv->issue[i].res->payload);
sprintf((char*)tmp, "[Deleted binary payload (%u bytes)]",
pv->issue[i].res->pay_len);
pv->issue[i].res->payload = ck_strdup(tmp);
pv->issue[i].res->pay_len = strlen((char*)tmp);
}
}
}

View File

@ -31,7 +31,6 @@ extern u8 no_parse, /* Disable HTML link detection */
warn_mixed, /* Warn on mixed content */
log_ext_urls, /* Log all external URLs */
no_forms, /* Do not submit forms */
relaxed_mime, /* Relax cset / mime checks */
pedantic_cache; /* Match HTTP/1.0 and HTTP/1.1 */
/* Helper macros to group various useful checks: */
@ -74,6 +73,10 @@ void scrape_response(struct http_request* req, struct http_response* res);
void content_checks(struct http_request* req, struct http_response* res);
/* Deletes payload of binary responses if requested. */
void maybe_delete_payload(struct pivot_desc* pv);
/* MIME detector output codes: */
#define MIME_NONE 0 /* Checks missing or failed */

View File

@ -35,6 +35,7 @@
u32 crawl_prob = 100; /* Crawl probability (1-100%) */
u8 no_fuzz_ext; /* Don't fuzz extensions for dirs */
u8 no_500_dir; /* Don't crawl 500 directories */
u8 delete_bin; /* Don't keep binary responses */
/*
@ -1453,6 +1454,7 @@ static void end_injection_checks(struct pivot_desc* pv) {
} else {
pv->state = PSTATE_DONE;
if (delete_bin) maybe_delete_payload(pv);
return;
}
@ -1461,6 +1463,7 @@ static void end_injection_checks(struct pivot_desc* pv) {
if (pv->bogus_par || pv->res_varies) {
pv->state = PSTATE_DONE;
if (delete_bin) maybe_delete_payload(pv);
} else {
crawl_par_numerical_init(pv);
}
@ -1492,6 +1495,7 @@ static void crawl_parametric_init(struct pivot_desc* pv) {
if (pv->fuzz_par < 0 || !url_allowed(pv->req) || !param_allowed(pv->name)) {
pv->state = PSTATE_DONE;
if (delete_bin) maybe_delete_payload(pv);
return;
}
@ -1787,6 +1791,8 @@ static u8 par_numerical_callback(struct http_request* req,
secondary_ext_init(orig_pv, req, res, 1);
if (delete_bin) maybe_delete_payload(n);
schedule_next:
if (!(--(orig_pv->num_pending))) {
@ -1979,6 +1985,8 @@ static u8 par_dict_callback(struct http_request* req,
if (!req->user_val)
secondary_ext_init(orig_pv, req, res, 1);
if (delete_bin) maybe_delete_payload(n);
schedule_next:
if (!req->user_val)
@ -2000,7 +2008,10 @@ void crawl_par_trylist_init(struct pivot_desc* pv) {
if (pv->fuzz_par == -1 || pv->bogus_par || pv->res_varies
|| !descendants_ok(pv)) {
pv->state = PSTATE_DONE;
if (delete_bin) maybe_delete_payload(pv);
return;
} else
pv->state = PSTATE_PAR_TRYLIST;
@ -2046,6 +2057,7 @@ void crawl_par_trylist_init(struct pivot_desc* pv) {
if (!pv->try_pending) {
pv->state = PSTATE_DONE;
if (delete_bin) maybe_delete_payload(pv);
return;
}
@ -2112,10 +2124,14 @@ static u8 par_trylist_callback(struct http_request* req,
secondary_ext_init(orig_pv, req, res, 1);
if (delete_bin) maybe_delete_payload(n);
schedule_next:
if (!(--(orig_pv->try_pending)))
if (!(--(orig_pv->try_pending))) {
orig_pv->state = PSTATE_DONE;
if (delete_bin) maybe_delete_payload(orig_pv);
}
/* Copied over to pivot. */
return n ? 1 : 0;

View File

@ -30,7 +30,8 @@ extern u32 crawl_prob; /* Crawl probability (1-100%) */
extern u8 no_parse, /* Disable HTML link detection */
warn_mixed, /* Warn on mixed content? */
no_fuzz_ext, /* Don't fuzz ext in dirs? */
no_500_dir, /* Don't assume dirs on 500 */
no_500_dir, /* Don't assume dirs on 500 */
delete_bin, /* Don't keep binary responses */
log_ext_urls; /* Log external URLs? */
/* Provisional debugging callback. */

View File

@ -38,7 +38,7 @@ The basic modes you should be aware of (in order of request cost):
This method has a cost of about 1,700 requests per fuzzed location, and is
recommended for rapid assessments, especially when working with slow
servers.
servers or very large services.
4) Normal dictionary fuzzing. In this mode, every ${filename}.${extension}
pair will be attempted. This mode is significantly slower, but offers

View File

@ -100,7 +100,6 @@ static void usage(char* argv0) {
"Reporting options:\n\n"
" -o dir - write output to specified directory (required)\n"
" -J - be less picky about MIME / charset mismatches\n"
" -M - log warnings about mixed content / non-SSL passwords\n"
" -E - log all HTTP/1.0 / HTTP/1.1 caching intent mismatches\n"
" -U - log all external URLs and e-mails seen\n"
@ -125,7 +124,8 @@ static void usage(char* argv0) {
" -t req_tmout - total request response timeout (%u s)\n"
" -w rw_tmout - individual network I/O timeout (%u s)\n"
" -i idle_tmout - timeout on idle HTTP connections (%u s)\n"
" -s s_limit - response size limit (%u B)\n\n"
" -s s_limit - response size limit (%u B)\n"
" -e - do not keep binary responses for reporting\n\n"
"Send comments and complaints to <lcamtuf@google.com>.\n", argv0,
max_depth, max_children, max_descendants, max_requests, DEF_WORDLIST,
@ -207,7 +207,7 @@ int main(int argc, char** argv) {
SAY("skipfish version " VERSION " by <lcamtuf@google.com>\n");
while ((opt = getopt(argc, argv,
"+A:F:C:H:b:Nd:c:x:r:p:I:X:S:D:PJOYQMZUEK:W:LVT:G:R:B:q:g:m:f:t:w:i:s:o:hu")) > 0)
"+A:F:C:H:b:Nd:c:x:r:p:I:X:S:D:POYQMZUEK:W:LVT:G:R:B:q:g:m:f:t:w:i:s:o:hue")) > 0)
switch (opt) {
@ -277,10 +277,6 @@ int main(int argc, char** argv) {
APPEND_FILTER(deny_urls, num_deny_urls, optarg);
break;
case 'J':
relaxed_mime = 1;
break;
case 'S':
if (*optarg == '*') optarg++;
APPEND_FILTER(deny_strings, num_deny_strings, optarg);
@ -436,6 +432,10 @@ int main(int argc, char** argv) {
be_quiet = 1;
break;
case 'e':
delete_bin = 1;
break;
case 'Z':
no_500_dir = 1;
break;