1.84b: Option -S removed.

This commit is contained in:
Steve Pinkham 2011-01-10 14:22:09 -05:00
parent 0f835b3def
commit baf9921f42
7 changed files with 9 additions and 18 deletions

View File

@ -1,3 +1,8 @@
Version 1.84b:
--------------
- Option -S removed.
Version 1.83b:
--------------

View File

@ -20,7 +20,7 @@
#
PROGNAME = skipfish
VERSION = 1.83b
VERSION = 1.84b
OBJFILES = http_client.c database.c crawler.c analysis.c report.c
INCFILES = alloc-inl.h string-inl.h debug.h types.h http_client.h \

3
README
View File

@ -280,8 +280,7 @@ $ ./skipfish -X /logout/logout.aspx ...other parameters...
The -X option is also useful for speeding up your scans by excluding /icons/,
/doc/, /manuals/, and other standard, mundane locations along these lines. In
general, you can use -X, plus -I (only spider URLs matching a substring) and
-S (ignore links on pages where a substring appears in response body) to
general, you can use -X and -I (only spider URLs matching a substring) to
limit the scope of a scan any way you like - including restricting it only to
a specific protocol and port:

View File

@ -39,14 +39,12 @@
struct pivot_desc root_pivot;
u8 **deny_urls, /* List of banned URL substrings */
**deny_strings, /* List of banned page substrings */
**allow_urls, /* List of required URL substrings */
**allow_domains, /* List of allowed vhosts */
**trust_domains, /* List of trusted vhosts */
**skip_params; /* List of parameters to ignore */
u32 num_deny_urls,
num_deny_strings,
num_allow_urls,
num_allow_domains,
num_trust_domains,
@ -1398,7 +1396,6 @@ void destroy_database() {
dealloc_pivots(0);
ck_free(deny_urls);
ck_free(deny_strings);
ck_free(allow_urls);
ck_free(allow_domains);
ck_free(trust_domains);

View File

@ -334,11 +334,10 @@ u8 same_page(struct http_sig* sig1, struct http_sig* sig2);
(_cnt)++; \
} while (0)
extern u8 **deny_urls, **deny_strings, **allow_urls, **allow_domains,
extern u8 **deny_urls, **allow_urls, **allow_domains,
**trust_domains, **skip_params;
extern u32 num_deny_urls,
num_deny_strings,
num_allow_urls,
num_allow_domains,
num_trust_domains,

View File

@ -60,9 +60,6 @@ only follow URLs matching 'string'
.B \-X string
exclude URLs matching 'string'
.TP
.B \-S string
exclude pages containing 'string'
.TP
.B \-K string
do not fuzz query parameters or form fields named 'string'
.TP

View File

@ -89,7 +89,6 @@ static void usage(char* argv0) {
" -q hex - repeat probabilistic scan with given seed\n"
" -I string - only follow URLs matching 'string'\n"
" -X string - exclude URLs matching 'string'\n"
" -S string - exclude pages containing 'string'\n"
" -K string - do not fuzz parameters named 'string'\n"
" -D domain - crawl cross-site links to another domain\n"
" -B domain - trust, but do not crawl, another domain\n"
@ -207,7 +206,7 @@ int main(int argc, char** argv) {
SAY("skipfish version " VERSION " by <lcamtuf@google.com>\n");
while ((opt = getopt(argc, argv,
"+A:F:C:H:b:Nd:c:x:r:p:I:X:S:D:POYQMZUEK:W:LVT:G:R:B:q:g:m:f:t:w:i:s:o:hue")) > 0)
"+A:F:C:H:b:Nd:c:x:r:p:I:X:D:POYQMZUEK:W:LVT:G:R:B:q:g:m:f:t:w:i:s:o:hue")) > 0)
switch (opt) {
@ -277,11 +276,6 @@ int main(int argc, char** argv) {
APPEND_FILTER(deny_urls, num_deny_urls, optarg);
break;
case 'S':
if (*optarg == '*') optarg++;
APPEND_FILTER(deny_strings, num_deny_strings, optarg);
break;
case 'T': {
u8* x = (u8*)strchr(optarg, '=');
if (!x) FATAL("Rules must be in 'name=value' form.");