Add support for configuration files for ffuf (#308)
* Refactor config and job creation * ConfigOptions defaults * Structure ConfigOptions for config file parser * Sort options * Finalize the configuration file reading and add examples and documentation * Fix issues with opts -> config translation
This commit is contained in:
parent
f2aa824f5c
commit
bde943cc5d
28
README.md
28
README.md
@ -14,12 +14,14 @@ A fast web fuzzer written in Go.
|
|||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
- [Download](https://github.com/ffuf/ffuf/releases/latest) a prebuilt binary from [releases page](https://github.com/ffuf/ffuf/releases/latest), unpack and run!
|
- [Download](https://github.com/ffuf/ffuf/releases/latest) a prebuilt binary from [releases page](https://github.com/ffuf/ffuf/releases/latest), unpack and run!
|
||||||
or
|
|
||||||
- If you have recent go compiler installed: `go get github.com/ffuf/ffuf`
|
|
||||||
or
|
|
||||||
- git clone https://github.com/ffuf/ffuf ; cd ffuf ; go build
|
|
||||||
|
|
||||||
The only dependency of ffuf is Go 1.13. No dependencies outside of Go standard library are needed.
|
_or_
|
||||||
|
- If you have recent go compiler installed: `go get -u github.com/ffuf/ffuf` (the same command works for updating)
|
||||||
|
|
||||||
|
_or_
|
||||||
|
- git clone https://github.com/ffuf/ffuf ; cd ffuf ; go get ; go build
|
||||||
|
|
||||||
|
Ffuf depends on Go 1.13 or greater.
|
||||||
|
|
||||||
## Example usage
|
## Example usage
|
||||||
|
|
||||||
@ -110,6 +112,21 @@ radamsa -n 1000 -o %n.txt example1.txt example2.txt
|
|||||||
ffuf --input-cmd 'cat $FFUF_NUM.txt' -H "Content-Type: application/json" -X POST -u https://ffuf.io.fi/ -mc all -fc 400
|
ffuf --input-cmd 'cat $FFUF_NUM.txt' -H "Content-Type: application/json" -X POST -u https://ffuf.io.fi/ -mc all -fc 400
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Configuration files
|
||||||
|
|
||||||
|
When running ffuf, it first checks if a default configuration file exists. The file path for it is `~/.ffufrc` / `$HOME/.ffufrc`
|
||||||
|
for most *nixes (for example `/home/joohoi/.ffufrc`) and `%USERPROFILE%\.ffufrc` for Windows. You can configure one or
|
||||||
|
multiple options in this file, and they will be applied on every subsequent ffuf job. An example of .ffufrc file can be
|
||||||
|
found [here](https://github.com/ffuf/ffuf/blob/master/ffufrc.example).
|
||||||
|
|
||||||
|
The configuration options provided on the command line override the ones loaded from `~/.ffufrc`.
|
||||||
|
Note: this does not apply for CLI flags that can be provided more than once. One of such examples is `-H` (header) flag.
|
||||||
|
In this case, the `-H` values provided on the command line will be _appended_ to the ones from the config file instead.
|
||||||
|
|
||||||
|
Additionally, in case you wish to use bunch of configuration files for different use cases, you can do this by defining
|
||||||
|
the configuration file path using `-config` command line flag that takes the file path to the configuration file as its
|
||||||
|
parameter.
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
To define the test case for ffuf, use the keyword `FUZZ` anywhere in the URL (`-u`), headers (`-H`), or POST data (`-d`).
|
To define the test case for ffuf, use the keyword `FUZZ` anywhere in the URL (`-u`), headers (`-H`), or POST data (`-d`).
|
||||||
@ -136,6 +153,7 @@ GENERAL OPTIONS:
|
|||||||
-ac Automatically calibrate filtering options (default: false)
|
-ac Automatically calibrate filtering options (default: false)
|
||||||
-acc Custom auto-calibration string. Can be used multiple times. Implies -ac
|
-acc Custom auto-calibration string. Can be used multiple times. Implies -ac
|
||||||
-c Colorize output. (default: false)
|
-c Colorize output. (default: false)
|
||||||
|
-config Load configuration from a file
|
||||||
-maxtime Maximum running time in seconds for entire process. (default: 0)
|
-maxtime Maximum running time in seconds for entire process. (default: 0)
|
||||||
-maxtime-job Maximum running time in seconds per job. (default: 0)
|
-maxtime-job Maximum running time in seconds per job. (default: 0)
|
||||||
-p Seconds of `delay` between requests, or a range of random delay. For example "0.1" or "0.1-2.0"
|
-p Seconds of `delay` between requests, or a range of random delay. For example "0.1" or "0.1-2.0"
|
||||||
|
|||||||
76
ffufrc.example
Normal file
76
ffufrc.example
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
# This is an example of a ffuf configuration file.
|
||||||
|
# https://github.com/ffuf/ffuf
|
||||||
|
|
||||||
|
[http]
|
||||||
|
cookies = [
|
||||||
|
"cookiename=cookievalue"
|
||||||
|
]
|
||||||
|
data = "post=data&key=value"
|
||||||
|
followredirects = false
|
||||||
|
headers = [
|
||||||
|
"X-Header-Name: value",
|
||||||
|
"X-Another-Header: value"
|
||||||
|
]
|
||||||
|
ignorebody = false
|
||||||
|
method = "GET"
|
||||||
|
proxyurl = "http://127.0.0.1:8080"
|
||||||
|
recursion = false
|
||||||
|
recursiondepth = 0
|
||||||
|
replayproxyurl = "http://127.0.0.1:8080"
|
||||||
|
timeout = 10
|
||||||
|
url = "https://example.org/FUZZ"
|
||||||
|
|
||||||
|
[general]
|
||||||
|
autocalibration = false
|
||||||
|
autocalibrationstrings = [
|
||||||
|
"randomtest",
|
||||||
|
"admin"
|
||||||
|
]
|
||||||
|
colors = false
|
||||||
|
delay = ""
|
||||||
|
maxtime = 0
|
||||||
|
maxtimejob = 0
|
||||||
|
quiet = false
|
||||||
|
rate = 0
|
||||||
|
stopon403 = false
|
||||||
|
stoponall = false
|
||||||
|
stoponerrors = false
|
||||||
|
threads = 40
|
||||||
|
verbose = false
|
||||||
|
|
||||||
|
[input]
|
||||||
|
dirsearchcompat = false
|
||||||
|
extensions = ""
|
||||||
|
ignorewordlistcomments = false
|
||||||
|
inputmode = "clusterbomb"
|
||||||
|
inputnum = 100
|
||||||
|
inputcommands = [
|
||||||
|
"seq 1 100:CUSTOMKEYWORD"
|
||||||
|
]
|
||||||
|
request = "requestfile.txt"
|
||||||
|
requestproto = "https"
|
||||||
|
wordlists = [
|
||||||
|
"/path/to/wordlist:FUZZ",
|
||||||
|
"/path/to/hostlist:HOST"
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
[output]
|
||||||
|
debuglog = "debug.log"
|
||||||
|
outputdirectory = "/tmp/rawoutputdir"
|
||||||
|
outputfile = "output.json"
|
||||||
|
outputformat = "json"
|
||||||
|
|
||||||
|
[filter]
|
||||||
|
lines = ""
|
||||||
|
regexp = ""
|
||||||
|
size = ""
|
||||||
|
status = ""
|
||||||
|
words = ""
|
||||||
|
|
||||||
|
[matcher]
|
||||||
|
lines = ""
|
||||||
|
regexp = ""
|
||||||
|
size = ""
|
||||||
|
status = "200,204,301,302,307,401,403"
|
||||||
|
words = ""
|
||||||
4
go.mod
4
go.mod
@ -1,3 +1,5 @@
|
|||||||
module github.com/ffuf/ffuf
|
module github.com/ffuf/ffuf
|
||||||
|
|
||||||
go 1.11
|
go 1.13
|
||||||
|
|
||||||
|
require github.com/pelletier/go-toml v1.8.1
|
||||||
|
|||||||
3
go.sum
Normal file
3
go.sum
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/pelletier/go-toml v1.8.1 h1:1Nf83orprkJyknT6h7zbuEGUEjcyVlCxSUGTENmNCRM=
|
||||||
|
github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc=
|
||||||
2
help.go
2
help.go
@ -61,7 +61,7 @@ func Usage() {
|
|||||||
Description: "",
|
Description: "",
|
||||||
Flags: make([]UsageFlag, 0),
|
Flags: make([]UsageFlag, 0),
|
||||||
Hidden: false,
|
Hidden: false,
|
||||||
ExpectedFlags: []string{"ac", "acc", "c", "maxtime", "maxtime-job", "p", "rate", "s", "sa", "se", "sf", "t", "v", "V"},
|
ExpectedFlags: []string{"ac", "acc", "c", "config", "maxtime", "maxtime-job", "p", "rate", "s", "sa", "se", "sf", "t", "v", "V"},
|
||||||
}
|
}
|
||||||
u_compat := UsageSection{
|
u_compat := UsageSection{
|
||||||
Name: "COMPATIBILITY OPTIONS",
|
Name: "COMPATIBILITY OPTIONS",
|
||||||
|
|||||||
632
main.go
632
main.go
@ -1,17 +1,12 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bufio"
|
|
||||||
"context"
|
"context"
|
||||||
"flag"
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"log"
|
"log"
|
||||||
"net/textproto"
|
|
||||||
"net/url"
|
|
||||||
"os"
|
"os"
|
||||||
"runtime"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/ffuf/ffuf/pkg/ffuf"
|
"github.com/ffuf/ffuf/pkg/ffuf"
|
||||||
@ -21,36 +16,6 @@ import (
|
|||||||
"github.com/ffuf/ffuf/pkg/runner"
|
"github.com/ffuf/ffuf/pkg/runner"
|
||||||
)
|
)
|
||||||
|
|
||||||
type cliOptions struct {
|
|
||||||
extensions string
|
|
||||||
delay string
|
|
||||||
filterStatus string
|
|
||||||
filterSize string
|
|
||||||
filterRegexp string
|
|
||||||
filterWords string
|
|
||||||
filterLines string
|
|
||||||
matcherStatus string
|
|
||||||
matcherSize string
|
|
||||||
matcherRegexp string
|
|
||||||
matcherWords string
|
|
||||||
matcherLines string
|
|
||||||
proxyURL string
|
|
||||||
rate int
|
|
||||||
replayProxyURL string
|
|
||||||
request string
|
|
||||||
requestProto string
|
|
||||||
URL string
|
|
||||||
outputFormat string
|
|
||||||
ignoreBody bool
|
|
||||||
wordlists wordlistFlag
|
|
||||||
inputcommands multiStringFlag
|
|
||||||
headers multiStringFlag
|
|
||||||
cookies multiStringFlag
|
|
||||||
AutoCalibrationStrings multiStringFlag
|
|
||||||
showVersion bool
|
|
||||||
debugLog string
|
|
||||||
}
|
|
||||||
|
|
||||||
type multiStringFlag []string
|
type multiStringFlag []string
|
||||||
type wordlistFlag []string
|
type wordlistFlag []string
|
||||||
|
|
||||||
@ -79,76 +44,101 @@ func (m *wordlistFlag) Set(value string) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
//ParseFlags parses the command line flags and (re)populates the ConfigOptions struct
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
func ParseFlags(opts *ffuf.ConfigOptions) *ffuf.ConfigOptions {
|
||||||
defer cancel()
|
|
||||||
conf := ffuf.NewConfig(ctx, cancel)
|
|
||||||
opts := cliOptions{}
|
|
||||||
var ignored bool
|
var ignored bool
|
||||||
flag.BoolVar(&conf.IgnoreWordlistComments, "ic", false, "Ignore wordlist comments")
|
var cookies, autocalibrationstrings, headers, inputcommands multiStringFlag
|
||||||
flag.StringVar(&opts.extensions, "e", "", "Comma separated list of extensions. Extends FUZZ keyword.")
|
var wordlists wordlistFlag
|
||||||
flag.BoolVar(&conf.DirSearchCompat, "D", false, "DirSearch wordlist compatibility mode. Used in conjunction with -e flag.")
|
|
||||||
flag.Var(&opts.headers, "H", "Header `\"Name: Value\"`, separated by colon. Multiple -H flags are accepted.")
|
cookies = opts.HTTP.Cookies
|
||||||
flag.StringVar(&opts.URL, "u", "", "Target URL")
|
autocalibrationstrings = opts.General.AutoCalibrationStrings
|
||||||
flag.Var(&opts.wordlists, "w", "Wordlist file path and (optional) keyword separated by colon. eg. '/path/to/wordlist:KEYWORD'")
|
headers = opts.HTTP.Headers
|
||||||
flag.BoolVar(&ignored, "k", false, "Dummy flag for backwards compatibility")
|
inputcommands = opts.Input.Inputcommands
|
||||||
flag.StringVar(&opts.delay, "p", "", "Seconds of `delay` between requests, or a range of random delay. For example \"0.1\" or \"0.1-2.0\"")
|
|
||||||
flag.StringVar(&opts.filterStatus, "fc", "", "Filter HTTP status codes from response. Comma separated list of codes and ranges")
|
|
||||||
flag.StringVar(&opts.filterSize, "fs", "", "Filter HTTP response size. Comma separated list of sizes and ranges")
|
|
||||||
flag.StringVar(&opts.filterRegexp, "fr", "", "Filter regexp")
|
|
||||||
flag.StringVar(&opts.filterWords, "fw", "", "Filter by amount of words in response. Comma separated list of word counts and ranges")
|
|
||||||
flag.StringVar(&opts.filterLines, "fl", "", "Filter by amount of lines in response. Comma separated list of line counts and ranges")
|
|
||||||
flag.StringVar(&conf.Data, "d", "", "POST data")
|
|
||||||
flag.StringVar(&conf.Data, "data", "", "POST data (alias of -d)")
|
|
||||||
flag.StringVar(&conf.Data, "data-ascii", "", "POST data (alias of -d)")
|
|
||||||
flag.StringVar(&conf.Data, "data-binary", "", "POST data (alias of -d)")
|
|
||||||
flag.BoolVar(&conf.Colors, "c", false, "Colorize output.")
|
|
||||||
flag.BoolVar(&ignored, "compressed", true, "Dummy flag for copy as curl functionality (ignored)")
|
flag.BoolVar(&ignored, "compressed", true, "Dummy flag for copy as curl functionality (ignored)")
|
||||||
flag.Var(&opts.inputcommands, "input-cmd", "Command producing the input. --input-num is required when using this input method. Overrides -w.")
|
|
||||||
flag.IntVar(&conf.InputNum, "input-num", 100, "Number of inputs to test. Used in conjunction with --input-cmd.")
|
|
||||||
flag.StringVar(&conf.InputMode, "mode", "clusterbomb", "Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork")
|
|
||||||
flag.BoolVar(&ignored, "i", true, "Dummy flag for copy as curl functionality (ignored)")
|
flag.BoolVar(&ignored, "i", true, "Dummy flag for copy as curl functionality (ignored)")
|
||||||
flag.Var(&opts.cookies, "b", "Cookie data `\"NAME1=VALUE1; NAME2=VALUE2\"` for copy as curl functionality.")
|
flag.BoolVar(&ignored, "k", false, "Dummy flag for backwards compatibility")
|
||||||
flag.Var(&opts.cookies, "cookie", "Cookie data (alias of -b)")
|
flag.BoolVar(&opts.General.AutoCalibration, "ac", opts.General.AutoCalibration, "Automatically calibrate filtering options")
|
||||||
flag.StringVar(&opts.matcherStatus, "mc", "200,204,301,302,307,401,403", "Match HTTP status codes, or \"all\" for everything.")
|
flag.BoolVar(&opts.General.Colors, "c", opts.General.Colors, "Colorize output.")
|
||||||
flag.StringVar(&opts.matcherSize, "ms", "", "Match HTTP response size")
|
flag.BoolVar(&opts.General.Quiet, "s", opts.General.Quiet, "Do not print additional information (silent mode)")
|
||||||
flag.StringVar(&opts.matcherRegexp, "mr", "", "Match regexp")
|
flag.BoolVar(&opts.General.ShowVersion, "V", opts.General.ShowVersion, "Show version information.")
|
||||||
flag.StringVar(&opts.matcherWords, "mw", "", "Match amount of words in response")
|
flag.BoolVar(&opts.General.StopOn403, "sf", opts.General.StopOn403, "Stop when > 95% of responses return 403 Forbidden")
|
||||||
flag.StringVar(&opts.matcherLines, "ml", "", "Match amount of lines in response")
|
flag.BoolVar(&opts.General.StopOnAll, "sa", opts.General.StopOnAll, "Stop on all error cases. Implies -sf and -se.")
|
||||||
flag.StringVar(&opts.proxyURL, "x", "", "HTTP Proxy URL")
|
flag.BoolVar(&opts.General.StopOnErrors, "se", opts.General.StopOnErrors, "Stop on spurious errors")
|
||||||
flag.IntVar(&opts.rate, "rate", 0, "Rate of requests per second")
|
flag.BoolVar(&opts.General.Verbose, "v", opts.General.Verbose, "Verbose output, printing full URL and redirect location (if any) with the results.")
|
||||||
flag.StringVar(&opts.request, "request", "", "File containing the raw http request")
|
flag.BoolVar(&opts.HTTP.FollowRedirects, "r", opts.HTTP.FollowRedirects, "Follow redirects")
|
||||||
flag.StringVar(&opts.requestProto, "request-proto", "https", "Protocol to use along with raw request")
|
flag.BoolVar(&opts.HTTP.IgnoreBody, "ignore-body", opts.HTTP.IgnoreBody, "Do not fetch the response content.")
|
||||||
flag.StringVar(&conf.Method, "X", "GET", "HTTP method to use")
|
flag.BoolVar(&opts.HTTP.Recursion, "recursion", opts.HTTP.Recursion, "Scan recursively. Only FUZZ keyword is supported, and URL (-u) has to end in it.")
|
||||||
flag.StringVar(&conf.OutputFile, "o", "", "Write output to file")
|
flag.BoolVar(&opts.Input.DirSearchCompat, "D", opts.Input.DirSearchCompat, "DirSearch wordlist compatibility mode. Used in conjunction with -e flag.")
|
||||||
flag.StringVar(&opts.outputFormat, "of", "json", "Output file format. Available formats: json, ejson, html, md, csv, ecsv (or, 'all' for all formats)")
|
flag.BoolVar(&opts.Input.IgnoreWordlistComments, "ic", opts.Input.IgnoreWordlistComments, "Ignore wordlist comments")
|
||||||
flag.StringVar(&conf.OutputDirectory, "od", "", "Directory path to store matched results to.")
|
flag.IntVar(&opts.General.MaxTime, "maxtime", opts.General.MaxTime, "Maximum running time in seconds for entire process.")
|
||||||
flag.BoolVar(&conf.IgnoreBody, "ignore-body", false, "Do not fetch the response content.")
|
flag.IntVar(&opts.General.MaxTimeJob, "maxtime-job", opts.General.MaxTimeJob, "Maximum running time in seconds per job.")
|
||||||
flag.BoolVar(&conf.Quiet, "s", false, "Do not print additional information (silent mode)")
|
flag.IntVar(&opts.General.Rate, "rate", opts.General.Rate, "Rate of requests per second")
|
||||||
flag.BoolVar(&conf.StopOn403, "sf", false, "Stop when > 95% of responses return 403 Forbidden")
|
flag.IntVar(&opts.General.Threads, "t", opts.General.Threads, "Number of concurrent threads.")
|
||||||
flag.BoolVar(&conf.StopOnErrors, "se", false, "Stop on spurious errors")
|
flag.IntVar(&opts.HTTP.RecursionDepth, "recursion-depth", opts.HTTP.RecursionDepth, "Maximum recursion depth.")
|
||||||
flag.BoolVar(&conf.StopOnAll, "sa", false, "Stop on all error cases. Implies -sf and -se.")
|
flag.IntVar(&opts.HTTP.Timeout, "timeout", opts.HTTP.Timeout, "HTTP request timeout in seconds.")
|
||||||
flag.BoolVar(&conf.FollowRedirects, "r", false, "Follow redirects")
|
flag.IntVar(&opts.Input.InputNum, "input-num", opts.Input.InputNum, "Number of inputs to test. Used in conjunction with --input-cmd.")
|
||||||
flag.BoolVar(&conf.Recursion, "recursion", false, "Scan recursively. Only FUZZ keyword is supported, and URL (-u) has to end in it.")
|
flag.StringVar(&opts.General.ConfigFile, "config", "", "Load configuration from a file")
|
||||||
flag.IntVar(&conf.RecursionDepth, "recursion-depth", 0, "Maximum recursion depth.")
|
flag.StringVar(&opts.Filter.Lines, "fl", opts.Filter.Lines, "Filter by amount of lines in response. Comma separated list of line counts and ranges")
|
||||||
flag.StringVar(&opts.replayProxyURL, "replay-proxy", "", "Replay matched requests using this proxy.")
|
flag.StringVar(&opts.Filter.Regexp, "fr", opts.Filter.Regexp, "Filter regexp")
|
||||||
flag.BoolVar(&conf.AutoCalibration, "ac", false, "Automatically calibrate filtering options")
|
flag.StringVar(&opts.Filter.Size, "fs", opts.Filter.Size, "Filter HTTP response size. Comma separated list of sizes and ranges")
|
||||||
flag.Var(&opts.AutoCalibrationStrings, "acc", "Custom auto-calibration string. Can be used multiple times. Implies -ac")
|
flag.StringVar(&opts.Filter.Status, "fc", opts.Filter.Status, "Filter HTTP status codes from response. Comma separated list of codes and ranges")
|
||||||
flag.IntVar(&conf.Threads, "t", 40, "Number of concurrent threads.")
|
flag.StringVar(&opts.Filter.Words, "fw", opts.Filter.Words, "Filter by amount of words in response. Comma separated list of word counts and ranges")
|
||||||
flag.IntVar(&conf.Timeout, "timeout", 10, "HTTP request timeout in seconds.")
|
flag.StringVar(&opts.General.Delay, "p", opts.General.Delay, "Seconds of `delay` between requests, or a range of random delay. For example \"0.1\" or \"0.1-2.0\"")
|
||||||
flag.IntVar(&conf.MaxTime, "maxtime", 0, "Maximum running time in seconds for entire process.")
|
flag.StringVar(&opts.HTTP.Data, "d", opts.HTTP.Data, "POST data")
|
||||||
flag.IntVar(&conf.MaxTimeJob, "maxtime-job", 0, "Maximum running time in seconds per job.")
|
flag.StringVar(&opts.HTTP.Data, "data", opts.HTTP.Data, "POST data (alias of -d)")
|
||||||
flag.BoolVar(&conf.Verbose, "v", false, "Verbose output, printing full URL and redirect location (if any) with the results.")
|
flag.StringVar(&opts.HTTP.Data, "data-ascii", opts.HTTP.Data, "POST data (alias of -d)")
|
||||||
flag.BoolVar(&opts.showVersion, "V", false, "Show version information.")
|
flag.StringVar(&opts.HTTP.Data, "data-binary", opts.HTTP.Data, "POST data (alias of -d)")
|
||||||
flag.StringVar(&opts.debugLog, "debug-log", "", "Write all of the internal logging to the specified file.")
|
flag.StringVar(&opts.HTTP.Method, "X", opts.HTTP.Method, "HTTP method to use")
|
||||||
|
flag.StringVar(&opts.HTTP.ProxyURL, "x", opts.HTTP.ProxyURL, "HTTP Proxy URL")
|
||||||
|
flag.StringVar(&opts.HTTP.ReplayProxyURL, "replay-proxy", opts.HTTP.ReplayProxyURL, "Replay matched requests using this proxy.")
|
||||||
|
flag.StringVar(&opts.HTTP.URL, "u", opts.HTTP.URL, "Target URL")
|
||||||
|
flag.StringVar(&opts.Input.Extensions, "e", opts.Input.Extensions, "Comma separated list of extensions. Extends FUZZ keyword.")
|
||||||
|
flag.StringVar(&opts.Input.InputMode, "mode", opts.Input.InputMode, "Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork")
|
||||||
|
flag.StringVar(&opts.Input.Request, "request", opts.Input.Request, "File containing the raw http request")
|
||||||
|
flag.StringVar(&opts.Input.RequestProto, "request-proto", opts.Input.RequestProto, "Protocol to use along with raw request")
|
||||||
|
flag.StringVar(&opts.Matcher.Lines, "ml", opts.Matcher.Lines, "Match amount of lines in response")
|
||||||
|
flag.StringVar(&opts.Matcher.Regexp, "mr", opts.Matcher.Regexp, "Match regexp")
|
||||||
|
flag.StringVar(&opts.Matcher.Size, "ms", opts.Matcher.Size, "Match HTTP response size")
|
||||||
|
flag.StringVar(&opts.Matcher.Status, "mc", opts.Matcher.Status, "Match HTTP status codes, or \"all\" for everything.")
|
||||||
|
flag.StringVar(&opts.Matcher.Words, "mw", opts.Matcher.Words, "Match amount of words in response")
|
||||||
|
flag.StringVar(&opts.Output.DebugLog, "debug-log", opts.Output.DebugLog, "Write all of the internal logging to the specified file.")
|
||||||
|
flag.StringVar(&opts.Output.OutputDirectory, "od", opts.Output.OutputDirectory, "Directory path to store matched results to.")
|
||||||
|
flag.StringVar(&opts.Output.OutputFile, "o", opts.Output.OutputFile, "Write output to file")
|
||||||
|
flag.StringVar(&opts.Output.OutputFormat, "of", opts.Output.OutputFormat, "Output file format. Available formats: json, ejson, html, md, csv, ecsv (or, 'all' for all formats)")
|
||||||
|
flag.Var(&autocalibrationstrings, "acc", "Custom auto-calibration string. Can be used multiple times. Implies -ac")
|
||||||
|
flag.Var(&cookies, "b", "Cookie data `\"NAME1=VALUE1; NAME2=VALUE2\"` for copy as curl functionality.")
|
||||||
|
flag.Var(&cookies, "cookie", "Cookie data (alias of -b)")
|
||||||
|
flag.Var(&headers, "H", "Header `\"Name: Value\"`, separated by colon. Multiple -H flags are accepted.")
|
||||||
|
flag.Var(&inputcommands, "input-cmd", "Command producing the input. --input-num is required when using this input method. Overrides -w.")
|
||||||
|
flag.Var(&wordlists, "w", "Wordlist file path and (optional) keyword separated by colon. eg. '/path/to/wordlist:KEYWORD'")
|
||||||
flag.Usage = Usage
|
flag.Usage = Usage
|
||||||
flag.Parse()
|
flag.Parse()
|
||||||
if opts.showVersion {
|
|
||||||
|
opts.General.AutoCalibrationStrings = autocalibrationstrings
|
||||||
|
opts.HTTP.Cookies = cookies
|
||||||
|
opts.HTTP.Headers = headers
|
||||||
|
opts.Input.Inputcommands = inputcommands
|
||||||
|
opts.Input.Wordlists = wordlists
|
||||||
|
return opts
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
|
||||||
|
var err, optserr error
|
||||||
|
|
||||||
|
// prepare the default config options from default config file
|
||||||
|
var opts *ffuf.ConfigOptions
|
||||||
|
opts, optserr = ffuf.ReadDefaultConfig()
|
||||||
|
|
||||||
|
opts = ParseFlags(opts)
|
||||||
|
|
||||||
|
if opts.General.ShowVersion {
|
||||||
fmt.Printf("ffuf version: %s\n", ffuf.VERSION)
|
fmt.Printf("ffuf version: %s\n", ffuf.VERSION)
|
||||||
os.Exit(0)
|
os.Exit(0)
|
||||||
}
|
}
|
||||||
if len(opts.debugLog) != 0 {
|
if len(opts.Output.DebugLog) != 0 {
|
||||||
f, err := os.OpenFile(opts.debugLog, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
|
f, err := os.OpenFile(opts.Output.DebugLog, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Fprintf(os.Stderr, "Disabling logging, encountered error(s): %s\n", err)
|
fmt.Fprintf(os.Stderr, "Disabling logging, encountered error(s): %s\n", err)
|
||||||
log.SetOutput(ioutil.Discard)
|
log.SetOutput(ioutil.Discard)
|
||||||
@ -159,20 +149,42 @@ func main() {
|
|||||||
} else {
|
} else {
|
||||||
log.SetOutput(ioutil.Discard)
|
log.SetOutput(ioutil.Discard)
|
||||||
}
|
}
|
||||||
if err := prepareConfig(&opts, &conf); err != nil {
|
if optserr != nil {
|
||||||
fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
|
log.Printf("Error while opening default config file: %s", optserr)
|
||||||
Usage()
|
|
||||||
fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
}
|
||||||
job, err := prepareJob(&conf)
|
|
||||||
|
if opts.General.ConfigFile != "" {
|
||||||
|
opts, err = ffuf.ReadConfig(opts.General.ConfigFile)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Encoutered error(s): %s\n", err)
|
||||||
|
Usage()
|
||||||
|
fmt.Fprintf(os.Stderr, "Encoutered error(s): %s\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
// Reset the flag package state
|
||||||
|
flag.CommandLine = flag.NewFlagSet(os.Args[0], flag.ExitOnError)
|
||||||
|
// Re-parse the cli options
|
||||||
|
opts = ParseFlags(opts)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare context and set up Config struct
|
||||||
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
|
defer cancel()
|
||||||
|
conf, err := ffuf.ConfigFromOptions(opts, ctx, cancel)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
|
fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
|
||||||
Usage()
|
Usage()
|
||||||
fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
|
fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
if err := prepareFilters(&opts, &conf); err != nil {
|
job, err := prepareJob(conf)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
|
||||||
|
Usage()
|
||||||
|
fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
if err := filter.SetupFilters(opts, conf); err != nil {
|
||||||
fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
|
fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
|
||||||
Usage()
|
Usage()
|
||||||
fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
|
fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
|
||||||
@ -190,423 +202,15 @@ func main() {
|
|||||||
|
|
||||||
func prepareJob(conf *ffuf.Config) (*ffuf.Job, error) {
|
func prepareJob(conf *ffuf.Config) (*ffuf.Job, error) {
|
||||||
job := ffuf.NewJob(conf)
|
job := ffuf.NewJob(conf)
|
||||||
errs := ffuf.NewMultierror()
|
var errs ffuf.Multierror
|
||||||
var err error
|
job.Input, errs = input.NewInputProvider(conf)
|
||||||
inputprovider, err := input.NewInputProvider(conf)
|
|
||||||
if err != nil {
|
|
||||||
errs.Add(err)
|
|
||||||
}
|
|
||||||
// TODO: implement error handling for runnerprovider and outputprovider
|
// TODO: implement error handling for runnerprovider and outputprovider
|
||||||
// We only have http runner right now
|
// We only have http runner right now
|
||||||
job.Runner = runner.NewRunnerByName("http", conf, false)
|
job.Runner = runner.NewRunnerByName("http", conf, false)
|
||||||
if len(conf.ReplayProxyURL) > 0 {
|
if len(conf.ReplayProxyURL) > 0 {
|
||||||
job.ReplayRunner = runner.NewRunnerByName("http", conf, true)
|
job.ReplayRunner = runner.NewRunnerByName("http", conf, true)
|
||||||
}
|
}
|
||||||
// Initialize the correct inputprovider
|
|
||||||
for _, v := range conf.InputProviders {
|
|
||||||
err = inputprovider.AddProvider(v)
|
|
||||||
if err != nil {
|
|
||||||
errs.Add(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
job.Input = inputprovider
|
|
||||||
// We only have stdout outputprovider right now
|
// We only have stdout outputprovider right now
|
||||||
job.Output = output.NewOutputProviderByName("stdout", conf)
|
job.Output = output.NewOutputProviderByName("stdout", conf)
|
||||||
return job, errs.ErrorOrNil()
|
return job, errs.ErrorOrNil()
|
||||||
}
|
}
|
||||||
|
|
||||||
func prepareFilters(parseOpts *cliOptions, conf *ffuf.Config) error {
|
|
||||||
errs := ffuf.NewMultierror()
|
|
||||||
// If any other matcher is set, ignore -mc default value
|
|
||||||
matcherSet := false
|
|
||||||
statusSet := false
|
|
||||||
warningIgnoreBody := false
|
|
||||||
flag.Visit(func(f *flag.Flag) {
|
|
||||||
if f.Name == "mc" {
|
|
||||||
statusSet = true
|
|
||||||
}
|
|
||||||
if f.Name == "ms" {
|
|
||||||
matcherSet = true
|
|
||||||
warningIgnoreBody = true
|
|
||||||
}
|
|
||||||
if f.Name == "ml" {
|
|
||||||
matcherSet = true
|
|
||||||
warningIgnoreBody = true
|
|
||||||
}
|
|
||||||
if f.Name == "mr" {
|
|
||||||
matcherSet = true
|
|
||||||
}
|
|
||||||
if f.Name == "mw" {
|
|
||||||
matcherSet = true
|
|
||||||
warningIgnoreBody = true
|
|
||||||
}
|
|
||||||
})
|
|
||||||
if statusSet || !matcherSet {
|
|
||||||
if err := filter.AddMatcher(conf, "status", parseOpts.matcherStatus); err != nil {
|
|
||||||
errs.Add(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if parseOpts.filterStatus != "" {
|
|
||||||
if err := filter.AddFilter(conf, "status", parseOpts.filterStatus); err != nil {
|
|
||||||
errs.Add(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if parseOpts.filterSize != "" {
|
|
||||||
warningIgnoreBody = true
|
|
||||||
if err := filter.AddFilter(conf, "size", parseOpts.filterSize); err != nil {
|
|
||||||
errs.Add(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if parseOpts.filterRegexp != "" {
|
|
||||||
if err := filter.AddFilter(conf, "regexp", parseOpts.filterRegexp); err != nil {
|
|
||||||
errs.Add(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if parseOpts.filterWords != "" {
|
|
||||||
warningIgnoreBody = true
|
|
||||||
if err := filter.AddFilter(conf, "word", parseOpts.filterWords); err != nil {
|
|
||||||
errs.Add(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if parseOpts.filterLines != "" {
|
|
||||||
warningIgnoreBody = true
|
|
||||||
if err := filter.AddFilter(conf, "line", parseOpts.filterLines); err != nil {
|
|
||||||
errs.Add(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if parseOpts.matcherSize != "" {
|
|
||||||
if err := filter.AddMatcher(conf, "size", parseOpts.matcherSize); err != nil {
|
|
||||||
errs.Add(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if parseOpts.matcherRegexp != "" {
|
|
||||||
if err := filter.AddMatcher(conf, "regexp", parseOpts.matcherRegexp); err != nil {
|
|
||||||
errs.Add(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if parseOpts.matcherWords != "" {
|
|
||||||
if err := filter.AddMatcher(conf, "word", parseOpts.matcherWords); err != nil {
|
|
||||||
errs.Add(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if parseOpts.matcherLines != "" {
|
|
||||||
if err := filter.AddMatcher(conf, "line", parseOpts.matcherLines); err != nil {
|
|
||||||
errs.Add(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if conf.IgnoreBody && warningIgnoreBody {
|
|
||||||
fmt.Printf("*** Warning: possible undesired combination of -ignore-body and the response options: fl,fs,fw,ml,ms and mw.\n")
|
|
||||||
}
|
|
||||||
return errs.ErrorOrNil()
|
|
||||||
}
|
|
||||||
|
|
||||||
func prepareConfig(parseOpts *cliOptions, conf *ffuf.Config) error {
|
|
||||||
//TODO: refactor in a proper flag library that can handle things like required flags
|
|
||||||
errs := ffuf.NewMultierror()
|
|
||||||
|
|
||||||
var err error
|
|
||||||
var err2 error
|
|
||||||
if len(parseOpts.URL) == 0 && parseOpts.request == "" {
|
|
||||||
errs.Add(fmt.Errorf("-u flag or -request flag is required"))
|
|
||||||
}
|
|
||||||
|
|
||||||
// prepare extensions
|
|
||||||
if parseOpts.extensions != "" {
|
|
||||||
extensions := strings.Split(parseOpts.extensions, ",")
|
|
||||||
conf.Extensions = extensions
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert cookies to a header
|
|
||||||
if len(parseOpts.cookies) > 0 {
|
|
||||||
parseOpts.headers.Set("Cookie: " + strings.Join(parseOpts.cookies, "; "))
|
|
||||||
}
|
|
||||||
|
|
||||||
//Prepare inputproviders
|
|
||||||
for _, v := range parseOpts.wordlists {
|
|
||||||
var wl []string
|
|
||||||
if runtime.GOOS == "windows" {
|
|
||||||
// Try to ensure that Windows file paths like C:\path\to\wordlist.txt:KEYWORD are treated properly
|
|
||||||
if ffuf.FileExists(v) {
|
|
||||||
// The wordlist was supplied without a keyword parameter
|
|
||||||
wl = []string{v}
|
|
||||||
} else {
|
|
||||||
filepart := v[:strings.LastIndex(v, ":")]
|
|
||||||
if ffuf.FileExists(filepart) {
|
|
||||||
wl = []string{filepart, v[strings.LastIndex(v, ":")+1:]}
|
|
||||||
} else {
|
|
||||||
// The file was not found. Use full wordlist parameter value for more concise error message down the line
|
|
||||||
wl = []string{v}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
wl = strings.SplitN(v, ":", 2)
|
|
||||||
}
|
|
||||||
if len(wl) == 2 {
|
|
||||||
conf.InputProviders = append(conf.InputProviders, ffuf.InputProviderConfig{
|
|
||||||
Name: "wordlist",
|
|
||||||
Value: wl[0],
|
|
||||||
Keyword: wl[1],
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
conf.InputProviders = append(conf.InputProviders, ffuf.InputProviderConfig{
|
|
||||||
Name: "wordlist",
|
|
||||||
Value: wl[0],
|
|
||||||
Keyword: "FUZZ",
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, v := range parseOpts.inputcommands {
|
|
||||||
ic := strings.SplitN(v, ":", 2)
|
|
||||||
if len(ic) == 2 {
|
|
||||||
conf.InputProviders = append(conf.InputProviders, ffuf.InputProviderConfig{
|
|
||||||
Name: "command",
|
|
||||||
Value: ic[0],
|
|
||||||
Keyword: ic[1],
|
|
||||||
})
|
|
||||||
conf.CommandKeywords = append(conf.CommandKeywords, ic[0])
|
|
||||||
} else {
|
|
||||||
conf.InputProviders = append(conf.InputProviders, ffuf.InputProviderConfig{
|
|
||||||
Name: "command",
|
|
||||||
Value: ic[0],
|
|
||||||
Keyword: "FUZZ",
|
|
||||||
})
|
|
||||||
conf.CommandKeywords = append(conf.CommandKeywords, "FUZZ")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(conf.InputProviders) == 0 {
|
|
||||||
errs.Add(fmt.Errorf("Either -w or --input-cmd flag is required"))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prepare the request using body
|
|
||||||
if parseOpts.request != "" {
|
|
||||||
err := parseRawRequest(parseOpts, conf)
|
|
||||||
if err != nil {
|
|
||||||
errmsg := fmt.Sprintf("Could not parse raw request: %s", err)
|
|
||||||
errs.Add(fmt.Errorf(errmsg))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//Prepare URL
|
|
||||||
if parseOpts.URL != "" {
|
|
||||||
conf.Url = parseOpts.URL
|
|
||||||
}
|
|
||||||
|
|
||||||
//Prepare headers and make canonical
|
|
||||||
for _, v := range parseOpts.headers {
|
|
||||||
hs := strings.SplitN(v, ":", 2)
|
|
||||||
if len(hs) == 2 {
|
|
||||||
// trim and make canonical
|
|
||||||
// except if used in custom defined header
|
|
||||||
var CanonicalNeeded bool = true
|
|
||||||
for _, a := range conf.CommandKeywords {
|
|
||||||
if a == hs[0] {
|
|
||||||
CanonicalNeeded = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// check if part of InputProviders
|
|
||||||
if CanonicalNeeded {
|
|
||||||
for _, b := range conf.InputProviders {
|
|
||||||
if b.Keyword == hs[0] {
|
|
||||||
CanonicalNeeded = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if CanonicalNeeded {
|
|
||||||
var CanonicalHeader string = textproto.CanonicalMIMEHeaderKey(strings.TrimSpace(hs[0]))
|
|
||||||
conf.Headers[CanonicalHeader] = strings.TrimSpace(hs[1])
|
|
||||||
} else {
|
|
||||||
conf.Headers[strings.TrimSpace(hs[0])] = strings.TrimSpace(hs[1])
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
errs.Add(fmt.Errorf("Header defined by -H needs to have a value. \":\" should be used as a separator"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//Prepare delay
|
|
||||||
d := strings.Split(parseOpts.delay, "-")
|
|
||||||
if len(d) > 2 {
|
|
||||||
errs.Add(fmt.Errorf("Delay needs to be either a single float: \"0.1\" or a range of floats, delimited by dash: \"0.1-0.8\""))
|
|
||||||
} else if len(d) == 2 {
|
|
||||||
conf.Delay.IsRange = true
|
|
||||||
conf.Delay.HasDelay = true
|
|
||||||
conf.Delay.Min, err = strconv.ParseFloat(d[0], 64)
|
|
||||||
conf.Delay.Max, err2 = strconv.ParseFloat(d[1], 64)
|
|
||||||
if err != nil || err2 != nil {
|
|
||||||
errs.Add(fmt.Errorf("Delay range min and max values need to be valid floats. For example: 0.1-0.5"))
|
|
||||||
}
|
|
||||||
} else if len(parseOpts.delay) > 0 {
|
|
||||||
conf.Delay.IsRange = false
|
|
||||||
conf.Delay.HasDelay = true
|
|
||||||
conf.Delay.Min, err = strconv.ParseFloat(parseOpts.delay, 64)
|
|
||||||
if err != nil {
|
|
||||||
errs.Add(fmt.Errorf("Delay needs to be either a single float: \"0.1\" or a range of floats, delimited by dash: \"0.1-0.8\""))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify proxy url format
|
|
||||||
if len(parseOpts.proxyURL) > 0 {
|
|
||||||
_, err := url.Parse(parseOpts.proxyURL)
|
|
||||||
if err != nil {
|
|
||||||
errs.Add(fmt.Errorf("Bad proxy url (-x) format: %s", err))
|
|
||||||
} else {
|
|
||||||
conf.ProxyURL = parseOpts.proxyURL
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify replayproxy url format
|
|
||||||
if len(parseOpts.replayProxyURL) > 0 {
|
|
||||||
_, err := url.Parse(parseOpts.replayProxyURL)
|
|
||||||
if err != nil {
|
|
||||||
errs.Add(fmt.Errorf("Bad replay-proxy url (-replay-proxy) format: %s", err))
|
|
||||||
} else {
|
|
||||||
conf.ReplayProxyURL = parseOpts.replayProxyURL
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//Check the output file format option
|
|
||||||
if conf.OutputFile != "" {
|
|
||||||
//No need to check / error out if output file isn't defined
|
|
||||||
outputFormats := []string{"all", "json", "ejson", "html", "md", "csv", "ecsv"}
|
|
||||||
found := false
|
|
||||||
for _, f := range outputFormats {
|
|
||||||
if f == parseOpts.outputFormat {
|
|
||||||
conf.OutputFormat = f
|
|
||||||
found = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !found {
|
|
||||||
errs.Add(fmt.Errorf("Unknown output file format (-of): %s", parseOpts.outputFormat))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Auto-calibration strings
|
|
||||||
if len(parseOpts.AutoCalibrationStrings) > 0 {
|
|
||||||
conf.AutoCalibrationStrings = parseOpts.AutoCalibrationStrings
|
|
||||||
}
|
|
||||||
// Using -acc implies -ac
|
|
||||||
if len(conf.AutoCalibrationStrings) > 0 {
|
|
||||||
conf.AutoCalibration = true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle copy as curl situation where POST method is implied by --data flag. If method is set to anything but GET, NOOP
|
|
||||||
if len(conf.Data) > 0 &&
|
|
||||||
conf.Method == "GET" &&
|
|
||||||
//don't modify the method automatically if a request file is being used as input
|
|
||||||
len(parseOpts.request) == 0 {
|
|
||||||
|
|
||||||
conf.Method = "POST"
|
|
||||||
}
|
|
||||||
|
|
||||||
conf.CommandLine = strings.Join(os.Args, " ")
|
|
||||||
|
|
||||||
for _, provider := range conf.InputProviders {
|
|
||||||
if !keywordPresent(provider.Keyword, conf) {
|
|
||||||
errmsg := fmt.Sprintf("Keyword %s defined, but not found in headers, method, URL or POST data.", provider.Keyword)
|
|
||||||
errs.Add(fmt.Errorf(errmsg))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Do checks for recursion mode
|
|
||||||
if conf.Recursion {
|
|
||||||
if !strings.HasSuffix(conf.Url, "FUZZ") {
|
|
||||||
errmsg := fmt.Sprintf("When using -recursion the URL (-u) must end with FUZZ keyword.")
|
|
||||||
errs.Add(fmt.Errorf(errmsg))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if parseOpts.rate < 0 {
|
|
||||||
conf.Rate = 0
|
|
||||||
} else {
|
|
||||||
conf.Rate = int64(parseOpts.rate)
|
|
||||||
}
|
|
||||||
|
|
||||||
return errs.ErrorOrNil()
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseRawRequest(parseOpts *cliOptions, conf *ffuf.Config) error {
|
|
||||||
file, err := os.Open(parseOpts.request)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("could not open request file: %s", err)
|
|
||||||
}
|
|
||||||
defer file.Close()
|
|
||||||
|
|
||||||
r := bufio.NewReader(file)
|
|
||||||
|
|
||||||
s, err := r.ReadString('\n')
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("could not read request: %s", err)
|
|
||||||
}
|
|
||||||
parts := strings.Split(s, " ")
|
|
||||||
if len(parts) < 3 {
|
|
||||||
return fmt.Errorf("malformed request supplied")
|
|
||||||
}
|
|
||||||
// Set the request Method
|
|
||||||
conf.Method = parts[0]
|
|
||||||
|
|
||||||
for {
|
|
||||||
line, err := r.ReadString('\n')
|
|
||||||
line = strings.TrimSpace(line)
|
|
||||||
|
|
||||||
if err != nil || line == "" {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
p := strings.SplitN(line, ":", 2)
|
|
||||||
if len(p) != 2 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if strings.EqualFold(p[0], "content-length") {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
conf.Headers[strings.TrimSpace(p[0])] = strings.TrimSpace(p[1])
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle case with the full http url in path. In that case,
|
|
||||||
// ignore any host header that we encounter and use the path as request URL
|
|
||||||
if strings.HasPrefix(parts[1], "http") {
|
|
||||||
parsed, err := url.Parse(parts[1])
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("could not parse request URL: %s", err)
|
|
||||||
}
|
|
||||||
conf.Url = parts[1]
|
|
||||||
conf.Headers["Host"] = parsed.Host
|
|
||||||
} else {
|
|
||||||
// Build the request URL from the request
|
|
||||||
conf.Url = parseOpts.requestProto + "://" + conf.Headers["Host"] + parts[1]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set the request body
|
|
||||||
b, err := ioutil.ReadAll(r)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("could not read request body: %s", err)
|
|
||||||
}
|
|
||||||
conf.Data = string(b)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func keywordPresent(keyword string, conf *ffuf.Config) bool {
|
|
||||||
//Search for keyword from HTTP method, URL and POST data too
|
|
||||||
if strings.Index(conf.Method, keyword) != -1 {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
if strings.Index(conf.Url, keyword) != -1 {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
if strings.Index(conf.Data, keyword) != -1 {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
for k, v := range conf.Headers {
|
|
||||||
if strings.Index(k, keyword) != -1 {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
if strings.Index(v, keyword) != -1 {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|||||||
@ -5,46 +5,47 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Config struct {
|
type Config struct {
|
||||||
Headers map[string]string `json:"headers"`
|
AutoCalibration bool `json:"autocalibration"`
|
||||||
Extensions []string `json:"extensions"`
|
AutoCalibrationStrings []string `json:"autocalibration_strings"`
|
||||||
DirSearchCompat bool `json:"dirsearch_compatibility"`
|
Cancel context.CancelFunc `json:"-"`
|
||||||
Method string `json:"method"`
|
|
||||||
Url string `json:"url"`
|
|
||||||
Data string `json:"postdata"`
|
|
||||||
Quiet bool `json:"quiet"`
|
|
||||||
Colors bool `json:"colors"`
|
Colors bool `json:"colors"`
|
||||||
InputProviders []InputProviderConfig `json:"inputproviders"`
|
|
||||||
CommandKeywords []string `json:"-"`
|
CommandKeywords []string `json:"-"`
|
||||||
InputNum int `json:"cmd_inputnum"`
|
CommandLine string `json:"cmdline"`
|
||||||
|
ConfigFile string `json:"configfile"`
|
||||||
|
Context context.Context `json:"-"`
|
||||||
|
Data string `json:"postdata"`
|
||||||
|
Delay optRange `json:"delay"`
|
||||||
|
DirSearchCompat bool `json:"dirsearch_compatibility"`
|
||||||
|
Extensions []string `json:"extensions"`
|
||||||
|
Filters map[string]FilterProvider `json:"filters"`
|
||||||
|
FollowRedirects bool `json:"follow_redirects"`
|
||||||
|
Headers map[string]string `json:"headers"`
|
||||||
|
IgnoreBody bool `json:"ignorebody"`
|
||||||
|
IgnoreWordlistComments bool `json:"ignore_wordlist_comments"`
|
||||||
InputMode string `json:"inputmode"`
|
InputMode string `json:"inputmode"`
|
||||||
|
InputNum int `json:"cmd_inputnum"`
|
||||||
|
InputProviders []InputProviderConfig `json:"inputproviders"`
|
||||||
|
Matchers map[string]FilterProvider `json:"matchers"`
|
||||||
|
MaxTime int `json:"maxtime"`
|
||||||
|
MaxTimeJob int `json:"maxtime_job"`
|
||||||
|
Method string `json:"method"`
|
||||||
OutputDirectory string `json:"outputdirectory"`
|
OutputDirectory string `json:"outputdirectory"`
|
||||||
OutputFile string `json:"outputfile"`
|
OutputFile string `json:"outputfile"`
|
||||||
OutputFormat string `json:"outputformat"`
|
OutputFormat string `json:"outputformat"`
|
||||||
IgnoreBody bool `json:"ignorebody"`
|
|
||||||
IgnoreWordlistComments bool `json:"ignore_wordlist_comments"`
|
|
||||||
StopOn403 bool `json:"stop_403"`
|
|
||||||
StopOnErrors bool `json:"stop_errors"`
|
|
||||||
StopOnAll bool `json:"stop_all"`
|
|
||||||
FollowRedirects bool `json:"follow_redirects"`
|
|
||||||
AutoCalibration bool `json:"autocalibration"`
|
|
||||||
AutoCalibrationStrings []string `json:"autocalibration_strings"`
|
|
||||||
Timeout int `json:"timeout"`
|
|
||||||
ProgressFrequency int `json:"-"`
|
ProgressFrequency int `json:"-"`
|
||||||
Delay optRange `json:"delay"`
|
|
||||||
Filters map[string]FilterProvider `json:"filters"`
|
|
||||||
Matchers map[string]FilterProvider `json:"matchers"`
|
|
||||||
Threads int `json:"threads"`
|
|
||||||
Context context.Context `json:"-"`
|
|
||||||
Cancel context.CancelFunc `json:"-"`
|
|
||||||
ProxyURL string `json:"proxyurl"`
|
ProxyURL string `json:"proxyurl"`
|
||||||
ReplayProxyURL string `json:"replayproxyurl"`
|
Quiet bool `json:"quiet"`
|
||||||
CommandLine string `json:"cmdline"`
|
Rate int64 `json:"rate"`
|
||||||
Verbose bool `json:"verbose"`
|
|
||||||
MaxTime int `json:"maxtime"`
|
|
||||||
MaxTimeJob int `json:"maxtime_job"`
|
|
||||||
Recursion bool `json:"recursion"`
|
Recursion bool `json:"recursion"`
|
||||||
RecursionDepth int `json:"recursion_depth"`
|
RecursionDepth int `json:"recursion_depth"`
|
||||||
Rate int64 `json:"rate"`
|
ReplayProxyURL string `json:"replayproxyurl"`
|
||||||
|
StopOn403 bool `json:"stop_403"`
|
||||||
|
StopOnAll bool `json:"stop_all"`
|
||||||
|
StopOnErrors bool `json:"stop_errors"`
|
||||||
|
Threads int `json:"threads"`
|
||||||
|
Timeout int `json:"timeout"`
|
||||||
|
Url string `json:"url"`
|
||||||
|
Verbose bool `json:"verbose"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type InputProviderConfig struct {
|
type InputProviderConfig struct {
|
||||||
@ -55,37 +56,41 @@ type InputProviderConfig struct {
|
|||||||
|
|
||||||
func NewConfig(ctx context.Context, cancel context.CancelFunc) Config {
|
func NewConfig(ctx context.Context, cancel context.CancelFunc) Config {
|
||||||
var conf Config
|
var conf Config
|
||||||
|
conf.AutoCalibrationStrings = make([]string, 0)
|
||||||
|
conf.CommandKeywords = make([]string, 0)
|
||||||
conf.Context = ctx
|
conf.Context = ctx
|
||||||
conf.Cancel = cancel
|
conf.Cancel = cancel
|
||||||
conf.Headers = make(map[string]string)
|
|
||||||
conf.Method = "GET"
|
|
||||||
conf.Url = ""
|
|
||||||
conf.Data = ""
|
conf.Data = ""
|
||||||
conf.Quiet = false
|
|
||||||
conf.IgnoreWordlistComments = false
|
|
||||||
conf.StopOn403 = false
|
|
||||||
conf.StopOnErrors = false
|
|
||||||
conf.StopOnAll = false
|
|
||||||
conf.FollowRedirects = false
|
|
||||||
conf.InputProviders = make([]InputProviderConfig, 0)
|
|
||||||
conf.CommandKeywords = make([]string, 0)
|
|
||||||
conf.AutoCalibrationStrings = make([]string, 0)
|
|
||||||
conf.InputNum = 0
|
|
||||||
conf.InputMode = "clusterbomb"
|
|
||||||
conf.ProxyURL = ""
|
|
||||||
conf.Filters = make(map[string]FilterProvider)
|
|
||||||
conf.Matchers = make(map[string]FilterProvider)
|
|
||||||
conf.Delay = optRange{0, 0, false, false}
|
conf.Delay = optRange{0, 0, false, false}
|
||||||
conf.Extensions = make([]string, 0)
|
|
||||||
conf.Timeout = 10
|
|
||||||
// Progress update frequency, in milliseconds
|
|
||||||
conf.ProgressFrequency = 125
|
|
||||||
conf.DirSearchCompat = false
|
conf.DirSearchCompat = false
|
||||||
conf.Verbose = false
|
conf.Extensions = make([]string, 0)
|
||||||
|
conf.Filters = make(map[string]FilterProvider)
|
||||||
|
conf.FollowRedirects = false
|
||||||
|
conf.Headers = make(map[string]string)
|
||||||
|
conf.IgnoreWordlistComments = false
|
||||||
|
conf.InputMode = "clusterbomb"
|
||||||
|
conf.InputNum = 0
|
||||||
|
conf.InputProviders = make([]InputProviderConfig, 0)
|
||||||
|
conf.Matchers = make(map[string]FilterProvider)
|
||||||
conf.MaxTime = 0
|
conf.MaxTime = 0
|
||||||
conf.MaxTimeJob = 0
|
conf.MaxTimeJob = 0
|
||||||
|
conf.Method = "GET"
|
||||||
|
conf.ProgressFrequency = 125
|
||||||
|
conf.ProxyURL = ""
|
||||||
|
conf.Quiet = false
|
||||||
|
conf.Rate = 0
|
||||||
conf.Recursion = false
|
conf.Recursion = false
|
||||||
conf.RecursionDepth = 0
|
conf.RecursionDepth = 0
|
||||||
conf.Rate = 0
|
conf.StopOn403 = false
|
||||||
|
conf.StopOnAll = false
|
||||||
|
conf.StopOnErrors = false
|
||||||
|
conf.Timeout = 10
|
||||||
|
conf.Url = ""
|
||||||
|
conf.Verbose = false
|
||||||
return conf
|
return conf
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *Config) SetContext(ctx context.Context, cancel context.CancelFunc) {
|
||||||
|
c.Context = ctx
|
||||||
|
c.Cancel = cancel
|
||||||
|
}
|
||||||
|
|||||||
499
pkg/ffuf/optionsparser.go
Normal file
499
pkg/ffuf/optionsparser.go
Normal file
@ -0,0 +1,499 @@
|
|||||||
|
package ffuf
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/textproto"
|
||||||
|
"net/url"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ConfigOptions struct {
|
||||||
|
Filter FilterOptions
|
||||||
|
General GeneralOptions
|
||||||
|
HTTP HTTPOptions
|
||||||
|
Input InputOptions
|
||||||
|
Matcher MatcherOptions
|
||||||
|
Output OutputOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
type HTTPOptions struct {
|
||||||
|
Cookies []string
|
||||||
|
Data string
|
||||||
|
FollowRedirects bool
|
||||||
|
Headers []string
|
||||||
|
IgnoreBody bool
|
||||||
|
Method string
|
||||||
|
ProxyURL string
|
||||||
|
Recursion bool
|
||||||
|
RecursionDepth int
|
||||||
|
ReplayProxyURL string
|
||||||
|
Timeout int
|
||||||
|
URL string
|
||||||
|
}
|
||||||
|
|
||||||
|
type GeneralOptions struct {
|
||||||
|
AutoCalibration bool
|
||||||
|
AutoCalibrationStrings []string
|
||||||
|
Colors bool
|
||||||
|
ConfigFile string `toml:"-"`
|
||||||
|
Delay string
|
||||||
|
MaxTime int
|
||||||
|
MaxTimeJob int
|
||||||
|
Quiet bool
|
||||||
|
Rate int
|
||||||
|
ShowVersion bool `toml:"-"`
|
||||||
|
StopOn403 bool
|
||||||
|
StopOnAll bool
|
||||||
|
StopOnErrors bool
|
||||||
|
Threads int
|
||||||
|
Verbose bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type InputOptions struct {
|
||||||
|
DirSearchCompat bool
|
||||||
|
Extensions string
|
||||||
|
IgnoreWordlistComments bool
|
||||||
|
InputMode string
|
||||||
|
InputNum int
|
||||||
|
Inputcommands []string
|
||||||
|
Request string
|
||||||
|
RequestProto string
|
||||||
|
Wordlists []string
|
||||||
|
}
|
||||||
|
|
||||||
|
type OutputOptions struct {
|
||||||
|
DebugLog string
|
||||||
|
OutputDirectory string
|
||||||
|
OutputFile string
|
||||||
|
OutputFormat string
|
||||||
|
}
|
||||||
|
|
||||||
|
type FilterOptions struct {
|
||||||
|
Lines string
|
||||||
|
Regexp string
|
||||||
|
Size string
|
||||||
|
Status string
|
||||||
|
Words string
|
||||||
|
}
|
||||||
|
|
||||||
|
type MatcherOptions struct {
|
||||||
|
Lines string
|
||||||
|
Regexp string
|
||||||
|
Size string
|
||||||
|
Status string
|
||||||
|
Words string
|
||||||
|
}
|
||||||
|
|
||||||
|
//NewConfigOptions returns a newly created ConfigOptions struct with default values
|
||||||
|
func NewConfigOptions() *ConfigOptions {
|
||||||
|
c := &ConfigOptions{}
|
||||||
|
c.Filter.Lines = ""
|
||||||
|
c.Filter.Regexp = ""
|
||||||
|
c.Filter.Size = ""
|
||||||
|
c.Filter.Status = ""
|
||||||
|
c.Filter.Words = ""
|
||||||
|
c.General.AutoCalibration = false
|
||||||
|
c.General.Colors = false
|
||||||
|
c.General.Delay = ""
|
||||||
|
c.General.MaxTime = 0
|
||||||
|
c.General.MaxTimeJob = 0
|
||||||
|
c.General.Quiet = false
|
||||||
|
c.General.Rate = 0
|
||||||
|
c.General.ShowVersion = false
|
||||||
|
c.General.StopOn403 = false
|
||||||
|
c.General.StopOnAll = false
|
||||||
|
c.General.StopOnErrors = false
|
||||||
|
c.General.Threads = 40
|
||||||
|
c.General.Verbose = false
|
||||||
|
c.HTTP.Data = ""
|
||||||
|
c.HTTP.FollowRedirects = false
|
||||||
|
c.HTTP.IgnoreBody = false
|
||||||
|
c.HTTP.Method = "GET"
|
||||||
|
c.HTTP.ProxyURL = ""
|
||||||
|
c.HTTP.Recursion = false
|
||||||
|
c.HTTP.RecursionDepth = 0
|
||||||
|
c.HTTP.ReplayProxyURL = ""
|
||||||
|
c.HTTP.Timeout = 10
|
||||||
|
c.HTTP.URL = ""
|
||||||
|
c.Input.DirSearchCompat = false
|
||||||
|
c.Input.Extensions = ""
|
||||||
|
c.Input.IgnoreWordlistComments = false
|
||||||
|
c.Input.InputMode = "clusterbomb"
|
||||||
|
c.Input.InputNum = 100
|
||||||
|
c.Input.Request = ""
|
||||||
|
c.Input.RequestProto = "https"
|
||||||
|
c.Matcher.Lines = ""
|
||||||
|
c.Matcher.Regexp = ""
|
||||||
|
c.Matcher.Size = ""
|
||||||
|
c.Matcher.Status = "200,204,301,302,307,401,403"
|
||||||
|
c.Matcher.Words = ""
|
||||||
|
c.Output.DebugLog = ""
|
||||||
|
c.Output.OutputDirectory = ""
|
||||||
|
c.Output.OutputFile = ""
|
||||||
|
c.Output.OutputFormat = "json"
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
//ConfigFromOptions parses the values in ConfigOptions struct, ensures that the values are sane,
|
||||||
|
// and creates a Config struct out of them.
|
||||||
|
func ConfigFromOptions(parseOpts *ConfigOptions, ctx context.Context, cancel context.CancelFunc) (*Config, error) {
|
||||||
|
//TODO: refactor in a proper flag library that can handle things like required flags
|
||||||
|
errs := NewMultierror()
|
||||||
|
conf := NewConfig(ctx, cancel)
|
||||||
|
|
||||||
|
var err error
|
||||||
|
var err2 error
|
||||||
|
if len(parseOpts.HTTP.URL) == 0 && parseOpts.Input.Request == "" {
|
||||||
|
errs.Add(fmt.Errorf("-u flag or -request flag is required"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// prepare extensions
|
||||||
|
if parseOpts.Input.Extensions != "" {
|
||||||
|
extensions := strings.Split(parseOpts.Input.Extensions, ",")
|
||||||
|
conf.Extensions = extensions
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert cookies to a header
|
||||||
|
if len(parseOpts.HTTP.Cookies) > 0 {
|
||||||
|
parseOpts.HTTP.Headers = append(parseOpts.HTTP.Headers, "Cookie: "+strings.Join(parseOpts.HTTP.Cookies, "; "))
|
||||||
|
}
|
||||||
|
|
||||||
|
//Prepare inputproviders
|
||||||
|
for _, v := range parseOpts.Input.Wordlists {
|
||||||
|
var wl []string
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
// Try to ensure that Windows file paths like C:\path\to\wordlist.txt:KEYWORD are treated properly
|
||||||
|
if FileExists(v) {
|
||||||
|
// The wordlist was supplied without a keyword parameter
|
||||||
|
wl = []string{v}
|
||||||
|
} else {
|
||||||
|
filepart := v[:strings.LastIndex(v, ":")]
|
||||||
|
if FileExists(filepart) {
|
||||||
|
wl = []string{filepart, v[strings.LastIndex(v, ":")+1:]}
|
||||||
|
} else {
|
||||||
|
// The file was not found. Use full wordlist parameter value for more concise error message down the line
|
||||||
|
wl = []string{v}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
wl = strings.SplitN(v, ":", 2)
|
||||||
|
}
|
||||||
|
if len(wl) == 2 {
|
||||||
|
conf.InputProviders = append(conf.InputProviders, InputProviderConfig{
|
||||||
|
Name: "wordlist",
|
||||||
|
Value: wl[0],
|
||||||
|
Keyword: wl[1],
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
conf.InputProviders = append(conf.InputProviders, InputProviderConfig{
|
||||||
|
Name: "wordlist",
|
||||||
|
Value: wl[0],
|
||||||
|
Keyword: "FUZZ",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, v := range parseOpts.Input.Inputcommands {
|
||||||
|
ic := strings.SplitN(v, ":", 2)
|
||||||
|
if len(ic) == 2 {
|
||||||
|
conf.InputProviders = append(conf.InputProviders, InputProviderConfig{
|
||||||
|
Name: "command",
|
||||||
|
Value: ic[0],
|
||||||
|
Keyword: ic[1],
|
||||||
|
})
|
||||||
|
conf.CommandKeywords = append(conf.CommandKeywords, ic[0])
|
||||||
|
} else {
|
||||||
|
conf.InputProviders = append(conf.InputProviders, InputProviderConfig{
|
||||||
|
Name: "command",
|
||||||
|
Value: ic[0],
|
||||||
|
Keyword: "FUZZ",
|
||||||
|
})
|
||||||
|
conf.CommandKeywords = append(conf.CommandKeywords, "FUZZ")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(conf.InputProviders) == 0 {
|
||||||
|
errs.Add(fmt.Errorf("Either -w or --input-cmd flag is required"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare the request using body
|
||||||
|
if parseOpts.Input.Request != "" {
|
||||||
|
err := parseRawRequest(parseOpts, &conf)
|
||||||
|
if err != nil {
|
||||||
|
errmsg := fmt.Sprintf("Could not parse raw request: %s", err)
|
||||||
|
errs.Add(fmt.Errorf(errmsg))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Prepare URL
|
||||||
|
if parseOpts.HTTP.URL != "" {
|
||||||
|
conf.Url = parseOpts.HTTP.URL
|
||||||
|
}
|
||||||
|
|
||||||
|
//Prepare headers and make canonical
|
||||||
|
for _, v := range parseOpts.HTTP.Headers {
|
||||||
|
hs := strings.SplitN(v, ":", 2)
|
||||||
|
if len(hs) == 2 {
|
||||||
|
// trim and make canonical
|
||||||
|
// except if used in custom defined header
|
||||||
|
var CanonicalNeeded = true
|
||||||
|
for _, a := range conf.CommandKeywords {
|
||||||
|
if a == hs[0] {
|
||||||
|
CanonicalNeeded = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// check if part of InputProviders
|
||||||
|
if CanonicalNeeded {
|
||||||
|
for _, b := range conf.InputProviders {
|
||||||
|
if b.Keyword == hs[0] {
|
||||||
|
CanonicalNeeded = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if CanonicalNeeded {
|
||||||
|
var CanonicalHeader = textproto.CanonicalMIMEHeaderKey(strings.TrimSpace(hs[0]))
|
||||||
|
conf.Headers[CanonicalHeader] = strings.TrimSpace(hs[1])
|
||||||
|
} else {
|
||||||
|
conf.Headers[strings.TrimSpace(hs[0])] = strings.TrimSpace(hs[1])
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
errs.Add(fmt.Errorf("Header defined by -H needs to have a value. \":\" should be used as a separator"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Prepare delay
|
||||||
|
d := strings.Split(parseOpts.General.Delay, "-")
|
||||||
|
if len(d) > 2 {
|
||||||
|
errs.Add(fmt.Errorf("Delay needs to be either a single float: \"0.1\" or a range of floats, delimited by dash: \"0.1-0.8\""))
|
||||||
|
} else if len(d) == 2 {
|
||||||
|
conf.Delay.IsRange = true
|
||||||
|
conf.Delay.HasDelay = true
|
||||||
|
conf.Delay.Min, err = strconv.ParseFloat(d[0], 64)
|
||||||
|
conf.Delay.Max, err2 = strconv.ParseFloat(d[1], 64)
|
||||||
|
if err != nil || err2 != nil {
|
||||||
|
errs.Add(fmt.Errorf("Delay range min and max values need to be valid floats. For example: 0.1-0.5"))
|
||||||
|
}
|
||||||
|
} else if len(parseOpts.General.Delay) > 0 {
|
||||||
|
conf.Delay.IsRange = false
|
||||||
|
conf.Delay.HasDelay = true
|
||||||
|
conf.Delay.Min, err = strconv.ParseFloat(parseOpts.General.Delay, 64)
|
||||||
|
if err != nil {
|
||||||
|
errs.Add(fmt.Errorf("Delay needs to be either a single float: \"0.1\" or a range of floats, delimited by dash: \"0.1-0.8\""))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify proxy url format
|
||||||
|
if len(parseOpts.HTTP.ProxyURL) > 0 {
|
||||||
|
_, err := url.Parse(parseOpts.HTTP.ProxyURL)
|
||||||
|
if err != nil {
|
||||||
|
errs.Add(fmt.Errorf("Bad proxy url (-x) format: %s", err))
|
||||||
|
} else {
|
||||||
|
conf.ProxyURL = parseOpts.HTTP.ProxyURL
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify replayproxy url format
|
||||||
|
if len(parseOpts.HTTP.ReplayProxyURL) > 0 {
|
||||||
|
_, err := url.Parse(parseOpts.HTTP.ReplayProxyURL)
|
||||||
|
if err != nil {
|
||||||
|
errs.Add(fmt.Errorf("Bad replay-proxy url (-replay-proxy) format: %s", err))
|
||||||
|
} else {
|
||||||
|
conf.ReplayProxyURL = parseOpts.HTTP.ReplayProxyURL
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Check the output file format option
|
||||||
|
if parseOpts.Output.OutputFile != "" {
|
||||||
|
//No need to check / error out if output file isn't defined
|
||||||
|
outputFormats := []string{"all", "json", "ejson", "html", "md", "csv", "ecsv"}
|
||||||
|
found := false
|
||||||
|
for _, f := range outputFormats {
|
||||||
|
if f == parseOpts.Output.OutputFormat {
|
||||||
|
conf.OutputFormat = f
|
||||||
|
found = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !found {
|
||||||
|
errs.Add(fmt.Errorf("Unknown output file format (-of): %s", parseOpts.Output.OutputFormat))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auto-calibration strings
|
||||||
|
if len(parseOpts.General.AutoCalibrationStrings) > 0 {
|
||||||
|
conf.AutoCalibrationStrings = parseOpts.General.AutoCalibrationStrings
|
||||||
|
}
|
||||||
|
// Using -acc implies -ac
|
||||||
|
if len(parseOpts.General.AutoCalibrationStrings) > 0 {
|
||||||
|
conf.AutoCalibration = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if parseOpts.General.Rate < 0 {
|
||||||
|
conf.Rate = 0
|
||||||
|
} else {
|
||||||
|
conf.Rate = int64(parseOpts.General.Rate)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Common stuff
|
||||||
|
conf.IgnoreWordlistComments = parseOpts.Input.IgnoreWordlistComments
|
||||||
|
conf.DirSearchCompat = parseOpts.Input.DirSearchCompat
|
||||||
|
conf.Data = parseOpts.HTTP.Data
|
||||||
|
conf.Colors = parseOpts.General.Colors
|
||||||
|
conf.InputNum = parseOpts.Input.InputNum
|
||||||
|
conf.InputMode = parseOpts.Input.InputMode
|
||||||
|
conf.Method = parseOpts.HTTP.Method
|
||||||
|
conf.OutputFile = parseOpts.Output.OutputFile
|
||||||
|
conf.OutputDirectory = parseOpts.Output.OutputDirectory
|
||||||
|
conf.IgnoreBody = parseOpts.HTTP.IgnoreBody
|
||||||
|
conf.Quiet = parseOpts.General.Quiet
|
||||||
|
conf.StopOn403 = parseOpts.General.StopOn403
|
||||||
|
conf.StopOnAll = parseOpts.General.StopOnAll
|
||||||
|
conf.StopOnErrors = parseOpts.General.StopOnErrors
|
||||||
|
conf.FollowRedirects = parseOpts.HTTP.FollowRedirects
|
||||||
|
conf.Recursion = parseOpts.HTTP.Recursion
|
||||||
|
conf.RecursionDepth = parseOpts.HTTP.RecursionDepth
|
||||||
|
conf.AutoCalibration = parseOpts.General.AutoCalibration
|
||||||
|
conf.Threads = parseOpts.General.Threads
|
||||||
|
conf.Timeout = parseOpts.HTTP.Timeout
|
||||||
|
conf.MaxTime = parseOpts.General.MaxTime
|
||||||
|
conf.MaxTimeJob = parseOpts.General.MaxTimeJob
|
||||||
|
conf.Verbose = parseOpts.General.Verbose
|
||||||
|
|
||||||
|
// Handle copy as curl situation where POST method is implied by --data flag. If method is set to anything but GET, NOOP
|
||||||
|
if len(conf.Data) > 0 &&
|
||||||
|
conf.Method == "GET" &&
|
||||||
|
//don't modify the method automatically if a request file is being used as input
|
||||||
|
len(parseOpts.Input.Request) == 0 {
|
||||||
|
|
||||||
|
conf.Method = "POST"
|
||||||
|
}
|
||||||
|
|
||||||
|
conf.CommandLine = strings.Join(os.Args, " ")
|
||||||
|
|
||||||
|
for _, provider := range conf.InputProviders {
|
||||||
|
if !keywordPresent(provider.Keyword, &conf) {
|
||||||
|
errmsg := fmt.Sprintf("Keyword %s defined, but not found in headers, method, URL or POST data.", provider.Keyword)
|
||||||
|
errs.Add(fmt.Errorf(errmsg))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Do checks for recursion mode
|
||||||
|
if parseOpts.HTTP.Recursion {
|
||||||
|
if !strings.HasSuffix(conf.Url, "FUZZ") {
|
||||||
|
errmsg := fmt.Sprintf("When using -recursion the URL (-u) must end with FUZZ keyword.")
|
||||||
|
errs.Add(fmt.Errorf(errmsg))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return &conf, errs.ErrorOrNil()
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseRawRequest(parseOpts *ConfigOptions, conf *Config) error {
|
||||||
|
file, err := os.Open(parseOpts.Input.Request)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("could not open request file: %s", err)
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
r := bufio.NewReader(file)
|
||||||
|
|
||||||
|
s, err := r.ReadString('\n')
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("could not read request: %s", err)
|
||||||
|
}
|
||||||
|
parts := strings.Split(s, " ")
|
||||||
|
if len(parts) < 3 {
|
||||||
|
return fmt.Errorf("malformed request supplied")
|
||||||
|
}
|
||||||
|
// Set the request Method
|
||||||
|
conf.Method = parts[0]
|
||||||
|
|
||||||
|
for {
|
||||||
|
line, err := r.ReadString('\n')
|
||||||
|
line = strings.TrimSpace(line)
|
||||||
|
|
||||||
|
if err != nil || line == "" {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
p := strings.SplitN(line, ":", 2)
|
||||||
|
if len(p) != 2 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.EqualFold(p[0], "content-length") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
conf.Headers[strings.TrimSpace(p[0])] = strings.TrimSpace(p[1])
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle case with the full http url in path. In that case,
|
||||||
|
// ignore any host header that we encounter and use the path as request URL
|
||||||
|
if strings.HasPrefix(parts[1], "http") {
|
||||||
|
parsed, err := url.Parse(parts[1])
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("could not parse request URL: %s", err)
|
||||||
|
}
|
||||||
|
conf.Url = parts[1]
|
||||||
|
conf.Headers["Host"] = parsed.Host
|
||||||
|
} else {
|
||||||
|
// Build the request URL from the request
|
||||||
|
conf.Url = parseOpts.Input.RequestProto + "://" + conf.Headers["Host"] + parts[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the request body
|
||||||
|
b, err := ioutil.ReadAll(r)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("could not read request body: %s", err)
|
||||||
|
}
|
||||||
|
conf.Data = string(b)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func keywordPresent(keyword string, conf *Config) bool {
|
||||||
|
//Search for keyword from HTTP method, URL and POST data too
|
||||||
|
if strings.Index(conf.Method, keyword) != -1 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if strings.Index(conf.Url, keyword) != -1 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if strings.Index(conf.Data, keyword) != -1 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
for k, v := range conf.Headers {
|
||||||
|
if strings.Index(k, keyword) != -1 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if strings.Index(v, keyword) != -1 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func ReadConfig(configFile string) (*ConfigOptions, error) {
|
||||||
|
conf := NewConfigOptions()
|
||||||
|
configData, err := ioutil.ReadFile(configFile)
|
||||||
|
if err == nil {
|
||||||
|
err = toml.Unmarshal(configData, conf)
|
||||||
|
}
|
||||||
|
return conf, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func ReadDefaultConfig() (*ConfigOptions, error) {
|
||||||
|
userhome, err := os.UserHomeDir()
|
||||||
|
if err != nil {
|
||||||
|
return NewConfigOptions(), err
|
||||||
|
}
|
||||||
|
defaultconf := filepath.Join(userhome, ".ffufrc")
|
||||||
|
return ReadConfig(defaultconf)
|
||||||
|
}
|
||||||
@ -1,6 +1,7 @@
|
|||||||
package filter
|
package filter
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
@ -95,3 +96,89 @@ func calibrateFilters(j *ffuf.Job, responses []ffuf.Response) {
|
|||||||
AddFilter(j.Config, "line", strings.Join(lineCalib, ","))
|
AddFilter(j.Config, "line", strings.Join(lineCalib, ","))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func SetupFilters(parseOpts *ffuf.ConfigOptions, conf *ffuf.Config) error {
|
||||||
|
errs := ffuf.NewMultierror()
|
||||||
|
// If any other matcher is set, ignore -mc default value
|
||||||
|
matcherSet := false
|
||||||
|
statusSet := false
|
||||||
|
warningIgnoreBody := false
|
||||||
|
flag.Visit(func(f *flag.Flag) {
|
||||||
|
if f.Name == "mc" {
|
||||||
|
statusSet = true
|
||||||
|
}
|
||||||
|
if f.Name == "ms" {
|
||||||
|
matcherSet = true
|
||||||
|
warningIgnoreBody = true
|
||||||
|
}
|
||||||
|
if f.Name == "ml" {
|
||||||
|
matcherSet = true
|
||||||
|
warningIgnoreBody = true
|
||||||
|
}
|
||||||
|
if f.Name == "mr" {
|
||||||
|
matcherSet = true
|
||||||
|
}
|
||||||
|
if f.Name == "mw" {
|
||||||
|
matcherSet = true
|
||||||
|
warningIgnoreBody = true
|
||||||
|
}
|
||||||
|
})
|
||||||
|
if statusSet || !matcherSet {
|
||||||
|
if err := AddMatcher(conf, "status", parseOpts.Matcher.Status); err != nil {
|
||||||
|
errs.Add(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if parseOpts.Filter.Status != "" {
|
||||||
|
if err := AddFilter(conf, "status", parseOpts.Filter.Status); err != nil {
|
||||||
|
errs.Add(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if parseOpts.Filter.Size != "" {
|
||||||
|
warningIgnoreBody = true
|
||||||
|
if err := AddFilter(conf, "size", parseOpts.Filter.Size); err != nil {
|
||||||
|
errs.Add(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if parseOpts.Filter.Regexp != "" {
|
||||||
|
if err := AddFilter(conf, "regexp", parseOpts.Filter.Regexp); err != nil {
|
||||||
|
errs.Add(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if parseOpts.Filter.Words != "" {
|
||||||
|
warningIgnoreBody = true
|
||||||
|
if err := AddFilter(conf, "word", parseOpts.Filter.Words); err != nil {
|
||||||
|
errs.Add(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if parseOpts.Filter.Lines != "" {
|
||||||
|
warningIgnoreBody = true
|
||||||
|
if err := AddFilter(conf, "line", parseOpts.Filter.Lines); err != nil {
|
||||||
|
errs.Add(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if parseOpts.Matcher.Size != "" {
|
||||||
|
if err := AddMatcher(conf, "size", parseOpts.Matcher.Size); err != nil {
|
||||||
|
errs.Add(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if parseOpts.Matcher.Regexp != "" {
|
||||||
|
if err := AddMatcher(conf, "regexp", parseOpts.Matcher.Regexp); err != nil {
|
||||||
|
errs.Add(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if parseOpts.Matcher.Words != "" {
|
||||||
|
if err := AddMatcher(conf, "word", parseOpts.Matcher.Words); err != nil {
|
||||||
|
errs.Add(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if parseOpts.Matcher.Lines != "" {
|
||||||
|
if err := AddMatcher(conf, "line", parseOpts.Matcher.Lines); err != nil {
|
||||||
|
errs.Add(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if conf.IgnoreBody && warningIgnoreBody {
|
||||||
|
fmt.Printf("*** Warning: possible undesired combination of -ignore-body and the response options: fl,fs,fw,ml,ms and mw.\n")
|
||||||
|
}
|
||||||
|
return errs.ErrorOrNil()
|
||||||
|
}
|
||||||
|
|||||||
@ -13,17 +13,27 @@ type MainInputProvider struct {
|
|||||||
msbIterator int
|
msbIterator int
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewInputProvider(conf *ffuf.Config) (ffuf.InputProvider, error) {
|
func NewInputProvider(conf *ffuf.Config) (ffuf.InputProvider, ffuf.Multierror) {
|
||||||
validmode := false
|
validmode := false
|
||||||
|
errs := ffuf.NewMultierror()
|
||||||
for _, mode := range []string{"clusterbomb", "pitchfork"} {
|
for _, mode := range []string{"clusterbomb", "pitchfork"} {
|
||||||
if conf.InputMode == mode {
|
if conf.InputMode == mode {
|
||||||
validmode = true
|
validmode = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !validmode {
|
if !validmode {
|
||||||
return &MainInputProvider{}, fmt.Errorf("Input mode (-mode) %s not recognized", conf.InputMode)
|
errs.Add(fmt.Errorf("Input mode (-mode) %s not recognized", conf.InputMode))
|
||||||
|
return &MainInputProvider{}, errs
|
||||||
}
|
}
|
||||||
return &MainInputProvider{Config: conf, msbIterator: 0}, nil
|
mainip := MainInputProvider{Config: conf, msbIterator: 0}
|
||||||
|
// Initialize the correct inputprovider
|
||||||
|
for _, v := range conf.InputProviders {
|
||||||
|
err := mainip.AddProvider(v)
|
||||||
|
if err != nil {
|
||||||
|
errs.Add(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return &mainip, errs
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *MainInputProvider) AddProvider(provider ffuf.InputProviderConfig) error {
|
func (i *MainInputProvider) AddProvider(provider ffuf.InputProviderConfig) error {
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user