pkg: handle gosimple linter findings (#322)

This change is an attempt to handle gosimple linter finfings in order to
make the code easier to follow. It includes the following changes:

- use strings.Contains instead of strings.Index != -1
- use time.Since which is the standard library helper. See https://github.com/golang/go/blob/go1.15.2/src/time/time.go#L866-L867
- remove unneeded return statements at the end of methods
- preallocate maps when their capacity is known
- avoid underscoring values when they can be omitted
- avoid fmt.Sprintf() calls when the only argument is already a string

Signed-off-by: Miguel Ángel Jimeno <miguelangel4b@gmail.com>
This commit is contained in:
M. Ángel Jimeno 2020-10-03 09:45:07 +02:00 committed by GitHub
parent 7099b61f2d
commit 19937c4929
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 29 additions and 48 deletions

View File

@ -123,10 +123,7 @@ func (j *Job) Start() {
} }
func (j *Job) jobsInQueue() bool { func (j *Job) jobsInQueue() bool {
if j.queuepos < len(j.queuejobs) { return j.queuepos < len(j.queuejobs)
return true
}
return false
} }
func (j *Job) prepareQueueJob() { func (j *Job) prepareQueueJob() {
@ -192,14 +189,13 @@ func (j *Job) startExecution() {
} }
wg.Wait() wg.Wait()
j.updateProgress() j.updateProgress()
return
} }
func (j *Job) interruptMonitor() { func (j *Job) interruptMonitor() {
sigChan := make(chan os.Signal, 2) sigChan := make(chan os.Signal, 2)
signal.Notify(sigChan, os.Interrupt, syscall.SIGTERM) signal.Notify(sigChan, os.Interrupt, syscall.SIGTERM)
go func() { go func() {
for _ = range sigChan { for range sigChan {
j.Error = "Caught keyboard interrupt (Ctrl-C)\n" j.Error = "Caught keyboard interrupt (Ctrl-C)\n"
j.Stop() j.Stop()
} }
@ -321,7 +317,6 @@ func (j *Job) runTask(input map[string][]byte, position int, retried bool) {
if j.Config.Recursion && len(resp.GetRedirectLocation(false)) > 0 { if j.Config.Recursion && len(resp.GetRedirectLocation(false)) > 0 {
j.handleRecursionJob(resp) j.handleRecursionJob(resp)
} }
return
} }
//handleRecursionJob adds a new recursion job to the job queue if a new directory is found //handleRecursionJob adds a new recursion job to the job queue if a new directory is found
@ -356,7 +351,7 @@ func (j *Job) CalibrateResponses() ([]Response, error) {
results := make([]Response, 0) results := make([]Response, 0)
for _, input := range cInputs { for _, input := range cInputs {
inputs := make(map[string][]byte, 0) inputs := make(map[string][]byte, len(j.Config.InputProviders))
for _, v := range j.Config.InputProviders { for _, v := range j.Config.InputProviders {
inputs[v.Keyword] = []byte(input) inputs[v.Keyword] = []byte(input)
} }
@ -409,7 +404,7 @@ func (j *Job) CheckStop() {
// Check for runtime of entire process // Check for runtime of entire process
if j.Config.MaxTime > 0 { if j.Config.MaxTime > 0 {
dur := time.Now().Sub(j.startTime) dur := time.Since(j.startTime)
runningSecs := int(dur / time.Second) runningSecs := int(dur / time.Second)
if runningSecs >= j.Config.MaxTime { if runningSecs >= j.Config.MaxTime {
j.Error = "Maximum running time for entire process reached, exiting." j.Error = "Maximum running time for entire process reached, exiting."
@ -419,7 +414,7 @@ func (j *Job) CheckStop() {
// Check for runtime of current job // Check for runtime of current job
if j.Config.MaxTimeJob > 0 { if j.Config.MaxTimeJob > 0 {
dur := time.Now().Sub(j.startTimeJob) dur := time.Since(j.startTimeJob)
runningSecs := int(dur / time.Second) runningSecs := int(dur / time.Second)
if runningSecs >= j.Config.MaxTimeJob { if runningSecs >= j.Config.MaxTimeJob {
j.Error = "Maximum running time for this job reached, continuing with next job if one exists." j.Error = "Maximum running time for this job reached, continuing with next job if one exists."
@ -433,11 +428,9 @@ func (j *Job) CheckStop() {
func (j *Job) Stop() { func (j *Job) Stop() {
j.Running = false j.Running = false
j.Config.Cancel() j.Config.Cancel()
return
} }
//Stop current, resume to next //Stop current, resume to next
func (j *Job) Next() { func (j *Job) Next() {
j.RunningJob = false j.RunningJob = false
return
} }

View File

@ -387,7 +387,7 @@ func ConfigFromOptions(parseOpts *ConfigOptions, ctx context.Context, cancel con
// Do checks for recursion mode // Do checks for recursion mode
if parseOpts.HTTP.Recursion { if parseOpts.HTTP.Recursion {
if !strings.HasSuffix(conf.Url, "FUZZ") { if !strings.HasSuffix(conf.Url, "FUZZ") {
errmsg := fmt.Sprintf("When using -recursion the URL (-u) must end with FUZZ keyword.") errmsg := "When using -recursion the URL (-u) must end with FUZZ keyword."
errs.Add(fmt.Errorf(errmsg)) errs.Add(fmt.Errorf(errmsg))
} }
} }
@ -460,20 +460,20 @@ func parseRawRequest(parseOpts *ConfigOptions, conf *Config) error {
func keywordPresent(keyword string, conf *Config) bool { func keywordPresent(keyword string, conf *Config) bool {
//Search for keyword from HTTP method, URL and POST data too //Search for keyword from HTTP method, URL and POST data too
if strings.Index(conf.Method, keyword) != -1 { if strings.Contains(conf.Method, keyword) {
return true return true
} }
if strings.Index(conf.Url, keyword) != -1 { if strings.Contains(conf.Url, keyword) {
return true return true
} }
if strings.Index(conf.Data, keyword) != -1 { if strings.Contains(conf.Data, keyword) {
return true return true
} }
for k, v := range conf.Headers { for k, v := range conf.Headers {
if strings.Index(k, keyword) != -1 { if strings.Contains(k, keyword) {
return true return true
} }
if strings.Index(v, keyword) != -1 { if strings.Contains(v, keyword) {
return true return true
} }
} }

View File

@ -25,7 +25,7 @@ func UniqStringSlice(inslice []string) []string {
found[v] = true found[v] = true
} }
ret := []string{} ret := []string{}
for k, _ := range found { for k := range found {
ret = append(ret, k) ret = append(ret, k)
} }
return ret return ret

View File

@ -10,7 +10,7 @@ import (
func TestNewLineFilter(t *testing.T) { func TestNewLineFilter(t *testing.T) {
f, _ := NewLineFilter("200,301,400-410,500") f, _ := NewLineFilter("200,301,400-410,500")
linesRepr := f.Repr() linesRepr := f.Repr()
if strings.Index(linesRepr, "200,301,400-410,500") == -1 { if !strings.Contains(linesRepr, "200,301,400-410,500") {
t.Errorf("Word filter was expected to have 4 values") t.Errorf("Word filter was expected to have 4 values")
} }
} }

View File

@ -10,7 +10,7 @@ import (
func TestNewRegexpFilter(t *testing.T) { func TestNewRegexpFilter(t *testing.T) {
f, _ := NewRegexpFilter("s([a-z]+)arch") f, _ := NewRegexpFilter("s([a-z]+)arch")
statusRepr := f.Repr() statusRepr := f.Repr()
if strings.Index(statusRepr, "s([a-z]+)arch") == -1 { if !strings.Contains(statusRepr, "s([a-z]+)arch") {
t.Errorf("Status filter was expected to have a regexp value") t.Errorf("Status filter was expected to have a regexp value")
} }
} }

View File

@ -10,7 +10,7 @@ import (
func TestNewSizeFilter(t *testing.T) { func TestNewSizeFilter(t *testing.T) {
f, _ := NewSizeFilter("1,2,3,444,5-90") f, _ := NewSizeFilter("1,2,3,444,5-90")
sizeRepr := f.Repr() sizeRepr := f.Repr()
if strings.Index(sizeRepr, "1,2,3,444,5-90") == -1 { if !strings.Contains(sizeRepr, "1,2,3,444,5-90") {
t.Errorf("Size filter was expected to have 5 values") t.Errorf("Size filter was expected to have 5 values")
} }
} }

View File

@ -10,7 +10,7 @@ import (
func TestNewStatusFilter(t *testing.T) { func TestNewStatusFilter(t *testing.T) {
f, _ := NewStatusFilter("200,301,400-410,500") f, _ := NewStatusFilter("200,301,400-410,500")
statusRepr := f.Repr() statusRepr := f.Repr()
if strings.Index(statusRepr, "200,301,400-410,500") == -1 { if !strings.Contains(statusRepr, "200,301,400-410,500") {
t.Errorf("Status filter was expected to have 4 values") t.Errorf("Status filter was expected to have 4 values")
} }
} }

View File

@ -10,7 +10,7 @@ import (
func TestNewWordFilter(t *testing.T) { func TestNewWordFilter(t *testing.T) {
f, _ := NewWordFilter("200,301,400-410,500") f, _ := NewWordFilter("200,301,400-410,500")
wordsRepr := f.Repr() wordsRepr := f.Repr()
if strings.Index(wordsRepr, "200,301,400-410,500") == -1 { if !strings.Contains(wordsRepr, "200,301,400-410,500") {
t.Errorf("Word filter was expected to have 4 values") t.Errorf("Word filter was expected to have 4 values")
} }
} }

View File

@ -47,10 +47,7 @@ func (c *CommandInput) IncrementPosition() {
//Next will increment the cursor position, and return a boolean telling if there's iterations left //Next will increment the cursor position, and return a boolean telling if there's iterations left
func (c *CommandInput) Next() bool { func (c *CommandInput) Next() bool {
if c.count >= c.config.InputNum { return c.count < c.config.InputNum
return false
}
return true
} }
//Value returns the input from command stdoutput //Value returns the input from command stdoutput

View File

@ -57,10 +57,7 @@ func (w *WordlistInput) Keyword() string {
//Next will increment the cursor position, and return a boolean telling if there's words left in the list //Next will increment the cursor position, and return a boolean telling if there's words left in the list
func (w *WordlistInput) Next() bool { func (w *WordlistInput) Next() bool {
if w.position >= len(w.data) { return w.position < len(w.data)
return false
}
return true
} }
//IncrementPosition will increment the current position in the inputprovider data slice //IncrementPosition will increment the current position in the inputprovider data slice

View File

@ -25,17 +25,14 @@ func writeCSV(config *ffuf.Config, res []Result, encode bool) error {
for _, inputprovider := range config.InputProviders { for _, inputprovider := range config.InputProviders {
header = append(header, inputprovider.Keyword) header = append(header, inputprovider.Keyword)
} }
header = append(header, staticheaders...)
for _, item := range staticheaders {
header = append(header, item)
}
if err := w.Write(header); err != nil { if err := w.Write(header); err != nil {
return err return err
} }
for _, r := range res { for _, r := range res {
if encode { if encode {
inputs := make(map[string][]byte, 0) inputs := make(map[string][]byte, len(r.Input))
for k, v := range r.Input { for k, v := range r.Input {
inputs[k] = []byte(base64encode(v)) inputs[k] = []byte(base64encode(v))
} }

View File

@ -107,12 +107,10 @@ func (s *Stdoutput) Banner() error {
// Proxies // Proxies
if len(s.config.ProxyURL) > 0 { if len(s.config.ProxyURL) > 0 {
proxy := fmt.Sprintf("%s", s.config.ProxyURL) printOption([]byte("Proxy"), []byte(s.config.ProxyURL))
printOption([]byte("Proxy"), []byte(proxy))
} }
if len(s.config.ReplayProxyURL) > 0 { if len(s.config.ReplayProxyURL) > 0 {
replayproxy := fmt.Sprintf("%s", s.config.ReplayProxyURL) printOption([]byte("ReplayProxy"), []byte(s.config.ReplayProxyURL))
printOption([]byte("ReplayProxy"), []byte(replayproxy))
} }
// Timeout // Timeout
@ -152,7 +150,7 @@ func (s *Stdoutput) Progress(status ffuf.Progress) {
return return
} }
dur := time.Now().Sub(status.StartedAt) dur := time.Since(status.StartedAt)
runningSecs := int(dur / time.Second) runningSecs := int(dur / time.Second)
var reqRate int64 var reqRate int64
if runningSecs > 0 { if runningSecs > 0 {
@ -289,7 +287,7 @@ func (s *Stdoutput) Result(resp ffuf.Response) {
// Check if we need the data later // Check if we need the data later
if s.config.OutputFile != "" { if s.config.OutputFile != "" {
// No need to store results if we're not going to use them later // No need to store results if we're not going to use them later
inputs := make(map[string][]byte, 0) inputs := make(map[string][]byte, len(resp.Request.Input))
for k, v := range resp.Request.Input { for k, v := range resp.Request.Input {
inputs[k] = v inputs[k] = v
} }
@ -404,14 +402,13 @@ func (s *Stdoutput) resultMultiline(resp ffuf.Response) {
} }
func (s *Stdoutput) resultNormal(resp ffuf.Response) { func (s *Stdoutput) resultNormal(resp ffuf.Response) {
var res_str string res := fmt.Sprintf("%s%-23s [Status: %s, Size: %d, Words: %d, Lines: %d]", TERMINAL_CLEAR_LINE, s.prepareInputsOneLine(resp), s.colorize(fmt.Sprintf("%d", resp.StatusCode), resp.StatusCode), resp.ContentLength, resp.ContentWords, resp.ContentLines)
res_str = fmt.Sprintf("%s%-23s [Status: %s, Size: %d, Words: %d, Lines: %d]", TERMINAL_CLEAR_LINE, s.prepareInputsOneLine(resp), s.colorize(fmt.Sprintf("%d", resp.StatusCode), resp.StatusCode), resp.ContentLength, resp.ContentWords, resp.ContentLines) fmt.Println(res)
fmt.Println(res_str)
} }
func (s *Stdoutput) colorize(input string, status int64) string { func (s *Stdoutput) colorize(input string, status int64) string {
if !s.config.Colors { if !s.config.Colors {
return fmt.Sprintf("%s", input) return input
} }
colorCode := ANSI_CLEAR colorCode := ANSI_CLEAR
if status >= 200 && status < 300 { if status >= 200 && status < 300 {

View File

@ -78,7 +78,7 @@ func (r *SimpleRunner) Prepare(input map[string][]byte) (ffuf.Request, error) {
for keyword, inputitem := range input { for keyword, inputitem := range input {
req.Method = strings.ReplaceAll(req.Method, keyword, string(inputitem)) req.Method = strings.ReplaceAll(req.Method, keyword, string(inputitem))
headers := make(map[string]string, 0) headers := make(map[string]string, len(req.Headers))
for h, v := range req.Headers { for h, v := range req.Headers {
var CanonicalHeader string = textproto.CanonicalMIMEHeaderKey(strings.ReplaceAll(h, keyword, string(inputitem))) var CanonicalHeader string = textproto.CanonicalMIMEHeaderKey(strings.ReplaceAll(h, keyword, string(inputitem)))
headers[CanonicalHeader] = strings.ReplaceAll(v, keyword, string(inputitem)) headers[CanonicalHeader] = strings.ReplaceAll(v, keyword, string(inputitem))