Skip to content

Commit

Permalink
fix negative stats (#1367)
Browse files Browse the repository at this point in the history
* fix negative stats

* lint

---------

Co-authored-by: Mzack9999 <[email protected]>
  • Loading branch information
dogancanbakir and Mzack9999 authored Sep 18, 2024
1 parent 7592ac1 commit 6537b32
Show file tree
Hide file tree
Showing 10 changed files with 45 additions and 8 deletions.
6 changes: 4 additions & 2 deletions v2/pkg/runner/stats.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,14 @@ func printStatistics(stats map[string]subscraping.Statistics) {

if len(lines) > 0 {
gologger.Print().Msgf("\n Source Duration Results Errors\n%s\n", strings.Repeat("─", 56))
gologger.Print().Msgf("%s\n", strings.Join(lines, "\n"))
gologger.Print().Msg(strings.Join(lines, "\n"))
gologger.Print().Msgf("\n")
}

if len(skipped) > 0 {
gologger.Print().Msgf("\n The following sources were included but skipped...\n\n")
gologger.Print().Msgf("%s\n\n", strings.Join(skipped, "\n"))
gologger.Print().Msg(strings.Join(skipped, "\n"))
gologger.Print().Msgf("\n\n")
}
}

Expand Down
3 changes: 3 additions & 0 deletions v2/pkg/subscraping/sources/bevigil/bevigil.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
})
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
s.errors++
session.DiscardHTTPResponse(resp)
return
}
Expand All @@ -57,6 +58,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
err = jsoniter.NewDecoder(resp.Body).Decode(&response)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
s.errors++
resp.Body.Close()
return
}
Expand All @@ -69,6 +71,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se

for _, subdomain := range subdomains {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
s.results++
}

}()
Expand Down
3 changes: 2 additions & 1 deletion v2/pkg/subscraping/sources/binaryedge/binaryedge.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package binaryedge

import (
"context"
"errors"
"fmt"
"math"
"net/url"
Expand Down Expand Up @@ -119,7 +120,7 @@ func (s *Source) enumerate(ctx context.Context, session *subscraping.Session, ba

// Check error messages
if response.Message != "" && response.Status != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("%s", response.Message)}
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: errors.New(response.Message)}
s.errors++
return
}
Expand Down
2 changes: 1 addition & 1 deletion v2/pkg/subscraping/sources/bufferover/bufferover.go
Original file line number Diff line number Diff line change
Expand Up @@ -97,8 +97,8 @@ func (s *Source) getData(ctx context.Context, sourceURL string, apiKey string, s
for _, subdomain := range subdomains {
for _, value := range session.Extractor.Extract(subdomain) {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: value}
s.results++
}
s.results++
}
}

Expand Down
2 changes: 2 additions & 0 deletions v2/pkg/subscraping/sources/commoncrawl/commoncrawl.go
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ func (s *Source) getSubdomains(ctx context.Context, searchURL, domain string, se
resp, err := session.Get(ctx, fmt.Sprintf("%s?url=*.%s", searchURL, domain), "", headers)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
s.errors++
session.DiscardHTTPResponse(resp)
return false
}
Expand All @@ -150,6 +151,7 @@ func (s *Source) getSubdomains(ctx context.Context, searchURL, domain string, se
subdomain = strings.TrimPrefix(subdomain, "2f")

results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
s.results++
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion v2/pkg/subscraping/sources/crtsh/crtsh.go
Original file line number Diff line number Diff line change
Expand Up @@ -156,8 +156,8 @@ func (s *Source) getSubdomainsFromHTTP(ctx context.Context, domain string, sessi
for _, value := range session.Extractor.Extract(sub) {
if value != "" {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: value}
s.results++
}
s.results++
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion v2/pkg/subscraping/sources/facebook/ctlogs.go
Original file line number Diff line number Diff line change
Expand Up @@ -122,8 +122,8 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
}
for _, v := range response.Data {
for _, domain := range v.Domains {
s.results++
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: domain}
s.results++
}
}
if response.Paging.Next == "" {
Expand Down
29 changes: 27 additions & 2 deletions v2/pkg/subscraping/sources/gitlab/gitlab.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (
"regexp"
"strings"
"sync"
"time"

jsoniter "github.com/json-iterator/go"
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
Expand All @@ -17,7 +18,11 @@ import (

// Source is the passive scraping agent
type Source struct {
apiKeys []string
apiKeys []string
timeTaken time.Duration
errors int
results int
skipped bool
}

type item struct {
Expand All @@ -30,9 +35,14 @@ type item struct {
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
s.errors = 0
s.results = 0

go func() {
defer close(results)
defer func(startTime time.Time) {
s.timeTaken = time.Since(startTime)
close(results)
}(time.Now())

randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
if randomApiKey == "" {
Expand All @@ -59,6 +69,7 @@ func (s *Source) enumerate(ctx context.Context, searchURL string, domainRegexp *
resp, err := session.Get(ctx, searchURL, "", headers)
if err != nil && resp == nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
s.errors++
session.DiscardHTTPResponse(resp)
return
}
Expand All @@ -69,6 +80,7 @@ func (s *Source) enumerate(ctx context.Context, searchURL string, domainRegexp *
err = jsoniter.NewDecoder(resp.Body).Decode(&items)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
s.errors++
return
}

Expand All @@ -85,6 +97,7 @@ func (s *Source) enumerate(ctx context.Context, searchURL string, domainRegexp *
session.DiscardHTTPResponse(resp)

results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
s.errors++
return
}
}
Expand All @@ -98,6 +111,7 @@ func (s *Source) enumerate(ctx context.Context, searchURL string, domainRegexp *
}
for _, subdomain := range domainRegexp.FindAllString(line, -1) {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
s.results++
}
}
resp.Body.Close()
Expand All @@ -114,6 +128,7 @@ func (s *Source) enumerate(ctx context.Context, searchURL string, domainRegexp *
nextURL, err := url.QueryUnescape(link.URL)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
s.errors++
return
}

Expand Down Expand Up @@ -149,3 +164,13 @@ func (s *Source) NeedsKey() bool {
func (s *Source) AddApiKeys(keys []string) {
s.apiKeys = keys
}

// Statistics returns the statistics for the source
func (s *Source) Statistics() subscraping.Statistics {
return subscraping.Statistics{
Errors: s.errors,
Results: s.results,
TimeTaken: s.timeTaken,
Skipped: s.skipped,
}
}
3 changes: 3 additions & 0 deletions v2/pkg/subscraping/sources/hunter/hunter.go
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
err = jsoniter.NewDecoder(resp.Body).Decode(&response)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
s.errors++
resp.Body.Close()
return
}
Expand All @@ -81,13 +82,15 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
results <- subscraping.Result{
Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("%s", response.Message),
}
s.errors++
return
}

if response.Data.Total > 0 {
for _, hunterInfo := range response.Data.InfoArr {
subdomain := hunterInfo.Domain
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
s.results++
}
}
pages = int(response.Data.Total/1000) + 1
Expand Down
1 change: 1 addition & 0 deletions v2/pkg/subscraping/sources/threatbook/threatbook.go
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
if total > 0 {
for _, subdomain := range response.Data.SubDomains.Data {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
s.results++
}
}
}()
Expand Down

0 comments on commit 6537b32

Please sign in to comment.