Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

RedHunt Labs Attack Surface Recon API Integration #978

Merged
merged 11 commits into from
Sep 12, 2023
5 changes: 4 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest

`subfinder` can be used right after the installation, however the following services require configuring API keys to work:

[BeVigil](https://bevigil.com/osint-api), [BinaryEdge](https://binaryedge.io), [BufferOver](https://tls.bufferover.run), [C99](https://api.c99.nl/), [Censys](https://censys.io), [CertSpotter](https://sslmate.com/certspotter/api/), [Chaos](https://chaos.projectdiscovery.io), [Chinaz](http://my.chinaz.com/ChinazAPI/DataCenter/MyDataApi), [DnsDB](https://api.dnsdb.info), [Fofa](https://fofa.info/static_pages/api_help), [FullHunt](https://fullhunt.io), [GitHub](https://github.com), [Intelx](https://intelx.io), [PassiveTotal](http://passivetotal.org), [quake](https://quake.360.cn), [Robtex](https://www.robtex.com/api/), [SecurityTrails](http://securitytrails.com), [Shodan](https://shodan.io), [ThreatBook](https://x.threatbook.cn/en), [VirusTotal](https://www.virustotal.com), [WhoisXML API](https://whoisxmlapi.com/), [ZoomEye](https://www.zoomeye.org), [ZoomEye API](https://api.zoomeye.org), [dnsrepo](https://dnsrepo.noc.org), [Hunter](https://hunter.qianxin.com/), [Facebook](https://developers.facebook.com)
[BeVigil](https://bevigil.com/osint-api), [BinaryEdge](https://binaryedge.io), [BufferOver](https://tls.bufferover.run), [C99](https://api.c99.nl/), [Censys](https://censys.io), [CertSpotter](https://sslmate.com/certspotter/api/), [Chaos](https://chaos.projectdiscovery.io), [Chinaz](http://my.chinaz.com/ChinazAPI/DataCenter/MyDataApi), [DnsDB](https://api.dnsdb.info), [Fofa](https://fofa.info/static_pages/api_help), [FullHunt](https://fullhunt.io), [GitHub](https://github.com), [Intelx](https://intelx.io), [PassiveTotal](http://passivetotal.org), [quake](https://quake.360.cn), [RedHunt Labs]("https://devportal.redhuntlabs.com/"), [Robtex](https://www.robtex.com/api/), [SecurityTrails](http://securitytrails.com), [Shodan](https://shodan.io), [ThreatBook](https://x.threatbook.cn/en), [VirusTotal](https://www.virustotal.com), [WhoisXML API](https://whoisxmlapi.com/), [ZoomEye](https://www.zoomeye.org), [ZoomEye API](https://api.zoomeye.org), [dnsrepo](https://dnsrepo.noc.org), [Hunter](https://hunter.qianxin.com/), [Facebook](https://developers.facebook.com)

You can also use the `subfinder -ls` command to display all the available sources.

Expand All @@ -141,6 +141,9 @@ censys:
certspotter: []
passivetotal:
- [email protected]:sample_password
redhuntlabs:
- ENDPOINT:API_TOKEN
- https://reconapi.redhuntlabs.com/community/v1/domains/subdomains:joEPzJJp2AuOCw7teAj63HYrPGnsxuPQ
securitytrails: []
shodan:
- AAAAClP1bJJSRMEYJazgwhJKrggRwKA
Expand Down
2 changes: 2 additions & 0 deletions v2/pkg/passive/sources.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ import (
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/passivetotal"
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/quake"
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/rapiddns"
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/redhuntlabs"
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/riddler"
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/robtex"
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/securitytrails"
Expand Down Expand Up @@ -77,6 +78,7 @@ var AllSources = [...]subscraping.Source{
&passivetotal.Source{},
&quake.Source{},
&rapiddns.Source{},
&redhuntlabs.Source{},
&riddler.Source{},
&robtex.Source{},
&securitytrails.Source{},
Expand Down
2 changes: 2 additions & 0 deletions v2/pkg/passive/sources_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ var (
"passivetotal",
"quake",
"rapiddns",
"redhuntlabs",
"riddler",
"robtex",
"securitytrails",
Expand Down Expand Up @@ -74,6 +75,7 @@ var (
"intelx",
"passivetotal",
"quake",
"redhuntlabs",
"robtex",
"riddler",
"securitytrails",
Expand Down
143 changes: 143 additions & 0 deletions v2/pkg/subscraping/sources/redhuntlabs/redhuntlabs.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,143 @@
// Package redhuntlabs logic
package redhuntlabs

import (
"context"
"errors"
"strconv"
"strings"
"time"

jsoniter "github.com/json-iterator/go"

"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
)

type Response struct {
Subdomains []string `json:"subdomains"`
Metadata ResponseMetadata `json:"metadata"`
}

type ResponseMetadata struct {
ResultCount int `json:"result_count"`
PageSize int `json:"page_size"`
PageNumber int `json:"page_number"`
}

type Source struct {
apiKeys []string
timeTaken time.Duration
errors int
results int
skipped bool
}

func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
s.errors = 0
s.results = 0
pageSize := 1000
go func() {
defer func(startTime time.Time) {
s.timeTaken = time.Since(startTime)
close(results)
}(time.Now())

randomCred := subscraping.PickRandom(s.apiKeys, s.Name())
if randomCred == "" || !strings.Contains(randomCred, ":") {
s.skipped = true
return
}

creds := strings.Split(randomCred, ":")
baseUrl := creds[0] + ":" + creds[1]
dogancanbakir marked this conversation as resolved.
Show resolved Hide resolved
getUrl := baseUrl + "?domain=" + domain + "&page=1&page_size=" + strconv.Itoa(pageSize)
dogancanbakir marked this conversation as resolved.
Show resolved Hide resolved
resp, err := session.Get(ctx, getUrl, "", map[string]string{
"X-BLOBR-KEY": creds[2], "User-Agent": "subfinder",
})
if err != nil {
dogancanbakir marked this conversation as resolved.
Show resolved Hide resolved
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
dogancanbakir marked this conversation as resolved.
Show resolved Hide resolved
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: errors.New("if you get a 'limit has been reached' error, head over to https://devportal.redhuntlabs.com")}
session.DiscardHTTPResponse(resp)
return
}
var response Response
err = jsoniter.NewDecoder(resp.Body).Decode(&response)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
return
}

resp.Body.Close()
if response.Metadata.ResultCount > pageSize {
totalPages := (response.Metadata.ResultCount + pageSize - 1) / pageSize
for page := 1; page <= totalPages; page++ {
getUrl = baseUrl + "?domain=" + domain + "&page=" + strconv.Itoa(page) + "&page_size=" + strconv.Itoa(pageSize)
resp, err := session.Get(ctx, getUrl, "", map[string]string{
dogancanbakir marked this conversation as resolved.
Show resolved Hide resolved
"X-BLOBR-KEY": creds[2], "User-Agent": "subfinder",
})
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: errors.New("if you get a 'limit has been reached' error, head over to https://devportal.redhuntlabs.com/ for upgrading your subscription")}
session.DiscardHTTPResponse(resp)
continue
}

var subdomains []string
err = jsoniter.NewDecoder(resp.Body).Decode(&response)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
continue
}

resp.Body.Close()
if len(response.Subdomains) > 0 {
subdomains = response.Subdomains
}

for _, subdomain := range subdomains {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
}
}
} else {
if len(response.Subdomains) > 0 {
dogancanbakir marked this conversation as resolved.
Show resolved Hide resolved
for _, subdomain := range response.Subdomains {
dogancanbakir marked this conversation as resolved.
Show resolved Hide resolved
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
}
}
}

}()
return results
}

func (s *Source) Name() string {
return "redhuntlabs"
}

func (s *Source) IsDefault() bool {
return true
}

func (s *Source) HasRecursiveSupport() bool {
return false
}

func (s *Source) NeedsKey() bool {
return true
}

func (s *Source) AddApiKeys(keys []string) {
s.apiKeys = keys
}

func (s *Source) Statistics() subscraping.Statistics {
return subscraping.Statistics{
Errors: s.errors,
Results: s.results,
TimeTaken: s.timeTaken,
Skipped: s.skipped,
}
}
Loading