Skip to content

Commit

Permalink
Update dnsdb to v2 and add dnsdb offset use
Browse files Browse the repository at this point in the history
  • Loading branch information
kelvinatorr committed Oct 5, 2023
1 parent 9986743 commit 56230c6
Show file tree
Hide file tree
Showing 3 changed files with 135 additions and 26 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest

`subfinder` can be used right after the installation, however the following services require configuring API keys to work:

[BeVigil](https://bevigil.com/osint-api), [BinaryEdge](https://binaryedge.io), [BufferOver](https://tls.bufferover.run), [C99](https://api.c99.nl/), [Censys](https://censys.io), [CertSpotter](https://sslmate.com/certspotter/api/), [Chaos](https://chaos.projectdiscovery.io), [Chinaz](http://my.chinaz.com/ChinazAPI/DataCenter/MyDataApi), [DnsDB](https://api.dnsdb.info), [Fofa](https://fofa.info/static_pages/api_help), [FullHunt](https://fullhunt.io), [GitHub](https://github.com), [Intelx](https://intelx.io), [PassiveTotal](http://passivetotal.org), [quake](https://quake.360.cn), [Robtex](https://www.robtex.com/api/), [SecurityTrails](http://securitytrails.com), [Shodan](https://shodan.io), [ThreatBook](https://x.threatbook.cn/en), [VirusTotal](https://www.virustotal.com), [WhoisXML API](https://whoisxmlapi.com/), [ZoomEye](https://www.zoomeye.org), [ZoomEye API](https://api.zoomeye.org), [dnsrepo](https://dnsrepo.noc.org), [Hunter](https://hunter.qianxin.com/), [Facebook](https://developers.facebook.com), [BuiltWith](https://api.builtwith.com/domain-api)
[BeVigil](https://bevigil.com/osint-api), [BinaryEdge](https://binaryedge.io), [BufferOver](https://tls.bufferover.run), [C99](https://api.c99.nl/), [Censys](https://censys.io), [CertSpotter](https://sslmate.com/certspotter/api/), [Chaos](https://chaos.projectdiscovery.io), [Chinaz](http://my.chinaz.com/ChinazAPI/DataCenter/MyDataApi), [DNSDB](https://api.dnsdb.info), [Fofa](https://fofa.info/static_pages/api_help), [FullHunt](https://fullhunt.io), [GitHub](https://github.com), [Intelx](https://intelx.io), [PassiveTotal](http://passivetotal.org), [quake](https://quake.360.cn), [Robtex](https://www.robtex.com/api/), [SecurityTrails](http://securitytrails.com), [Shodan](https://shodan.io), [ThreatBook](https://x.threatbook.cn/en), [VirusTotal](https://www.virustotal.com), [WhoisXML API](https://whoisxmlapi.com/), [ZoomEye](https://www.zoomeye.org), [ZoomEye API](https://api.zoomeye.org), [dnsrepo](https://dnsrepo.noc.org), [Hunter](https://hunter.qianxin.com/), [Facebook](https://developers.facebook.com), [BuiltWith](https://api.builtwith.com/domain-api)

You can also use the `subfinder -ls` command to display all the available sources.

Expand Down
1 change: 1 addition & 0 deletions v2/pkg/passive/sources_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@ var (
"certspotter",
"crtsh",
"dnsdumpster",
"dnsdb",
"digitorus",
"hackertarget",
"passivetotal",
Expand Down
158 changes: 133 additions & 25 deletions v2/pkg/subscraping/sources/dnsdb/dnsdb.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,12 @@ package dnsdb

import (
"bufio"
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/url"
"strconv"
"strings"
"time"

Expand All @@ -14,7 +17,23 @@ import (
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
)

type dnsdbResponse struct {
const urlBase string = "https://api.dnsdb.info/dnsdb/v2"

type rateResponse struct {
Rate rate
}

type rate struct {
OffsetMax json.Number `json:"offset_max"`
}

type safResponse struct {
Condition string `json:"cond"`
Obj dnsdbObj `json:"obj"`
Msg string `json:"msg"`
}

type dnsdbObj struct {
Name string `json:"rrname"`
}

Expand All @@ -23,7 +42,7 @@ type Source struct {
apiKeys []string
timeTaken time.Duration
errors int
results int
results uint64
skipped bool
}

Expand All @@ -39,44 +58,105 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
close(results)
}(time.Now())

randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
sourceName := s.Name()

randomApiKey := subscraping.PickRandom(s.apiKeys, sourceName)
if randomApiKey == "" {
return
}

headers := map[string]string{
"X-API-KEY": randomApiKey,
"Accept": "application/json",
"Content-Type": "application/json",
"X-API-KEY": randomApiKey,
"Accept": "application/x-ndjson",
}

resp, err := session.Get(ctx, fmt.Sprintf("https://api.dnsdb.info/lookup/rrset/name/*.%s?limit=1000000000000", domain), "", headers)
offsetMax, err := getMaxOffset(ctx, session, headers)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
results <- subscraping.Result{Source: sourceName, Type: subscraping.Error, Error: err}
s.errors++
session.DiscardHTTPResponse(resp)
return
}

scanner := bufio.NewScanner(resp.Body)
for scanner.Scan() {
line := scanner.Text()
if line == "" {
continue
}
var response dnsdbResponse
err = jsoniter.NewDecoder(bytes.NewBufferString(line)).Decode(&response)
path := fmt.Sprintf("lookup/rrset/name/*.%s", domain)
urlTemplate := fmt.Sprintf("%s/%s?", urlBase, path)
queryParams := url.Values{}
// ?limit=0 means DNSDB will return the maximum number of results allowed.
queryParams.Add("limit", "0")
queryParams.Add("swclient", "subfinder")

for {
url := urlTemplate + queryParams.Encode()

resp, err := session.Get(ctx, url, "", headers)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
results <- subscraping.Result{Source: sourceName, Type: subscraping.Error, Error: err}
s.errors++
session.DiscardHTTPResponse(resp)
return
}
results <- subscraping.Result{
Source: s.Name(), Type: subscraping.Subdomain, Value: strings.TrimSuffix(response.Name, "."),

var respCond string
reader := bufio.NewReader(resp.Body)
for {
n, err := reader.ReadBytes('\n')
if err == io.EOF {
break
} else if err != nil {
results <- subscraping.Result{Source: sourceName, Type: subscraping.Error, Error: err}
s.errors++
resp.Body.Close()
return
}

var response safResponse
err = jsoniter.Unmarshal(n, &response)
if err != nil {
results <- subscraping.Result{Source: sourceName, Type: subscraping.Error, Error: err}
s.errors++
resp.Body.Close()
return
}

// Condition is a scalar enum of string values: {“begin”, “ongoing”, “succeeded”, “limited”, “failed”}.
// "begin" will be the initiating Condition, this can be safely ignored. The data of interest will be in
// objects with Condition "" or "ongoing". Conditions "succeeded", "limited", and "failed" are terminating conditions.
// See https://www.domaintools.com/resources/user-guides/farsight-streaming-api-framing-protocol-documentation/
// for more details
respCond = response.Condition
if respCond == "" || respCond == "ongoing" {
if response.Obj.Name != "" {
results <- subscraping.Result{
Source: sourceName, Type: subscraping.Subdomain, Value: strings.TrimSuffix(response.Obj.Name, "."),
}
s.results++
}
} else if respCond != "begin" {
// if the respCond is not "", "ongoing", or "begin", then it is a terminating condition, so break out of the loop
break
}
}
s.results++

// Check the terminating jsonl object's condition. There are 3 possible scenarios:
// 1. "limited" - There are more results available, make another query with an offset
// 2. "succeeded" - The query completed successfully and all results were sent.
// 3. anything else - This is an error and should be reported to the user. The user can then decide to use the results up to this
// point or discard and retry.
if respCond == "limited" {
if offsetMax != 0 && s.results <= offsetMax {
// Reset done to false to get more results with an offset query parameter set to s.results
queryParams.Set("offset", strconv.FormatUint(s.results, 10))
continue
}
} else if respCond != "succeeded" {
// DNSDB's terminating jsonl object's cond is not "limited" or succeeded" (#3), this is an error, notify the user.
err = fmt.Errorf("%s terminated with condition: %s", sourceName, respCond)
results <- subscraping.Result{Source: sourceName, Type: subscraping.Error, Error: err}
s.errors++
}

resp.Body.Close()
break
}
resp.Body.Close()
}()

return results
Expand All @@ -92,7 +172,7 @@ func (s *Source) IsDefault() bool {
}

func (s *Source) HasRecursiveSupport() bool {
return false
return true
}

func (s *Source) NeedsKey() bool {
Expand All @@ -106,8 +186,36 @@ func (s *Source) AddApiKeys(keys []string) {
func (s *Source) Statistics() subscraping.Statistics {
return subscraping.Statistics{
Errors: s.errors,
Results: s.results,
Results: int(s.results),
TimeTaken: s.timeTaken,
Skipped: s.skipped,
}
}

func getMaxOffset(ctx context.Context, session *subscraping.Session, headers map[string]string) (uint64, error) {
var offsetMax uint64
url := fmt.Sprintf("%s/rate_limit", urlBase)
resp, err := session.Get(ctx, url, "", headers)
defer session.DiscardHTTPResponse(resp)
if err != nil {
return offsetMax, err
}
data, err := io.ReadAll(resp.Body)
if err != nil {
return offsetMax, err
}
var rateResp rateResponse
err = jsoniter.Unmarshal(data, &rateResp)
if err != nil {
return offsetMax, err
}
// if the OffsetMax is "n/a" then the ?offset= query parameter is not allowed
if rateResp.Rate.OffsetMax.String() != "n/a" {
offsetMax, err = strconv.ParseUint(rateResp.Rate.OffsetMax.String(), 10, 64)
if err != nil {
return offsetMax, err
}
}

return offsetMax, nil
}

0 comments on commit 56230c6

Please sign in to comment.