Skip to content

Commit

Permalink
Merge pull request #73 from lc/lc/add-timeout
Browse files Browse the repository at this point in the history
feat(gau): add timeout flag for http client
  • Loading branch information
lc authored Apr 14, 2022
2 parents d102582 + 8627404 commit d1c5e72
Show file tree
Hide file tree
Showing 8 changed files with 18 additions and 12 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ $ gau -h
|`--providers`| list of providers to use (wayback,commoncrawl,otx,urlscan) | gau --providers wayback|
|`--proxy`| http proxy to use (socks5:// or http:// | gau --proxy http://proxy.example.com:8080 |
|`--retries`| retries for HTTP client | gau --retries 10 |
|`--timeout`| timeout (in seconds) for HTTP client | gau --timeout 60 |
|`--subs`| include subdomains of target domain | gau example.com --subs |
|`--threads`| number of workers to spawn | gau example.com --threads |
|`--to`| fetch urls to date (format: YYYYMM) | gau example.com --to 202101 |
Expand Down
6 changes: 3 additions & 3 deletions pkg/httpclient/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,12 @@ type Header struct {
Value string
}

func MakeRequest(c *fasthttp.Client, url string, maxRetries int, headers ...Header) ([]byte, error) {
func MakeRequest(c *fasthttp.Client, url string, maxRetries uint, timeout uint, headers ...Header) ([]byte, error) {
var (
req *fasthttp.Request
resp *fasthttp.Response
)
retries := maxRetries
retries := int(maxRetries)
for i := retries; i >= 0; i-- {
req = fasthttp.AcquireRequest()
defer fasthttp.ReleaseRequest(req)
Expand All @@ -35,7 +35,7 @@ func MakeRequest(c *fasthttp.Client, url string, maxRetries int, headers ...Head
resp = fasthttp.AcquireResponse()
defer fasthttp.ReleaseResponse(resp)

if err := c.DoTimeout(req, resp, time.Second*45); err != nil {
if err := c.DoTimeout(req, resp, time.Second*time.Duration(timeout)); err != nil {
fasthttp.ReleaseRequest(req)
if retries == 0 {
return nil, err
Expand Down
6 changes: 3 additions & 3 deletions pkg/providers/commoncrawl/commoncrawl.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ type Client struct {
func New(c *providers.Config, filters providers.Filters) (*Client, error) {
client := &Client{config: c, filters: filters}
// Fetch the list of available CommonCrawl Api URLs.
resp, err := httpclient.MakeRequest(c.Client, "http://index.commoncrawl.org/collinfo.json", int(c.MaxRetries))
resp, err := httpclient.MakeRequest(c.Client, "http://index.commoncrawl.org/collinfo.json", c.MaxRetries, c.Timeout)
if err != nil {
return nil, err
}
Expand Down Expand Up @@ -78,7 +78,7 @@ paginate:
logrus.WithFields(logrus.Fields{"provider": Name, "page": page}).Infof("fetching %s", domain)
}
apiURL := c.formatURL(domain, page)
resp, err := httpclient.MakeRequest(c.config.Client, apiURL, int(c.config.MaxRetries))
resp, err := httpclient.MakeRequest(c.config.Client, apiURL, c.config.MaxRetries, c.config.Timeout)
if err != nil {
return fmt.Errorf("failed to fetch commoncrawl(%d): %s", page, err)
}
Expand Down Expand Up @@ -114,7 +114,7 @@ func (c *Client) formatURL(domain string, page uint) string {
func (c *Client) getPagination(domain string) (paginationResult, error) {
url := fmt.Sprintf("%s&showNumPages=true", c.formatURL(domain, 0))

resp, err := httpclient.MakeRequest(c.config.Client, url, int(c.config.MaxRetries))
resp, err := httpclient.MakeRequest(c.config.Client, url, c.config.MaxRetries, c.config.Timeout)
if err != nil {
return paginationResult{}, err
}
Expand Down
2 changes: 1 addition & 1 deletion pkg/providers/otx/otx.go
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ paginate:
logrus.WithFields(logrus.Fields{"provider": Name, "page": page - 1}).Infof("fetching %s", domain)
}
apiURL := c.formatURL(domain, page)
resp, err := httpclient.MakeRequest(c.config.Client, apiURL, int(c.config.MaxRetries))
resp, err := httpclient.MakeRequest(c.config.Client, apiURL, c.config.MaxRetries, c.config.Timeout)
if err != nil {
return fmt.Errorf("failed to fetch alienvault(%d): %s", page, err)
}
Expand Down
3 changes: 2 additions & 1 deletion pkg/providers/providers.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import (
"github.com/valyala/fasthttp"
)

const Version = `2.0.9`
const Version = `2.1.0`

// Provider is a generic interface for all archive fetchers
type Provider interface {
Expand All @@ -20,6 +20,7 @@ type URLScan struct {

type Config struct {
Threads uint
Timeout uint
Verbose bool
MaxRetries uint
IncludeSubdomains bool
Expand Down
2 changes: 1 addition & 1 deletion pkg/providers/urlscan/urlscan.go
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ paginate:
logrus.WithFields(logrus.Fields{"provider": Name, "page": page}).Infof("fetching %s", domain)
}
apiURL := c.formatURL(domain, searchAfter)
resp, err := httpclient.MakeRequest(c.config.Client, apiURL, int(c.config.MaxRetries), header)
resp, err := httpclient.MakeRequest(c.config.Client, apiURL, c.config.MaxRetries, c.config.Timeout, header)
if err != nil {
return fmt.Errorf("failed to fetch urlscan: %s", err)
}
Expand Down
5 changes: 3 additions & 2 deletions pkg/providers/wayback/wayback.go
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ func (c *Client) Fetch(ctx context.Context, domain string, results chan string)
}
apiURL := c.formatURL(domain, page)
// make HTTP request
resp, err := httpclient.MakeRequest(c.config.Client, apiURL, int(c.config.MaxRetries))
resp, err := httpclient.MakeRequest(c.config.Client, apiURL, c.config.MaxRetries, c.config.Timeout)
if err != nil {
return fmt.Errorf("failed to fetch wayback results page %d: %s", page, err)
}
Expand Down Expand Up @@ -96,7 +96,8 @@ func (c *Client) formatURL(domain string, page uint) string {
// getPagination returns the number of pages for Wayback
func (c *Client) getPagination(domain string) (uint, error) {
url := fmt.Sprintf("%s&showNumPages=true", c.formatURL(domain, 0))
resp, err := httpclient.MakeRequest(c.config.Client, url, int(c.config.MaxRetries))
resp, err := httpclient.MakeRequest(c.config.Client, url, c.config.MaxRetries, c.config.Timeout)

if err != nil {
return 0, err
}
Expand Down
5 changes: 4 additions & 1 deletion runner/flags/flags.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ type Config struct {
Filters providers.Filters `mapstructure:"filters"`
Proxy string `mapstructure:"proxy"`
Threads uint `mapstructure:"threads"`
Timeout uint `mapstructure:"timeout"`
Verbose bool `mapstructure:"verbose"`
MaxRetries uint `mapstructure:"retries"`
IncludeSubdomains bool `mapstructure:"subdomains"`
Expand Down Expand Up @@ -58,6 +59,7 @@ func (c *Config) ProviderConfig() (*providers.Config, error) {

pc := &providers.Config{
Threads: c.Threads,
Timeout: c.Timeout,
Verbose: c.Verbose,
MaxRetries: c.MaxRetries,
IncludeSubdomains: c.IncludeSubdomains,
Expand Down Expand Up @@ -94,7 +96,8 @@ func New() *Options {
v := viper.New()

pflag.String("o", "", "filename to write results to")
pflag.Uint("threads", 1, "number of workers to spawn, default: 1")
pflag.Uint("threads", 1, "number of workers to spawn")
pflag.Uint("timeout", 45, "timeout (in seconds) for HTTP client")
pflag.Uint("retries", 0, "retries for HTTP client")
pflag.String("proxy", "", "http proxy to use")
pflag.StringSlice("blacklist", []string{}, "list of extensions to skip")
Expand Down

0 comments on commit d1c5e72

Please sign in to comment.