Skip to content

Commit 725332c

Browse files
authored
Merge pull request #15 from projectdiscovery/14-bugfix-adding-options-support
Adding support for auth options + stricter tests
2 parents a34ff2f + 95da81e commit 725332c

File tree

4 files changed

+102
-44
lines changed

4 files changed

+102
-44
lines changed

cdncheck.go

+52-20
Original file line numberDiff line numberDiff line change
@@ -12,19 +12,26 @@ import (
1212
// Client checks for CDN based IPs which should be excluded
1313
// during scans since they belong to third party firewalls.
1414
type Client struct {
15-
Data map[string]struct{}
16-
ranger cidranger.Ranger
15+
Options Options
16+
Data map[string]struct{}
17+
ranger cidranger.Ranger
1718
}
1819

1920
var defaultScrapers = map[string]scraperFunc{
20-
"akamai": scrapeAkamai,
21+
// "akamai": scrapeAkamai,
2122
"azure": scrapeAzure,
2223
"cloudflare": scrapeCloudflare,
2324
"cloudfront": scrapeCloudFront,
2425
"fastly": scrapeFastly,
2526
"incapsula": scrapeIncapsula,
26-
"sucuri": scrapeSucuri,
27-
"leaseweb": scrapeLeaseweb,
27+
// "sucuri": scrapeSucuri,
28+
// "leaseweb": scrapeLeaseweb,
29+
}
30+
31+
var defaultScrapersWithOptions = map[string]scraperWithOptionsFunc{
32+
"akamai": scrapeAkamai,
33+
"sucuri": scrapeSucuri,
34+
"leaseweb": scrapeLeaseweb,
2835
}
2936

3037
var cachedScrapers = map[string]scraperFunc{
@@ -33,15 +40,20 @@ var cachedScrapers = map[string]scraperFunc{
3340

3441
// New creates a new firewall IP checking client.
3542
func New() (*Client, error) {
36-
return new(false)
43+
return new(&Options{})
3744
}
3845

3946
// NewWithCache creates a new firewall IP with cached data from project discovery (faster)
4047
func NewWithCache() (*Client, error) {
41-
return new(true)
48+
return new(&Options{Cache: true})
49+
}
50+
51+
// NewWithOptions creates a new instance with options
52+
func NewWithOptions(Options *Options) (*Client, error) {
53+
return new(Options)
4254
}
4355

44-
func new(cache bool) (*Client, error) {
56+
func new(options *Options) (*Client, error) {
4557
httpClient := &http.Client{
4658
Transport: &http.Transport{
4759
MaxIdleConns: 100,
@@ -55,21 +67,31 @@ func new(cache bool) (*Client, error) {
5567
}
5668
client := &Client{}
5769

58-
var scrapers map[string]scraperFunc
59-
if cache {
60-
scrapers = cachedScrapers
70+
if options.Cache {
71+
for _, scraper := range cachedScrapers {
72+
cidrs, err := scraper(httpClient)
73+
if err != nil {
74+
return nil, err
75+
}
76+
client.parseCidrs(cidrs)
77+
}
6178
} else {
62-
scrapers = defaultScrapers
79+
for _, scraper := range defaultScrapers {
80+
cidrs, err := scraper(httpClient)
81+
if err != nil {
82+
return nil, err
83+
}
84+
client.parseCidrs(cidrs)
85+
}
6386
}
6487

65-
client.Data = make(map[string]struct{})
66-
for _, scraper := range scrapers {
67-
cidrs, err := scraper(httpClient)
68-
if err != nil {
69-
return nil, err
70-
}
71-
for _, cidr := range cidrs {
72-
client.Data[cidr] = struct{}{}
88+
if options.HasAuthInfo() {
89+
for _, scraper := range defaultScrapersWithOptions {
90+
cidrs, err := scraper(httpClient, options)
91+
if err != nil {
92+
return nil, err
93+
}
94+
client.parseCidrs(cidrs)
7395
}
7496
}
7597

@@ -86,6 +108,16 @@ func new(cache bool) (*Client, error) {
86108
return client, nil
87109
}
88110

111+
// parseCidrs inserts the scraped cidrs to the internal structure
112+
func (c *Client) parseCidrs(cidrs []string) {
113+
if c.Data == nil {
114+
c.Data = make(map[string]struct{})
115+
}
116+
for _, cidr := range cidrs {
117+
c.Data[cidr] = struct{}{}
118+
}
119+
}
120+
89121
// Check checks if an IP is contained in the blacklist
90122
func (c *Client) Check(ip net.IP) (bool, error) {
91123
return c.ranger.Contains(ip)

options.go

+10
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
package cdncheck
2+
3+
type Options struct {
4+
Cache bool
5+
IPInfoToken string
6+
}
7+
8+
func (options *Options) HasAuthInfo() bool {
9+
return options.IPInfoToken != ""
10+
}

ranges.go

+28-6
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ import (
1010
var cidrRegex = regexp.MustCompile(`[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\/[0-9]{1,3}`)
1111

1212
type scraperFunc func(httpClient *http.Client) ([]string, error)
13+
type scraperWithOptionsFunc func(httpClient *http.Client, options *Options) ([]string, error)
1314

1415
// scrapeAzure scrapes Microsoft Azure firewall's CIDR ranges from their datacenter
1516
func scrapeAzure(httpClient *http.Client) ([]string, error) {
@@ -90,8 +91,12 @@ func scrapeIncapsula(httpClient *http.Client) ([]string, error) {
9091
}
9192

9293
// scrapeAkamai scrapes akamai firewall's CIDR ranges from ipinfo
93-
func scrapeAkamai(httpClient *http.Client) ([]string, error) {
94-
resp, err := httpClient.Get("https://ipinfo.io/AS12222")
94+
func scrapeAkamai(httpClient *http.Client, options *Options) ([]string, error) {
95+
req, err := makeReqWithAuth(http.MethodGet, "https://ipinfo.io/AS12222", "Authorization", "Bearer "+options.IPInfoToken)
96+
if err != nil {
97+
return nil, err
98+
}
99+
resp, err := httpClient.Do(req)
95100
if err != nil {
96101
return nil, err
97102
}
@@ -108,8 +113,12 @@ func scrapeAkamai(httpClient *http.Client) ([]string, error) {
108113
}
109114

110115
// scrapeSucuri scrapes sucuri firewall's CIDR ranges from ipinfo
111-
func scrapeSucuri(httpClient *http.Client) ([]string, error) {
112-
resp, err := httpClient.Get("https://ipinfo.io/AS30148")
116+
func scrapeSucuri(httpClient *http.Client, options *Options) ([]string, error) {
117+
req, err := makeReqWithAuth(http.MethodGet, "https://ipinfo.io/AS30148", "Authorization", "Bearer "+options.IPInfoToken)
118+
if err != nil {
119+
return nil, err
120+
}
121+
resp, err := httpClient.Do(req)
113122
if err != nil {
114123
return nil, err
115124
}
@@ -143,8 +152,12 @@ func scrapeFastly(httpClient *http.Client) ([]string, error) {
143152
}
144153

145154
// scrapeLeaseweb scrapes leaseweb firewall's CIDR ranges from ipinfo
146-
func scrapeLeaseweb(httpClient *http.Client) ([]string, error) {
147-
resp, err := httpClient.Get("https://ipinfo.io/AS60626")
155+
func scrapeLeaseweb(httpClient *http.Client, options *Options) ([]string, error) {
156+
req, err := makeReqWithAuth(http.MethodGet, "https://ipinfo.io/AS60626", "Authorization", "Bearer "+options.IPInfoToken)
157+
if err != nil {
158+
return nil, err
159+
}
160+
resp, err := httpClient.Do(req)
148161
if err != nil {
149162
return nil, err
150163
}
@@ -176,3 +189,12 @@ func scrapeProjectDiscovery(httpClient *http.Client) ([]string, error) {
176189
cidrs := cidrRegex.FindAllString(body, -1)
177190
return cidrs, nil
178191
}
192+
193+
func makeReqWithAuth(method, URL, headerName, bearerValue string) (*http.Request, error) {
194+
req, err := http.NewRequest(http.MethodGet, URL, nil)
195+
if err != nil {
196+
return nil, err
197+
}
198+
req.Header.Add(headerName, "Bearer "+bearerValue)
199+
return req, nil
200+
}

ranges_test.go

+12-18
Original file line numberDiff line numberDiff line change
@@ -23,39 +23,33 @@ func TestScrapeRanges(t *testing.T) {
2323
}
2424

2525
t.Run("azure", func(t *testing.T) {
26-
_, err := scrapeAzure(httpClient)
26+
ips, err := scrapeAzure(httpClient)
2727
require.Nil(t, err, "Could not scrape azure")
28+
require.Positive(t, len(ips), "Empty ip list")
2829
})
2930
t.Run("cloudfront", func(t *testing.T) {
30-
_, err := scrapeCloudFront(httpClient)
31+
ips, err := scrapeCloudFront(httpClient)
3132
require.Nil(t, err, "Could not scrape cloudfront")
33+
require.Positive(t, len(ips), "Empty ip list")
3234
})
3335
t.Run("cloudflare", func(t *testing.T) {
34-
_, err := scrapeCloudflare(httpClient)
36+
ips, err := scrapeCloudflare(httpClient)
3537
require.Nil(t, err, "Could not scrape cloudflare")
38+
require.Positive(t, len(ips), "Empty ip list")
3639
})
3740
t.Run("incapsula", func(t *testing.T) {
38-
_, err := scrapeIncapsula(httpClient)
41+
ips, err := scrapeIncapsula(httpClient)
3942
require.Nil(t, err, "Could not scrape incapsula")
40-
})
41-
t.Run("akamai", func(t *testing.T) {
42-
_, err := scrapeAkamai(httpClient)
43-
require.Nil(t, err, "Could not scrape akamai")
44-
})
45-
t.Run("sucuri", func(t *testing.T) {
46-
_, err := scrapeSucuri(httpClient)
47-
require.Nil(t, err, "Could not scrape sucuri")
48-
})
49-
t.Run("leaseweb", func(t *testing.T) {
50-
_, err := scrapeLeaseweb(httpClient)
51-
require.Nil(t, err, "Could not scrape leaseweb")
43+
require.Positive(t, len(ips), "Empty ip list")
5244
})
5345
t.Run("fastly", func(t *testing.T) {
54-
_, err := scrapeFastly(httpClient)
46+
ips, err := scrapeFastly(httpClient)
5547
require.Nil(t, err, "Could not scrape fastly")
48+
require.Positive(t, len(ips), "Empty ip list")
5649
})
5750
t.Run("projectdiscovery", func(t *testing.T) {
58-
_, err := scrapeProjectDiscovery(httpClient)
51+
ips, err := scrapeProjectDiscovery(httpClient)
5952
require.Nil(t, err, "Could not scrape projectdiscovery")
53+
require.Positive(t, len(ips), "Empty ip list")
6054
})
6155
}

0 commit comments

Comments
 (0)