Skip to content

Commit 04a4f79

Browse files
committed
update subdomain scanner tutorial
1 parent 9caa8d9 commit 04a4f79

File tree

3 files changed

+34
-4
lines changed

3 files changed

+34
-4
lines changed

ethical-hacking/subdomain-scanner/README.md

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,9 @@ To run this:
77
```
88
**Output:**
99
```
10-
usage: fast_subdomain_scanner.py [-h] [-l WORDLIST] [-t NUM_THREADS] domain
10+
usage: fast_subdomain_scanner.py [-h] [-l WORDLIST] [-t NUM_THREADS]
11+
[-o OUTPUT_FILE]
12+
domain
1113
1214
Faster Subdomain Scanner using Threads
1315
@@ -23,6 +25,9 @@ To run this:
2325
-t NUM_THREADS, --num-threads NUM_THREADS
2426
Number of threads to use to scan the domain. Default
2527
is 10
28+
-o OUTPUT_FILE, --output-file OUTPUT_FILE
29+
Specify the output text file to write discovered
30+
subdomains
2631
```
2732
- If you want to scan hackthissite.org for subdomains using only 10 threads with a word list of 100 subdomains (`subdomains.txt`):
2833
```
@@ -37,4 +42,9 @@ To run this:
3742
[+] Discovered subdomain: http://stats.hackthissite.org
3843
[+] Discovered subdomain: http://forums.hackthissite.org
3944
```
45+
If you want to output the discovered URLs to a file:
46+
```
47+
python fast_subdomain_scanner.py hackthissite.org -l subdomains.txt -t 10 -o discovered_urls.txt
48+
```
49+
This will create a new file `discovered_urls.txt` that includes the discovered subdomains.
4050
- For bigger subdomain wordlists, check [this repository](https://github.com/rbsec/dnscan).

ethical-hacking/subdomain-scanner/fast_subdomain_scanner.py

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
11
import requests
2-
from threading import Thread
2+
from threading import Thread, Lock
33
from queue import Queue
44

55
q = Queue()
6+
list_lock = Lock()
7+
discovered_domains = []
68

79
def scan_subdomains(domain):
810
global q
@@ -17,6 +19,9 @@ def scan_subdomains(domain):
1719
pass
1820
else:
1921
print("[+] Discovered subdomain:", url)
22+
# add the subdomain to the global list
23+
with list_lock:
24+
discovered_domains.append(url)
2025

2126
# we're done with scanning that subdomain
2227
q.task_done()
@@ -44,12 +49,19 @@ def main(domain, n_threads, subdomains):
4449
parser.add_argument("-l", "--wordlist", help="File that contains all subdomains to scan, line by line. Default is subdomains.txt",
4550
default="subdomains.txt")
4651
parser.add_argument("-t", "--num-threads", help="Number of threads to use to scan the domain. Default is 10", default=10, type=int)
52+
parser.add_argument("-o", "--output-file", help="Specify the output text file to write discovered subdomains")
4753

4854
args = parser.parse_args()
4955
domain = args.domain
5056
wordlist = args.wordlist
5157
num_threads = args.num_threads
58+
output_file = args.output_file
5259

5360
main(domain=domain, n_threads=num_threads, subdomains=open(wordlist).read().splitlines())
5461
q.join()
62+
63+
# save the file
64+
with open(output_file, "w") as f:
65+
for url in discovered_domains:
66+
print(url, file=f)
5567

ethical-hacking/subdomain-scanner/subdomain_scanner.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,8 @@
99
content = file.read()
1010
# split by new lines
1111
subdomains = content.splitlines()
12-
12+
# a list of discovered subdomains
13+
discovered_subdomains = []
1314
for subdomain in subdomains:
1415
# construct the url
1516
url = f"http://{subdomain}.{domain}"
@@ -20,4 +21,11 @@
2021
# if the subdomain does not exist, just pass, print nothing
2122
pass
2223
else:
23-
print("[+] Discovered subdomain:", url)
24+
print("[+] Discovered subdomain:", url)
25+
# append the discovered subdomain to our list
26+
discovered_subdomains.append(url)
27+
28+
# save the discovered subdomains into a file
29+
with open("discovered_subdomains.txt", "w") as f:
30+
for subdomain in discovered_subdomains:
31+
print(subdomain, file=f)

0 commit comments

Comments
 (0)