Skip to content

Commit cb538e4

Browse files
committed
add username search tool tutorial
1 parent bb6f92d commit cb538e4

File tree

4 files changed

+111
-0
lines changed

4 files changed

+111
-0
lines changed

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,7 @@ This is a repository of all the tutorials of [The Python Code](https://www.thepy
6060
- [Bluetooth Device Scanning in Python](https://thepythoncode.com/article/build-a-bluetooth-scanner-in-python). ([code](ethical-hacking/bluetooth-scanner))
6161
- [How to Create A Fork Bomb in Python](https://thepythoncode.com/article/make-a-fork-bomb-in-python). ([code](ethical-hacking/fork-bomb))
6262
- [How to Implement 2FA in Python](https://thepythoncode.com/article/implement-2fa-in-python). ([code](ethical-hacking/implement-2fa))
63+
- [How to Build a Username Search Tool in Python](https://thepythoncode.com/code/social-media-username-finder-in-python). ([code](ethical-hacking/username-finder))
6364

6465
- ### [Machine Learning](https://www.thepythoncode.com/topic/machine-learning)
6566
- ### [Natural Language Processing](https://www.thepythoncode.com/topic/nlp)
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
# [How to Build a Username Search Tool in Python](https://thepythoncode.com/code/social-media-username-finder-in-python)
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
colorama
Lines changed: 108 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,108 @@
1+
# Import necessary libraries
2+
import requests # For making HTTP requests
3+
import argparse # For parsing command line arguments
4+
import concurrent.futures # For concurrent execution
5+
from collections import OrderedDict # For maintaining order of websites
6+
from colorama import init, Fore # For colored terminal output
7+
import time # For handling time-related tasks
8+
import random # For generating random numbers
9+
10+
# Initialize colorama for colored output.
11+
init()
12+
13+
# Ordered dictionary of websites to check for a given username.
14+
WEBSITES = OrderedDict([
15+
("Instagram", "https://www.instagram.com/{}"),
16+
("Facebook", "https://www.facebook.com/{}"),
17+
("YouTube", "https://www.youtube.com/user/{}"),
18+
("Reddit", "https://www.reddit.com/user/{}"),
19+
("GitHub", "https://github.com/{}"),
20+
("Twitch", "https://www.twitch.tv/{}"),
21+
("Pinterest", "https://www.pinterest.com/{}/"),
22+
("TikTok", "https://www.tiktok.com/@{}"),
23+
("Flickr", "https://www.flickr.com/photos/{}")
24+
])
25+
26+
REQUEST_DELAY = 2 # Delay in seconds between requests to the same website
27+
MAX_RETRIES = 3 # Maximum number of retries for a failed request
28+
last_request_times = {} # Dictionary to track the last request time for each website
29+
30+
def check_username(website, username):
31+
"""
32+
Check if the username exists on the given website.
33+
Returns the full URL if the username exists, False otherwise.
34+
"""
35+
url = website.format(username) # Format the URL with the given username
36+
retries = 0 # Initialize retry counter
37+
38+
# Retry loop
39+
while retries < MAX_RETRIES:
40+
try:
41+
# Implement rate limiting.
42+
current_time = time.time()
43+
if website in last_request_times and current_time - last_request_times[website] < REQUEST_DELAY:
44+
delay = REQUEST_DELAY - (current_time - last_request_times[website])
45+
time.sleep(delay) # Sleep to maintain the request delay.
46+
47+
response = requests.get(url) # Make the HTTP request
48+
last_request_times[website] = time.time() # Update the last request time.
49+
50+
if response.status_code == 200: # Check if the request was successful.
51+
return url
52+
else:
53+
return False
54+
except requests.exceptions.RequestException:
55+
retries += 1 # Increment retry counter on exception.
56+
delay = random.uniform(1, 3) # Random delay between retries.
57+
time.sleep(delay) # Sleep for the delay period.
58+
59+
return False # Return False if all retries failed.
60+
61+
def main():
62+
# Parse command line arguments.
63+
parser = argparse.ArgumentParser(description="Check if a username exists on various websites.")
64+
parser.add_argument("username", help="The username to check.")
65+
parser.add_argument("-o", "--output", help="Path to save the results to a file.")
66+
args = parser.parse_args()
67+
68+
username = args.username # Username to check.
69+
output_file = args.output # Output file path.
70+
71+
print(f"Checking for username: {username}")
72+
73+
results = OrderedDict() # Dictionary to store results.
74+
75+
# Use ThreadPoolExecutor for concurrent execution.
76+
with concurrent.futures.ThreadPoolExecutor() as executor:
77+
# Submit tasks to the executor.
78+
futures = {executor.submit(check_username, website, username): website_name for website_name, website in WEBSITES.items()}
79+
for future in concurrent.futures.as_completed(futures):
80+
website_name = futures[future] # Get the website name.
81+
try:
82+
result = future.result() # Get the result.
83+
except Exception as exc:
84+
print(f"{website_name} generated an exception: {exc}")
85+
result = False
86+
finally:
87+
results[website_name] = result # Store the result.
88+
89+
# Print the results.
90+
print("\nResults:")
91+
for website, result in results.items():
92+
if result:
93+
print(f"{Fore.GREEN}{website}: Found ({result})")
94+
else:
95+
print(f"{Fore.RED}{website}: Not Found")
96+
97+
# Save results to a file if specified.
98+
if output_file:
99+
with open(output_file, "w") as f:
100+
for website, result in results.items():
101+
if result:
102+
f.write(f"{website}: Found ({result})\n")
103+
else:
104+
f.write(f"{website}: Not Found\n")
105+
print(f"{Fore.GREEN}\nResults saved to {output_file}")
106+
107+
# Call the main function
108+
main()

0 commit comments

Comments
 (0)