Update cmd_wdir.py

This commit is contained in:
m5rcel { Marcel }
2025-09-09 17:20:00 +02:00
committed by GitHub
parent 89272e368e
commit 036ebf99ea

View File

@@ -1,6 +1,7 @@
import requests import requests
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from colorama import Fore from urllib.parse import urljoin, urlparse
from colorama import Fore, Style
class WdirCommand: class WdirCommand:
def __init__(self, url): def __init__(self, url):
@@ -8,45 +9,50 @@ class WdirCommand:
def run(self): def run(self):
if not self.url: if not self.url:
print(Fore.RED + "Usage: wdir <url>") print(Fore.RED + "Usage: wdir <url>" + Style.RESET_ALL)
return return
# Ensure URL has scheme
if not self.url.startswith("http://") and not self.url.startswith("https://"): if not self.url.startswith("http://") and not self.url.startswith("https://"):
self.url = "http://" + self.url self.url = "http://" + self.url
try: try:
response = requests.get(self.url, timeout=5) print(Fore.CYAN + f"[FETCH] Scanning directory at {self.url}..." + Style.RESET_ALL)
if response.status_code != 200: resp = requests.get(self.url, timeout=5)
print(Fore.RED + f"Failed to fetch URL (status {response.status_code})") resp.raise_for_status()
return except Exception as e:
print(Fore.RED + f"[ERR] Failed to fetch {self.url}: {e}" + Style.RESET_ALL)
return
soup = BeautifulSoup(response.text, "html.parser") soup = BeautifulSoup(resp.text, "html.parser")
links = soup.find_all("a") links = soup.find_all("a")
files = [] files = []
dirs = [] for link in links:
href = link.get("href")
if not href:
continue
for link in links: # Skip parent directory and in-page anchors
href = link.get("href") if href.startswith("?") or href.startswith("#") or href == "../":
if not href or href.startswith("?") or href.startswith("#"): continue
continue
if href in ("../", "/"):
continue # skip parent link
if href.endswith("/"):
dirs.append(href)
else:
files.append(href)
if not files and not dirs: full_url = urljoin(self.url, href)
print(Fore.YELLOW + "No directory listing found (site may not expose files).") filename = href.split("/")[-1]
return
print(Fore.CYAN + f"Directory listing for {self.url}:\n") # Determine file type
if "." in filename:
ext = filename.split(".")[-1].lower()
ftype = f".{ext} file"
else:
ftype = "Directory"
for d in dirs: files.append((filename, ftype, full_url))
print(Fore.BLUE + f"[DIR] {d}")
for f in files:
print(Fore.GREEN + f"[FILE] {f}")
except requests.exceptions.RequestException as e: if not files:
print(Fore.RED + f"Error fetching URL: {e}") print(Fore.YELLOW + "No files or directories found (maybe directory listing is disabled)." + Style.RESET_ALL)
return
print(Fore.GREEN + "\nFiles found:" + Style.RESET_ALL)
for fname, ftype, furl in files:
print(f" {Fore.CYAN}{fname:<30}{Style.RESET_ALL} {Fore.WHITE}{ftype}{Style.RESET_ALL}")