mirror of
https://github.com/m4rcel-lol/m5rcode.git
synced 2025-12-06 19:13:57 +05:30
Update cmd_wdir.py
This commit is contained in:
committed by
GitHub
parent
89272e368e
commit
036ebf99ea
@@ -1,6 +1,7 @@
|
|||||||
import requests
|
import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from colorama import Fore
|
from urllib.parse import urljoin, urlparse
|
||||||
|
from colorama import Fore, Style
|
||||||
|
|
||||||
class WdirCommand:
|
class WdirCommand:
|
||||||
def __init__(self, url):
|
def __init__(self, url):
|
||||||
@@ -8,45 +9,50 @@ class WdirCommand:
|
|||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
if not self.url:
|
if not self.url:
|
||||||
print(Fore.RED + "Usage: wdir <url>")
|
print(Fore.RED + "Usage: wdir <url>" + Style.RESET_ALL)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# Ensure URL has scheme
|
||||||
if not self.url.startswith("http://") and not self.url.startswith("https://"):
|
if not self.url.startswith("http://") and not self.url.startswith("https://"):
|
||||||
self.url = "http://" + self.url
|
self.url = "http://" + self.url
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = requests.get(self.url, timeout=5)
|
print(Fore.CYAN + f"[FETCH] Scanning directory at {self.url}..." + Style.RESET_ALL)
|
||||||
if response.status_code != 200:
|
resp = requests.get(self.url, timeout=5)
|
||||||
print(Fore.RED + f"Failed to fetch URL (status {response.status_code})")
|
resp.raise_for_status()
|
||||||
|
except Exception as e:
|
||||||
|
print(Fore.RED + f"[ERR] Failed to fetch {self.url}: {e}" + Style.RESET_ALL)
|
||||||
return
|
return
|
||||||
|
|
||||||
soup = BeautifulSoup(response.text, "html.parser")
|
soup = BeautifulSoup(resp.text, "html.parser")
|
||||||
links = soup.find_all("a")
|
links = soup.find_all("a")
|
||||||
|
|
||||||
files = []
|
files = []
|
||||||
dirs = []
|
|
||||||
|
|
||||||
for link in links:
|
for link in links:
|
||||||
href = link.get("href")
|
href = link.get("href")
|
||||||
if not href or href.startswith("?") or href.startswith("#"):
|
if not href:
|
||||||
continue
|
continue
|
||||||
if href in ("../", "/"):
|
|
||||||
continue # skip parent link
|
|
||||||
if href.endswith("/"):
|
|
||||||
dirs.append(href)
|
|
||||||
else:
|
|
||||||
files.append(href)
|
|
||||||
|
|
||||||
if not files and not dirs:
|
# Skip parent directory and in-page anchors
|
||||||
print(Fore.YELLOW + "No directory listing found (site may not expose files).")
|
if href.startswith("?") or href.startswith("#") or href == "../":
|
||||||
|
continue
|
||||||
|
|
||||||
|
full_url = urljoin(self.url, href)
|
||||||
|
filename = href.split("/")[-1]
|
||||||
|
|
||||||
|
# Determine file type
|
||||||
|
if "." in filename:
|
||||||
|
ext = filename.split(".")[-1].lower()
|
||||||
|
ftype = f".{ext} file"
|
||||||
|
else:
|
||||||
|
ftype = "Directory"
|
||||||
|
|
||||||
|
files.append((filename, ftype, full_url))
|
||||||
|
|
||||||
|
if not files:
|
||||||
|
print(Fore.YELLOW + "No files or directories found (maybe directory listing is disabled)." + Style.RESET_ALL)
|
||||||
return
|
return
|
||||||
|
|
||||||
print(Fore.CYAN + f"Directory listing for {self.url}:\n")
|
print(Fore.GREEN + "\nFiles found:" + Style.RESET_ALL)
|
||||||
|
for fname, ftype, furl in files:
|
||||||
for d in dirs:
|
print(f" {Fore.CYAN}{fname:<30}{Style.RESET_ALL} {Fore.WHITE}→ {ftype}{Style.RESET_ALL}")
|
||||||
print(Fore.BLUE + f"[DIR] {d}")
|
|
||||||
for f in files:
|
|
||||||
print(Fore.GREEN + f"[FILE] {f}")
|
|
||||||
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
print(Fore.RED + f"Error fetching URL: {e}")
|
|
||||||
|
|||||||
Reference in New Issue
Block a user