import requests
from bs4 import BeautifulSoup
def get_web_page(url):
try:
response = requests.get(url)
response.raise_for_status() # 检查请求是否成功
return response.text
except requests.exceptions.RequestException as e:
print(f"Error: {e}")
return None
def extract_links(html):
soup = BeautifulSoup(html, 'html.parser')
links = [a['href'] for a in soup.find_all('a', href=True)]
return links
url = "http://example.com"
html_content = get_web_page(url)
if html_content:
links = extract_links(html_content)
print("Found links:", links)
import nmap
def scan_host(target):
nm = nmap.PortScanner()
nm.scan(target, arguments='-sS -T4 -p-') # 使用SYN扫描,扫描所有端口
for host in nm.all_hosts():
print(f"Host: {host} ({nm[host].hostname()})")
for proto in nm[host].all_protocols():
print(f"Protocol: {proto}")
for port in nm[host][proto].keys():
print(f"Port: {port} State: {nm[host][proto][port]['state']}")
scan_host('192.168.1.1')
import whois
def get_whois_info(domain):
try:
w = whois.whois(domain)
return w
except Exception as e:
print(f"Error: {e}")
return None
domain = "example.com"
whois_info = get_whois_info(domain)
if whois_info:
print(whois_info)
import shodan
def search_shodan(api_key, query):
api = shodan.Shodan(api_key)
try:
results = api.search(query)
for result in results['matches']:
print(f"IP: {result['ip_str']}")
print(f"Port: {result['port']}")
print(f"Data: {result['data']}")
except shodan.APIError as e:
print(f"Error: {e}")
api_key = "YOUR_SHODAN_API_KEY"
query = "apache"
search_shodan(api_key, query)
import sublist3r
def enumerate_subdomains(domain):
subdomains = sublist3r.main(domain, 40, savefile=None, ports=None, silent=False, verbose=True, enable_bruteforce=False, engines=None)
return subdomains
domain = "example.com"
subdomains = enumerate_subdomains(domain)
print("Found subdomains:", subdomains)
import dns.resolver
def get_dns_records(domain):
try:
a_records = dns.resolver.resolve(domain, 'A')
mx_records = dns.resolver.resolve(domain, 'MX')
ns_records = dns.resolver.resolve(domain, 'NS')
return a_records, mx_records, ns_records
except dns.resolver.NoAnswer:
print("No DNS records found")
return None, None, None
domain = "example.com"
a_records, mx_records, ns_records = get_dns_records(domain)
if a_records:
print("A Records:", [r.to_text() for r in a_records])
if mx_records:
print("MX Records:", [r.to_text() for r in mx_records])
if ns_records:
print("NS Records:", [r.to_text() for r in ns_records])
import requests
def zap_scan(url):
zap_url = "http://localhost:8080"
apikey = "YOUR_ZAP_API_KEY"
headers = {'X-ZAP-API-Key': apikey}
response = requests.get(f"{zap_url}/JSON/ascan/action/scan/?url={url}", headers=headers)
if response.status_code == 200:
print("Scan started successfully")
else:
print("Failed to start scan")
url = "http://example.com"
zap_scan(url)
import subprocess
def sqlmap_test(url):
command = f"sqlmap -u {url} --batch"
subprocess.run(command, shell=True)
url = "http://example.com/vulnerable.php?id=1"
sqlmap_test(url)
import requests
def wappalyzer_analysis(url):
response = requests.get(f"https://api.wappalyzer.com/v2/lookup/?url={url}")
if response.status_code == 200:
print(response.json())
else:
print("Failed to get analysis")
url = "http://example.com"
wappalyzer_analysis(url)
import subprocess
def theharvester(domain):
command = f"theharvester -d {domain} -l 500 -b all"
subprocess.run(command, shell=True)
domain = "example.com"
theharvester(domain)
原文始发于微信公众号(船山信安):原创 | Web渗透测试中信息收集常用的Python脚本
免责声明:文章中涉及的程序(方法)可能带有攻击性,仅供安全研究与教学之用,读者将其信息做其他用途,由读者承担全部法律及连带责任,本站不承担任何法律及连带责任;如有问题可邮件联系(建议使用企业邮箱或有效邮箱,避免邮件被拦截,联系方式见首页),望知悉。
- 左青龙
- 微信扫一扫
-
- 右白虎
- 微信扫一扫
-
评论