Skip to content

Commit 280edf2

Browse files
authored
Update threattracer.py
updated the script, the script was not running due to some errors.
1 parent 125e520 commit 280edf2

File tree

1 file changed

+87
-122
lines changed

1 file changed

+87
-122
lines changed

threattracer.py

Lines changed: 87 additions & 122 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
import json
66
from pyExploitDb import PyExploitDb
77
from bs4 import BeautifulSoup
8-
import subprocess
98

109
art = """
1110
_______ _ _ _______
@@ -23,146 +22,117 @@
2322

2423
def find_cpes(component, version):
2524
base_url = "https://nvd.nist.gov/products/cpe/search/results"
26-
params = {
27-
"namingFormat": "2.3",
28-
"keyword": f"{component} {version}"
29-
}
30-
31-
response = requests.get(base_url, params=params)
32-
content = response.text
33-
34-
cpe_matches = re.findall(r'cpe:(.*?)<', content)
35-
return cpe_matches
25+
params = {"namingFormat": "2.3", "keyword": f"{component} {version}"}
26+
27+
try:
28+
response = requests.get(base_url, params=params, verify=True)
29+
response.raise_for_status()
30+
content = response.text
31+
cpe_matches = re.findall(r'cpe:(.*?)<', content)
32+
return cpe_matches
33+
except requests.RequestException as e:
34+
print(colored(f"Error fetching CPEs: {e}", "red"))
35+
return []
3636

3737
def synk_db(cve_id):
38-
res = requests.get(f"https://security.snyk.io/vuln/?search={cve_id}")
39-
a_tag_pattern = r'data-snyk-test="vuln table title".*>([^"]+)<!----><!---->'
40-
a_tag_matches = re.findall(a_tag_pattern, res.text)
41-
42-
if a_tag_matches:
43-
snyk_short_name = a_tag_matches[0].strip()
44-
return snyk_short_name
38+
try:
39+
res = requests.get(f"https://security.snyk.io/vuln/?search={cve_id}")
40+
a_tag_pattern = r'data-snyk-test="vuln table title".*>([^"]+)<!----><!---->'
41+
a_tag_matches = re.findall(a_tag_pattern, res.text)
42+
if a_tag_matches:
43+
return a_tag_matches[0].strip()
44+
except requests.RequestException as e:
45+
print(colored(f"Error fetching Snyk data: {e}", "red"))
46+
return None
4547

4648
def fetch_cve_details(cpe_string):
4749
base_url = "https://services.nvd.nist.gov/rest/json/cves/2.0"
50+
all_cve_details = []
4851

49-
cves = []
50-
51-
for index, cpe_string in enumerate(cpe_strings[:2]):
52+
for cpe_string in [cpe_string]:
5253
cve_query_string = ":".join(cpe_string.split(":")[1:5])
5354
url = f"{base_url}?cpeName=cpe:{cpe_string}"
5455
print(colored(f"Querying: {url}", "red"))
5556

56-
response = requests.get(url)
57-
58-
if response.status_code != 200:
59-
print(colored(f"Error: Unable to retrieve CVE data for CPE: {cpe_string}. Status code: {response.status_code}", "red"))
60-
return []
61-
6257
try:
58+
response = requests.get(url)
59+
response.raise_for_status()
6360
data = response.json()
61+
for cve_item in data.get("vulnerabilities", []):
62+
cve_id = cve_item.get("cve", {}).get("id", "N/A")
63+
description_text = cve_item.get("cve", {}).get("descriptions", [{}])[0].get("value", "No description")
64+
link = f"https://nvd.nist.gov/vuln/detail/{cve_id}"
65+
weaknesses = [desc.get("value", "No description") for problem_type in cve_item.get("cve", {}).get("weaknesses", []) for desc in problem_type.get("description", [])]
66+
67+
pEdb = PyExploitDb()
68+
pEdb.debug = False
69+
pEdb.openFile()
70+
exploit_status = "Public Exploit Found over Exploit-DB" if pEdb.searchCve(cve_id) else "No Public Exploit Found over Exploit-DB"
71+
72+
snyk_short_name = synk_db(cve_id)
73+
74+
all_cve_details.append({
75+
"CVE ID": cve_id,
76+
"Short Name": snyk_short_name,
77+
"Description": description_text,
78+
"Weaknesses": ", ".join(weaknesses),
79+
"Link": link,
80+
"Exploit Status": exploit_status
81+
})
82+
except requests.RequestException as e:
83+
print(colored(f"Request error: {e}", "red"))
6484
except json.JSONDecodeError:
6585
print(colored(f"Error decoding JSON for CPE: {cpe_string}. Skipping.", "red"))
66-
return []
67-
68-
for cve_item in data["vulnerabilities"]:
69-
70-
all_cve_details = []
71-
72-
cve_id = cve_item["cve"]["id"]
73-
description_text = cve_item["cve"]["descriptions"][0]["value"]
74-
link = f"https://nvd.nist.gov/vuln/detail/{cve_id}"
75-
76-
weaknesses = []
77-
for problem_type in cve_item["cve"]["weaknesses"]:
78-
for description in problem_type["description"]:
79-
weaknesses.append(description["value"])
80-
81-
pEdb = PyExploitDb()
82-
pEdb.debug = False
83-
pEdb.openFile()
84-
exploit_status = pEdb.searchCve(cve_id)
85-
if exploit_status:
86-
exploit_status = "Public Exploit Found over Exploit-DB"
87-
else:
88-
exploit_status = "No Public Exploit Found over Exploit-DB"
89-
90-
snyk_short_name = synk_db(cve_id)
91-
92-
all_cve_details.append({
93-
"CVE ID": cve_id,
94-
"Short Name": snyk_short_name,
95-
"Description": description_text,
96-
"Weaknesses": ", ".join(weaknesses),
97-
"Link": link,
98-
"Exploit Status": exploit_status
99-
})
10086

10187
return all_cve_details
10288

10389
def fetch_github_urls(cve_id):
10490
api_url = f"https://poc-in-github.motikan2010.net/api/v1/?cve_id={cve_id}"
105-
response = requests.get(api_url)
106-
107-
if response.status_code == 200:
108-
data = response.json()
109-
if "pocs" in data and data["pocs"]:
110-
github_urls = [poc["html_url"] for poc in data["pocs"]]
111-
return github_urls
91+
try:
92+
response = requests.get(api_url)
93+
if response.status_code == 200:
94+
data = response.json()
95+
if "pocs" in data and data["pocs"]:
96+
return [poc["html_url"] for poc in data["pocs"]]
97+
except requests.RequestException as e:
98+
print(colored(f"Error fetching GitHub URLs: {e}", "red"))
11299
return []
113100

114101
def search_and_extract_download_links(product_name):
115102
search_url = f"https://packetstormsecurity.com/search/?q={product_name}"
116-
response = requests.get(search_url)
117-
118-
download_links = []
119-
120-
if response.status_code == 200:
121-
soup = BeautifulSoup(response.text, 'html.parser')
122-
results = soup.find_all('a', href=True)
123-
124-
for result in results:
125-
href = result['href']
126-
if '/files/download/' in href and href.endswith('.txt'):
127-
download_links.append(f"https://packetstormsecurity.com{href}")
128-
129-
if not download_links:
130-
print(colored("No download links found on Packet Storm Security.", "green", attrs=["underline"]))
131-
return None
132-
133-
return download_links
103+
try:
104+
response = requests.get(search_url)
105+
if response.status_code == 200:
106+
soup = BeautifulSoup(response.text, 'html.parser')
107+
results = soup.find_all('a', href=True)
108+
download_links = [f"https://packetstormsecurity.com{result['href']}" for result in results if '/files/download/' in result['href'] and result['href'].endswith('.txt')]
109+
if not download_links:
110+
print(colored("No download links found on Packet Storm Security.", "green", attrs=["underline"]))
111+
return download_links
112+
except requests.RequestException as e:
113+
print(colored(f"Error fetching download links: {e}", "red"))
114+
return []
134115

135116
def search_marc_info(search_term):
136-
# Make a GET request to the URL
137117
url = f"https://marc.info/?l=full-disclosure&s={search_term}"
138-
response = requests.get(url)
139-
140-
# Check if the request was successful
141-
if response.status_code == 200:
142-
# Parse the HTML content of the page
143-
soup = BeautifulSoup(response.text, 'html.parser')
144-
145-
# Check if the response contains "No hits found for"
146-
if "No hits found for" in soup.get_text():
147-
print(colored("No matching exploits found.", "red", attrs=["underline"]))
148-
else:
149-
# Find all <a> tags within <pre> tags, excluding those with "full-disc" in the text
150-
post_links = soup.find('pre').find_all('a', string=lambda text: "full-disc" not in text)
151-
152-
# Print all names and links
153-
if post_links:
154-
results = []
155-
for link in post_links:
156-
name = link.get_text(strip=True)
157-
link_url = "https://marc.info" + link['href']
158-
results.append({"Name": name, "Link": link_url})
159-
return results
118+
try:
119+
response = requests.get(url)
120+
if response.status_code == 200:
121+
soup = BeautifulSoup(response.text, 'html.parser')
122+
if "No hits found for" in soup.get_text():
123+
print(colored("No matching exploits found.", "red", attrs=["underline"]))
160124
else:
161-
print(colored("No matching exploits found.", "green"))
162-
else:
163-
print(colored("Failed to retrieve the web page.", "red"))
164-
print(f"Status code: {response.status_code}")
165-
return None
125+
post_links = soup.find('pre').find_all('a', string=lambda text: "full-disc" not in text)
126+
results = [{"Name": link.get_text(strip=True), "Link": "https://marc.info" + link['href']} for link in post_links]
127+
if results:
128+
return results
129+
else:
130+
print(colored("No matching exploits found.", "green"))
131+
else:
132+
print(colored(f"Failed to retrieve the web page. Status code: {response.status_code}", "red"))
133+
except requests.RequestException as e:
134+
print(colored(f"Error fetching Marc.Info data: {e}", "red"))
135+
return None
166136

167137
if __name__ == "__main__":
168138
print(colored("CVE and Exploit Finder Script", "green", attrs=["bold"]))
@@ -193,19 +163,15 @@ def search_marc_info(search_term):
193163
print(colored(f"Link: {result['Link']}", "blue"))
194164
github_urls = fetch_github_urls(cve_id)
195165
if github_urls:
196-
print(colored("Public Exploit/ POC Over Github found:", "red"))
166+
print(colored("Public Exploit/POC Over Github found:", "red"))
197167
for url in github_urls:
198168
print(colored(f" {url}", "blue"))
199169
else:
200-
print(colored("Public Exploit/ POC Over Github not found, you might need to check manually", "green"))
201-
if result["Exploit Status"] == "Public Exploit Found":
202-
print(colored(f"Exploit Status: {result['Exploit Status']}", "red"))
203-
else:
204-
print(colored(f"Exploit Status: {result['Exploit Status']}", "green"))
170+
print(colored("Public Exploit/POC Over Github not found, you might need to check manually", "green"))
171+
print(colored(f"Exploit Status: {result['Exploit Status']}", "red" if result["Exploit Status"] == "Public Exploit Found over Exploit-DB" else "green"))
205172
else:
206173
print(colored("No CPEs found for the provided component and version.", "red"))
207174

208-
# Search for download links on Packet Storm Security even if no CPEs were found
209175
download_links = search_and_extract_download_links(component)
210176

211177
if download_links:
@@ -215,7 +181,6 @@ def search_marc_info(search_term):
215181
else:
216182
print(colored("No download links found on Packet Storm Security.", "red", attrs=["underline"]))
217183

218-
# Search Marc.Info
219184
search_term_marc = f"{component} {version}"
220185
print(f"\nUsing keyword "+search_term_marc+" for lookup...")
221186
marc_results = search_marc_info(search_term_marc)

0 commit comments

Comments
 (0)