5
5
import json
6
6
from pyExploitDb import PyExploitDb
7
7
from bs4 import BeautifulSoup
8
- import subprocess
9
8
10
9
art = """
11
10
_______ _ _ _______
23
22
24
23
def find_cpes (component , version ):
25
24
base_url = "https://nvd.nist.gov/products/cpe/search/results"
26
- params = {
27
- "namingFormat" : "2.3" ,
28
- "keyword" : f"{ component } { version } "
29
- }
30
-
31
- response = requests .get (base_url , params = params )
32
- content = response .text
33
-
34
- cpe_matches = re .findall (r'cpe:(.*?)<' , content )
35
- return cpe_matches
25
+ params = {"namingFormat" : "2.3" , "keyword" : f"{ component } { version } " }
26
+
27
+ try :
28
+ response = requests .get (base_url , params = params , verify = True )
29
+ response .raise_for_status ()
30
+ content = response .text
31
+ cpe_matches = re .findall (r'cpe:(.*?)<' , content )
32
+ return cpe_matches
33
+ except requests .RequestException as e :
34
+ print (colored (f"Error fetching CPEs: { e } " , "red" ))
35
+ return []
36
36
37
37
def synk_db (cve_id ):
38
- res = requests .get (f"https://security.snyk.io/vuln/?search={ cve_id } " )
39
- a_tag_pattern = r'data-snyk-test="vuln table title".*>([^"]+)<!----><!---->'
40
- a_tag_matches = re .findall (a_tag_pattern , res .text )
41
-
42
- if a_tag_matches :
43
- snyk_short_name = a_tag_matches [0 ].strip ()
44
- return snyk_short_name
38
+ try :
39
+ res = requests .get (f"https://security.snyk.io/vuln/?search={ cve_id } " )
40
+ a_tag_pattern = r'data-snyk-test="vuln table title".*>([^"]+)<!----><!---->'
41
+ a_tag_matches = re .findall (a_tag_pattern , res .text )
42
+ if a_tag_matches :
43
+ return a_tag_matches [0 ].strip ()
44
+ except requests .RequestException as e :
45
+ print (colored (f"Error fetching Snyk data: { e } " , "red" ))
46
+ return None
45
47
46
48
def fetch_cve_details (cpe_string ):
47
49
base_url = "https://services.nvd.nist.gov/rest/json/cves/2.0"
50
+ all_cve_details = []
48
51
49
- cves = []
50
-
51
- for index , cpe_string in enumerate (cpe_strings [:2 ]):
52
+ for cpe_string in [cpe_string ]:
52
53
cve_query_string = ":" .join (cpe_string .split (":" )[1 :5 ])
53
54
url = f"{ base_url } ?cpeName=cpe:{ cpe_string } "
54
55
print (colored (f"Querying: { url } " , "red" ))
55
56
56
- response = requests .get (url )
57
-
58
- if response .status_code != 200 :
59
- print (colored (f"Error: Unable to retrieve CVE data for CPE: { cpe_string } . Status code: { response .status_code } " , "red" ))
60
- return []
61
-
62
57
try :
58
+ response = requests .get (url )
59
+ response .raise_for_status ()
63
60
data = response .json ()
61
+ for cve_item in data .get ("vulnerabilities" , []):
62
+ cve_id = cve_item .get ("cve" , {}).get ("id" , "N/A" )
63
+ description_text = cve_item .get ("cve" , {}).get ("descriptions" , [{}])[0 ].get ("value" , "No description" )
64
+ link = f"https://nvd.nist.gov/vuln/detail/{ cve_id } "
65
+ weaknesses = [desc .get ("value" , "No description" ) for problem_type in cve_item .get ("cve" , {}).get ("weaknesses" , []) for desc in problem_type .get ("description" , [])]
66
+
67
+ pEdb = PyExploitDb ()
68
+ pEdb .debug = False
69
+ pEdb .openFile ()
70
+ exploit_status = "Public Exploit Found over Exploit-DB" if pEdb .searchCve (cve_id ) else "No Public Exploit Found over Exploit-DB"
71
+
72
+ snyk_short_name = synk_db (cve_id )
73
+
74
+ all_cve_details .append ({
75
+ "CVE ID" : cve_id ,
76
+ "Short Name" : snyk_short_name ,
77
+ "Description" : description_text ,
78
+ "Weaknesses" : ", " .join (weaknesses ),
79
+ "Link" : link ,
80
+ "Exploit Status" : exploit_status
81
+ })
82
+ except requests .RequestException as e :
83
+ print (colored (f"Request error: { e } " , "red" ))
64
84
except json .JSONDecodeError :
65
85
print (colored (f"Error decoding JSON for CPE: { cpe_string } . Skipping." , "red" ))
66
- return []
67
-
68
- for cve_item in data ["vulnerabilities" ]:
69
-
70
- all_cve_details = []
71
-
72
- cve_id = cve_item ["cve" ]["id" ]
73
- description_text = cve_item ["cve" ]["descriptions" ][0 ]["value" ]
74
- link = f"https://nvd.nist.gov/vuln/detail/{ cve_id } "
75
-
76
- weaknesses = []
77
- for problem_type in cve_item ["cve" ]["weaknesses" ]:
78
- for description in problem_type ["description" ]:
79
- weaknesses .append (description ["value" ])
80
-
81
- pEdb = PyExploitDb ()
82
- pEdb .debug = False
83
- pEdb .openFile ()
84
- exploit_status = pEdb .searchCve (cve_id )
85
- if exploit_status :
86
- exploit_status = "Public Exploit Found over Exploit-DB"
87
- else :
88
- exploit_status = "No Public Exploit Found over Exploit-DB"
89
-
90
- snyk_short_name = synk_db (cve_id )
91
-
92
- all_cve_details .append ({
93
- "CVE ID" : cve_id ,
94
- "Short Name" : snyk_short_name ,
95
- "Description" : description_text ,
96
- "Weaknesses" : ", " .join (weaknesses ),
97
- "Link" : link ,
98
- "Exploit Status" : exploit_status
99
- })
100
86
101
87
return all_cve_details
102
88
103
89
def fetch_github_urls (cve_id ):
104
90
api_url = f"https://poc-in-github.motikan2010.net/api/v1/?cve_id={ cve_id } "
105
- response = requests .get (api_url )
106
-
107
- if response .status_code == 200 :
108
- data = response .json ()
109
- if "pocs" in data and data ["pocs" ]:
110
- github_urls = [poc ["html_url" ] for poc in data ["pocs" ]]
111
- return github_urls
91
+ try :
92
+ response = requests .get (api_url )
93
+ if response .status_code == 200 :
94
+ data = response .json ()
95
+ if "pocs" in data and data ["pocs" ]:
96
+ return [poc ["html_url" ] for poc in data ["pocs" ]]
97
+ except requests .RequestException as e :
98
+ print (colored (f"Error fetching GitHub URLs: { e } " , "red" ))
112
99
return []
113
100
114
101
def search_and_extract_download_links (product_name ):
115
102
search_url = f"https://packetstormsecurity.com/search/?q={ product_name } "
116
- response = requests .get (search_url )
117
-
118
- download_links = []
119
-
120
- if response .status_code == 200 :
121
- soup = BeautifulSoup (response .text , 'html.parser' )
122
- results = soup .find_all ('a' , href = True )
123
-
124
- for result in results :
125
- href = result ['href' ]
126
- if '/files/download/' in href and href .endswith ('.txt' ):
127
- download_links .append (f"https://packetstormsecurity.com{ href } " )
128
-
129
- if not download_links :
130
- print (colored ("No download links found on Packet Storm Security." , "green" , attrs = ["underline" ]))
131
- return None
132
-
133
- return download_links
103
+ try :
104
+ response = requests .get (search_url )
105
+ if response .status_code == 200 :
106
+ soup = BeautifulSoup (response .text , 'html.parser' )
107
+ results = soup .find_all ('a' , href = True )
108
+ download_links = [f"https://packetstormsecurity.com{ result ['href' ]} " for result in results if '/files/download/' in result ['href' ] and result ['href' ].endswith ('.txt' )]
109
+ if not download_links :
110
+ print (colored ("No download links found on Packet Storm Security." , "green" , attrs = ["underline" ]))
111
+ return download_links
112
+ except requests .RequestException as e :
113
+ print (colored (f"Error fetching download links: { e } " , "red" ))
114
+ return []
134
115
135
116
def search_marc_info (search_term ):
136
- # Make a GET request to the URL
137
117
url = f"https://marc.info/?l=full-disclosure&s={ search_term } "
138
- response = requests .get (url )
139
-
140
- # Check if the request was successful
141
- if response .status_code == 200 :
142
- # Parse the HTML content of the page
143
- soup = BeautifulSoup (response .text , 'html.parser' )
144
-
145
- # Check if the response contains "No hits found for"
146
- if "No hits found for" in soup .get_text ():
147
- print (colored ("No matching exploits found." , "red" , attrs = ["underline" ]))
148
- else :
149
- # Find all <a> tags within <pre> tags, excluding those with "full-disc" in the text
150
- post_links = soup .find ('pre' ).find_all ('a' , string = lambda text : "full-disc" not in text )
151
-
152
- # Print all names and links
153
- if post_links :
154
- results = []
155
- for link in post_links :
156
- name = link .get_text (strip = True )
157
- link_url = "https://marc.info" + link ['href' ]
158
- results .append ({"Name" : name , "Link" : link_url })
159
- return results
118
+ try :
119
+ response = requests .get (url )
120
+ if response .status_code == 200 :
121
+ soup = BeautifulSoup (response .text , 'html.parser' )
122
+ if "No hits found for" in soup .get_text ():
123
+ print (colored ("No matching exploits found." , "red" , attrs = ["underline" ]))
160
124
else :
161
- print (colored ("No matching exploits found." , "green" ))
162
- else :
163
- print (colored ("Failed to retrieve the web page." , "red" ))
164
- print (f"Status code: { response .status_code } " )
165
- return None
125
+ post_links = soup .find ('pre' ).find_all ('a' , string = lambda text : "full-disc" not in text )
126
+ results = [{"Name" : link .get_text (strip = True ), "Link" : "https://marc.info" + link ['href' ]} for link in post_links ]
127
+ if results :
128
+ return results
129
+ else :
130
+ print (colored ("No matching exploits found." , "green" ))
131
+ else :
132
+ print (colored (f"Failed to retrieve the web page. Status code: { response .status_code } " , "red" ))
133
+ except requests .RequestException as e :
134
+ print (colored (f"Error fetching Marc.Info data: { e } " , "red" ))
135
+ return None
166
136
167
137
if __name__ == "__main__" :
168
138
print (colored ("CVE and Exploit Finder Script" , "green" , attrs = ["bold" ]))
@@ -193,19 +163,15 @@ def search_marc_info(search_term):
193
163
print (colored (f"Link: { result ['Link' ]} " , "blue" ))
194
164
github_urls = fetch_github_urls (cve_id )
195
165
if github_urls :
196
- print (colored ("Public Exploit/ POC Over Github found:" , "red" ))
166
+ print (colored ("Public Exploit/POC Over Github found:" , "red" ))
197
167
for url in github_urls :
198
168
print (colored (f" { url } " , "blue" ))
199
169
else :
200
- print (colored ("Public Exploit/ POC Over Github not found, you might need to check manually" , "green" ))
201
- if result ["Exploit Status" ] == "Public Exploit Found" :
202
- print (colored (f"Exploit Status: { result ['Exploit Status' ]} " , "red" ))
203
- else :
204
- print (colored (f"Exploit Status: { result ['Exploit Status' ]} " , "green" ))
170
+ print (colored ("Public Exploit/POC Over Github not found, you might need to check manually" , "green" ))
171
+ print (colored (f"Exploit Status: { result ['Exploit Status' ]} " , "red" if result ["Exploit Status" ] == "Public Exploit Found over Exploit-DB" else "green" ))
205
172
else :
206
173
print (colored ("No CPEs found for the provided component and version." , "red" ))
207
174
208
- # Search for download links on Packet Storm Security even if no CPEs were found
209
175
download_links = search_and_extract_download_links (component )
210
176
211
177
if download_links :
@@ -215,7 +181,6 @@ def search_marc_info(search_term):
215
181
else :
216
182
print (colored ("No download links found on Packet Storm Security." , "red" , attrs = ["underline" ]))
217
183
218
- # Search Marc.Info
219
184
search_term_marc = f"{ component } { version } "
220
185
print (f"\n Using keyword " + search_term_marc + " for lookup..." )
221
186
marc_results = search_marc_info (search_term_marc )
0 commit comments