Skip to content

Commit

Permalink
Merge pull request #18 from krystianbajno/feature/rapid7
Browse files Browse the repository at this point in the history
Added Rapid7
  • Loading branch information
krystianbajno authored Nov 23, 2024
2 parents f4cc1ab + cc1b450 commit af2584f
Show file tree
Hide file tree
Showing 7 changed files with 178 additions and 8 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ python3 cveseeker.py cve-2024 --critical --high --medium --low # include critica
- [www.packetstormsecurity.com](https://packetstormsecurity.com) (IMPLEMENTED)
- [vulners.com](https://vulners.com/search) (IMPLEMENTED)
- [www.cisa.gov - KEV](https://www.cisa.gov/known-exploited-vulnerabilities-catalog) (IMPLEMENTED)
- [www.rapid7.com](https://www.rapid7.com) (WIP)
- [www.rapid7.com](https://www.rapid7.com) (IMPLEMENTED)
- [cve.mitre.org](https://cve.mitre.org/cve/search_cve_list.html) (WIP)
- [github.com PoC](https://github.com/nomi-sec/PoC-in-GitHub) (IMPLEMENTED)
- [github.com advisories](https://github.com/advisories) (IMPLEMENTED)
Expand Down
1 change: 1 addition & 0 deletions config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ providers:
GitHubAdvisoryAPI: true
VulnersAPI: false
CISAKEVAPI: true
RAPID7: true

enrichment:
sources:
Expand Down
4 changes: 3 additions & 1 deletion providers/search_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from services.api.sources.nist import NistAPI
from services.api.sources.opencve import OpenCVEAPI
from services.api.sources.packetstormsecurity import PacketStormSecurityAPI
from services.api.sources.rapid7 import RAPID7
from services.api.sources.vulners import VulnersAPI

from typing import Dict
Expand All @@ -26,7 +27,8 @@ def __init__(self, playwright_enabled=False, config_file='config.yaml'):
'ExploitDBAPI': ExploitDBAPI,
'GitHubAdvisoryAPI': GitHubAdvisoryAPI,
'VulnersAPI': VulnersAPI,
"CISAKEVAPI": CISAKEVAPI
"CISAKEVAPI": CISAKEVAPI,
"RAPID7": RAPID7
}

def make_service_api(self) -> SearchManager:
Expand Down
1 change: 0 additions & 1 deletion services/api/sources/github.py

This file was deleted.

156 changes: 155 additions & 1 deletion services/api/sources/rapid7.py
Original file line number Diff line number Diff line change
@@ -1 +1,155 @@
# https://www.rapid7.com/db/
import httpx
from bs4 import BeautifulSoup
from typing import List
from dateutil import parser as dateutil_parser
import re
from concurrent.futures import ThreadPoolExecutor, as_completed

from models.vulnerability import Vulnerability
from services.api.source import Source
from services.vulnerabilities.factories.vulnerability_factory import VulnerabilityFactory, DEFAULT_VALUES


class RAPID7(Source):
def __init__(self):
self.base_url = "https://www.rapid7.com"
self.search_url = f"{self.base_url}/db/"
self.session = httpx.Client()

def search(self, keywords: List[str], max_results=100) -> List[Vulnerability]:
vulnerabilities = []
if not max_results:
max_results = 100

search_query = "+".join(keywords)
page = 1
results_count = 0

while results_count < max_results:
url = f"{self.search_url}?q={search_query}&type=nexpose&page={page}"
response = self.session.get(url)

if response.status_code != 200:
break

soup = BeautifulSoup(response.text, 'html.parser')
results_section = soup.find('section', class_='vulndb__results')
if not results_section:
break

result_links = results_section.find_all('a', class_='vulndb__result resultblock')

if not result_links:
break

with ThreadPoolExecutor(max_workers=10) as executor:
futures = []
for result_link in result_links:
if results_count >= max_results:
break

future = executor.submit(self.process_vulnerability_link, result_link)
futures.append(future)

for future in as_completed(futures):
result = future.result()
if result:
vulnerabilities.append(result)
results_count += 1
if results_count >= max_results:
break

pagination = soup.find('ul', class_='pagination')
next_page = pagination.find('a', text=str(page + 1)) if pagination else None
if next_page:
page += 1
else:
break

self.session.close()
return vulnerabilities

def process_vulnerability_link(self, result_link):
try:
title = result_link.find('div', class_='resultblock__info-title').text.strip()
href = result_link['href']
detail_url = f"{self.base_url}{href}"

cve_id = self.extract_cve_id_from_title(title)
if not cve_id:
return None

meta_info = result_link.find('div', class_='resultblock__info-meta').text.strip()
published_date = DEFAULT_VALUES['date']
base_score = DEFAULT_VALUES['base_score']

if "Published:" in meta_info:
date_part = meta_info.split("Published:")[1].split("|")[0].strip()
published_date = dateutil_parser.parse(date_part).strftime('%Y-%m-%d')

if "Severity:" in meta_info:
score_part = meta_info.split("Severity:")[1].strip()
try:
base_score = float(score_part)
except ValueError:
pass

base_severity = self.calculate_severity_from_score(base_score)

detail_response = self.session.get(detail_url)
if detail_response.status_code != 200:
return None

detail_soup = BeautifulSoup(detail_response.text, 'html.parser')

description_div = detail_soup.find('div', class_='vulndb__detail-content bottom-border')
description = ""
if description_div:
description_paragraphs = description_div.find_all('p')
description = " ".join(p.text.strip() for p in description_paragraphs if p.text.strip())

components_section = detail_soup.find('section', class_='vulndb__references bottom-border')
vulnerable_components = []
if components_section:
components_list = components_section.find_all('li')
vulnerable_components = [li.text.strip() for li in components_list]

references_div = detail_soup.find('div', class_='vulndb__related-content')
reference_urls = set()
if references_div:
reference_links = references_div.find_all('a', href=True)
reference_urls = {link['href'] for link in reference_links}

vulnerability = VulnerabilityFactory.make(
id=cve_id,
source=self.__class__.__name__,
url=detail_url,
date=published_date,
title=title,
reference_urls=list(reference_urls),
base_score=str(base_score),
base_severity=base_severity,
description=description,
vulnerable_components=vulnerable_components,
weaknesses=[],
)

return vulnerability
except Exception as e:
return None

@staticmethod
def extract_cve_id_from_title(title: str) -> str:
match = re.search(r'CVE-\d{4}-\d{4,7}', title, re.IGNORECASE)
return match.group(0) if match else None

@staticmethod
def calculate_severity_from_score(score: float) -> str:
if score >= 9.0:
return "Critical"
elif score >= 7.0:
return "High"
elif score >= 5.0:
return "Medium"
else:
return "Low"
3 changes: 1 addition & 2 deletions services/search/engine/filtering.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
def filter_by_severity(vulnerabilities, severities):
return [
vuln for vuln in vulnerabilities
if set(sev['severity'].lower() for sev in vuln.severities).intersection(severities)
vuln for vuln in vulnerabilities if set(sev['severity'].lower() for sev in vuln.severities).intersection(severities)
]
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,9 @@ def process(vulnerabilities: List[Vulnerability], search_terms: List[str]) -> Li
]

unique_vulnerabilities = VulnerabilityIntelligenceProcessor._remove_duplicates(validated_vulnerabilities)

sorted_vulnerabilities = VulnerabilityIntelligenceProcessor._sort_by_id(unique_vulnerabilities)
remapped_moderate_to_medium = VulnerabilityIntelligenceProcessor._remapped_moderate_to_medium(unique_vulnerabilities)

sorted_vulnerabilities = VulnerabilityIntelligenceProcessor._sort_by_id(remapped_moderate_to_medium)

vulnerability_intelligence_list = VulnerabilityIntelligenceFactory.make(sorted_vulnerabilities)

Expand All @@ -38,6 +39,20 @@ def _remove_duplicates(vulnerabilities: List[Vulnerability]) -> List[Vulnerabili
unique_vulnerabilities.append(vuln)

return unique_vulnerabilities


@staticmethod
def _remapped_moderate_to_medium(vulnerabilities: List[Vulnerability]) -> List[Vulnerability]:
remapped = []
for vuln in vulnerabilities:
if vuln.base_severity.lower() == "moderate":
vuln.base_severity = "Medium"
remapped.append(vuln)
else:
remapped.append(vuln)

return remapped


@staticmethod
def _sort_by_id(vulnerabilities: List[Vulnerability]) -> List[Vulnerability]:
Expand Down

0 comments on commit af2584f

Please sign in to comment.