Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 14 additions & 6 deletions bbot/core/helpers/diff.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,9 +233,21 @@ async def compare(
if item in subject_response.text:
reflection = True
break
diff_reasons = await self.parent_helper.run_in_executor_cpu(
self._compare_sync,
subject_response,
subject,
)

if not diff_reasons:
return (True, [], reflection, subject_response)
else:
return (False, diff_reasons, reflection, subject_response)

def _compare_sync(self, subject_response, subject):
"""CPU-bound comparison work offloaded from the event loop."""
try:
subject_json = xmltodict.parse(subject_response.text)

except ExpatError:
log.debug(f"Can't HTML parse for {subject.split('?')[0]}. Switching to text parsing as a backup")
subject_json = subject_response.text.split("\n")
Expand All @@ -255,13 +267,9 @@ async def compare(

if self.compare_body(self.baseline_json, subject_json) is False:
log.debug("difference in HTML body, no match")

diff_reasons.append("body")

if not diff_reasons:
return (True, [], reflection, subject_response)
else:
return (False, diff_reasons, reflection, subject_response)
return diff_reasons

async def canary_check(self, url, mode, rounds=3):
"""
Expand Down
60 changes: 36 additions & 24 deletions bbot/core/helpers/web/web.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,43 +328,55 @@ async def request_batch(self, urls, threads=10, **kwargs):
>>> async for url, response, tracker in self.helpers.request_batch(reqs):
>>> ...
"""
tasks = {}
semaphore = asyncio.Semaphore(threads)
entries = []
has_tracker = False

async def _do_request(_url, _kwargs, _tracker):
async with semaphore:
return _url, await self.request(_url, **_kwargs), _tracker

for entry in urls:
if isinstance(entry, str):
url, req_kwargs, tracker = entry, kwargs, None
entries.append((entry, kwargs, None))
elif isinstance(entry, tuple):
url = entry[0]
req_kwargs = entry[1] if len(entry) > 1 and isinstance(entry[1], dict) else kwargs
tracker = entry[2] if len(entry) > 2 else None
if tracker is not None:
has_tracker = True
entries.append((url, req_kwargs, tracker))
else:
url, req_kwargs, tracker = str(entry), kwargs, None
task = asyncio.create_task(_do_request(url, req_kwargs, tracker))
tasks[task] = True
entries.append((str(entry), kwargs, None))

total = len(entries)
if total == 0:
return
work_queue = asyncio.Queue()
yield_queue = asyncio.Queue()

async def _worker():
while True:
url, req_kwargs, tracker = await work_queue.get()
try:
response = await self.request(url, **req_kwargs)
yield_queue.put_nowait((url, response, tracker))
except (RuntimeError, OSError, ConnectionError):
yield_queue.put_nowait((url, None, tracker))
finally:
work_queue.task_done()

workers = [asyncio.create_task(_worker()) for _ in range(min(threads, total))]
try:
while tasks:
finished, _ = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED)
for task in finished:
del tasks[task]
url, response, tracker = task.result()
if has_tracker:
yield url, response, tracker
else:
yield url, response
for url, req_kwargs, tracker in entries:
await work_queue.put((url, req_kwargs, tracker))

completed = 0
while completed < total:
url, response, tracker = await yield_queue.get()
completed += 1
if has_tracker:
yield url, response, tracker
else:
yield url, response
finally:
for task in tasks:
task.cancel()
if tasks:
await asyncio.gather(*tasks, return_exceptions=True)
for w in workers:
w.cancel()
await asyncio.gather(*workers, return_exceptions=True)

async def download(self, url, **kwargs):
"""
Expand Down
2 changes: 1 addition & 1 deletion bbot/modules/badsecrets.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ async def setup(self):

@property
def _module_threads(self):
return max(1, multiprocessing.cpu_count() - 1)
return min(4, max(1, multiprocessing.cpu_count() - 1))

async def handle_event(self, event):
resp_body = event.data.get("body", None)
Expand Down
11 changes: 5 additions & 6 deletions bbot/modules/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -1857,16 +1857,15 @@ async def _worker(self):
continue

acceptable = True
async with self._task_counter.count(f"event_precheck({event})"):
precheck_pass, reason = self._event_precheck(event)
precheck_pass, reason = self._event_precheck(event)
if not precheck_pass:
self.debug(f"Not intercepting {event} because precheck failed ({reason})")
acceptable = False
async with self._task_counter.count(f"event_postcheck({event})"):
else:
postcheck_pass, reason = await self._event_postcheck(event)
if not postcheck_pass:
self.debug(f"Not intercepting {event} because postcheck failed ({reason})")
acceptable = False
if not postcheck_pass:
self.debug(f"Not intercepting {event} because postcheck failed ({reason})")
acceptable = False

# whether to pass the event on to the rest of the scan
# defaults to true, unless handle_event returns False
Expand Down
2 changes: 1 addition & 1 deletion bbot/modules/iis_shortnames.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class iis_shortnames(BaseModule):
}
in_scope_only = True

_module_threads = 8
_module_threads = 4

async def detect(self, target):
technique = None
Expand Down
2 changes: 1 addition & 1 deletion bbot/modules/internal/excavate.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,7 @@ class excavateTestRule(ExcavateRule):
scope_distance_modifier = None
accept_dupes = False

_module_threads = 8
_module_threads = 6

yara_rule_name_regex = re.compile(r"rule\s(\w+)\s{")
yara_rule_regex = re.compile(r"(?s)((?:rule\s+\w+\s*{[^{}]*(?:{[^{}]*}[^{}]*)*[^{}]*(?:/\S*?}[^/]*?/)*)*})")
Expand Down
15 changes: 6 additions & 9 deletions bbot/modules/paramminer_cookies.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ class paramminer_cookies(paramminer_headers):
options_desc = {"wordlist": "Define the wordlist to be used to derive cookies"}
scanned_hosts = []
boring_words = set()
_module_threads = 12
_module_threads = 4
in_scope_only = True
compare_mode = "cookie"
default_wordlist = "paramminer_parameters.txt"
Expand All @@ -36,11 +36,8 @@ async def check_batch(self, compare_helper, url, cookie_list):
cookies = {p: self.rand_string(14) for p in cookie_list}
return await compare_helper.compare(url, cookies=cookies, check_reflection=(len(cookie_list) == 1))

def gen_count_args(self, url):
cookie_count = 40
while 1:
if cookie_count < 0:
break
fake_cookies = {self.rand_string(14): self.rand_string(14) for _ in range(0, cookie_count)}
yield cookie_count, (url,), {"cookies": fake_cookies}
cookie_count -= 5
max_count = 40

def build_count_test_request(self, url, count):
fake_cookies = {self.rand_string(14): self.rand_string(14) for _ in range(count)}
return (url,), {"cookies": fake_cookies}
13 changes: 5 additions & 8 deletions bbot/modules/paramminer_getparams.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,8 @@ async def check_batch(self, compare_helper, url, getparam_list):
self.helpers.add_get_params(url, test_getparams).geturl(), check_reflection=(len(getparam_list) == 1)
)

def gen_count_args(self, url):
getparam_count = 40
while 1:
if getparam_count < 0:
break
fake_getparams = {self.rand_string(14): self.rand_string(14) for _ in range(0, getparam_count)}
yield getparam_count, (self.helpers.add_get_params(url, fake_getparams).geturl(),), {}
getparam_count -= 5
max_count = 40

def build_count_test_request(self, url, count):
fake_getparams = {self.rand_string(14): self.rand_string(14) for _ in range(count)}
return (self.helpers.add_get_params(url, fake_getparams).geturl(),), {}
34 changes: 21 additions & 13 deletions bbot/modules/paramminer_headers.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ class paramminer_headers(BaseModule):
"zx-request-id",
"zx-timer",
}
_module_threads = 12
_module_threads = 4
in_scope_only = True
compare_mode = "header"
default_wordlist = "paramminer_headers.txt"
Expand Down Expand Up @@ -199,27 +199,35 @@ async def handle_event(self, event):
self.debug(f"Encountered HttpCompareError: [{e}] for URL [{event.url}]")
await self.process_results(event, results)

max_count = 95

async def count_test(self, url):
baseline = await self.helpers.request(url)
if baseline is None:
return
if str(baseline.status_code)[0] in {"4", "5"}:
return
for count, args, kwargs in self.gen_count_args(url):

# Binary search for the maximum count the server accepts
lo, hi = 0, self.max_count
result = None
while lo <= hi:
mid = (lo + hi) // 2
if mid == 0:
break
args, kwargs = self.build_count_test_request(url, mid)
r = await self.helpers.request(*args, **kwargs)
if r is not None and str(r.status_code)[0] not in {"4", "5"}:
return count
result = mid
lo = mid + 1
else:
hi = mid - 1
return result

def gen_count_args(self, url):
header_count = 95
while 1:
if header_count < 0:
break
fake_headers = {}
for i in range(0, header_count):
fake_headers[self.rand_string(14)] = self.rand_string(14)
yield header_count, (url,), {"headers": fake_headers}
header_count -= 5
def build_count_test_request(self, url, count):
"""Build a test request with `count` fake parameters. Returns (args, kwargs) for helpers.request()."""
fake_headers = {self.rand_string(14): self.rand_string(14) for _ in range(count)}
return (url,), {"headers": fake_headers}

async def binary_search(self, compare_helper, url, group, reasons=None, reflection=False):
if reasons is None:
Expand Down
50 changes: 25 additions & 25 deletions bbot/modules/web_brute.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,33 +347,33 @@ async def execute_fuzz(
self.debug("Found canary in results, all hits are likely false positives — aborting")
return

# Mid-scan validation for each hit
for hit in hits:
if not baseline and ext_filter:
canary_word = "".join(random.choice(string.ascii_lowercase) for _ in range(4))
canary_url = f"{url}{prefix}{canary_word}{suffix}{ext}"
canary_configs = [
blasthttp.BatchConfig(
canary_url,
headers=headers,
timeout=self.scan.http_timeout,
retries=0,
verify_certs=False,
follow_redirects=False,
proxy=proxy,
# Mid-scan validation: one canary check per extension
if hits and not baseline and ext_filter:
canary_word = "".join(random.choice(string.ascii_lowercase) for _ in range(4))
canary_url = f"{url}{prefix}{canary_word}{suffix}{ext}"
canary_configs = [
blasthttp.BatchConfig(
canary_url,
headers=headers,
timeout=self.scan.http_timeout,
retries=0,
verify_certs=False,
follow_redirects=False,
proxy=proxy,
)
]
canary_batch = await self.blast_client.request_batch(canary_configs, 1, rate_limit=self.rate)
if canary_batch and canary_batch[0].success:
canary_metrics = self._batch_response_metrics(canary_batch[0].response)
if not self._is_baseline_match(canary_metrics, ext_filter):
self.verbose(
f"Would have reported {len(hits)} hit(s), but mid-scan baseline check failed. "
"This could be due to a WAF turning on mid-scan."
)
]
canary_batch = await self.blast_client.request_batch(canary_configs, 1, rate_limit=self.rate)
if canary_batch and canary_batch[0].success:
canary_metrics = self._batch_response_metrics(canary_batch[0].response)
if not self._is_baseline_match(canary_metrics, ext_filter):
self.verbose(
f"Would have reported [{hit['url']}], but mid-scan baseline check failed. "
"This could be due to a WAF turning on mid-scan."
)
self.verbose(f"Aborting the current run against [{url}]")
return
self.verbose(f"Aborting the current run against [{url}]")
return

for hit in hits:
yield hit

def generate_templist(self, prefix=None):
Expand Down
Loading
Loading