Here are the examples of the python api wapitiCore.net.web.Request taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
180 Examples
3
Source : test_mod_backup.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_false_positive():
respx.get("http://perdu.com/config.php").mock(return_value=httpx.Response(200, text="Hello there"))
respx.get(url__startswith="http://perdu.com/").mock(return_value=httpx.Response(200, text="Default webpage"))
persister = AsyncMock()
request = Request("http://perdu.com/config.php")
request.path_id = 1
request.set_headers({"content-type": "text/html"})
crawler = AsyncCrawler(Request("http://perdu.com/"), timeout=1)
options = {"timeout": 10, "level": 2}
module = ModuleBackup(crawler, persister, options, Event())
module.do_get = True
assert not await module.must_attack(request)
await crawler.close()
3
Source : test_mod_file.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_warning_false_positive():
persister = AsyncMock()
request = Request("http://127.0.0.1:65085/inclusion.php?yolo=warn&f=toto")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65085/"))
options = {"timeout": 10, "level": 2}
module = ModuleFile(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert persister.add_payload.call_count == 1
assert ["f", "/etc/services"] in persister.add_payload.call_args_list[0][1]["request"].get_params
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mod_htp.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_must_attack():
persister = AsyncMock()
home_dir = os.getenv("HOME") or os.getenv("USERPROFILE")
base_dir = os.path.join(home_dir, ".wapiti")
persister.CONFIG_DIR = os.path.join(base_dir, "config")
request = Request("http://perdu.com/")
request.path_id = 1
crawler = AsyncCrawler(Request("http://perdu.com/"))
options = {"timeout": 10, "level": 2}
module_htp = ModuleHtp(crawler, persister, options, Event())
assert await module_htp.must_attack(Request("http://perdu.com", method="POST")) is False
assert await module_htp.must_attack(Request("http://perdu.com", method="GET")) is True
@pytest.mark.asyncio
3
Source : test_mod_redirect.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_redirect_detection():
persister = AsyncMock()
request = Request("http://127.0.0.1:65080/open_redirect.php?yolo=nawak&url=toto")
crawler = AsyncCrawler(Request("http://127.0.0.1:65080/"))
options = {"timeout": 10, "level": 2}
module = ModuleRedirect(crawler, persister, options, Event())
await module.attack(request)
assert persister.add_payload.call_args_list[0][1]["module"] == "redirect"
assert persister.add_payload.call_args_list[0][1]["category"] == _("Open Redirect")
assert persister.add_payload.call_args_list[0][1]["request"].get_params == [
['yolo', 'nawak'],
['url', 'https://openbugbounty.org/']
]
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mod_timesql.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_timesql_false_positive():
persister = AsyncMock()
request = Request("http://127.0.0.1:65082/blind_sql.php?vuln2=hello%20there")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65082/"), timeout=1)
options = {"timeout": 1, "level": 1}
module = ModuleTimesql(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert not persister.add_payload.call_count
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mod_xss_advanced.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_title_false_positive():
# We should fail at escaping the title tag and we should be aware of it
persister = AsyncMock()
request = Request("http://127.0.0.1:65081/title_false_positive.php?title=yolo&fixed=yes")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65081/"))
options = {"timeout": 10, "level": 2}
module = ModuleXss(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert not persister.add_payload.call_count
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mod_xss_advanced.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_script_filter_bypass():
# We should succeed at bypass the < script filter
persister = AsyncMock()
request = Request("http://127.0.0.1:65081/script_tag_filter.php?name=kenobi")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65081/"))
options = {"timeout": 10, "level": 2}
module = ModuleXss(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert persister.add_payload.call_count
assert persister.add_payload.call_args_list[0][1]["parameter"] == "name"
assert persister.add_payload.call_args_list[0][1]["request"].get_params[0][1].lower().startswith(" < svg")
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mod_xss_advanced.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_attr_quote_escape():
# We should succeed at closing the attribute value and the opening tag
persister = AsyncMock()
request = Request("http://127.0.0.1:65081/attr_quote_escape.php?class=custom")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65081/"))
options = {"timeout": 10, "level": 2}
module = ModuleXss(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert persister.add_payload.call_count
assert persister.add_payload.call_args_list[0][1]["parameter"] == "class"
assert persister.add_payload.call_args_list[0][1]["request"].get_params[0][1].lower().startswith("'> < /pre>")
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mod_xss_advanced.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_attr_double_quote_escape():
# We should succeed at closing the attribute value and the opening tag
persister = AsyncMock()
request = Request("http://127.0.0.1:65081/attr_double_quote_escape.php?class=custom")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65081/"))
options = {"timeout": 10, "level": 2}
module = ModuleXss(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert persister.add_payload.call_count
assert persister.add_payload.call_args_list[0][1]["parameter"] == "class"
assert persister.add_payload.call_args_list[0][1]["request"].get_params[0][1].lower().startswith("\"> < /pre>")
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mod_xss_advanced.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_attr_escape():
# We should succeed at closing the attribute value and the opening tag
persister = AsyncMock()
request = Request("http://127.0.0.1:65081/attr_escape.php?state=checked")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65081/"))
options = {"timeout": 10, "level": 2}
module = ModuleXss(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert persister.add_payload.call_count
assert persister.add_payload.call_args_list[0][1]["parameter"] == "state"
assert persister.add_payload.call_args_list[0][1]["request"].get_params[0][1].lower().startswith("> < script>")
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mod_xss_advanced.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_tag_name_escape():
# We should succeed at closing the attribute value and the opening tag
persister = AsyncMock()
request = Request("http://127.0.0.1:65081/tag_name_escape.php?tag=textarea")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65081/"))
options = {"timeout": 10, "level": 2}
module = ModuleXss(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert persister.add_payload.call_count
assert persister.add_payload.call_args_list[0][1]["parameter"] == "tag"
assert persister.add_payload.call_args_list[0][1]["request"].get_params[0][1].lower().startswith("script>")
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mod_xss_advanced.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_partial_tag_name_escape():
# We should succeed at closing the attribute value and the opening tag
persister = AsyncMock()
request = Request("http://127.0.0.1:65081/partial_tag_name_escape.php?importance=2")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65081/"))
options = {"timeout": 10, "level": 2}
module = ModuleXss(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert persister.add_payload.call_count
assert persister.add_payload.call_args_list[0][1]["parameter"] == "importance"
assert persister.add_payload.call_args_list[0][1]["request"].get_params[0][1].lower().startswith("/> < script>")
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mod_xss_advanced.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_xss_inside_tag_input():
persister = AsyncMock()
request = Request("http://127.0.0.1:65081/input_text_strip_tags.php?uid=5")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65081/"))
options = {"timeout": 10, "level": 2}
module = ModuleXss(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert persister.add_payload.call_count
assert persister.add_payload.call_args_list[0][1]["parameter"] == "uid"
used_payload = persister.add_payload.call_args_list[0][1]["request"].get_params[0][1].lower()
assert " < " not in used_payload and ">" not in used_payload and "autofocus/onfocus" in used_payload
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mod_xss_advanced.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_xss_inside_tag_link():
persister = AsyncMock()
request = Request("http://127.0.0.1:65081/link_href_strip_tags.php?url=http://perdu.com/")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65081/"))
options = {"timeout": 10, "level": 2}
module = ModuleXss(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert persister.add_payload.call_count
assert persister.add_payload.call_args_list[0][1]["parameter"] == "url"
used_payload = persister.add_payload.call_args_list[0][1]["request"].get_params[0][1].lower()
assert " < " not in used_payload and ">" not in used_payload and "autofocus href onfocus" in used_payload
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mod_xss_advanced.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_xss_uppercase_no_script():
persister = AsyncMock()
request = Request("http://127.0.0.1:65081/uppercase_no_script.php?name=obiwan")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65081/"))
options = {"timeout": 10, "level": 2}
module = ModuleXss(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert persister.add_payload.call_count
assert persister.add_payload.call_args_list[0][1]["parameter"] == "name"
used_payload = persister.add_payload.call_args_list[0][1]["request"].get_params[0][1].lower()
assert used_payload.startswith(" < svg onload=&")
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mod_xss_advanced.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_frame_src_escape():
persister = AsyncMock()
request = Request("http://127.0.0.1:65081/frame_src_escape.php?url=https://wapiti-scanner.github.io/")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65081/"))
options = {"timeout": 10, "level": 2}
module = ModuleXss(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert persister.add_payload.call_count
assert persister.add_payload.call_args_list[0][1]["parameter"] == "url"
used_payload = persister.add_payload.call_args_list[0][1]["request"].get_params[0][1].lower()
assert used_payload.startswith('"> < frame src="javascript:alert(/w')
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mod_xss_advanced.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_frame_src_no_escape():
persister = AsyncMock()
request = Request("http://127.0.0.1:65081/frame_src_no_escape.php?url=https://wapiti-scanner.github.io/")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65081/"))
options = {"timeout": 10, "level": 2}
module = ModuleXss(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert persister.add_payload.call_count
assert persister.add_payload.call_args_list[0][1]["parameter"] == "url"
used_payload = persister.add_payload.call_args_list[0][1]["request"].get_params[0][1].lower()
assert used_payload.startswith("javascript:alert(/w")
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mod_xss_advanced.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_bad_separator_used():
persister = AsyncMock()
request = Request("http://127.0.0.1:65081/confuse_separator.php?number=42")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65081/"))
options = {"timeout": 10, "level": 2}
module = ModuleXss(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert persister.add_payload.call_count
used_payload = persister.add_payload.call_args_list[0][1]["request"].get_params[0][1].lower()
assert used_payload.startswith("\">")
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mod_xss_advanced.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_escape_with_style():
persister = AsyncMock()
request = Request("http://127.0.0.1:65081/escape_with_style.php?color=green")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65081/"))
options = {"timeout": 10, "level": 2}
module = ModuleXss(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert persister.add_payload.call_count
used_payload = persister.add_payload.call_args_list[0][1]["request"].get_params[0][1].lower()
assert used_payload.startswith(" < /style>")
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mod_xss_advanced.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_rare_tag_and_event():
persister = AsyncMock()
request = Request("http://127.0.0.1:65081/filter_common_keywords.php?msg=test")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65081/"))
options = {"timeout": 10, "level": 2}
module = ModuleXss(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert persister.add_payload.call_count
used_payload = persister.add_payload.call_args_list[0][1]["request"].get_params[0][1].lower()
assert used_payload.startswith(" < custom\nchecked\nonpointerenter=")
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mod_xss_advanced.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_xss_with_strong_csp():
persister = AsyncMock()
request = Request("http://127.0.0.1:65081/strong_csp.php?content=Hello%20there")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65081/"))
options = {"timeout": 10, "level": 2}
module = ModuleXss(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert persister.add_payload.call_count
assert _("Warning: Content-Security-Policy is present!") in persister.add_payload.call_args_list[0][1]["info"]
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mod_xss_advanced.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_xss_with_weak_csp():
persister = AsyncMock()
request = Request("http://127.0.0.1:65081/weak_csp.php?content=Hello%20there")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65081/"))
options = {"timeout": 10, "level": 2}
module = ModuleXss(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert persister.add_payload.call_count
assert _(
"Warning: Content-Security-Policy is present!"
) not in persister.add_payload.call_args_list[0][1]["info"]
await crawler.close()
3
Source : test_mod_xxe.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_direct_param():
# check for false positives too
persister = AsyncMock()
request = Request("http://127.0.0.1:65084/xxe/direct/param.php?foo=bar&vuln=yolo")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65084/"))
options = {"timeout": 10, "level": 1}
module = ModuleXxe(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert persister.add_payload.call_count
assert persister.add_payload.call_args_list[0][1]["parameter"] == "vuln"
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mod_xxe.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_direct_query_string():
persister = AsyncMock()
request = Request("http://127.0.0.1:65084/xxe/direct/qs.php")
request.path_id = 42
crawler = AsyncCrawler(Request("http://127.0.0.1:65084/"))
options = {"timeout": 10, "level": 2}
module = ModuleXxe(crawler, persister, options, Event())
module.do_post = False
await module.attack(request)
assert persister.add_payload.call_count
assert persister.add_payload.call_args_list[0][1]["parameter"] == "QUERY_STRING"
await crawler.close()
@pytest.mark.asyncio
3
Source : test_mutator.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
def test_missing_value():
req2 = Request(
"http://perdu.com/directory/?high=tone",
)
# Filename of the target URL should be injected but it is missing here, we should not raise a mutation
mutator = Mutator(payloads=[("[FILE_NAME]::$DATA", Flags())])
count = 0
for __ in mutator.mutate(req2):
count += 1
assert count == 0
3
Source : test_explorer.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_explorer_filtering():
crawler = AsyncCrawler(Request("http://127.0.0.1:65080/"))
explorer = Explorer(crawler, Event())
start_urls = deque(["http://127.0.0.1:65080/filters.html"])
excluded_urls = []
results = {resource.url async for resource in explorer.async_explore(start_urls, excluded_urls)}
3
Source : test_network_issues.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_chunked_timeout():
url = "http://127.0.0.1:65080/chunked_timeout.php"
request = Request(url)
crawler = AsyncCrawler(request, timeout=1)
with pytest.raises(ReadTimeout):
await crawler.async_send(request)
await crawler.close()
@pytest.mark.asyncio
3
Source : test_network_issues.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_timeout():
url = "http://127.0.0.1:65080/timeout.php"
request = Request(url)
crawler = AsyncCrawler(request, timeout=1)
with pytest.raises(ReadTimeout):
await crawler.async_send(request)
await crawler.close()
3
Source : test_request.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_redirect():
slyfx = "http://www.slyfx.com/"
disney = "http://www.disney.com/"
respx.get(slyfx).mock(return_value=httpx.Response(301, headers={"Location": disney}, text="Back to disneyland"))
respx.get(disney).mock(return_value=httpx.Response(200, text="Hello there"))
crawler = AsyncCrawler(Request(slyfx))
page = await crawler.async_send(Request(slyfx))
assert page.url == slyfx
assert not page.history
page = await crawler.async_send(Request(slyfx), follow_redirects=True)
assert page.url == disney
assert page.history[0].url == slyfx
await crawler.close()
3
Source : mod_backup.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def is_false_positive(self, request: Request):
# Check for false positives by asking an improbable file inside the same folder
# Use a dict to cache state for each directory
if request.dir_name not in self.false_positive_directories:
request = Request(urljoin(request.dir_name, random_string() + ".zip"))
try:
response = await self.crawler.async_send(request)
except RequestError:
self.network_errors += 1
# Do not put anything in false_positive_directories, another luck for next time
return False
self.false_positive_directories[request.dir_name] = (response and response.is_success)
return self.false_positive_directories[request.dir_name]
async def must_attack(self, request: Request):
3
Source : mod_drupal_enum.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def get_url_hash(self, root_url: str, path: str) -> Tuple[str, str]:
request = Request(f"{root_url}{path}")
response = await self.crawler.async_get(request, follow_redirects=True)
if response.is_error:
return "", ""
return hashlib.sha256(response.content.encode()).hexdigest(), path
async def detect_version(self, root_url):
3
Source : mod_drupal_enum.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def check_drupal(self, url):
check_list = ['sites/', 'core/misc/drupal.js', 'misc/drupal.js', 'misc/test/error/404/ispresent.html']
for item in check_list:
request = Request(f'{url}{item}')
try:
response = await self.crawler.async_get(request, follow_redirects=True)
except RequestError:
self.network_errors += 1
except Exception as exception:
logging.exception(exception)
else:
if response.is_success:
return True
return False
async def must_attack(self, request: Request):
3
Source : mod_htp.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def attack(self, request: Request):
await self._init_db()
root_url = await self.persister.get_root_url()
if request.url == root_url:
files = self._get_static_files()
for file_path in files:
await self._analyze_file(Request(root_url + file_path, method="GET"))
await self._analyze_file(request)
async def _init_db(self):
3
Source : mod_htp.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def _download_htp_database(self, htp_dabatabse_url: str, htp_database_path: str):
request = Request(htp_dabatabse_url)
response: Page = await self.crawler.async_send(request, follow_redirects=True)
with open(htp_database_path, 'wb') as file:
file.write(response.bytes)
async def _verify_htp_database(self, htp_database_path: str):
3
Source : mod_log4shell.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def attack_apache_solr_url(self, request_url: str):
payload_unique_id = uuid.uuid4()
payload = self._generate_payload(payload_unique_id).replace("{", "%7B").replace("}", "%7D")
query = f"action=CREATE&name={payload}&wt=json"
malicious_request = Request(
path=request_url + "?" + query,
method="GET",
get_params=None,
)
try:
await self.crawler.async_send(malicious_request, follow_redirects=True)
except RequestError:
self.network_errors += 1
return
await self._verify_param_vulnerability(malicious_request, payload_unique_id, "name")
async def _attack_apache_struts(self, request_url: str):
3
Source : mod_log4shell.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def _attack_apache_struts(self, request_url: str):
payload_unique_id = uuid.uuid4()
# Here we need to replace "//" by "$%7B::-/%7D/" because Apache Struts will replace "//" by "/"
# and with these special characters it will keep "//"
payload = self._generate_payload(payload_unique_id).replace("//", "$%7B::-/%7D/")
modified_request = Request(request_url + ("" if request_url.endswith("/") else "/") + payload + "/")
try:
await self.crawler.async_send(modified_request, follow_redirects=True)
except RequestError:
self.network_errors += 1
return
await self._verify_url_vulnerability(modified_request, payload_unique_id)
async def _attack_apache_druid_url(self, request_url: str):
3
Source : mod_log4shell.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def _attack_apache_druid_url(self, request_url: str):
payload_unique_id = uuid.uuid4()
payload = self._generate_payload(payload_unique_id).replace("{", "%7B").replace("}", "%7D").replace("/", "%2f")
malicious_request = Request(
path=request_url + payload,
method="DELETE",
)
try:
await self.crawler.async_send(malicious_request, follow_redirects=True)
except RequestError:
self.network_errors += 1
return
await self._verify_url_vulnerability(malicious_request, payload_unique_id)
async def _attack_specific_cases(self, request: Request):
3
Source : mod_log4shell.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def _attack_specific_cases(self, request: Request):
root_url = await self.persister.get_root_url()
if request.url == root_url:
current_url = request.url + ("" if request.url.endswith("/") else "/")
vsphere_request = Request(
path=current_url + self.VSPHERE_URL,
method=request.method,
referer=request.referer,
link_depth=request.link_depth
)
await self._attack_vsphere_url(vsphere_request)
await self._attack_apache_struts(request.url)
await self._attack_apache_druid_url(current_url + self.DRUID_URL)
await self.attack_apache_solr_url(current_url + self.SOLR_URL)
await self._attack_vsphere_url(request)
async def attack(self, request: Request):
3
Source : mod_wapp.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def _dump_url_content_to_file(self, url: str, file_path: str):
request = Request(url)
response = await self.crawler.async_send(request)
with open(file_path, 'w', encoding='utf-8') as file:
json.dump(response.json, file)
async def _load_wapp_database(self, categories_url: str, technologies_base_url: str, groups_url: str):
3
Source : mod_wp_enum.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def attack(self, request: Request):
self.finished = True
request_to_root = Request(request.url)
response = await self.crawler.async_send(request_to_root, follow_redirects=True)
if self.check_wordpress(response):
await self.detect_version(request_to_root.url)
log_blue("----")
log_blue(_("Enumeration of WordPress Plugins :"))
await self.detect_plugin(request_to_root.url)
log_blue("----")
log_blue(_("Enumeration of WordPress Themes :"))
await self.detect_theme(request_to_root.url)
else:
log_blue(MSG_NO_WP)
3
Source : crawler.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def _async_try_login_basic_digest_ntlm(self, auth_url: str) -> Tuple[bool, dict, List[str]]:
page = await self.async_get(web.Request(auth_url))
if page.status in (401, 403, 404):
return False, {}, []
return True, {}, []
def _extract_disconnect_urls(self, page: Page) -> List[str]:
0
Source : test_mod_backup.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_whole_stuff():
# Test attacking all kind of parameter without crashing
respx.get("http://perdu.com/config.php.bak").mock(return_value=httpx.Response(200, text="password = 123456"))
respx.get("http://perdu.com/config.php").mock(return_value=httpx.Response(200, text="Hello there"))
respx.get(url__startswith="http://perdu.com/").mock(return_value=httpx.Response(404))
persister = AsyncMock()
request = Request("http://perdu.com/config.php")
request.path_id = 1
request.set_headers({"content-type": "text/html"})
crawler = AsyncCrawler(Request("http://perdu.com/"), timeout=1)
options = {"timeout": 10, "level": 2}
module = ModuleBackup(crawler, persister, options, Event())
module.do_get = True
await module.attack(request)
assert persister.add_payload.call_args_list[0][1]["module"] == "backup"
assert persister.add_payload.call_args_list[0][1]["payload_type"] == "vulnerability"
assert persister.add_payload.call_args_list[0][1]["request"].url == "http://perdu.com/config.php.bak"
await crawler.close()
@pytest.mark.asyncio
0
Source : test_mod_buster.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_whole_stuff():
# Test attacking all kind of parameter without crashing
respx.get("http://perdu.com/").mock(return_value=httpx.Response(200, text="Default page"))
respx.get("http://perdu.com/admin").mock(
return_value=httpx.Response(301, text="Hello there", headers={"Location": "/admin/"})
)
respx.get("http://perdu.com/admin/").mock(return_value=httpx.Response(200, text="Hello there"))
respx.get("http://perdu.com/config.inc").mock(return_value=httpx.Response(200, text="pass = 123456"))
respx.get("http://perdu.com/admin/authconfig.php").mock(return_value=httpx.Response(200, text="Hello there"))
respx.get(url__regex=r"http://perdu\.com/.*").mock(return_value=httpx.Response(404))
persister = Mock()
request = Request("http://perdu.com/")
request.path_id = 1
request.set_headers({"content-type": "text/html"})
# Buster module will get requests from the persister
persister.get_links.return_value = AsyncIterator([request])
crawler = AsyncCrawler(Request("http://perdu.com/"), timeout=1)
options = {"timeout": 10, "level": 2, "tasks": 20}
with patch(
"wapitiCore.attack.mod_buster.ModuleBuster.payloads",
[("nawak", Flags()), ("admin", Flags()), ("config.inc", Flags()), ("authconfig.php", Flags())]
):
module = ModuleBuster(crawler, persister, options, Event())
module.do_get = True
await module.attack(request)
assert module.known_dirs == ["http://perdu.com/", "http://perdu.com/admin/"]
assert module.known_pages == ["http://perdu.com/config.inc", "http://perdu.com/admin/authconfig.php"]
await crawler.close()
0
Source : test_mod_cookieflags.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_cookieflags():
respx.get("https://github.com/").mock(
return_value=httpx.Response(
200,
headers=[
("set-cookie", "_octo=31337; Path=/; Domain=github.com; Secure; SameSite=Lax"),
("set-cookie", "logged_in=no; Path=/; Domain=github.com; HttpOnly; Secure; SameSite=Lax"),
("set-cookie", "foo=bar; Path=/; Domain=github.com;")
]
)
)
persister = AsyncMock()
request = Request("https://github.com/")
request.path_id = 1
crawler = AsyncCrawler(Request("https://github.com/"), timeout=1)
await crawler.async_send(request) # Put cookies in our crawler object
options = {"timeout": 10, "level": 2}
module = ModuleCookieflags(crawler, persister, options, asyncio.Event())
await module.attack(request)
cookie_flags = []
assert persister.add_payload.call_count == 3
assert persister.add_payload.call_args_list[0][1]["module"] == "cookieflags"
for call in persister.add_payload.call_args_list:
description, cookie_name = call[1]["info"].split(":")
cookie_flags.append((cookie_name.strip(), re.search(r"(HttpOnly|Secure)", description).group()))
assert cookie_flags == [
('_octo', 'HttpOnly'),
('foo', 'HttpOnly'),
('foo', 'Secure')
]
await crawler.close()
0
Source : test_mod_crlf.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_whole_stuff():
# Test attacking all kind of parameter without crashing
respx.get(url__regex=r"http://perdu\.com/\?a=.*&foo=bar").mock(return_value=httpx.Response(200, text="Hello there"))
respx.get(url__regex=r"http://perdu.com/\?a=b*&foo=.*wapiti.*").mock(
return_value=httpx.Response(200, text="Hello there", headers={"wapiti": "whatever version"})
)
persister = AsyncMock()
request = Request("http://perdu.com/?a=b&foo=bar")
request.path_id = 1
crawler = AsyncCrawler(Request("http://perdu.com/"), timeout=1)
options = {"timeout": 10, "level": 2}
module = ModuleCrlf(crawler, persister, options, Event())
module.do_get = True
await module.attack(request)
assert persister.add_payload.call_count == 1
assert persister.add_payload.call_args_list[0][1]["module"] == "crlf"
assert persister.add_payload.call_args_list[0][1]["category"] == _("CRLF Injection")
assert persister.add_payload.call_args_list[0][1]["parameter"] == "foo"
await crawler.close()
0
Source : test_mod_csrf.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_csrf_cases():
persister = AsyncMock()
all_requests = []
request = Request("http://127.0.0.1:65086/")
request.path_id = 1
all_requests.append(request)
request = Request(
"http://127.0.0.1:65086/",
method="POST",
post_params=[["email", "[email protected]"], ["xsrf_token", "weak"]],
)
request.path_id = 2
all_requests.append(request)
request = Request(
"http://127.0.0.1:65086/?check=true",
method="POST",
post_params=[["email", "[email protected]"], ["xsrf_token", "weak"]],
)
request.path_id = 3
all_requests.append(request)
request = Request(
"http://127.0.0.1:65086/?check=true",
method="POST",
post_params=[["name", "Obiwan"]],
)
request.path_id = 4
all_requests.append(request)
crawler = AsyncCrawler(Request("http://127.0.0.1:65086/"), timeout=1)
options = {"timeout": 10, "level": 1}
module = ModuleCsrf(crawler, persister, options, Event())
module.do_post = True
for request in all_requests:
if await module.must_attack(request):
await module.attack(request)
else:
# Not attacked because of GET verb
assert request.path_id == 1
vulnerabilities = set()
for call in persister.add_payload.call_args_list:
vulnerabilities.add((call[1]["request_id"], call[1]["info"]))
assert vulnerabilities == {
(2, _("CSRF token '{}' is not properly checked in backend").format("xsrf_token")),
(3, _("CSRF token '{}' might be easy to predict").format("xsrf_token")),
(4, _("Lack of anti CSRF token"))
}
await crawler.close()
0
Source : test_mod_drupal_enum.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_no_drupal():
respx.get("http://perdu.com/").mock(
return_value=httpx.Response(
200,
text=" < html> < head> < title>Vous Etes Perdu ? < /title> < /head> < body> < h1>Perdu sur l'Internet ? < /h1> \
< h2>Pas de panique, on va vous aider < /h2> \
< strong> < pre> * < ----- vous êtes ici < /pre> < /strong> < /body> < /html>"
)
)
respx.get(url__regex=r"http://perdu.com/.*?").mock(return_value=httpx.Response(404))
persister = AsyncMock()
request = Request("http://perdu.com/")
request.path_id = 1
crawler = AsyncCrawler(Request("http://perdu.com/"))
options = {"timeout": 10, "level": 2, "tasks": 20}
module = ModuleDrupalEnum(crawler, persister, options, Event())
await module.attack(request)
assert not persister.add_payload.call_count
await crawler.close()
@pytest.mark.asyncio
0
Source : test_mod_drupal_enum.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_version_detected():
base_dir = os.path.dirname(sys.modules["wapitiCore"].__file__)
test_directory = os.path.join(base_dir, "..", "tests/data/drupal/")
changelog_file = "CHANGELOG.txt"
with open(path_join(test_directory, changelog_file), errors="ignore") as changelog:
data = changelog.read()
# Response to tell that Drupal is used
respx.get("http://perdu.com/sites/").mock(return_value=httpx.Response(200))
# Response for changelog.txt
respx.get("http://perdu.com/CHANGELOG.txt").mock(return_value=httpx.Response(200, text=data))
respx.get(url__regex=r"http://perdu.com/.*?").mock(return_value=httpx.Response(404))
persister = AsyncMock()
request = Request("http://perdu.com/")
request.path_id = 1
crawler = AsyncCrawler(Request("http://perdu.com/"))
options = {"timeout": 10, "level": 2, "tasks": 20}
module = ModuleDrupalEnum(crawler, persister, options, Event())
await module.attack(request)
assert persister.add_payload.call_count == 1
assert persister.add_payload.call_args_list[0][1]["module"] == "drupal_enum"
assert persister.add_payload.call_args_list[0][1]["info"] == (
'{"name": "Drupal", "versions": ["7.67"], "categories": ["CMS Drupal"]}'
)
await crawler.close()
@pytest.mark.asyncio
0
Source : test_mod_drupal_enum.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_multi_versions_detected():
base_dir = os.path.dirname(sys.modules["wapitiCore"].__file__)
test_directory = os.path.join(base_dir, "..", "tests/data/drupal/")
maintainers_file = "MAINTAINERS.txt"
with open(path_join(test_directory, maintainers_file), errors="ignore") as maintainers:
data = maintainers.read()
# Response to tell that Drupal is used
respx.get("http://perdu.com/sites/").mock(return_value=httpx.Response(200))
# Response for maintainers.txt
respx.get("http://perdu.com/core/MAINTAINERS.txt").mock(return_value=httpx.Response(200, text=data))
respx.get(url__regex=r"http://perdu.com/.*?").mock(return_value=httpx.Response(404))
persister = AsyncMock()
request = Request("http://perdu.com/")
request.path_id = 1
crawler = AsyncCrawler(Request("http://perdu.com/"))
options = {"timeout": 10, "level": 2, "tasks": 20}
module = ModuleDrupalEnum(crawler, persister, options, Event())
await module.attack(request)
assert persister.add_payload.call_count == 1
assert persister.add_payload.call_args_list[0][1]["info"] == (
'{"name": "Drupal", "versions": ["8.0.0-beta4", "8.0.0-beta5", "8.0.0-beta6"], "categories": ["CMS Drupal"]}'
)
await crawler.close()
@pytest.mark.asyncio
0
Source : test_mod_drupal_enum.py
with GNU General Public License v2.0
from wapiti-scanner
with GNU General Public License v2.0
from wapiti-scanner
async def test_version_not_detected():
base_dir = os.path.dirname(sys.modules["wapitiCore"].__file__)
test_directory = os.path.join(base_dir, "..", "tests/data/drupal/")
changelog_edited = "CHANGELOG_EDITED.txt"
with open(path_join(test_directory, changelog_edited), errors="ignore") as changelog:
data = changelog.read()
# Response to tell that Drupal is used
respx.get("http://perdu.com/sites/").mock(return_value=httpx.Response(200))
# Response for edited changelog.txt
respx.get("http://perdu.com/CHANGELOG.txt").mock(return_value=httpx.Response(200, text=data))
respx.get(url__regex=r"http://perdu.com/.*?").mock(return_value=httpx.Response(404))
persister = AsyncMock()
request = Request("http://perdu.com/")
request.path_id = 1
crawler = AsyncCrawler(Request("http://perdu.com/"))
options = {"timeout": 10, "level": 2, "tasks": 20}
module = ModuleDrupalEnum(crawler, persister, options, Event())
await module.attack(request)
assert persister.add_payload.call_count == 1
assert persister.add_payload.call_args_list[0][1]["info"] == (
'{"name": "Drupal", "versions": [""], "categories": ["CMS Drupal"]}'
)
await crawler.close()
See More Examples