Compare commits

...

337 Commits

Author SHA1 Message Date
M09Ic
dfa3b2da56
Merge pull request #111 from chainreactors/dev
merge v1.2.2
2025-06-06 13:30:01 +08:00
M09Ic
57c67fe36a fix: cicd releaser 2025-06-05 13:59:13 +08:00
M09Ic
20b70d0dcd feat: support unique when format result, https://github.com/chainreactors/spray/issues/104 2025-06-05 13:52:30 +08:00
M09Ic
0a833b0326 feat: 支持通配符状态码, https://github.com/chainreactors/spray/issues/38 2025-06-05 13:45:48 +08:00
M09Ic
fd7b603e02 feat: recover fuzzuli generate algorithm 2025-06-05 12:55:36 +08:00
M09Ic
10540f942c fix: csv format , https://github.com/chainreactors/spray/issues/97 2025-06-05 12:46:53 +08:00
M09Ic
ff06fd1902 fix: addition word not safe join path, https://github.com/chainreactors/spray/issues/106 2025-06-05 12:34:06 +08:00
M09Ic
104d41380e fix: null word not continue, https://github.com/chainreactors/spray/issues/108 2025-06-05 12:29:17 +08:00
M09Ic
d21dd493f6 fix: -e not expect , https://github.com/chainreactors/spray/issues/108 2025-06-05 12:29:02 +08:00
M09Ic
cd58c410d8 fix: fix log and option 2025-06-05 11:41:11 +08:00
M09Ic
5c2e377d0d chore: add black,white,unique short flag 2025-05-20 00:23:25 +08:00
M09Ic
31f48d4b06 chore: append-rule add short flag -R 2025-05-12 23:03:17 +08:00
M09Ic
f7c1034310 fix: crawl and url not print 2025-04-18 21:20:00 +08:00
M09Ic
fb63ed010c
Update README.md 2025-04-10 19:30:21 +08:00
M09Ic
0d700f8ea0
Update README.md 2025-03-04 15:10:37 +08:00
M09Ic
c82f0564f5
Merge pull request #95 from chainreactors/dev
merge v1.2.1
2025-03-04 15:06:24 +08:00
M09Ic
08ce95b43d fix: check pool net set headers 2025-03-04 14:58:28 +08:00
M09Ic
c746c26ff9 fix: brute init not set header, https://github.com/chainreactors/spray/issues/94 2025-03-04 14:56:48 +08:00
M09Ic
b13903ea98 fix: map panic, https://github.com/chainreactors/spray/issues/93 2025-03-04 14:45:31 +08:00
M09Ic
e951b68e75
Merge pull request #92 from chainreactors/dev
fix: adapt template yaml
2025-02-23 00:08:33 +08:00
M09Ic
0e9d094dd1 fix: adapt template yaml 2025-02-23 00:02:20 +08:00
M09Ic
72720a942d
Merge pull request #91 from chainreactors/dev
merge v1.2.0
2025-02-22 21:14:47 +08:00
M09Ic
6c5811f1d2 ci: fix golang version go1.20 2025-02-22 21:05:39 +08:00
M09Ic
ef69d46b2a ci: fix golang version go1.20 2025-02-22 21:01:00 +08:00
M09Ic
ff1e596380 feat: support proxyclient for http and fasthttp 2025-02-22 20:58:24 +08:00
M09Ic
f1b9400e19 refactor: remove internal pkg, use engine replace
fix: chunk mod not read
fix: nil bar panic
enhance: add default accept and user-agent
2025-02-22 20:31:32 +08:00
M09Ic
c07c2305af ci: update gorelease go version to 1.20 2025-02-22 14:41:37 +08:00
M09Ic
3087ec32d1 chore: improve format output 2025-02-22 14:38:24 +08:00
M09Ic
286710f5ec fix: init failed bar not close and total bar not wait 2025-02-22 14:01:10 +08:00
M09Ic
5f8f5c7795 chore: improve format output 2025-02-22 02:50:50 +08:00
M09Ic
0f1e6b8333 fix: try fix deadlock, thanks https://github.com/chainreactors/spray/pull/89 2025-02-22 02:49:50 +08:00
M09Ic
7621514bd9
Merge pull request #79 from chainreactors/dev
merge v1.1.6
2024-11-01 13:54:13 +08:00
M09Ic
de12d568ce enhance: add hard exit, https://github.com/chainreactors/spray/issues/78 2024-11-01 12:30:55 +08:00
M09Ic
02162cffd6 revert: not same redirect banned 2024-11-01 12:27:31 +08:00
M09Ic
9e74a17096 fix: path join not expect 2024-11-01 12:25:53 +08:00
M09Ic
0ca5c02de7 enhance: skip not same host redirect 2024-10-30 16:11:05 +08:00
M09Ic
5cb9aa119d fix: not same domain filtered 2024-10-30 15:57:32 +08:00
M09Ic
6bbc6141ac enhance: add 404 default fuzzystatus, 429 waf status 2024-10-16 14:47:29 +08:00
M09Ic
af82ae43b9 enhance probe output 2024-10-14 02:20:39 +08:00
M09Ic
e483bb4439 baseline add from and parent prop 2024-10-14 02:20:16 +08:00
M09Ic
344e560471 add --append-depth limit append recu depth 2024-10-14 01:54:57 +08:00
M09Ic
2a68d0b49b
Merge pull request #76 from chainreactors/dev
merge v1.1.3
2024-09-28 10:58:07 +08:00
M09Ic
f1684ffeb4 fix brutepool baseurl bug 2024-09-23 16:47:25 +08:00
M09Ic
a4b9e77029 fix no-stat not work 2024-09-23 16:25:06 +08:00
M09Ic
24eade89d0 clean fallback print and fix multi print 2024-09-23 16:19:41 +08:00
M09Ic
ed3e95f21d -q work for config print 2024-09-23 16:10:00 +08:00
M09Ic
fcce861ae3 fix stat Found bug 2024-09-23 16:08:43 +08:00
M09Ic
7693b4d38f fix checkpool time not work 2024-09-23 15:02:44 +08:00
M09Ic
2f28b0ec3c
Merge pull request #74 from chainreactors/dev
merge v1.1.2
2024-09-10 18:03:59 +08:00
M09Ic
a942bac337 add config panel 2024-09-10 17:59:38 +08:00
M09Ic
2de8822b01 fix %EXT% not work in plugin , https://github.com/chainreactors/spray/issues/63 2024-09-10 16:47:49 +08:00
M09Ic
29db702744 fix init timeout not work, https://github.com/chainreactors/spray/issues/58 2024-09-10 15:41:48 +08:00
M09Ic
5cf02cbbcb fix init panic when request failed
https://github.com/chainreactors/spray/issues/73
https://github.com/chainreactors/spray/issues/72
https://github.com/chainreactors/spray/issues/71
2024-09-10 14:14:01 +08:00
M09Ic
2e8a923bac
Merge pull request #67 from chainreactors/dev
merge v1.1.1
2024-08-29 14:38:18 +08:00
M09Ic
4a0c8f86eb support csv and fix fuzzy output 2024-08-29 14:04:56 +08:00
M09Ic
b4c6a77a98 fix host mod not work, https://github.com/chainreactors/spray/issues/63 2024-08-29 01:43:16 +08:00
M09Ic
d6e7e58b18 fix fuzzy output when not --fuzzy flag 2024-08-29 01:43:16 +08:00
M09Ic
15110ab895 fix fasthttp socket timeout, https://github.com/chainreactors/spray/issues/58 2024-08-29 01:43:15 +08:00
M09Ic
f6037d7a1e
Merge pull request #65 from chainreactors/dev
merge v1.1.0
2024-08-27 14:30:40 +08:00
M09Ic
da71cbc575 fix -a not load recon bug 2024-08-26 02:25:02 +08:00
M09Ic
106f007693 union load appendwords and dict 2024-08-26 02:22:35 +08:00
M09Ic
77a5e58a2a add --print print all preset config 2024-08-26 01:47:39 +08:00
M09Ic
06bd9820e7 adapt spray_dict load 2024-08-26 01:32:11 +08:00
M09Ic
105c426396 refactor plugin 2024-08-26 01:20:03 +08:00
M09Ic
491b8c16a5 enhance crawl and append 2024-08-26 00:33:01 +08:00
M09Ic
1c28898631 refactor output and format 2024-08-26 00:04:44 +08:00
M09Ic
de168e0be9 enhance basepool and brutepool structure 2024-08-25 23:06:10 +08:00
M09Ic
678a6a44e4 fix break error print limit 2024-08-21 16:52:20 +08:00
M09Ic
57eab148ac enhance http tls performance 2024-08-21 15:57:41 +08:00
M09Ic
bf6d1c5f0b
Merge pull request #57 from chainreactors/dev
merge v1.0.2
2024-08-20 16:53:55 +08:00
M09Ic
937855c075 fix brute mod not work 2024-08-20 16:42:32 +08:00
M09Ic
32f558f9c5 fix doUpgrade deadlock 2024-08-16 00:32:53 +08:00
M09Ic
411f24d94d fix the bug of thread pool hanging 2024-08-12 15:12:43 +08:00
M09Ic
8bf4b374ac
Merge pull request #54 from chainreactors/dev
merge v1.0.1
2024-08-06 16:53:00 +08:00
M09Ic
28aacea18c update quickstart note 2024-08-06 16:31:22 +08:00
M09Ic
b1aa68f20c misc update 2024-08-06 16:29:33 +08:00
M09Ic
38bc2d33f2 add --active flag and fix plugin not work bug https://github.com/chainreactors/spray/issues/51 2024-08-06 04:02:15 +08:00
M09Ic
021e84ae81 enhance runner structure 2024-08-06 03:58:41 +08:00
M09Ic
ebc74c1987 fix multi http parse bug 2024-08-06 03:46:30 +08:00
M09Ic
dc8829ecca fix word not work https://github.com/chainreactors/spray/issues/53 2024-08-06 02:58:52 +08:00
M09Ic
3791b765ea
Merge pull request #52 from chainreactors/dev
merge v1.0.0
2024-07-29 17:08:42 +08:00
M09Ic
18e8594da1 update version 2024-07-24 13:51:27 +08:00
M09Ic
31384dc575 update templates 2024-07-24 13:41:58 +08:00
M09Ic
ec5102fd8f fix stat and error print bug 2024-07-24 13:41:03 +08:00
M09Ic
d5286eace5 fix ctrl+c cannot exit 2024-07-24 04:21:43 +08:00
M09Ic
44e88e0aa7 fix filename error 2024-07-24 04:08:59 +08:00
M09Ic
2b7be8d449 finger update use exe path 2024-07-22 16:33:15 +08:00
M09Ic
7af848a6fd refactor option.go 2024-07-22 16:27:07 +08:00
M09Ic
31f4dc661e enhance finger update logic 2024-07-17 02:40:41 +08:00
M09Ic
da7a3d221d support multi fingers features 2024-07-17 01:40:30 +08:00
M09Ic
1c9898ebf5 enhance cidr parse 2024-07-15 16:09:50 +08:00
M09Ic
3e419a52a5 clean code 2024-07-14 04:28:08 +08:00
M09Ic
f942c9c1b3 support --no-stat flag 2024-07-14 04:16:12 +08:00
M09Ic
3f4094d89e refactor cmd ui. 2024-07-14 04:08:50 +08:00
M09Ic
40a1f90601 support preset port config 2024-07-14 03:27:20 +08:00
M09Ic
fab69d977a remove checkpool fuzzy output 2024-07-14 03:19:08 +08:00
M09Ic
bb98110292
Merge pull request #45 from chainreactors/dev
rm same status with random baseline filter
2024-07-04 15:40:23 +08:00
M09Ic
eb328a4b52 clean output code
enhance output format and color
support jsonline output in stdout
2024-07-04 15:18:46 +08:00
M09Ic
237102124e update fingers, fix exceed body match bug 2024-07-04 14:34:11 +08:00
M09Ic
04944cf829 adapt fingers 2024-07-01 19:51:16 +08:00
M09Ic
ca3637466e fix spray output format 2024-06-29 04:02:28 +08:00
M09Ic
3e63812ab0 adapt all finger engine 2024-06-29 03:56:44 +08:00
M09Ic
7ac545e0ae support cpe output 2024-06-29 03:49:30 +08:00
M09Ic
cec35ec667 adapt new fingers engine and refactor output format 2024-06-29 03:49:29 +08:00
M09Ic
6808a9a5f8 rm same status with random baseline filter 2024-06-24 14:51:05 +08:00
M09Ic
0d4a3652ce
Merge pull request #41 from chainreactors/dev
merge v0.9.8
2024-06-24 14:40:36 +08:00
M09Ic
784776edb3 fix prefix,suffix, ext, bak not work bug 2024-06-23 16:59:44 +08:00
M09Ic
0d24501a8a adapt goreleaser2.0 2024-06-23 16:47:11 +08:00
M09Ic
229ef1f424 fix socks auth not work bug 2024-06-06 18:21:26 +08:00
M09Ic
cbf60343bb fix expr not work bug 2024-06-06 18:11:45 +08:00
M09Ic
b84535a06b fix NewBaseline panic 2024-06-04 16:24:36 +08:00
M09Ic
a965412c1a fix wappalyzer panic 2024-06-03 15:30:09 +08:00
M09Ic
cd34c7b2dd fix checkpool bar thread safe bug 2024-05-30 23:33:57 +08:00
M09Ic
4c8c00416b 修复多个解析输入值的bug 2024-05-30 18:55:02 +08:00
M09ic
cbb8e2f295 misc update, bar and finger panic 2024-05-21 18:05:27 +08:00
M09ic
9bb4ae7341 update words 2024-05-17 17:54:52 +08:00
M09ic
ea090aa6bd fix proxy config bug when use http proxy 2024-05-17 17:47:42 +08:00
M09ic
f755fc3816 support -x/--method custom http method
support --raw parser input from raw http
2024-05-17 17:47:11 +08:00
M09ic
75680c21f4 update dependency 2024-05-16 18:35:39 +08:00
M09ic
b2d85a7698 fix abs path parse 2024-04-26 18:57:02 +08:00
M09Ic
13530eee5d update README.md 2024-03-07 15:40:25 +08:00
M09Ic
006e1af2db fix --check-only panic 2024-03-07 15:26:30 +08:00
M09Ic
3da923b2a8 update README.md 2024-03-07 15:02:20 +08:00
M09Ic
ec3ee45b89 add config load log 2024-03-07 04:24:00 +08:00
M09Ic
4e78e55b6e fix config load priority, now config < cmd
enhance bar print
fix config.yaml default
2024-03-07 04:15:28 +08:00
M09Ic
b1e42e763d optimize config.
1. add flag --init to init config.yaml
2. default load config.yaml if this file exist
2024-03-07 02:55:51 +08:00
M09Ic
9e82bb1ab3 use mpb replace uiprogress 2024-03-07 00:24:30 +08:00
M09Ic
16f149dadf enhance crawl print 2024-03-06 18:26:59 +08:00
M09Ic
ab830a5e98 support --no-dict flag 2024-03-06 18:14:07 +08:00
M09Ic
c70d26fd84 fix read body bug when MaxBodyLength == 0 2024-03-06 17:55:18 +08:00
M09Ic
c4d4efe6b7
Merge pull request #27 from chainreactors/dev
merge v0.9.5
2024-03-04 20:05:57 +08:00
M09Ic
f24f8899a9 update ver 2024-03-04 20:04:34 +08:00
M09Ic
d1d6982282 refactor fingers 2024-03-04 20:03:06 +08:00
M09Ic
b487e3da15 fix standard client null proxy bug 2024-03-03 01:59:56 +08:00
M09Ic
9eb4a13e20 进一步优化日志输出, 减少非必要输出 2024-02-20 21:28:07 +08:00
M09Ic
88dc9fe7cd fix fingerprinthub and fingers match bug 2024-02-20 21:09:00 +08:00
M09Ic
b9e970eb91 fix fingerpinthub match bug 2024-02-20 19:34:35 +08:00
M09Ic
4e28fb59b4
Merge pull request #26 from chainreactors/dev
merge v0.9.4
2024-02-20 19:17:56 +08:00
M09Ic
e58af07439 update README.md 2024-02-20 19:17:28 +08:00
M09Ic
376e8cdc38 issues19, add custom extract config 2024-02-20 19:12:05 +08:00
M09Ic
a40655287a fix statistor 2024-02-20 18:52:43 +08:00
M09Ic
20993e5500 support fingerprinthub detect 2024-02-20 18:25:43 +08:00
M09Ic
be19895446
Merge pull request #24 from chainreactors/dev
merge v0.9.3
2024-02-12 17:06:42 +08:00
M09Ic
a49239b78a update go version to 1.21 2024-02-12 16:55:16 +08:00
M09Ic
04c449a297 support config.yaml 2024-02-12 16:49:44 +08:00
M09Ic
61167054ee refactor pool 2024-02-12 02:54:07 +08:00
M09Ic
c1309fe7da 修复functions重复生成目标的bug
优化自动识别能力, 404,200,403现在都会自动进行unique
append-file 现在生效于uniquestatus与whitestatus
2024-02-10 12:48:22 +08:00
M09Ic
0bf3c0433a 403,200,502页面将会启用append-file 2024-02-08 17:41:54 +08:00
M09Ic
9b74f7c9a3 将active修改为finger
修复append-file拼接时的bug
2024-02-08 16:46:34 +08:00
M09Ic
e37201eb75 support append-file 2024-02-08 16:28:27 +08:00
M09Ic
09c2a86a18 refactor package dependency 2024-02-08 15:26:01 +08:00
M09Ic
007ff96478 进一步简化日志, 提供-v参数开启详细日志 2024-02-08 14:57:33 +08:00
M09Ic
cea66e69e0 clean check log 2024-02-07 03:01:07 +08:00
M09Ic
051c270df8 add dicc.txt as default dict 2024-02-07 02:59:55 +08:00
M09Ic
1f493fe104 兼容dirsearch的字典与规则, -e 将会作用于字典内的%EXT% 2024-02-07 02:34:18 +08:00
M09Ic
3272210e07 enable upx release 2024-02-07 01:55:42 +08:00
M09Ic
2222b49632 修复代理为空时的报错bug 2024-02-07 01:54:31 +08:00
M09Ic
f35a07050e support http/socks5 proxy 2024-02-07 01:29:05 +08:00
M09Ic
ea5218b7f3 修复fasthttp client中, timeout不生效的bug 2024-02-07 00:46:11 +08:00
M09Ic
414b85a4f0 enable upx release 2024-01-03 14:59:10 +08:00
M09Ic
408004223c Merge branch 'issue8' into dev 2023-12-28 14:58:23 +08:00
M09Ic
4cbb7b05ba bump dependency 2023-12-28 14:34:19 +08:00
M09Ic
00c880f16e 优化爬虫的url处理 2023-06-30 17:43:47 +08:00
M09Ic
a36e06e96b 修复--timeout实际没生效的bug 2023-06-30 17:29:49 +08:00
M09Ic
db29ce3e39 当爬虫启动时, --read-all将会自动启用 2023-06-30 12:20:17 +08:00
M09Ic
82f0630324 optimized. 添加没有输出目标时的错误信息 2023-06-12 11:19:23 +08:00
M09Ic
949843ce17 optimized. 优化retry的逻辑, 默认为0, 如果为0则不会在命令行输出配置日志 2023-06-12 11:19:22 +08:00
M09Ic
72853f2a06 optimized. 添加没有输出目标时的错误信息 2023-06-12 11:18:23 +08:00
M09Ic
5454c65fd6 optimized. 优化retry的逻辑, 默认为0, 如果为0则不会在命令行输出配置日志 2023-06-12 11:08:21 +08:00
M09Ic
63b39cead1
Merge pull request #9 from chainreactors/dev
merge v0.9.1
2023-06-12 10:46:24 +08:00
M09Ic
f196bd9be5 fix --remove-extension not work 2023-06-12 10:44:22 +08:00
M09Ic
c5bbe36289
Merge pull request #5 from chainreactors/issue4
fix. url list file parse bug
2023-06-06 20:34:51 +08:00
M09Ic
12f2ee4ad8 fix gorelease.yml 2023-06-06 20:27:16 +08:00
M09Ic
6d8227da46 add urlfounder link 2023-06-06 18:12:38 +08:00
M09Ic
1eddc5fcd3 fix. url list file parse bug 2023-06-03 22:33:38 +08:00
M09Ic
73f724b92a feat. 新增--index --random 支持自定义index与random目录 2023-06-03 22:24:52 +08:00
M09Ic
fc3f476fe2 修复bytes类型转换错误的bug 2023-06-03 21:09:01 +08:00
M09Ic
976ce55bdc 修复url解析错误的bug 2023-06-03 16:39:35 +08:00
M09Ic
2f131ccd02 删除无用参数, 修复示例引号错误 2023-06-02 16:57:08 +08:00
M09Ic
bef3e74b44 update README.md 2023-06-01 12:35:33 +08:00
M09Ic
16a7b78070 update README.md 2023-06-01 00:52:48 +08:00
M09Ic
06c6b36b9f 删除action中的upx相关部分 2023-05-04 13:08:10 +08:00
M09Ic
2421c4178d 添加自动化编译 2023-05-04 12:23:24 +08:00
M09Ic
1a656b26a1 新增--retry插件 2023-05-04 12:04:59 +08:00
M09Ic
4b78503d64 添加url错误是的报错信息 2023-04-25 17:33:07 +08:00
M09Ic
0167f376db 修复派生的url可能导致部分插件出错的bug. 现在一旦url出现错误, 判定该请求无效 2023-04-25 17:28:08 +08:00
M09Ic
3698d01903 添加--scope与--no-scope参数, 用来指定爬虫的作用范围
修复recursive中的多个bug
2023-04-14 20:05:21 +08:00
M09Ic
1bcf2f297d 修复url解析错误的bug
修复redirect的wg计算错误的bug
2023-04-06 21:20:24 +08:00
M09Ic
ccc9ca3719 修复task count算错误的bug 2023-04-06 20:41:35 +08:00
M09Ic
77b92b44ea 关闭check-only的kepp-alive.
新增checkonly的--match指定过滤器
2023-04-04 01:21:31 +08:00
M09Ic
8ec00abe10 修复多个可能导致报错的bug.
checkonly支持upgrade与redirect
2023-04-04 00:47:33 +08:00
M09Ic
9eb55ebd66 适配gogo v2.11.0 2023-04-04 00:47:33 +08:00
M09Ic
f44a01e975 新增-c cidr与 -p port-range的支持 2023-04-04 00:47:32 +08:00
M09Ic
30506b1f5b 修复部分情况下过滤不生效的bug 2023-03-24 15:39:40 +08:00
M09Ic
4a12286beb 实装random useragent 2023-03-24 14:20:31 +08:00
M09Ic
bdc793c75b 修复-m host 场景下字典与redirect生成错误的bug 2023-03-24 13:41:00 +08:00
M09Ic
7879d55856 优化log输出 2023-03-13 13:04:40 +08:00
M09Ic
58913caba7 修复部分情况下url不合法导致出现报错的bug 2023-02-21 17:58:16 +08:00
M09Ic
d62bbdf5df 调整指纹收集的逻辑 2023-02-19 22:27:16 +08:00
M09Ic
296bea96a0 修复check-only失效的bug 2023-02-19 16:14:51 +08:00
M09Ic
680d18a573 跳过二进制的返回数据读取 2023-02-08 19:18:33 +08:00
M09Ic
25188b24e8 -F新增从stdin中读, 并添加颜色参数 2023-02-08 19:17:42 +08:00
M09Ic
5ace37824a 调整extract格式 2023-02-08 15:30:40 +08:00
M09Ic
b3589db853 新增一种特殊的过滤模式--unique 或指定状态码的--unique-status 200 2023-02-08 12:58:56 +08:00
M09Ic
6287a8e468 --fuzzy-status添加特殊情况 "all", 所有类型的状态码都会启用对应的模糊匹配 2023-02-07 18:42:20 +08:00
M09Ic
c263c11926 优化无效重定向页面的过滤 2023-02-07 18:37:47 +08:00
M09Ic
ae746b9e67 优化location的处理, 某些应用可能会使用小写 2023-02-07 18:37:19 +08:00
M09Ic
f185dd80e9 添加一个常见的waf状态码1020 2023-02-06 23:29:26 +08:00
M09Ic
fea6ac7c39 去重策略不再作用于word, 防止内存中维护一个过大的map 2023-02-06 23:29:11 +08:00
M09Ic
645487ea1a 修复默认status配置错误的bug 2023-02-06 15:15:37 +08:00
M09Ic
c3993fc8c2 多处性能与代码优化, 能更好的支持不同的fuzz了 2023-02-04 19:44:37 +08:00
M09Ic
9f35b4d1e9 update words 2023-02-03 19:55:56 +08:00
M09Ic
02f601051c spray输出相关的代码整合到spray中 2023-02-01 18:31:50 +08:00
M09Ic
940c5b9e99 修复在优化stat时一个线程安全问题导致程序阻塞的bug 2023-01-29 18:23:55 +08:00
M09Ic
8152ae1b1d update README.md 2023-01-29 15:59:30 +08:00
M09Ic
5aaf062f22 适配新版本gogo 2023-01-28 15:56:21 +08:00
M09Ic
9213c29c3d 新增--recon命令, 打开自动信息提取, 用来提取各种敏感信息 2023-01-28 15:43:43 +08:00
M09Ic
480b7591d4 update mod 2023-01-28 14:53:02 +08:00
M09Ic
3943943405 完成对extract相关功能的重构 2023-01-28 13:15:49 +08:00
M09Ic
8756b7503e 使用了不太优雅的办法解决并发调度与可能的goroutine泄露问题 2023-01-16 17:30:54 +08:00
M09Ic
3ff46e5e1f 优化了输出中的number字段, 现在能正确判断了, 并且addition中的number会是当前的wordoffset, 可以用来判断错误发生的位置. 2023-01-12 19:21:35 +08:00
M09Ic
e30bab194a 优化统计模块, 现在check失败的任务也会在stat中保留了 2023-01-12 18:17:53 +08:00
M09Ic
758a274684 新增--rate-limit, 用来限制单个pool请求速率 2023-01-12 17:41:44 +08:00
M09Ic
78ee22b044 优化status的处理, 运行通过+!在原有的基础上修改 2023-01-12 17:12:30 +08:00
M09Ic
02195a8005 添加了多处错误与日志描述 2023-01-12 16:35:34 +08:00
M09Ic
427f64f57a 回调redirect逻辑, 只有check不会被重定向 2023-01-11 12:07:07 +08:00
M09Ic
16d90d9b30 调整多条正则 2023-01-11 12:04:03 +08:00
M09Ic
69471df137 优化stat输出配色 2023-01-11 12:03:54 +08:00
M09Ic
68bef7bc3c 优化redirect, init不需要重定向 2023-01-11 12:03:40 +08:00
M09Ic
009ca464bd 调整多条爬虫的正则 2023-01-11 11:40:38 +08:00
M09Ic
a4d912ed4d 调整全局的去重列表以及日志输出 2023-01-11 11:26:03 +08:00
M09Ic
8233dcefd3 调整爬虫的正则 2023-01-11 11:12:40 +08:00
M09Ic
4a774718c9 调整url去重的逻辑, 现在将统一进行去重判断 2023-01-11 11:12:00 +08:00
M09Ic
75236c7708 优化index的init 2023-01-10 23:55:03 +08:00
M09Ic
880b11e860 优化stat, 添加了source的相关统计 2023-01-10 23:54:46 +08:00
M09Ic
b120d703b8 重写了目录拼接的所有逻辑, 发现做到安全的目录拼接有些困难, 只能做到尽可能安全 2023-01-10 23:44:03 +08:00
M09Ic
0b8fed7e80 优化body读取的逻辑, 0byte的body将会自动跳过读取 2023-01-10 11:59:43 +08:00
M09Ic
33da70c457 优化body为0bytes情况下EOF报错显示 2023-01-10 02:06:02 +08:00
M09Ic
f197d18621 重写协议升级的逻辑 2023-01-10 02:04:12 +08:00
M09Ic
b5da1eb45f 优化crawl的正则表达式 2023-01-10 01:30:05 +08:00
M09Ic
f87acdf657 实装--crawl-depth 2023-01-10 01:09:00 +08:00
M09Ic
1fca8c146f 修复host输出不适当的bug 2023-01-10 01:08:42 +08:00
M09Ic
599118284c 进一步优化crawl的正则与特殊情况处理 2023-01-10 00:58:16 +08:00
M09Ic
033f3acdd7 实装-c参数, 可以自定义client 2023-01-10 00:57:55 +08:00
M09Ic
171786c51e 进一步优化crawl的正则与特殊情况 2023-01-09 22:41:05 +08:00
M09Ic
a23643ebf0 优化正则, 适配go的正则规则 2023-01-09 22:23:51 +08:00
M09Ic
2081e25f93 bl中的exceed显示适配read-all 2023-01-09 22:23:34 +08:00
M09Ic
b019324383 新增--read-all参数, 用来取消body max read限制 2023-01-09 21:47:06 +08:00
M09Ic
f24c7b3bc6 对爬虫进行大量优化, 包括优化性能, 去重, 优化目录拼接, 适配"./"相对目录
fuzzy会在开启debug时自动启用.
2023-01-09 21:33:05 +08:00
M09Ic
f8b84c733b 优化命令行参数, 当-w与-d都为空的时候自动添加"/"防止报错 2023-01-09 15:45:17 +08:00
M09Ic
a94f9e3dc7 优化相似度判断, 并添加了distance/sim字段用来获取.
优化fuzzybaseline的逻辑, 移动到处理线程中.
优化expr的性能
修复--fuzzy没启用也会生效的bug
2023-01-09 14:47:58 +08:00
M09Ic
797ac74af3 适配了favicon的指纹识别
添加了基于contenttype的一些优化
index现在发送的是空数据, 而非添加了"/"之后的
2023-01-09 13:43:00 +08:00
M09Ic
c84440a662 修复程序并发调度失败的bug 2023-01-09 11:55:27 +08:00
M09Ic
e3fc74e78d 优化baseline的性能与信息获取 2023-01-06 15:06:40 +08:00
M09Ic
4a1cb28bdd 新增--common, 探测web常见的通用文件.
优化--bak, bak现在还会带上常见的备份文件名了
2023-01-06 13:07:59 +08:00
M09Ic
6c2f5919d2 优化pool的init, 使用互斥锁提高并发速度 2023-01-06 11:30:17 +08:00
M09Ic
a3082d3f1a 通过fasthttp复用buf导致的数据引用错误的bug
优化代码结构
2023-01-06 04:18:21 +08:00
M09Ic
806f6355d0 misc update 2023-01-06 03:31:28 +08:00
M09Ic
010447c8f2 更加安全的目录拼接, 在不改变/数量的情况下, 实现安全且正确的拼接path 2023-01-06 01:28:09 +08:00
M09Ic
bb92c994cd 实现--bak, 自动生成字典爆破备份文件 2023-01-06 00:48:13 +08:00
M09Ic
26cc384de0 新增--file-bak参数, 开启有效结果的备份文件爆破, 可以使用-a同时开启三个 2023-01-05 23:26:14 +08:00
M09Ic
9750f819cd 新增--append-rule, 用来对valid的结果进行二次基于规则的爆破.
添加了规则表: filebak.txt
2023-01-05 22:42:07 +08:00
M09Ic
436fb2f3f5 update README.md 2023-01-05 15:54:48 +08:00
M09Ic
7e10db2391 添加help的补充说明, 优化README文档 2023-01-05 15:53:10 +08:00
M09Ic
da714a1fcc 调整client中并发池的大小为-t的1.5倍, 优化性能 2023-01-05 15:01:08 +08:00
M09Ic
679b0a129d 调整client中并发池的大小为-t的1.5倍, 优化性能 2023-01-05 14:58:14 +08:00
M09Ic
8014f06bf5 优化check-pool 2023-01-05 14:56:23 +08:00
M09Ic
f9837c4e29 优化help, 新增multi标记 2023-01-05 14:41:41 +08:00
M09Ic
c64467a588 -u 允许添加多个
调整stat文件名生成方式
2023-01-05 14:40:20 +08:00
M09Ic
38c73d350a update README.md 2023-01-04 14:19:32 +08:00
M09Ic
09a8f732e7 update README.md 2023-01-04 14:18:03 +08:00
M09Ic
e6815fedde 修复多处拼写错误 2023-01-04 13:52:12 +08:00
M09Ic
45f325e02b probe中添加source. 2023-01-04 13:52:03 +08:00
M09Ic
30a82a7657 初步实现-F
给--probe添加缩写-o
2023-01-04 12:06:13 +08:00
M09Ic
5dc8f7107f 优化content-length输出, 现在超过max的结果也会正确的获取, 但添加了(exceed)标记 2023-01-04 11:31:31 +08:00
M09Ic
f30712bb7e 优化输出, 有价值(200,30x)的index也会输出到文件中 2023-01-04 11:26:25 +08:00
M09Ic
4ab500c049 统一crawl与extract的输出 2023-01-04 11:18:03 +08:00
M09Ic
c83838e1ba 统一extract与crawl的输出 2023-01-04 11:07:18 +08:00
M09Ic
ffd387a28e 优化options的细节, 递归深度的选择 2023-01-04 00:15:34 +08:00
M09Ic
5142012ceb 实装--cookie --user-agent, --header参数 2023-01-03 18:43:12 +08:00
M09Ic
e6aabe44c5 baseline添加source字段 2023-01-03 18:27:06 +08:00
M09Ic
835d4663dd 调整爬虫的逻辑, 优化输出颜色 2023-01-03 18:22:13 +08:00
M09Ic
900dca32cb 新增--max-length参数, 允许手动调整最大读取的body 2023-01-03 17:30:14 +08:00
M09Ic
3d9e994c78 添加-a参数, 自动开启--crawl与--active 2023-01-03 17:20:07 +08:00
M09Ic
77d4e25c9e 初步实现主动指纹识别 2023-01-03 17:16:55 +08:00
M09Ic
9e9b0de039 初步实现简易爬虫 2023-01-03 17:09:32 +08:00
M09Ic
494ce9414a update README.md 2022-12-16 17:33:18 +08:00
M09Ic
a55397eaea 优化resume在因为错误退出时的逻辑 2022-12-16 11:56:27 +08:00
M09Ic
2f24baf27c update README.md 2022-12-15 00:41:39 +08:00
M09Ic
073cf2a095 给全局的输出添加配色, 可以使用--no-color或-q关闭 2022-12-15 00:19:06 +08:00
M09Ic
34e544deaf update README.md 2022-12-14 23:15:38 +08:00
M09Ic
7b4686caca update README.md 2022-12-14 23:09:19 +08:00
M09Ic
0ef16ab595 update README.md 2022-12-14 20:24:37 +08:00
M09Ic
da1984f224 修复recu没有正确编译的bug.
更新words到最新版
2022-12-14 20:24:25 +08:00
M09Ic
6c256af5f6 优化性能, 使用启用了表达式才会生成对应的变量.
优化words的mask内存性能,完全使用生成器代替保存在内存中.
2022-12-14 14:20:45 +08:00
M09Ic
14e4512efd 修复cmd输入url时stat文件名不正确的bug 2022-12-13 00:21:04 +08:00
M09Ic
7b515b849f misc update 2022-12-13 00:19:51 +08:00
M09Ic
22fcda7239 update README.md 2022-12-12 18:14:43 +08:00
M09Ic
c9f7620aed 展示删掉根据waf指纹判断waf, 因为可能会有大量误报 2022-12-12 18:01:14 +08:00
M09Ic
15e2cbbca8 update README.md 2022-12-12 17:28:55 +08:00
M09Ic
b3f297c847 适配gogo2.10.1以及最新的templates 2022-12-12 17:26:07 +08:00
M09Ic
f535627c75 update README.md 2022-12-12 17:07:07 +08:00
M09Ic
0d81491d5a 大量优化, 实装断点续传, 添加ctrl+c的后续处理 2022-12-12 17:05:44 +08:00
M09Ic
75c33e1135 新增--dump与--dump-file, 用来保存全部数据 2022-12-12 00:13:47 +08:00
M09Ic
db58771f49 更加的严格waf判断的逻辑, 防止一些误报导致提前退出程序 2022-12-11 04:37:56 +08:00
M09Ic
6fa21cb6b4 update README.md 2022-12-11 04:28:28 +08:00
M09Ic
faf0812858 优化statistor, 现在是线程安全的 2022-12-11 04:21:42 +08:00
M09Ic
71393bfeb4 修复一个严重的闭包的线程安全问题 2022-12-11 03:52:06 +08:00
M09Ic
6d03910049 修复hash计算时的传入值为body导致多处判断出错的bug 2022-12-11 01:21:05 +08:00
M09Ic
155d0d981b 调整默认check-period为200, 减少check发包.
优化check报错的输出
优化title输出, 转义换行符
2022-12-11 00:50:03 +08:00
M09Ic
af687701a7 初步实现了递归 2022-12-11 00:24:28 +08:00
M09Ic
8a71c1d35e 给help添加说明 2022-12-10 15:18:12 +08:00
M09Ic
eea66e919b update README.md 2022-12-09 19:34:50 +08:00
M09Ic
c2f8f69800 update README.md 2022-12-09 19:32:38 +08:00
M09Ic
5548987e66 add README.md 2022-12-09 19:30:42 +08:00
M09Ic
10e55bdb9a 实装rule-base的filter rule 2022-12-09 19:30:12 +08:00
M09Ic
7f4c75bf10 修复无效数据重复被送入结果管道的bug 2022-12-06 21:53:09 +08:00
M09Ic
b94a4c3137 实装rulebase 字典生成器 2022-12-06 21:45:14 +08:00
M09Ic
f9c5a71258 实现断点续传 2022-12-02 19:59:15 +08:00
M09Ic
023e316518 实装耗时功能, 现在可以看到每个任务与每个请求的耗时 2022-12-02 18:29:26 +08:00
M09Ic
a780f002bf 修复闭包的线程安全bug 2022-12-02 18:05:33 +08:00
M09Ic
38e654913d 实装extractor
多处小优化
2022-12-02 15:21:17 +08:00
M09Ic
288d3e12c2 给map添加lock 2022-12-01 11:49:40 +08:00
M09Ic
5f6481b5bf 给map 添加写锁 2022-12-01 11:40:59 +08:00
M09Ic
3e9bb1d5bf 适配新版words库 2022-11-30 01:31:15 +08:00
M09Ic
f05fd498be 优化client, 关闭path与header的normalizing 2022-11-29 22:00:40 +08:00
M09Ic
fed6fbb3ae 支持跟随重定向 2022-11-29 21:55:27 +08:00
M09Ic
0233c3017b 自动判断协议升级 2022-11-29 20:50:00 +08:00
M09Ic
35fbb1a3c0 优化文件名的输出 2022-11-29 20:24:03 +08:00
M09Ic
d01736c46f -q 时将关闭所有非结果的输出 2022-11-29 15:39:16 +08:00
M09Ic
f0362b08b0 优化过滤规则,现在可以自定义white, black,fuzzy的状态码 2022-11-29 15:16:33 +08:00
M09Ic
779c98487c 实现文件写入 2022-11-29 15:08:10 +08:00
M09Ic
922ed78611 去掉不必要的锁 2022-11-23 10:59:15 +08:00
M09Ic
cc512829ec 新增statistor, 用作统计请求概览与断点续传 2022-11-21 23:56:27 +08:00
M09Ic
cd0ede7e1c 新增--filter与--match参数, 可以自定义过滤与匹配函数 2022-11-21 20:44:02 +08:00
M09Ic
04d8a5d0e7 修复空行的bug 2022-11-21 11:52:14 +08:00
M09Ic
e48390a4be 优化初始化信息输出 2022-11-21 11:45:48 +08:00
43 changed files with 6357 additions and 1860 deletions

40
.github/workflows/gorelease.yml vendored Normal file
View File

@ -0,0 +1,40 @@
name: goreleaser
on:
push:
tags:
- 'v*.*.*'
workflow_dispatch:
jobs:
goreleaser:
runs-on: ubuntu-22.04
steps:
-
name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
submodules: recursive
- name: Install upx
run: sudo apt install upx -y
continue-on-error: true
-
name: Set up Go
uses: actions/setup-go@v3
with:
go-version: "1.20"
-
name: Run GoReleaser
uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser
version: latest
args: release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GOPATH: "/home/runner/go"

3
.gitmodules vendored Normal file
View File

@ -0,0 +1,3 @@
[submodule "templates"]
path = templates
url = https://github.com/chainreactors/gogo-templates

68
.goreleaser.yml Normal file
View File

@ -0,0 +1,68 @@
project_name: spray
before:
hooks:
- go mod tidy
- go generate
builds:
-
main: .
binary: "{{ .ProjectName }}_{{ .Os }}_{{ .Arch }}"
goos:
- windows
- linux
- darwin
goarch:
- amd64
- "386"
- arm64
ignore:
- goos: windows
goarch: arm64
- goos: darwin
goarch: "386"
ldflags: "-s -w -X 'github.com/chainreactors/spray/cmd.ver=={{ .Tag }}'"
flags:
- -trimpath
asmflags:
- all=-trimpath={{.Env.GOPATH}}
gcflags:
- all=-trimpath={{.Env.GOPATH}}
no_unique_dist_dir: true
env:
- CGO_ENABLED=0
tags:
- forceposix
- osusergo
- netgo
upx:
-
enabled: true
goos: [linux, windows]
goarch:
- amd64
- "386"
archives:
-
name_template: "{{ .ProjectName }}_{{ .Os }}_{{ .Arch }}"
format: binary
checksum:
name_template: "{{ .ProjectName }}_checksums.txt"
changelog:
sort: desc
filters:
exclude:
- '^MERGE'
- "{{ .Tag }}"
- "^docs"
release:
github:
owner: chainreactors
name: spray
draft: true

135
README.md
View File

@ -1,8 +1,135 @@
# SPRAY
blog posts:
- https://chainreactors.github.io/wiki/blog/2024/07/24/fingers-introduce/
- https://chainreactors.github.io/wiki/blog/2024/08/25/spray-best-practices/
![](https://socialify.git.ci/chainreactors/spray/image?description=1&font=Inter&forks=1&issues=1&language=1&name=1&owner=1&pattern=Circuit%20Board&pulls=1&stargazers=1&theme=Light)
<p align="center">
<a href="#features">Features</a>
<a href="#quickstart">QuickStart</a>
<a href="#make">Make</a>
<a href="https://chainreactors.github.io/wiki/spray/">Wiki</a>
</p>
## Features
**最好用最智能最可控的目录爆破工具**
* 超强的性能, 在本地测试极限性能的场景下, 能超过ffuf与feroxbruster的性能50%以上. 实际情况受到网络的影响, 感受没有这么明确. 但在多目标下可以感受到明显的区别.
* 基于掩码的字典生成
* 基于规则的字典生成
* 动态智能过滤, 自定义过滤策略
* 全量[gogo](https://github.com/chainreactors/gogo)的指纹识别, 全量的[fingerprinthub](https://github.com/0x727/FingerprintHub),[wappalyzer](https://github.com/projectdiscovery/wappalyzergo)指纹
* 自定义信息提取, 内置敏感信息提取规则
* 自定义输出格式与内容
* *nix的命令行设计, 轻松与其他工具联动
* 多角度的自动被ban,被waf判断
* 断点续传
## QuickStart
[**Document**](https://chainreactors.github.io/wiki/spray/start)
### 基本使用
**从字典中读取目录进行爆破**
`spray -u http://example.com -d wordlist1.txt -d wordlist2.txt`
**通过掩码生成字典进行爆破**
`spray -u http://example.com -w "/aaa/bbb{?l#4}/ccc"`
**通过规则生成字典爆破**
规则文件格式参考hashcat的字典生成规则
`spray -u http://example.com -r rule.txt -d 1.txt`
**批量爆破多个目标**
`spray -l url.txt -r rule.txt -d 1.txt`
**断点续传**
`spray --resume stat.json`
### 高级用法
**check-only 模式**
类似ehole/httpx这类对单页面信息收集的模式. 会有针对性的性能优化. 默认使用[templates](https://github.com/chainreactors/templates/tree/master/fingers)指纹库. 可以使用`--finger`打开第三方指纹库的匹配
`spray -l url.txt --check-only`
**启用拓展指纹识别**
会进行主动探测常见的指纹目录, 并额外启用fingerprinthub与wappalyzer拓展指纹库
`spray -u http://example.com --finger `
**启用爬虫**
`spray -u http://example.com --crawl`
**扫描备份文件与常见通用文件**
`spray -u http://example.com --bak --common`
**启用所有插件**
`spray -u http://example.com -a`
**被动url收集**
参见: https://github.com/chainreactors/urlfounder
## Wiki
详细用法请见[wiki](https://chainreactors.github.io/wiki/spray/)
https://chainreactors.github.io/wiki/spray/
## Make
```
git clone --recurse-submodules https://github.com/chainreactors/spray
cd spray
go mod tidy
go generate
go build .
```
## Similar or related works
* [ffuf](https://github.com/ffuf/ffuf) 一款优秀的http fuzz工具, 与spray的功能有一定重合但并不完全相同
* [feroxbuster](https://github.com/epi052/feroxbuster) 在编写spray之前我最常使用的目录爆破工具, 但因为批量扫描与过滤配置不便的原因选择自行编写
* [dirsearch](https://github.com/maurosoria/dirsearch) 较早的目录爆破工具, 参考了部分字典生成与配色
* [httpx](https://github.com/projectdiscovery/httpx) http信息收集功能, 参考了通过脚本语言编写任意过滤条件的功能
* [gobuster](https://github.com/OJ/gobuster) 一款统一是go编写的爆破工具, 但不仅限于目录爆破
## TODO
1. [ ] fuzzyequal
2. [ ] http2
3. [ ] auto-tune, 自定调整并发数量
4. [ ] 递归
1. [x] 模糊对比
2. [x] 断点续传
3. [x] 简易爬虫
4. [x] 支持http2
5. [ ] auto-tune, 自动调整并发数量
6. [x] 可自定义的递归配置
7. [x] 参考[feroxbuster](https://github.com/epi052/feroxbuster)的`--collect-backups`, 自动爆破有效目录的备份
8. [x] 支持socks/http代理, 不建议使用, 优先级较低. 代理的keep-alive会带来严重的性能下降
9. [ ] 云函数化, chainreactors工具链的通用分布式解决方案.
## Thanks
* [fuzzuli](https://github.com/musana/fuzzuli) 提供了一个备份文件字典生成思路
* [fingerprinthub](https://github.com/0x727/FingerprintHub) 作为指纹库的补充
* [wappalyzer](https://github.com/projectdiscovery/wappalyzergo) 作为指纹库补充
* [dirsearch](https://github.com/maurosoria/dirsearch) 提供了默认字典

View File

@ -3,15 +3,70 @@ package cmd
import (
"context"
"fmt"
"github.com/chainreactors/files"
"github.com/chainreactors/logs"
"github.com/chainreactors/spray/internal"
"github.com/chainreactors/spray/core"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/utils/iutils"
"github.com/jessevdk/go-flags"
"os"
"os/signal"
"syscall"
"time"
)
var ver = "dev"
var DefaultConfig = "config.yaml"
func init() {
logs.Log.SetColorMap(map[logs.Level]func(string) string{
logs.Info: logs.PurpleBold,
logs.Important: logs.GreenBold,
pkg.LogVerbose: logs.Green,
})
}
func Spray() {
var option internal.Option
var option core.Option
if files.IsExist(DefaultConfig) {
logs.Log.Debug("config.yaml exist, loading")
err := core.LoadConfig(DefaultConfig, &option)
if err != nil {
logs.Log.Error(err.Error())
return
}
}
parser := flags.NewParser(&option, flags.Default)
parser.Usage = `
WIKI: https://chainreactors.github.io/wiki/spray
QUICKSTART:
basic:
spray -u http://example.com
basic cidr and port:
spray -i example -p top2,top3
simple brute:
spray -u http://example.com -d wordlist1.txt -d wordlist2.txt
mask-base brute with wordlist:
spray -u http://example.com -w "/aaa/bbb{?l#4}/ccc"
rule-base brute with wordlist:
spray -u http://example.com -r rule.txt -d 1.txt
list input spray:
spray -l url.txt -r rule.txt -d 1.txt
resume:
spray --resume stat.json
`
_, err := parser.Parse()
if err != nil {
if err.(*flags.Error).Type != flags.ErrHelp {
@ -20,13 +75,110 @@ func Spray() {
return
}
runner, err := option.PrepareRunner()
// logs
logs.AddLevel(pkg.LogVerbose, "verbose", "[=] %s {{suffix}}\n")
if option.Debug {
logs.Log.SetLevel(logs.Debug)
} else if len(option.Verbose) > 0 {
logs.Log.SetLevel(pkg.LogVerbose)
}
if option.InitConfig {
configStr := core.InitDefaultConfig(&option, 0)
err := os.WriteFile(DefaultConfig, []byte(configStr), 0o744)
if err != nil {
logs.Log.Warn("cannot create config: config.yaml, " + err.Error())
return
}
if files.IsExist(DefaultConfig) {
logs.Log.Warn("override default config: ./config.yaml")
}
logs.Log.Info("init default config: ./config.yaml")
return
}
defer time.Sleep(time.Second)
if option.Config != "" {
err := core.LoadConfig(option.Config, &option)
if err != nil {
logs.Log.Error(err.Error())
return
}
if files.IsExist(DefaultConfig) {
logs.Log.Warnf("custom config %s, override default config", option.Config)
} else {
logs.Log.Important("load config: " + option.Config)
}
}
if option.Version {
fmt.Println(ver)
return
}
if option.PrintPreset {
err = pkg.Load()
if err != nil {
iutils.Fatal(err.Error())
}
err = pkg.LoadFingers()
if err != nil {
iutils.Fatal(err.Error())
}
core.PrintPreset()
return
}
if option.Format != "" {
core.Format(option)
return
}
err = option.Prepare()
if err != nil {
logs.Log.Errorf(err.Error())
return
}
ctx, _ := context.WithTimeout(context.Background(), time.Duration(runner.Deadline)*time.Second)
runner, err := option.NewRunner()
if err != nil {
logs.Log.Errorf(err.Error())
return
}
if option.ReadAll || runner.CrawlPlugin {
ihttp.DefaultMaxBodySize = -1
}
ctx, canceler := context.WithTimeout(context.Background(), time.Duration(runner.Deadline)*time.Second)
go func() {
select {
case <-ctx.Done():
time.Sleep(10 * time.Second)
logs.Log.Errorf("deadline and timeout not work, hard exit!!!")
os.Exit(0)
}
}()
go func() {
exitChan := make(chan os.Signal, 2)
signal.Notify(exitChan, os.Interrupt, syscall.SIGTERM)
go func() {
sigCount := 0
for {
<-exitChan
sigCount++
if sigCount == 1 {
logs.Log.Infof("Exit signal received, saving task and exiting...")
canceler()
} else if sigCount == 2 {
logs.Log.Infof("forcing exit...")
os.Exit(1)
}
}
}()
}()
err = runner.Prepare(ctx)
if err != nil {
@ -34,9 +186,4 @@ func Spray() {
return
}
if runner.CheckOnly {
runner.RunWithCheck(ctx)
} else {
runner.Run(ctx)
}
}

156
config.yaml Normal file
View File

@ -0,0 +1,156 @@
input:
# Files, Multi,dict files, e.g.: -d 1.txt -d 2.txt
dictionaries: []
# Bool, no dictionary
no-dict: false
# String, word generate dsl, e.g.: -w test{?ld#4}
word: ""
# Files, rule files, e.g.: -r rule1.txt -r rule2.txt
rules: []
# Files, when found valid path , use append rule generator new word with current path
append-rules: []
# String, filter rule, e.g.: --rule-filter '>8 <4'
filter-rule: ""
# Files, when found valid path , use append file new word with current path
append-files: []
functions:
# String, add extensions (separated by commas), e.g.: -e jsp,jspx
extension: ""
# Bool, force add extensions
force-extension: false
# String, exclude extensions (separated by commas), e.g.: --exclude-extension jsp,jspx
exclude-extension: ""
# String, remove extensions (separated by commas), e.g.: --remove-extension jsp,jspx
remove-extension: ""
# Bool, upper wordlist, e.g.: --uppercase
upper: false
# Bool, lower wordlist, e.g.: --lowercase
lower: false
# Strings, add prefix, e.g.: --prefix aaa --prefix bbb
prefix: []
# Strings, add suffix, e.g.: --suffix aaa --suffix bbb
suffix: []
# Strings, replace string, e.g.: --replace aaa:bbb --replace ccc:ddd
replace: {}
# String, skip word when generate. rule, e.g.: --skip aaa
skip: []
output:
# String, custom match function, e.g.: --match 'current.Status != 200''
match: ""
# String, custom filter function, e.g.: --filter 'current.Body contains "hello"'
filter: ""
# String, open fuzzy output
fuzzy: false
# String, output filename
output-file: ""
# String, fuzzy output filename
fuzzy-file: ""
# String, dump all request, and write to filename
dump-file: ""
# Bool, dump all request
dump: false
# Bool, auto generator output and fuzzy filename
auto-file: false
# String, output format, e.g.: --format 1.json
format: ""
# String, output format
output_probe: ""
# Bool, Quiet
quiet: false
# Bool, no color
no-color: false
# Bool, No progress bar
no-bar: false
# Bool, No stat
no-stat: true
plugins:
# Bool, enable all plugin
all: false
# Strings, extract response, e.g.: --extract js --extract ip --extract version:(.*?)
extract: []
# String, extract config filename
extract-config: ""
# Bool, enable recon
recon: false
# Bool, enable active finger detect
finger: false
# Bool, enable bak found
bak: false
# Bool, enable valid result bak found, equal --append-rule rule/filebak.txt
file-bak: false
# Bool, enable common file found
common: false
# Bool, enable crawl
crawl: false
# Int, crawl depth
crawl-depth: 3
request:
# Strings, custom headers, e.g.: --headers 'Auth: example_auth'
headers: []
# String, custom user-agent, e.g.: --user-agent Custom
useragent: ""
# Bool, use random with default user-agent
random-useragent: false
# Strings, custom cookie
cookies: []
# Bool, read all response body
read-all: false
# Int, max response body length (kb), -1 read-all, 0 not read body, default 100k, e.g. --max-length 1000
max-length: 100
mode:
# Int, request rate limit (rate/s), e.g.: --rate-limit 100
rate-limit: 0
# Bool, skip error break
force: false
# Bool, check only
default: false
# Bool, no scope
no-scope: false
# String, custom scope, e.g.: --scope *.example.com
scope: []
# String,custom recursive rule, e.g.: --recursive current.IsDir()
recursive: current.IsDir()
# Int, recursive depth
depth: 0
# String, custom index path
index: /
# String, custom random path
random: ""
# Int, check period when request
check-period: 200
# Int, check period when error
error-period: 10
# Int, break when the error exceeds the threshold
error-threshold: 20
# Strings (comma split),custom black status
black-status: 400,410
# Strings (comma split), custom white status
white-status: 200
# Strings (comma split), custom fuzzy status
fuzzy-status: 500,501,502,503
# Strings (comma split), custom unique status
unique-status: 403,200,404
# Bool, unique response
unique: false
# Int, retry count
retry: 0
sim-distance: 5
misc:
# String, path/host spray
mod: path
# String, Client type
client: auto
# Int, deadline (seconds)
deadline: 999999
# Int, timeout with request (seconds)
timeout: 5
# Int, Pool size
pool: 5
# Int, number of threads per pool
thread: 20
# Bool, output debug info
debug: false
# Bool, log verbose level ,default 0, level1: -v level2 -vv
verbose: []
# String, proxy address, e.g.: --proxy socks5://127.0.0.1:1080
proxy: ""

264
core/baseline/baseline.go Normal file
View File

@ -0,0 +1,264 @@
package baseline
import (
"bytes"
"github.com/chainreactors/fingers/common"
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/utils/encode"
"github.com/chainreactors/utils/iutils"
"net/http"
"net/url"
"strconv"
"strings"
)
func NewBaseline(u, host string, resp *ihttp.Response) *Baseline {
var err error
bl := &Baseline{
SprayResult: &parsers.SprayResult{
UrlString: u,
Status: resp.StatusCode(),
IsValid: true,
Frameworks: make(common.Frameworks),
},
}
if t, ok := pkg.ContentTypeMap[resp.ContentType()]; ok {
bl.ContentType = t
bl.Title = t + " data"
} else {
bl.ContentType = "other"
}
header := resp.Header()
bl.Header = make([]byte, len(header))
copy(bl.Header, header)
bl.HeaderLength = len(bl.Header)
if i := resp.ContentLength(); ihttp.CheckBodySize(i) {
if body := resp.Body(); body != nil {
bl.Body = make([]byte, len(body))
copy(bl.Body, body)
}
if i == -1 {
bl.Chunked = true
bl.BodyLength = len(bl.Body)
} else {
bl.BodyLength = int(i)
}
}
bl.Raw = append(bl.Header, bl.Body...)
bl.Response, err = pkg.ParseRawResponse(bl.Raw)
if err != nil {
bl.IsValid = false
bl.Reason = pkg.ErrResponseError.Error()
bl.ErrString = err.Error()
return bl
}
if r := bl.Response.Header.Get("Location"); r != "" {
bl.RedirectURL = r
} else {
bl.RedirectURL = bl.Response.Header.Get("location")
}
bl.Dir = bl.IsDir()
uu, err := url.Parse(u)
if err == nil {
bl.Path = uu.Path
bl.Url = uu
if uu.Host != host {
bl.Host = host
}
} else {
bl.IsValid = false
bl.Reason = pkg.ErrUrlError.Error()
bl.ErrString = err.Error()
}
bl.Unique = UniqueHash(bl)
return bl
}
func NewInvalidBaseline(u, host string, resp *ihttp.Response, reason string) *Baseline {
bl := &Baseline{
SprayResult: &parsers.SprayResult{
UrlString: u,
Status: resp.StatusCode(),
IsValid: false,
Reason: reason,
},
}
// 无效数据也要读取body, 否则keep-alive不生效
resp.Body()
bl.BodyLength = int(resp.ContentLength())
bl.RedirectURL = string(resp.GetHeader("Location"))
bl.Dir = bl.IsDir()
uu, err := url.Parse(u)
if err == nil {
bl.Path = uu.Path
bl.Url = uu
} else {
return bl
}
if bl.Url.Host != host {
bl.Host = host
}
return bl
}
type Baseline struct {
*parsers.SprayResult
Url *url.URL `json:"-"`
Dir bool `json:"-"`
Chunked bool `json:"-"`
Body pkg.BS `json:"-"`
Header pkg.BS `json:"-"`
Raw pkg.BS `json:"-"`
Response *http.Response `json:"-"`
Recu bool `json:"-"`
RecuDepth int `json:"-"`
URLs []string `json:"-"`
Collected bool `json:"-"`
Retry int `json:"-"`
SameRedirectDomain bool `json:"-"`
IsBaseline bool `json:"-"`
}
func (bl *Baseline) IsDir() bool {
if strings.HasSuffix(bl.Path, "/") {
return true
}
return false
}
// Collect 深度收集信息
func (bl *Baseline) Collect() {
if bl.Collected { // 防止重复收集
return
} else {
bl.Collected = true
}
if bl.ContentType == "html" || bl.ContentType == "json" || bl.ContentType == "txt" {
// 指纹库设计的时候没考虑js,css文件的指纹, 跳过非必要的指纹收集减少误报提高性能
//fmt.Println(bl.Source, bl.Url.String()+bl.Path, bl.RedirectURL, "call fingersengine")
if pkg.EnableAllFingerEngine {
bl.Frameworks = pkg.EngineDetect(bl.Raw)
} else {
bl.Frameworks = pkg.FingersDetect(bl.Raw)
}
}
if len(bl.Body) > 0 {
if bl.ContentType == "html" {
bl.Title = iutils.AsciiEncode(parsers.MatchTitle(bl.Body))
} else if bl.ContentType == "ico" {
if frame := pkg.FingerEngine.Favicon().Match(bl.Body); frame != nil {
bl.Frameworks.Merge(frame)
}
}
}
bl.Hashes = parsers.NewHashes(bl.Raw)
bl.Extracteds.Merge(pkg.Extractors.Extract(string(bl.Raw), true))
bl.Unique = UniqueHash(bl)
}
func (bl *Baseline) CollectURL() {
if len(bl.Body) == 0 {
return
}
for _, reg := range pkg.ExtractRegexps["js"][0].CompiledRegexps {
urls := reg.FindAllStringSubmatch(string(bl.Body), -1)
for _, u := range urls {
u[1] = pkg.CleanURL(u[1])
if u[1] != "" && !pkg.FilterJs(u[1]) {
bl.URLs = append(bl.URLs, u[1])
}
}
}
for _, reg := range pkg.ExtractRegexps["url"][0].CompiledRegexps {
urls := reg.FindAllStringSubmatch(string(bl.Body), -1)
for _, u := range urls {
u[1] = pkg.CleanURL(u[1])
if u[1] != "" && !pkg.FilterUrl(u[1]) {
bl.URLs = append(bl.URLs, u[1])
}
}
}
bl.URLs = iutils.StringsUnique(bl.URLs)
if len(bl.URLs) != 0 {
bl.Extracteds = append(bl.Extracteds, &parsers.Extracted{
Name: "crawl",
ExtractResult: bl.URLs,
})
}
}
// Compare
// if totally equal return 1
// if maybe equal return 0
// not equal return -1
func (bl *Baseline) Compare(other *Baseline) int {
if other.RedirectURL != "" && bl.RedirectURL == other.RedirectURL {
// 如果重定向url不为空, 且与base不相同, 则说明不是同一个页面
return 1
}
if bl.BodyLength == other.BodyLength {
// 如果body length相等且md5相等, 则说明是同一个页面
if bytes.Equal(bl.Body, other.Body) {
// 如果length相等, md5也相等, 则判断为全同
return 1
} else {
// 如果长度相等, 但是md5不相等, 可能是存在csrftoken之类的随机值
return 0
}
} else if i := bl.BodyLength - other.BodyLength; (i < 16 && i > 0) || (i > -16 && i < 0) {
// 如果body length绝对值小于16, 则可能是存在csrftoken之类的随机值, 需要模糊判断
return 0
} else {
// 如果body length绝对值大于16, 则认为大概率存在较大差异
if strings.Contains(string(other.Body), other.Path) {
// 如果包含路径本身, 可能是路径自身的随机值影响结果
return 0
} else {
// 如果不包含路径本身, 则认为是不同页面
return -1
}
}
return -1
}
func (bl *Baseline) ProbeOutput(format []string) string {
var s strings.Builder
for _, f := range format {
s.WriteString("\t")
s.WriteString(bl.Get(f))
}
return strings.TrimSpace(s.String())
}
var Distance uint8 = 5 // 数字越小越相似, 数字为0则为完全一致.
func (bl *Baseline) FuzzyCompare(other *Baseline) bool {
// 这里使用rawsimhash, 是为了保证一定数量的字符串, 否则超短的body会导致simhash偏差指较大
if other.Distance = encode.SimhashCompare(other.RawSimhash, bl.RawSimhash); other.Distance < Distance {
return true
}
return false
}
func UniqueHash(bl *Baseline) uint16 {
// 由host+状态码+重定向url+content-type+title+length舍去个位组成的hash
// body length可能会导致一些误报, 目前没有更好的解决办法
return pkg.CRC16Hash([]byte(bl.Host + strconv.Itoa(bl.Status) + bl.RedirectURL + bl.ContentType + bl.Title + strconv.Itoa(bl.BodyLength/10*10)))
}

202
core/config.go Normal file
View File

@ -0,0 +1,202 @@
package core
import (
"fmt"
"github.com/gookit/config/v2"
"reflect"
"strconv"
"strings"
)
//var (
// defaultConfigPath = ".config/spray/"
// defaultConfigFile = "config.yaml"
//)
//
//func LoadDefault(v interface{}) {
// dir, err := os.UserHomeDir()
// if err != nil {
// logs.Log.Error(err.Error())
// return
// }
// if !files.IsExist(filepath.Join(dir, defaultConfigPath, defaultConfigFile)) {
// err := os.MkdirAll(filepath.Join(dir, defaultConfigPath), 0o700)
// if err != nil {
// logs.Log.Error(err.Error())
// return
// }
// f, err := os.Create(filepath.Join(dir, defaultConfigPath, defaultConfigFile))
// if err != nil {
// logs.Log.Error(err.Error())
// return
// }
// err = LoadConfig(filepath.Join(dir, defaultConfigPath, defaultConfigFile), v)
// if err != nil {
// logs.Log.Error(err.Error())
// return
// }
// var buf bytes.Buffer
// _, err = config.DumpTo(&buf, config.Yaml)
// if err != nil {
// logs.Log.Error(err.Error())
// return
// }
// fmt.Println(buf.String())
// f.Sync()
// }
//}
func LoadConfig(filename string, v interface{}) error {
err := config.LoadFiles(filename)
if err != nil {
return err
}
err = config.Decode(v)
if err != nil {
return err
}
return nil
}
func convertToFieldType(fieldType reflect.StructField, defaultVal string) interface{} {
switch fieldType.Type.Kind() {
case reflect.Bool:
val, err := strconv.ParseBool(defaultVal)
if err == nil {
return val
}
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
val, err := strconv.ParseInt(defaultVal, 10, 64)
if err == nil {
return val
}
case reflect.Float32, reflect.Float64:
val, err := strconv.ParseFloat(defaultVal, 64)
if err == nil {
return val
}
case reflect.String:
return defaultVal
// 可以根据需要扩展其他类型
}
return nil // 如果转换失败或类型不受支持返回nil
}
func setFieldValue(field reflect.Value) interface{} {
switch field.Kind() {
case reflect.Bool:
return false
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return 0
case reflect.Float32, reflect.Float64:
return 0.0
case reflect.Slice, reflect.Array:
return []interface{}{} // 返回一个空切片
case reflect.String:
return ""
case reflect.Struct:
return make(map[string]interface{})
default:
return nil
}
}
func extractConfigAndDefaults(v reflect.Value, result map[string]interface{}, comments map[string]string) {
t := v.Type()
for i := 0; i < v.NumField(); i++ {
field := v.Field(i)
fieldType := t.Field(i)
configTag := fieldType.Tag.Get("config")
defaultTag := fieldType.Tag.Get("default")
descriptionTag := fieldType.Tag.Get("description") // 读取description标签
if configTag != "" {
var value interface{}
if defaultTag != "" {
value = convertToFieldType(fieldType, defaultTag)
} else {
value = setFieldValue(field)
}
fullPath := configTag // 在递归情况下,您可能需要构建完整的路径
if field.Kind() == reflect.Struct {
nestedResult := make(map[string]interface{})
nestedComments := make(map[string]string)
extractConfigAndDefaults(field, nestedResult, nestedComments)
result[configTag] = nestedResult
for k, v := range nestedComments {
comments[fullPath+"."+k] = v // 保留嵌套注释的路径
}
} else {
result[configTag] = value
if descriptionTag != "" {
comments[fullPath] = descriptionTag
}
}
}
}
}
func InitDefaultConfig(cfg interface{}, indentLevel int) string {
var yamlStr strings.Builder
v := reflect.ValueOf(cfg)
if v.Kind() == reflect.Ptr {
v = v.Elem() // 解引用指针
}
t := v.Type()
for i := 0; i < v.NumField(); i++ {
field := v.Field(i)
fieldType := t.Field(i)
configTag := fieldType.Tag.Get("config")
if configTag == "" {
continue // 忽略没有config标签的字段
}
defaultTag := fieldType.Tag.Get("default")
descriptionTag := fieldType.Tag.Get("description")
// 添加注释
if descriptionTag != "" {
yamlStr.WriteString(fmt.Sprintf("%s# %s\n", strings.Repeat(" ", indentLevel*2), descriptionTag))
}
// 准备值
valueStr := prepareValue(fieldType.Type.Kind(), defaultTag)
// 根据字段类型进行处理
switch field.Kind() {
case reflect.Struct:
// 对于嵌套结构体递归生成YAML
yamlStr.WriteString(fmt.Sprintf("%s%s:\n%s", strings.Repeat(" ", indentLevel*2), configTag, InitDefaultConfig(field.Interface(), indentLevel+1)))
default:
// 直接生成键值对
yamlStr.WriteString(fmt.Sprintf("%s%s: %s\n", strings.Repeat(" ", indentLevel*2), configTag, valueStr))
}
}
return yamlStr.String()
}
// prepareValue 根据字段类型和default标签的值准备最终的值字符串
func prepareValue(kind reflect.Kind, defaultVal string) string {
if defaultVal != "" {
return defaultVal
}
// 根据类型返回默认空值
switch kind {
case reflect.Bool:
return "false"
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return "0"
case reflect.Float32, reflect.Float64:
return "0.0"
case reflect.Slice, reflect.Array:
return "[]"
case reflect.String:
return `""`
case reflect.Struct, reflect.Map:
return "{}"
default:
return `""`
}
}

181
core/finger.go Normal file
View File

@ -0,0 +1,181 @@
package core
import (
"fmt"
"github.com/chainreactors/files"
"github.com/chainreactors/fingers"
"github.com/chainreactors/fingers/resources"
"github.com/chainreactors/logs"
"github.com/chainreactors/utils/encode"
"github.com/chainreactors/utils/iutils"
"io"
"net/http"
"os"
"path/filepath"
"strings"
)
var (
DefaultFingerPath = "fingers"
DefaultFingerTemplate = "fingers/templates"
FingerConfigs = map[string]string{
fingers.FingersEngine: "fingers_http.json.gz",
fingers.FingerPrintEngine: "fingerprinthub_v3.json.gz",
fingers.WappalyzerEngine: "wappalyzer.json.gz",
fingers.EHoleEngine: "ehole.json.gz",
fingers.GobyEngine: "goby.json.gz",
}
baseURL = "https://raw.githubusercontent.com/chainreactors/fingers/master/resources/"
)
type FingerOptions struct {
Finger bool `long:"finger" description:"Bool, enable active finger detect" config:"finger"`
FingerUpdate bool `long:"update" description:"Bool, update finger database" config:"update"`
FingerPath string `long:"finger-path" default:"fingers" description:"String, 3rd finger config path" config:"finger-path"`
//FingersTemplatesPath string `long:"finger-template" default:"fingers/templates" description:"Bool, use finger templates path" config:"finger-template"`
FingerEngines string `long:"finger-engine" default:"all" description:"String, custom finger engine, e.g. --finger-engine ehole,goby" config:"finger-engine"`
}
func (opt *FingerOptions) Validate() error {
var err error
if opt.FingerUpdate {
if opt.FingerPath != DefaultFingerPath && !files.IsExist(opt.FingerPath) {
err = os.MkdirAll(opt.FingerPath, 0755)
if err != nil {
return err
}
} else if !files.IsExist(DefaultFingerPath) {
opt.FingerPath = DefaultFingerPath
err = os.MkdirAll(DefaultFingerPath, 0755)
if err != nil {
return err
}
}
//if opt.FingersTemplatesPath != DefaultFingerTemplate && !files.IsExist(opt.FingersTemplatesPath) {
// err = os.MkdirAll(opt.FingersTemplatesPath, 0755)
// if err != nil {
// return err
// }
//} else if !files.IsExist(DefaultFingerTemplate) {
// err = os.MkdirAll(DefaultFingerTemplate, 0755)
// if err != nil {
// return err
// }
//}
}
if opt.FingerEngines != "all" {
for _, name := range strings.Split(opt.FingerEngines, ",") {
if !iutils.StringsContains(fingers.AllEngines, name) {
return fmt.Errorf("invalid finger engine: %s, please input one of %v", name, fingers.FingersEngine)
}
}
}
return nil
}
func (opt *FingerOptions) LoadLocalFingerConfig() error {
for name, fingerPath := range FingerConfigs {
if content, err := os.ReadFile(fingerPath); err == nil {
if encode.Md5Hash(content) != resources.CheckSum[name] {
logs.Log.Importantf("found %s difference, use %s replace embed", name, fingerPath)
switch name {
case fingers.FingersEngine:
resources.FingersHTTPData = content
case fingers.FingerPrintEngine:
resources.Fingerprinthubdata = content
case fingers.EHoleEngine:
resources.EholeData = content
case fingers.GobyEngine:
resources.GobyData = content
case fingers.WappalyzerEngine:
resources.WappalyzerData = content
default:
return fmt.Errorf("unknown engine name")
}
} else {
logs.Log.Infof("%s config is up to date", name)
}
}
}
return nil
}
func (opt *FingerOptions) UpdateFinger() error {
modified := false
for name, _ := range FingerConfigs {
if ok, err := opt.downloadConfig(name); err != nil {
return err
} else {
if ok {
modified = ok
}
}
}
if !modified {
logs.Log.Importantf("everything is up to date")
}
return nil
}
func (opt *FingerOptions) downloadConfig(name string) (bool, error) {
fingerFile, ok := FingerConfigs[name]
if !ok {
return false, fmt.Errorf("unknown engine name")
}
url := baseURL + fingerFile
resp, err := http.Get(url)
if err != nil {
return false, err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return false, fmt.Errorf("bad status: %s", resp.Status)
}
content, err := io.ReadAll(resp.Body)
filePath := filepath.Join(files.GetExcPath(), opt.FingerPath, fingerFile)
if files.IsExist(filePath) {
origin, err := os.ReadFile(filePath)
if err != nil {
return false, err
}
if resources.CheckSum[name] != encode.Md5Hash(origin) {
logs.Log.Importantf("update %s config from %s save to %s", name, url, fingerFile)
err = os.WriteFile(filePath, content, 0644)
if err != nil {
return false, err
}
return true, nil
}
} else {
out, err := os.Create(filePath)
if err != nil {
return false, err
}
defer out.Close()
logs.Log.Importantf("download %s config from %s save to %s", name, url, fingerFile)
err = os.WriteFile(filePath, content, 0644)
if err != nil {
return false, err
}
}
if err != nil {
return false, err
}
if origin, err := os.ReadFile(filePath); err == nil {
if encode.Md5Hash(content) != encode.Md5Hash(origin) {
logs.Log.Infof("download %s config from %s save to %s", name, url, fingerFile)
err = os.WriteFile(filePath, content, 0644)
if err != nil {
return false, err
}
return true, nil
}
}
return false, nil
}

92
core/format.go Normal file
View File

@ -0,0 +1,92 @@
package core
import (
"bytes"
"encoding/json"
"github.com/chainreactors/logs"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words/mask"
"io"
"net/url"
"os"
"strings"
)
func Format(opts Option) {
var content []byte
var err error
if opts.Format == "stdin" {
content, err = io.ReadAll(os.Stdin)
} else {
content, err = os.ReadFile(opts.Format)
}
if err != nil {
return
}
group := make(map[string]map[string]*baseline.Baseline)
for _, line := range bytes.Split(bytes.TrimSpace(content), []byte("\n")) {
var result baseline.Baseline
err := json.Unmarshal(line, &result)
if err != nil {
logs.Log.Error(err.Error())
return
}
result.Url, err = url.Parse(result.UrlString)
if err != nil {
continue
}
if _, exists := group[result.Url.Host]; !exists {
group[result.Url.Host] = make(map[string]*baseline.Baseline)
}
group[result.Url.Host][result.Path] = &result
}
for _, results := range group {
for _, result := range results {
if !opts.Fuzzy && result.IsFuzzy {
continue
}
if opts.OutputProbe == "" {
if !opts.NoColor {
logs.Log.Console(result.ColorString() + "\n")
} else {
logs.Log.Console(result.String() + "\n")
}
} else {
probes := strings.Split(opts.OutputProbe, ",")
logs.Log.Console(result.ProbeOutput(probes) + "\n")
}
}
}
}
func PrintPreset() {
logs.Log.Console("internal rules:\n")
for name, rule := range pkg.Rules {
logs.Log.Consolef("\t%s\t%d rules\n", name, len(strings.Split(rule, "\n")))
}
logs.Log.Console("\ninternal dicts:\n")
for name, dict := range pkg.Dicts {
logs.Log.Consolef("\t%s\t%d items\n", name, len(dict))
}
logs.Log.Console("\ninternal words keyword:\n")
for name, words := range mask.SpecialWords {
logs.Log.Consolef("\t%s\t%d words\n", name, len(words))
}
logs.Log.Console("\ninternal extractor:\n")
for name, _ := range pkg.ExtractRegexps {
logs.Log.Consolef("\t%s\n", name)
}
logs.Log.Console("\ninternal fingers:\n")
for name, engine := range pkg.FingerEngine.EnginesImpl {
logs.Log.Consolef("\t%s\t%d fingerprints \n", name, engine.Len())
}
logs.Log.Consolef("\nload %d active path\n", len(pkg.ActivePath))
}

136
core/ihttp/client.go Normal file
View File

@ -0,0 +1,136 @@
package ihttp
import (
"context"
"crypto/tls"
"fmt"
"github.com/chainreactors/proxyclient"
"github.com/valyala/fasthttp"
"net"
"net/http"
"time"
)
var (
DefaultMaxBodySize int64 = 1024 * 100 // 100k
)
func CheckBodySize(size int64) bool {
if DefaultMaxBodySize == -1 {
return true
}
if DefaultMaxBodySize == 0 {
return false
}
return size < DefaultMaxBodySize
}
const (
Auto = iota
FAST
STANDARD
)
func NewClient(config *ClientConfig) *Client {
var client *Client
if config.Type == FAST {
client = &Client{
fastClient: &fasthttp.Client{
TLSConfig: &tls.Config{
Renegotiation: tls.RenegotiateOnceAsClient,
InsecureSkipVerify: true,
},
Dial: customDialFunc(config.ProxyClient, config.Timeout),
MaxConnsPerHost: config.Thread * 3 / 2,
MaxIdleConnDuration: config.Timeout,
//MaxConnWaitTimeout: time.Duration(timeout) * time.Second,
ReadTimeout: config.Timeout,
WriteTimeout: config.Timeout,
ReadBufferSize: 16384, // 16k
MaxResponseBodySize: int(DefaultMaxBodySize),
NoDefaultUserAgentHeader: true,
DisablePathNormalizing: true,
DisableHeaderNamesNormalizing: true,
},
ClientConfig: config,
}
} else {
client = &Client{
standardClient: &http.Client{
Transport: &http.Transport{
DialContext: config.ProxyClient,
TLSClientConfig: &tls.Config{
Renegotiation: tls.RenegotiateNever,
InsecureSkipVerify: true,
},
TLSHandshakeTimeout: config.Timeout,
MaxConnsPerHost: config.Thread * 3 / 2,
IdleConnTimeout: config.Timeout,
ReadBufferSize: 16384, // 16k
},
Timeout: config.Timeout,
CheckRedirect: func(req *http.Request, via []*http.Request) error {
return http.ErrUseLastResponse
},
},
ClientConfig: config,
}
}
return client
}
type ClientConfig struct {
Type int
Timeout time.Duration
Thread int
ProxyClient proxyclient.Dial
}
type Client struct {
fastClient *fasthttp.Client
standardClient *http.Client
*ClientConfig
}
func (c *Client) TransToCheck() {
if c.fastClient != nil {
c.fastClient.MaxConnsPerHost = -1 // disable keepalive
} else if c.standardClient != nil {
c.standardClient.Transport.(*http.Transport).DisableKeepAlives = true // disable keepalive
}
}
func (c *Client) FastDo(req *fasthttp.Request) (*fasthttp.Response, error) {
resp := fasthttp.AcquireResponse()
err := c.fastClient.DoTimeout(req, resp, c.Timeout)
return resp, err
}
func (c *Client) StandardDo(req *http.Request) (*http.Response, error) {
return c.standardClient.Do(req)
}
func (c *Client) Do(req *Request) (*Response, error) {
if c.fastClient != nil {
resp, err := c.FastDo(req.FastRequest)
return &Response{FastResponse: resp, ClientType: FAST}, err
} else if c.standardClient != nil {
resp, err := c.StandardDo(req.StandardRequest)
return &Response{StandardResponse: resp, ClientType: STANDARD}, err
} else {
return nil, fmt.Errorf("not found client")
}
}
func customDialFunc(dialer proxyclient.Dial, timeout time.Duration) fasthttp.DialFunc {
if dialer == nil {
return func(addr string) (net.Conn, error) {
return fasthttp.DialTimeout(addr, timeout)
}
}
return func(addr string) (net.Conn, error) {
ctx, _ := context.WithTimeout(context.Background(), timeout)
return dialer.DialContext(ctx, "tcp", addr)
}
}

View File

@ -1,30 +1,26 @@
package ihttp
import (
"context"
"github.com/chainreactors/spray/pkg"
"github.com/valyala/fasthttp"
"net/http"
)
func BuildPathRequest(clientType int, base, path string) (*Request, error) {
func BuildRequest(ctx context.Context, clientType int, base, path, host, method string) (*Request, error) {
if clientType == FAST {
req := fasthttp.AcquireRequest()
req.Header.SetMethod(method)
req.SetRequestURI(base + path)
return &Request{FastRequest: req, ClientType: FAST}, nil
} else {
req, err := http.NewRequest("GET", base+path, nil)
return &Request{StandardRequest: req, ClientType: STANDARD}, err
}
}
func BuildHostRequest(clientType int, base, host string) (*Request, error) {
if clientType == FAST {
req := fasthttp.AcquireRequest()
req.SetRequestURI(base)
if host != "" {
req.SetHost(host)
}
return &Request{FastRequest: req, ClientType: FAST}, nil
} else {
req, err := http.NewRequest("GET", base, nil)
req, err := http.NewRequestWithContext(ctx, method, base+path, nil)
if host != "" {
req.Host = host
}
return &Request{StandardRequest: req, ClientType: STANDARD}, err
}
}
@ -35,17 +31,29 @@ type Request struct {
ClientType int
}
func (r *Request) SetHeader(header map[string]string) {
if r.StandardRequest != nil {
for k, v := range header {
r.StandardRequest.Header.Set(k, v)
func (r *Request) SetHeaders(header http.Header, RandomUA bool) {
if RandomUA {
r.SetHeader("User-Agent", pkg.RandomUA())
}
if r.StandardRequest != nil {
r.StandardRequest.Header = header
} else if r.FastRequest != nil {
for k, v := range header {
r.FastRequest.Header.Set(k, v)
for _, i := range v {
r.FastRequest.Header.Set(k, i)
}
}
}
}
func (r *Request) SetHeader(key, value string) {
if r.StandardRequest != nil {
r.StandardRequest.Header.Set(key, value)
} else if r.FastRequest != nil {
r.FastRequest.Header.Set(key, value)
}
}
func (r *Request) URI() string {
if r.FastRequest != nil {

View File

@ -1,11 +1,12 @@
package ihttp
import (
"bytes"
"github.com/chainreactors/logs"
"github.com/chainreactors/utils/httputils"
"github.com/valyala/fasthttp"
"io"
"net/http"
"strings"
)
type Response struct {
@ -28,16 +29,30 @@ func (r *Response) Body() []byte {
if r.FastResponse != nil {
return r.FastResponse.Body()
} else if r.StandardResponse != nil {
body := make([]byte, 20480)
if r.StandardResponse.ContentLength > 0 {
if r.StandardResponse.ContentLength == -1 {
body, err := io.ReadAll(r.StandardResponse.Body)
if err != nil {
return nil
}
return body
} else {
var body []byte
if r.StandardResponse.ContentLength > 0 && CheckBodySize(r.StandardResponse.ContentLength) {
body = make([]byte, r.StandardResponse.ContentLength)
} else {
return nil
}
n, err := io.ReadFull(r.StandardResponse.Body, body)
_ = r.StandardResponse.Body.Close()
if err == nil {
return body
} else if err == io.ErrUnexpectedEOF {
return body[:n]
} else if err == io.EOF {
return nil
} else {
logs.Log.Error("readfull failed" + err.Error())
logs.Log.Error("readfull failed, " + err.Error())
return nil
}
}
@ -47,27 +62,38 @@ func (r *Response) Body() []byte {
}
}
func (r *Response) ContentLength() int {
func (r *Response) ContentLength() int64 {
if r.FastResponse != nil {
return r.FastResponse.Header.ContentLength()
return int64(r.FastResponse.Header.ContentLength())
} else if r.StandardResponse != nil {
return int(r.StandardResponse.ContentLength)
return r.StandardResponse.ContentLength
} else {
return 0
}
}
func (r *Response) ContentType() string {
var t string
if r.FastResponse != nil {
t = string(r.FastResponse.Header.ContentType())
} else if r.StandardResponse != nil {
t = r.StandardResponse.Header.Get("Content-Type")
} else {
return ""
}
if i := strings.Index(t, ";"); i > 0 {
return t[:i]
} else {
return t
}
}
func (r *Response) Header() []byte {
if r.FastResponse != nil {
return r.FastResponse.Header.Header()
} else if r.StandardResponse != nil {
var header bytes.Buffer
for k, v := range r.StandardResponse.Header {
for _, i := range v {
header.WriteString(k + ": " + i + "\r\n")
}
}
return header.Bytes()
return append(httputils.ReadRawHeader(r.StandardResponse), []byte("\r\n")...)
} else {
return nil
}

924
core/option.go Normal file
View File

@ -0,0 +1,924 @@
package core
import (
"bufio"
"errors"
"fmt"
"github.com/chainreactors/files"
"github.com/chainreactors/logs"
"github.com/chainreactors/parsers"
"github.com/chainreactors/proxyclient"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/core/pool"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/utils"
"github.com/chainreactors/utils/iutils"
"github.com/chainreactors/words/mask"
"github.com/chainreactors/words/rule"
"github.com/charmbracelet/lipgloss"
"github.com/expr-lang/expr"
"github.com/vbauerster/mpb/v8"
"io/ioutil"
"net/http"
"net/url"
"os"
"path/filepath"
"regexp"
"strconv"
"strings"
"sync"
"time"
)
var (
DefaultThreads = 20
)
type Option struct {
InputOptions `group:"Input Options" config:"input" `
FunctionOptions `group:"Function Options" config:"functions" `
OutputOptions `group:"Output Options" config:"output"`
PluginOptions `group:"Plugin Options" config:"plugins"`
FingerOptions `group:"Finger Options" config:"finger"`
RequestOptions `group:"Request Options" config:"request"`
ModeOptions `group:"Modify Options" config:"mode"`
MiscOptions `group:"Miscellaneous Options" config:"misc"`
}
type InputOptions struct {
ResumeFrom string `long:"resume" description:"File, resume filename" `
Config string `short:"c" long:"config" description:"File, config filename"`
URL []string `short:"u" long:"url" description:"Strings, input baseurl, e.g.: http://google.com"`
URLFile string `short:"l" long:"list" description:"File, input filename"`
PortRange string `short:"p" long:"port" description:"String, input port range, e.g.: 80,8080-8090,db"`
CIDRs []string `short:"i" long:"cidr" description:"String, input cidr, e.g.: 1.1.1.1/24 "`
RawFile string `long:"raw" description:"File, input raw request filename"`
Dictionaries []string `short:"d" long:"dict" description:"Files, Multi,dict files, e.g.: -d 1.txt -d 2.txt" config:"dictionaries"`
DefaultDict bool `short:"D" long:"default" description:"Bool, use default dictionary" config:"default"`
Word string `short:"w" long:"word" description:"String, word generate dsl, e.g.: -w test{?ld#4}" config:"word"`
Rules []string `short:"r" long:"rules" description:"Files, rule files, e.g.: -r rule1.txt -r rule2.txt" config:"rules"`
AppendRule []string `short:"R" long:"append-rule" description:"Files, when found valid path , use append rule generator new word with current path" config:"append-rules"`
FilterRule string `long:"filter-rule" description:"String, filter rule, e.g.: --rule-filter '>8 <4'" config:"filter-rule"`
AppendFile []string `long:"append" description:"Files, when found valid path , use append file new word with current path" config:"append-files"`
Offset int `long:"offset" description:"Int, wordlist offset"`
Limit int `long:"limit" description:"Int, wordlist limit, start with offset. e.g.: --offset 1000 --limit 100"`
}
type FunctionOptions struct {
Extensions string `short:"e" long:"extension" description:"String, add extensions (separated by commas), e.g.: -e jsp,jspx" config:"extension"`
ForceExtension bool `long:"force-extension" description:"Bool, force add extensions" config:"force-extension"`
ExcludeExtensions string `long:"exclude-extension" description:"String, exclude extensions (separated by commas), e.g.: --exclude-extension jsp,jspx" config:"exclude-extension"`
RemoveExtensions string `long:"remove-extension" description:"String, remove extensions (separated by commas), e.g.: --remove-extension jsp,jspx" config:"remove-extension"`
Uppercase bool `short:"U" long:"uppercase" description:"Bool, upper wordlist, e.g.: --uppercase" config:"upper"`
Lowercase bool `short:"L" long:"lowercase" description:"Bool, lower wordlist, e.g.: --lowercase" config:"lower"`
Prefixes []string `long:"prefix" description:"Strings, add prefix, e.g.: --prefix aaa --prefix bbb" config:"prefix"`
Suffixes []string `long:"suffix" description:"Strings, add suffix, e.g.: --suffix aaa --suffix bbb" config:"suffix"`
Replaces map[string]string `long:"replace" description:"Strings, replace string, e.g.: --replace aaa:bbb --replace ccc:ddd" config:"replace"`
Skips []string `long:"skip" description:"String, skip word when generate. rule, e.g.: --skip aaa" config:"skip"`
//SkipEval string `long:"skip-eval" description:"String, skip word when generate. rule, e.g.: --skip-eval 'current.Length < 4'"`
}
type OutputOptions struct {
Match string `long:"match" description:"String, custom match function, e.g.: --match 'current.Status != 200''" config:"match" `
Filter string `long:"filter" description:"String, custom filter function, e.g.: --filter 'current.Body contains \"hello\"'" config:"filter"`
Fuzzy bool `long:"fuzzy" description:"String, open fuzzy output" config:"fuzzy"`
OutputFile string `short:"f" long:"file" description:"String, output filename" json:"output_file,omitempty" config:"output-file"`
DumpFile string `long:"dump-file" description:"String, dump all request, and write to filename" config:"dump-file"`
Dump bool `long:"dump" description:"Bool, dump all request" config:"dump"`
AutoFile bool `long:"auto-file" description:"Bool, auto generator output and fuzzy filename" config:"auto-file"`
Format string `short:"F" long:"format" description:"String, output format, e.g.: --format 1.json" config:"format"`
Json bool `short:"j" long:"json" description:"Bool, output json" config:"json"`
FileOutput string `short:"O" long:"file-output" default:"json" description:"Bool, file output format" config:"file_output"`
OutputProbe string `short:"o" long:"probe" description:"String, output format" config:"output"`
Quiet bool `short:"q" long:"quiet" description:"Bool, Quiet" config:"quiet"`
NoColor bool `long:"no-color" description:"Bool, no color" config:"no-color"`
NoBar bool `long:"no-bar" description:"Bool, No progress bar" config:"no-bar"`
NoStat bool `long:"no-stat" description:"Bool, No stat file output" config:"no-stat"`
}
type RequestOptions struct {
Method string `short:"X" long:"method" default:"GET" description:"String, request method, e.g.: --method POST" config:"method"`
Headers []string `short:"H" long:"header" description:"Strings, custom headers, e.g.: --header 'Auth: example_auth'" config:"headers"`
UserAgent string `long:"user-agent" description:"String, custom user-agent, e.g.: --user-agent Custom" config:"useragent"`
RandomUserAgent bool `long:"random-agent" description:"Bool, use random with default user-agent" config:"random-useragent"`
Cookie []string `long:"cookie" description:"Strings, custom cookie" config:"cookies"`
ReadAll bool `long:"read-all" description:"Bool, read all response body" config:"read-all"`
MaxBodyLength int64 `long:"max-length" default:"100" description:"Int, max response body length (kb), -1 read-all, 0 not read body, default 100k, e.g. --max-length 1000" config:"max-length"`
}
type PluginOptions struct {
Advance bool `short:"a" long:"advance" description:"Bool, enable all plugin" config:"all" `
Extracts []string `long:"extract" description:"Strings, extract response, e.g.: --extract js --extract ip --extract version:(.*?)" config:"extract"`
ExtractConfig string `long:"extract-config" description:"String, extract config filename" config:"extract-config"`
ActivePlugin bool `long:"active" description:"Bool, enable active finger path"`
ReconPlugin bool `long:"recon" description:"Bool, enable recon" config:"recon"`
BakPlugin bool `long:"bak" description:"Bool, enable bak found" config:"bak"`
FuzzuliPlugin bool `long:"fuzzuli" description:"Bool, enable fuzzuli plugin" config:"fuzzuli"`
CommonPlugin bool `long:"common" description:"Bool, enable common file found" config:"common"`
CrawlPlugin bool `long:"crawl" description:"Bool, enable crawl" config:"crawl"`
CrawlDepth int `long:"crawl-depth" default:"3" description:"Int, crawl depth" config:"crawl-depth"`
AppendDepth int `long:"append-depth" default:"2" description:"Int, append depth" config:"append-depth"`
}
type ModeOptions struct {
RateLimit int `long:"rate-limit" default:"0" description:"Int, request rate limit (rate/s), e.g.: --rate-limit 100" config:"rate-limit"`
Force bool `long:"force" description:"Bool, skip error break" config:"force"`
NoScope bool `long:"no-scope" description:"Bool, no scope" config:"no-scope"`
Scope []string `long:"scope" description:"String, custom scope, e.g.: --scope *.example.com" config:"scope"`
Recursive string `long:"recursive" default:"current.IsDir()" description:"String,custom recursive rule, e.g.: --recursive current.IsDir()" config:"recursive"`
Depth int `long:"depth" default:"0" description:"Int, recursive depth" config:"depth"`
Index string `long:"index" default:"/" description:"String, custom index path" config:"index"`
Random string `long:"random" default:"" description:"String, custom random path" config:"random"`
CheckPeriod int `long:"check-period" default:"200" description:"Int, check period when request" config:"check-period"`
ErrPeriod int `long:"error-period" default:"10" description:"Int, check period when error" config:"error-period"`
BreakThreshold int `long:"error-threshold" default:"20" description:"Int, break when the error exceeds the threshold" config:"error-threshold"`
BlackStatus string `short:"B" long:"black-status" default:"400,410" description:"Strings (comma split),custom black status" config:"black-status"`
WhiteStatus string `short:"W" long:"white-status" default:"200" description:"Strings (comma split), custom white status" config:"white-status"`
FuzzyStatus string `long:"fuzzy-status" default:"500,501,502,503,301,302,404" description:"Strings (comma split), custom fuzzy status" config:"fuzzy-status"`
UniqueStatus string `long:"unique-status" default:"403,200,404" description:"Strings (comma split), custom unique status" config:"unique-status"`
Unique bool `long:"unique" description:"Bool, unique response" config:"unique"`
RetryCount int `long:"retry" default:"0" description:"Int, retry count" config:"retry"`
SimhashDistance int `long:"sim-distance" default:"8" config:"sim-distance"`
}
type MiscOptions struct {
Mod string `short:"m" long:"mod" default:"path" choice:"path" choice:"host" description:"String, path/host spray" config:"mod"`
Client string `short:"C" long:"client" default:"auto" choice:"fast" choice:"standard" choice:"auto" description:"String, Client type" config:"client"`
Deadline int `long:"deadline" default:"999999" description:"Int, deadline (seconds)" config:"deadline"` // todo 总的超时时间,适配云函数的deadline
Timeout int `short:"T" long:"timeout" default:"5" description:"Int, timeout with request (seconds)" config:"timeout"`
PoolSize int `short:"P" long:"pool" default:"5" description:"Int, Pool size" config:"pool"`
Threads int `short:"t" long:"thread" default:"20" description:"Int, number of threads per pool" config:"thread"`
Debug bool `long:"debug" description:"Bool, output debug info" config:"debug"`
Version bool `long:"version" description:"Bool, show version"`
Verbose []bool `short:"v" description:"Bool, log verbose level ,default 0, level1: -v level2 -vv " config:"verbose"`
Proxies []string `long:"proxy" description:"String, proxy address, e.g.: --proxy socks5://127.0.0.1:1080" config:"proxies"`
InitConfig bool `long:"init" description:"Bool, init config file"`
PrintPreset bool `long:"print" description:"Bool, print preset all preset config "`
}
func (opt *Option) Validate() error {
if opt.Uppercase && opt.Lowercase {
return errors.New("cannot set -U and -L at the same time")
}
if (opt.Offset != 0 || opt.Limit != 0) && opt.Depth > 0 {
// 偏移和上限与递归同时使用时也会造成混淆.
return errors.New("--offset and --limit cannot be used with --depth at the same time")
}
if opt.Depth > 0 && opt.ResumeFrom != "" {
// 递归与断点续传会造成混淆, 断点续传的word与rule不是通过命令行获取的
return errors.New("--resume and --depth cannot be used at the same time")
}
if opt.ResumeFrom == "" && len(opt.URL) == 0 && opt.URLFile == "" && len(opt.CIDRs) == 0 && opt.RawFile == "" {
return fmt.Errorf("without any target, please use -u/-l/-c/--resume to set targets")
}
return nil
}
func (opt *Option) Prepare() error {
var err error
logs.Log.SetColor(true)
if err = opt.FingerOptions.Validate(); err != nil {
return err
}
if opt.FingerUpdate {
err = opt.UpdateFinger()
if err != nil {
return err
}
}
err = opt.LoadLocalFingerConfig()
if err != nil {
return err
}
err = opt.Validate()
if err != nil {
return err
}
err = pkg.LoadFingers()
if err != nil {
return err
}
err = pkg.Load()
if err != nil {
return err
}
if opt.Extracts != nil {
for _, e := range opt.Extracts {
if reg, ok := pkg.ExtractRegexps[e]; ok {
pkg.Extractors[e] = reg
} else {
pkg.Extractors[e] = []*parsers.Extractor{
&parsers.Extractor{
Name: e,
CompiledRegexps: []*regexp.Regexp{regexp.MustCompile(e)},
},
}
}
}
}
if opt.ExtractConfig != "" {
extracts, err := pkg.LoadExtractorConfig(opt.ExtractConfig)
if err != nil {
return err
}
pkg.Extractors[opt.ExtractConfig] = extracts
}
// 初始化全局变量
baseline.Distance = uint8(opt.SimhashDistance)
if opt.MaxBodyLength == -1 {
ihttp.DefaultMaxBodySize = -1
} else {
ihttp.DefaultMaxBodySize = opt.MaxBodyLength * 1024
}
pkg.BlackStatus = pkg.ParseStatus(pkg.DefaultBlackStatus, opt.BlackStatus)
pkg.WhiteStatus = pkg.ParseStatus(pkg.DefaultWhiteStatus, opt.WhiteStatus)
if opt.FuzzyStatus == "all" {
pool.EnableAllFuzzy = true
} else {
pkg.FuzzyStatus = pkg.ParseStatus(pkg.DefaultFuzzyStatus, opt.FuzzyStatus)
}
if opt.Unique {
pool.EnableAllUnique = true
} else {
pkg.UniqueStatus = pkg.ParseStatus(pkg.DefaultUniqueStatus, opt.UniqueStatus)
}
logs.Log.Logf(pkg.LogVerbose, "Black Status: %v, WhiteStatus: %v, WAFStatus: %v", pkg.BlackStatus, pkg.WhiteStatus, pkg.WAFStatus)
logs.Log.Logf(pkg.LogVerbose, "Fuzzy Status: %v, Unique Status: %v", pkg.FuzzyStatus, pkg.UniqueStatus)
return nil
}
func (opt *Option) NewRunner() (*Runner, error) {
var err error
r := &Runner{
Option: opt,
taskCh: make(chan *Task),
outputCh: make(chan *baseline.Baseline, 256),
poolwg: &sync.WaitGroup{},
outwg: &sync.WaitGroup{},
fuzzyCh: make(chan *baseline.Baseline, 256),
Headers: make(map[string]string),
Total: opt.Limit,
Color: true,
}
// log and bar
if opt.NoColor {
logs.Log.SetColor(false)
r.Color = false
}
if opt.Quiet {
logs.Log.SetQuiet(true)
logs.Log.SetColor(false)
r.Color = false
}
if !(opt.Quiet || opt.NoBar) {
r.Progress = mpb.New(mpb.WithRefreshRate(100 * time.Millisecond))
logs.Log.SetOutput(r.Progress)
}
// configuration
if opt.Force {
// 如果开启了force模式, 将关闭check机制, err积累到一定数量自动退出机制
r.BreakThreshold = MAX
r.CheckPeriod = MAX
r.ErrPeriod = MAX
}
// 选择client
if opt.Client == "auto" {
r.ClientType = ihttp.Auto
} else if opt.Client == "fast" {
r.ClientType = ihttp.FAST
} else if opt.Client == "standard" || opt.Client == "base" || opt.Client == "http" {
r.ClientType = ihttp.STANDARD
}
if len(opt.Proxies) > 0 {
urls, err := proxyclient.ParseProxyURLs(opt.Proxies)
if err != nil {
return nil, err
}
r.ProxyClient, err = proxyclient.NewClientChain(urls)
if err != nil {
return nil, err
}
}
err = opt.BuildPlugin(r)
if err != nil {
return nil, err
}
err = opt.BuildWords(r)
if err != nil {
return nil, err
}
if opt.Threads == DefaultThreads && r.bruteMod {
r.Threads = 1000
}
pkg.DefaultStatistor = pkg.Statistor{
Word: opt.Word,
WordCount: len(r.Wordlist),
Dictionaries: opt.Dictionaries,
Offset: opt.Offset,
RuleFiles: opt.Rules,
RuleFilter: opt.FilterRule,
Total: r.Total,
}
r.Tasks, err = opt.BuildTasks(r)
if err != nil {
return nil, err
}
if opt.Match != "" {
exp, err := expr.Compile(opt.Match)
if err != nil {
return nil, err
}
r.MatchExpr = exp
}
if opt.Filter != "" {
exp, err := expr.Compile(opt.Filter)
if err != nil {
return nil, err
}
r.FilterExpr = exp
}
// 初始化递归
var express string
if opt.Recursive != "current.IsDir()" && opt.Depth != 0 {
// 默认不打开递归, 除非指定了非默认的递归表达式
opt.Depth = 1
express = opt.Recursive
}
if opt.Depth != 0 {
// 手动设置的depth优先级高于默认
express = opt.Recursive
}
if express != "" {
exp, err := expr.Compile(express)
if err != nil {
return nil, err
}
r.RecursiveExpr = exp
}
// prepare header
for _, h := range opt.Headers {
i := strings.Index(h, ":")
if i == -1 {
logs.Log.Warn("invalid header")
} else {
r.Headers[h[:i]] = h[i+2:]
}
}
if opt.UserAgent != "" {
r.Headers["User-Agent"] = opt.UserAgent
}
if opt.Cookie != nil {
r.Headers["Cookie"] = strings.Join(opt.Cookie, "; ")
}
if opt.OutputProbe != "" {
r.Probes = strings.Split(opt.OutputProbe, ",")
}
if !opt.Quiet {
fmt.Println(opt.PrintConfig(r))
}
// init output file
if opt.OutputFile != "" {
r.OutputFile, err = files.NewFile(opt.OutputFile, false, false, true)
if err != nil {
return nil, err
}
} else if opt.AutoFile {
r.OutputFile, err = files.NewFile("result.json", false, false, true)
if err != nil {
return nil, err
}
}
if opt.DumpFile != "" {
r.DumpFile, err = files.NewFile(opt.DumpFile, false, false, true)
if err != nil {
return nil, err
}
} else if opt.Dump {
r.DumpFile, err = files.NewFile("dump.json", false, false, true)
if err != nil {
return nil, err
}
}
if opt.ResumeFrom != "" {
r.StatFile, err = files.NewFile(opt.ResumeFrom, false, true, true)
}
if err != nil {
return nil, err
}
if !opt.NoStat {
r.StatFile, err = files.NewFile(pkg.SafeFilename(r.Tasks.Name)+".stat", false, true, true)
r.StatFile.Mod = os.O_WRONLY | os.O_CREATE
err = r.StatFile.Init()
if err != nil {
return nil, err
}
}
return r, nil
}
func (opt *Option) PrintConfig(r *Runner) string {
// 定义颜色样式
keyStyle := lipgloss.NewStyle().Bold(true).Foreground(lipgloss.Color("#FFFFFF")).Width(20) // Key 加粗并设定宽度
stringValueStyle := lipgloss.NewStyle().Foreground(lipgloss.Color("#FFA07A")) // 字符串样式
arrayValueStyle := lipgloss.NewStyle().Foreground(lipgloss.Color("#98FB98")) // 数组样式
numberValueStyle := lipgloss.NewStyle().Foreground(lipgloss.Color("#ADD8E6")) // 数字样式
panelWidth := 60 // 调整 panelWidth 使内容稍微靠左
padding := 2 // 减少 padding 以调整布局靠左
// 分割线样式和终端宽度计算
divider := strings.Repeat("─", panelWidth) // 使用"─"符号生成更加连贯的分割线
// 处理不同类型的值
formatValue := func(value interface{}) string {
switch v := value.(type) {
case string:
return stringValueStyle.Render(v)
case []string:
return arrayValueStyle.Render(fmt.Sprintf("%v", v))
case int, int64, float64:
return numberValueStyle.Render(fmt.Sprintf("%v", v))
default:
return stringValueStyle.Render(fmt.Sprintf("%v", v)) // 默认为字符串样式
}
}
// 处理互斥参数,选择输出有值的那一个
inputSource := ""
if opt.ResumeFrom != "" {
inputSource = lipgloss.JoinHorizontal(lipgloss.Left, "🌀 ", keyStyle.Render("ResumeFrom: "), formatValue(opt.ResumeFrom))
} else if len(opt.URL) > 0 {
inputSource = lipgloss.JoinHorizontal(lipgloss.Left, "🌐 ", keyStyle.Render("URL: "), formatValue(opt.URL))
} else if opt.URLFile != "" {
inputSource = lipgloss.JoinHorizontal(lipgloss.Left, "📂 ", keyStyle.Render("URLFile: "), formatValue(opt.URLFile))
} else if len(opt.CIDRs) > 0 {
inputSource = lipgloss.JoinHorizontal(lipgloss.Left, "📡 ", keyStyle.Render("CIDRs: "), formatValue(opt.CIDRs))
} else if opt.RawFile != "" {
inputSource = lipgloss.JoinHorizontal(lipgloss.Left, "📄 ", keyStyle.Render("RawFile: "), formatValue(opt.RawFile))
}
// Input Options
inputOptions := lipgloss.JoinVertical(lipgloss.Left,
inputSource, // 互斥量处理
// PortRange 展示
lipgloss.JoinHorizontal(lipgloss.Left, "🔢 ", keyStyle.Render("PortRange: "), formatValue(opt.PortRange)),
// Dictionaries 展示
lipgloss.JoinHorizontal(lipgloss.Left, "📚 ", keyStyle.Render("Dictionaries: "), formatValue(opt.Dictionaries)),
// Word, Rules, FilterRule 展开为单独的行
lipgloss.JoinVertical(lipgloss.Left,
lipgloss.JoinHorizontal(lipgloss.Left, "💡 ", keyStyle.Render("Word: "), formatValue(r.Word)),
lipgloss.JoinHorizontal(lipgloss.Left, "📜 ", keyStyle.Render("Rules: "), formatValue(opt.Rules)),
lipgloss.JoinHorizontal(lipgloss.Left, "🔍 ", keyStyle.Render("FilterRule: "), formatValue(opt.FilterRule)),
),
// AppendRule 和 AppendWords 展开为单独的行
lipgloss.JoinVertical(lipgloss.Left,
lipgloss.JoinHorizontal(lipgloss.Left, "🔧 ", keyStyle.Render("AppendRule: "), formatValue(r.AppendRule)),
lipgloss.JoinHorizontal(lipgloss.Left, "🧩 ", keyStyle.Render("AppendWords: "), formatValue(len(r.AppendWords))),
),
)
// Output Options
outputOptions := lipgloss.JoinVertical(lipgloss.Left,
lipgloss.JoinHorizontal(lipgloss.Left, "📊 ", keyStyle.Render("Match: "), formatValue(opt.Match)),
lipgloss.JoinHorizontal(lipgloss.Left, "⚙️ ", keyStyle.Render("Filter: "), formatValue(opt.Filter)),
)
// Plugin Options
pluginValues := []string{}
if opt.ActivePlugin {
pluginValues = append(pluginValues, "active")
}
if opt.ReconPlugin {
pluginValues = append(pluginValues, "recon")
}
if opt.BakPlugin {
pluginValues = append(pluginValues, "bak")
}
if opt.FuzzuliPlugin {
pluginValues = append(pluginValues, "fuzzuli")
}
if opt.CommonPlugin {
pluginValues = append(pluginValues, "common")
}
if opt.CrawlPlugin {
pluginValues = append(pluginValues, "crawl")
}
pluginOptions := lipgloss.JoinVertical(lipgloss.Left,
lipgloss.JoinHorizontal(lipgloss.Left, "🔎 ", keyStyle.Render("Extracts: "), formatValue(opt.Extracts)),
lipgloss.JoinHorizontal(lipgloss.Left, "🔌 ", keyStyle.Render("Plugins: "), formatValue(strings.Join(pluginValues, ", "))),
)
// Mode Options
modeOptions := lipgloss.JoinVertical(lipgloss.Left,
lipgloss.JoinHorizontal(lipgloss.Left, "🛑 ", keyStyle.Render("BlackStatus: "), formatValue(pkg.BlackStatus)),
lipgloss.JoinHorizontal(lipgloss.Left, "✅ ", keyStyle.Render("WhiteStatus: "), formatValue(pkg.WhiteStatus)),
lipgloss.JoinHorizontal(lipgloss.Left, "🔄 ", keyStyle.Render("FuzzyStatus: "), formatValue(pkg.FuzzyStatus)),
lipgloss.JoinHorizontal(lipgloss.Left, "🔒 ", keyStyle.Render("UniqueStatus: "), formatValue(pkg.UniqueStatus)),
lipgloss.JoinHorizontal(lipgloss.Left, "🔑 ", keyStyle.Render("Unique: "), formatValue(opt.Unique)),
)
// Misc Options
miscOptions := lipgloss.JoinVertical(lipgloss.Left,
lipgloss.JoinHorizontal(lipgloss.Left, "⏱ ", keyStyle.Render("Timeout: "), formatValue(opt.Timeout)),
lipgloss.JoinHorizontal(lipgloss.Left, "📈 ", keyStyle.Render("PoolSize: "), formatValue(opt.PoolSize)),
lipgloss.JoinHorizontal(lipgloss.Left, "🧵 ", keyStyle.Render("Threads: "), formatValue(opt.Threads)),
lipgloss.JoinHorizontal(lipgloss.Left, "🌍 ", keyStyle.Render("Proxies: "), formatValue(opt.Proxies)),
)
// 将所有内容拼接在一起
content := lipgloss.JoinVertical(lipgloss.Left,
inputOptions,
outputOptions,
pluginOptions,
modeOptions,
miscOptions,
)
// 使用正确的方式添加 padding并居中显示内容
contentWithPadding := lipgloss.NewStyle().PaddingLeft(padding).Render(content)
// 使用 Place 方法来将整个内容居中显示
return lipgloss.Place(panelWidth+padding*2, 0, lipgloss.Center, lipgloss.Center,
lipgloss.JoinVertical(lipgloss.Center,
divider, // 顶部分割线
contentWithPadding,
divider, // 底部分割线
),
)
}
func (opt *Option) BuildPlugin(r *Runner) error {
// brute only
if opt.Advance {
opt.CrawlPlugin = true
opt.Finger = true
opt.BakPlugin = true
opt.FuzzuliPlugin = true
opt.CommonPlugin = true
opt.ActivePlugin = true
opt.ReconPlugin = true
}
if opt.ReconPlugin {
pkg.Extractors["recon"] = pkg.ExtractRegexps["pentest"]
}
if opt.Finger {
pkg.EnableAllFingerEngine = true
}
if opt.BakPlugin {
r.bruteMod = true
opt.AppendRule = append(opt.AppendRule, "filebak")
r.AppendWords = append(r.AppendWords, pkg.GetPresetWordList([]string{"bak_file"})...)
}
if opt.CommonPlugin {
r.bruteMod = true
r.AppendWords = append(r.AppendWords, pkg.Dicts["common"]...)
r.AppendWords = append(r.AppendWords, pkg.Dicts["log"]...)
}
if opt.ActivePlugin {
r.bruteMod = true
r.AppendWords = append(r.AppendWords, pkg.ActivePath...)
}
if opt.CrawlPlugin {
r.bruteMod = true
}
if r.bruteMod {
logs.Log.Important("enabling brute mod, because of enabled brute plugin")
}
if opt.NoScope {
r.Scope = []string{"*"}
}
return nil
}
func (opt *Option) BuildWords(r *Runner) error {
var dicts [][]string
var err error
if opt.DefaultDict {
dicts = append(dicts, pkg.Dicts["default"])
logs.Log.Info("use default dictionary: https://github.com/maurosoria/dirsearch/blob/master/db/dicc.txt")
}
for i, f := range opt.Dictionaries {
dict, err := pkg.LoadFileToSlice(f)
if err != nil {
return err
}
dicts = append(dicts, dict)
if opt.ResumeFrom != "" {
pkg.Dicts[f] = dicts[i]
}
logs.Log.Logf(pkg.LogVerbose, "Loaded %d word from %s", len(dict), f)
}
if len(dicts) == 0 && opt.Word == "" && len(opt.Rules) == 0 && len(opt.AppendRule) == 0 {
r.IsCheck = true
}
if opt.Word == "" {
opt.Word = "{?"
for i, _ := range dicts {
opt.Word += strconv.Itoa(i)
}
opt.Word += "}"
}
if len(opt.Suffixes) != 0 {
mask.SpecialWords["suffix"] = opt.Suffixes
opt.Word += "{?@suffix}"
}
if len(opt.Prefixes) != 0 {
mask.SpecialWords["prefix"] = opt.Prefixes
opt.Word = "{?@prefix}" + opt.Word
}
if opt.ForceExtension && opt.Extensions != "" {
exts := strings.Split(opt.Extensions, ",")
for i, e := range exts {
if !strings.HasPrefix(e, ".") {
exts[i] = "." + e
}
}
mask.SpecialWords["ext"] = exts
opt.Word += "{?@ext}"
}
r.Wordlist, err = mask.Run(opt.Word, dicts, nil)
if err != nil {
return fmt.Errorf("%s %w", opt.Word, err)
}
if len(r.Wordlist) > 0 {
logs.Log.Logf(pkg.LogVerbose, "Parsed %d words by %s", len(r.Wordlist), opt.Word)
}
if len(opt.Rules) != 0 {
rules, err := pkg.LoadRuleAndCombine(opt.Rules)
if err != nil {
return err
}
r.Rules = rule.Compile(rules, opt.FilterRule)
} else if opt.FilterRule != "" {
// if filter rule is not empty, set rules to ":", force to open filter mode
r.Rules = rule.Compile(":", opt.FilterRule)
} else {
r.Rules = new(rule.Program)
}
if len(r.Rules.Expressions) > 0 {
r.Total = len(r.Wordlist) * len(r.Rules.Expressions)
} else {
r.Total = len(r.Wordlist)
}
if len(opt.AppendRule) != 0 {
content, err := pkg.LoadRuleAndCombine(opt.AppendRule)
if err != nil {
return err
}
r.AppendRules = rule.Compile(string(content), "")
}
if len(opt.AppendFile) != 0 {
var lines []string
for _, f := range opt.AppendFile {
dict, err := pkg.LoadFileToSlice(f)
if err != nil {
return err
}
lines = append(lines, dict...)
}
r.AppendWords = append(r.AppendWords, lines...)
}
// 类似dirsearch中的
if opt.Extensions != "" {
r.AppendFunction(pkg.ParseEXTPlaceholderFunc(strings.Split(opt.Extensions, ",")))
} else {
r.AppendFunction(func(s string) []string {
if strings.Contains(s, pkg.EXTChar) {
return nil
}
return []string{s}
})
}
if opt.Uppercase {
r.AppendFunction(pkg.WrapWordsFunc(strings.ToUpper))
}
if opt.Lowercase {
r.AppendFunction(pkg.WrapWordsFunc(strings.ToLower))
}
if opt.RemoveExtensions != "" {
rexts := strings.Split(opt.ExcludeExtensions, ",")
r.AppendFunction(func(s string) []string {
if ext := pkg.ParseExtension(s); iutils.StringsContains(rexts, ext) {
return []string{strings.TrimSuffix(s, "."+ext)}
}
return []string{s}
})
}
if opt.ExcludeExtensions != "" {
exexts := strings.Split(opt.ExcludeExtensions, ",")
r.AppendFunction(func(s string) []string {
if ext := pkg.ParseExtension(s); iutils.StringsContains(exexts, ext) {
return nil
}
return []string{s}
})
}
if len(opt.Replaces) > 0 {
r.AppendFunction(func(s string) []string {
for k, v := range opt.Replaces {
s = strings.Replace(s, k, v, -1)
}
return []string{s}
})
}
if len(opt.Skips) > 0 {
r.AppendFunction(func(s string) []string {
for _, skip := range opt.Skips {
if strings.Contains(s, skip) {
return nil
}
}
return []string{s}
})
}
return nil
}
func (opt *Option) BuildTasks(r *Runner) (*TaskGenerator, error) {
// prepare task`
var err error
gen := NewTaskGenerator(opt.PortRange)
if opt.ResumeFrom != "" {
stats, err := pkg.ReadStatistors(opt.ResumeFrom)
if err != nil {
logs.Log.Error(err.Error())
}
r.Count = len(stats)
gen.Name = "resume " + opt.ResumeFrom
go func() {
for _, stat := range stats {
gen.In <- &Task{baseUrl: stat.BaseUrl, origin: NewOrigin(stat)}
}
close(gen.In)
}()
} else {
var file *os.File
// 根据不同的输入类型生成任务
if len(opt.URL) == 1 {
gen.Name = opt.URL[0]
go func() {
gen.Run(opt.URL[0])
close(gen.In)
}()
r.Count = 1
} else if len(opt.URL) > 1 {
go func() {
for _, u := range opt.URL {
gen.Run(u)
}
close(gen.In)
}()
gen.Name = "cmd"
r.Count = len(opt.URL)
} else if opt.RawFile != "" {
raw, err := os.Open(opt.RawFile)
if err != nil {
return nil, err
}
req, err := http.ReadRequest(bufio.NewReader(raw))
if err != nil {
return nil, err
}
go func() {
gen.Run(fmt.Sprintf("http://%s%s", req.Host, req.URL.String()))
close(gen.In)
}()
r.Method = req.Method
for k, _ := range req.Header {
r.Headers[k] = req.Header.Get(k)
}
r.Count = 1
} else if len(opt.CIDRs) != 0 {
cidrs := utils.ParseCIDRs(opt.CIDRs)
if len(gen.ports) == 0 {
gen.ports = []string{"80", "443"}
}
gen.Name = "cidr"
r.Count = cidrs.Count()
go func() {
for _, cidr := range cidrs {
if cidr == nil {
logs.Log.Error("cidr format error: " + cidr.String())
}
for ip := range cidr.Range() {
gen.Run(ip.String())
}
}
close(gen.In)
}()
} else if opt.URLFile != "" {
file, err = os.Open(opt.URLFile)
if err != nil {
return nil, err
}
gen.Name = filepath.Base(opt.URLFile)
} else if files.HasStdin() {
file = os.Stdin
gen.Name = "stdin"
}
if file != nil {
content, err := ioutil.ReadAll(file)
if err != nil {
return nil, err
}
urls := strings.Split(strings.TrimSpace(string(content)), "\n")
for _, u := range urls {
u = strings.TrimSpace(u)
if _, err := url.Parse(u); err == nil {
r.Count++
} else if ip := utils.ParseIP(u); ip != nil {
r.Count++
} else if cidr := utils.ParseCIDR(u); cidr != nil {
r.Count += cidr.Count()
}
}
go func() {
for _, u := range urls {
u = strings.TrimSpace(u)
if _, err := url.Parse(u); err == nil {
gen.Run(u)
} else if ip := utils.ParseIP(u); ip != nil {
gen.Run(u)
} else if cidr := utils.ParseCIDR(u); cidr != nil {
for ip := range cidr.Range() {
gen.Run(ip.String())
}
}
}
close(gen.In)
}()
}
}
if len(gen.ports) > 0 {
r.Count = r.Count * len(gen.ports)
}
return gen, nil
}

902
core/pool/brutepool.go Normal file
View File

@ -0,0 +1,902 @@
package pool
import (
"context"
"errors"
"fmt"
"github.com/chainreactors/logs"
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/utils/iutils"
"github.com/chainreactors/words/rule"
"github.com/panjf2000/ants/v2"
"github.com/valyala/fasthttp"
"golang.org/x/time/rate"
"math/rand"
"net/url"
"path"
"strings"
"sync"
"sync/atomic"
"time"
)
var (
EnableAllFuzzy = false
EnableAllUnique = false
//AllowHostModSource = []parsers.SpraySource{parsers.WordSource, parsers.CheckSource, parsers.InitIndexSource, parsers.InitRandomSource}
)
func NewBrutePool(ctx context.Context, config *Config) (*BrutePool, error) {
var u *url.URL
var err error
if u, err = url.Parse(config.BaseURL); err != nil {
return nil, err
}
pctx, cancel := context.WithCancel(ctx)
pool := &BrutePool{
Baselines: NewBaselines(),
BasePool: &BasePool{
Config: config,
ctx: pctx,
Cancel: cancel,
client: ihttp.NewClient(&ihttp.ClientConfig{
Thread: config.Thread,
Type: config.ClientType,
Timeout: config.Timeout,
ProxyClient: config.ProxyClient,
}),
additionCh: make(chan *Unit, config.Thread),
closeCh: make(chan struct{}),
processCh: make(chan *baseline.Baseline, config.Thread),
wg: &sync.WaitGroup{},
},
base: u.Scheme + "://" + u.Host,
isDir: strings.HasSuffix(u.Path, "/"),
url: u,
scopeurls: make(map[string]struct{}),
uniques: make(map[uint16]struct{}),
checkCh: make(chan struct{}, config.Thread),
initwg: sync.WaitGroup{},
limiter: rate.NewLimiter(rate.Limit(config.RateLimit), 1),
failedCount: 1,
}
rand.Seed(time.Now().UnixNano())
// 格式化dir, 保证至少有一个"/"
if strings.HasSuffix(config.BaseURL, "/") {
pool.dir = pool.url.Path
} else if pool.url.Path == "" {
pool.dir = "/"
} else {
pool.dir = pkg.Dir(pool.url.Path)
}
pool.reqPool, _ = ants.NewPoolWithFunc(config.Thread, pool.Invoke)
pool.scopePool, _ = ants.NewPoolWithFunc(config.Thread, pool.NoScopeInvoke)
// 挂起一个异步的处理结果线程, 不干扰主线程的请求并发
go pool.Handler()
return pool, nil
}
type BrutePool struct {
*Baselines
*BasePool
base string // url的根目录, 在爬虫或者redirect时, 会需要用到根目录进行拼接
isDir bool
url *url.URL
reqPool *ants.PoolWithFunc
scopePool *ants.PoolWithFunc
checkCh chan struct{} // 独立的check管道 防止与redirect/crawl冲突
closed bool
wordOffset int
failedCount int32
IsFailed bool
urls sync.Map
scopeurls map[string]struct{}
uniques map[uint16]struct{}
analyzeDone bool
limiter *rate.Limiter
locker sync.Mutex
scopeLocker sync.Mutex
initwg sync.WaitGroup // 初始化用, 之后改成锁
}
func (pool *BrutePool) Init() error {
pool.initwg.Add(2)
if pool.Index != "/" {
logs.Log.Logf(pkg.LogVerbose, "custom index url: %s", pkg.BaseURL(pool.url)+pkg.FormatURL(pkg.BaseURL(pool.url), pool.Index))
pool.reqPool.Invoke(&Unit{path: pool.Index, source: parsers.InitIndexSource})
//pool.urls[dir(pool.Index)] = struct{}{}
} else {
pool.reqPool.Invoke(&Unit{path: pool.url.Path, source: parsers.InitIndexSource})
//pool.urls[dir(pool.url.Path)] = struct{}{}
}
if pool.Random != "" {
logs.Log.Logf(pkg.LogVerbose, "custom random url: %s", pkg.BaseURL(pool.url)+pkg.FormatURL(pkg.BaseURL(pool.url), pool.Random))
if pool.Mod == PathSpray {
pool.reqPool.Invoke(&Unit{path: pool.Random, source: parsers.InitRandomSource})
} else {
pool.reqPool.Invoke(&Unit{host: pool.Random, source: parsers.InitRandomSource})
}
} else {
if pool.Mod == PathSpray {
pool.reqPool.Invoke(&Unit{path: pool.safePath(pkg.RandPath()), source: parsers.InitRandomSource})
} else {
pool.reqPool.Invoke(&Unit{host: pkg.RandHost(), source: parsers.InitRandomSource})
}
}
pool.initwg.Wait()
if pool.index.ErrString != "" {
logs.Log.Error(pool.index.String())
return fmt.Errorf(pool.index.ErrString)
}
if pool.index.Chunked && pool.ClientType == ihttp.FAST {
logs.Log.Warn("chunk encoding! buf current client FASTHTTP not support chunk decode")
}
logs.Log.Logf(pkg.LogVerbose, "[baseline.index] "+pool.index.Format([]string{"status", "length", "spend", "title", "frame", "redirect"}))
// 检测基本访问能力
if pool.random.ErrString != "" {
logs.Log.Error(pool.index.String())
return fmt.Errorf(pool.index.ErrString)
}
logs.Log.Logf(pkg.LogVerbose, "[baseline.random] "+pool.random.Format([]string{"status", "length", "spend", "title", "frame", "redirect"}))
// 某些网站http会重定向到https, 如果发现随机目录出现这种情况, 则自定将baseurl升级为https
if pool.url.Scheme == "http" {
if pool.index.RedirectURL != "" {
if err := pool.Upgrade(pool.index); err != nil {
return err
}
} else if pool.random.RedirectURL != "" {
if err := pool.Upgrade(pool.random); err != nil {
return err
}
}
}
return nil
}
func (pool *BrutePool) Run(offset, limit int) {
pool.Worder.Run()
if pool.Active {
pool.wg.Add(1)
go pool.doActive()
}
if pool.Bak {
pool.wg.Add(1)
go pool.doBak()
}
if pool.Fuzzuli {
pool.wg.Add(1)
go pool.doFuzzuli()
}
if pool.Common {
pool.wg.Add(1)
go pool.doCommonFile()
}
var done bool
// 挂起一个监控goroutine, 每100ms判断一次done, 如果已经done, 则关闭closeCh, 然后通过Loop中的select case closeCh去break, 实现退出
go func() {
for {
if done {
pool.wg.Wait()
close(pool.closeCh)
return
}
time.Sleep(100 * time.Millisecond)
}
}()
Loop:
for {
select {
case w, ok := <-pool.Worder.Output:
if !ok {
done = true
continue
}
pool.Statistor.End++
pool.wordOffset++
if pool.wordOffset < offset {
continue
}
if pool.Statistor.End > limit {
done = true
continue
}
if w == "" {
pool.Statistor.Skipped++
pool.Bar.Done()
continue
}
pool.wg.Add(1)
if pool.Mod == HostSpray {
pool.reqPool.Invoke(&Unit{host: w, source: parsers.WordSource, number: pool.wordOffset})
} else {
// 原样的目录拼接, 输入了几个"/"就是几个, 适配/有语义的中间件
pool.reqPool.Invoke(&Unit{path: pool.safePath(w), source: parsers.WordSource, number: pool.wordOffset})
}
case <-pool.checkCh:
pool.Statistor.CheckNumber++
if pool.Mod == HostSpray {
pool.reqPool.Invoke(&Unit{host: pkg.RandHost(), source: parsers.CheckSource, number: pool.wordOffset})
} else if pool.Mod == PathSpray {
pool.reqPool.Invoke(&Unit{path: pool.safePath(pkg.RandPath()), source: parsers.CheckSource, number: pool.wordOffset})
}
case unit, ok := <-pool.additionCh:
if !ok || pool.closed {
continue
}
if _, ok := pool.urls.Load(unit.path); ok {
logs.Log.Debugf("[%s] duplicate path: %s, skipped", unit.source.Name(), pool.base+unit.path)
pool.wg.Done()
} else {
pool.urls.Store(unit.path, nil)
unit.path = pool.safePath(unit.path)
unit.number = pool.wordOffset
pool.reqPool.Invoke(unit)
}
case <-pool.closeCh:
break Loop
case <-pool.ctx.Done():
break Loop
}
}
pool.closed = true
pool.Close()
}
func (pool *BrutePool) Invoke(v interface{}) {
if pool.RateLimit != 0 {
pool.limiter.Wait(pool.ctx)
}
atomic.AddInt32(&pool.Statistor.ReqTotal, 1)
unit := v.(*Unit)
var req *ihttp.Request
var err error
req, err = ihttp.BuildRequest(pool.ctx, pool.ClientType, pool.base, unit.path, unit.host, pool.Method)
if err != nil {
logs.Log.Error(err.Error())
return
}
req.SetHeaders(pool.Headers, pool.RandomUserAgent)
start := time.Now()
resp, reqerr := pool.client.Do(req)
if pool.ClientType == ihttp.FAST {
defer fasthttp.ReleaseResponse(resp.FastResponse)
defer fasthttp.ReleaseRequest(req.FastRequest)
}
// compare与各种错误处理
var bl *baseline.Baseline
if reqerr != nil && !errors.Is(reqerr, fasthttp.ErrBodyTooLarge) {
atomic.AddInt32(&pool.failedCount, 1)
atomic.AddInt32(&pool.Statistor.FailedNumber, 1)
bl = &baseline.Baseline{
SprayResult: &parsers.SprayResult{
UrlString: pool.base + unit.path,
ErrString: reqerr.Error(),
Reason: pkg.ErrRequestFailed.Error(),
},
}
pool.FailedBaselines = append(pool.FailedBaselines, bl)
// 自动重放失败请求
pool.doRetry(bl)
} else { // 特定场景优化
if unit.source <= 3 || unit.source == parsers.CrawlSource || unit.source == parsers.CommonFileSource {
// 一些高优先级的source, 将跳过PreCompare
bl = baseline.NewBaseline(req.URI(), req.Host(), resp)
} else if pool.MatchExpr != nil {
// 如果自定义了match函数, 则所有数据送入tempch中
bl = baseline.NewBaseline(req.URI(), req.Host(), resp)
} else if err = pool.PreCompare(resp); err == nil {
// 通过预对比跳过一些无用数据, 减少性能消耗
bl = baseline.NewBaseline(req.URI(), req.Host(), resp)
} else {
bl = baseline.NewInvalidBaseline(req.URI(), req.Host(), resp, err.Error())
}
}
// 手动处理重定向
if bl.IsValid && unit.source != parsers.CheckSource && bl.RedirectURL != "" {
bl.SameRedirectDomain = pool.checkHost(bl.RedirectURL)
pool.doRedirect(bl, unit.depth)
}
if !ihttp.CheckBodySize(int64(bl.BodyLength)) {
bl.ExceedLength = true
}
unit.Update(bl)
bl.Spended = time.Since(start).Milliseconds()
switch unit.source {
case parsers.InitRandomSource:
defer pool.initwg.Done()
pool.locker.Lock()
pool.random = bl
pool.locker.Unlock()
if !bl.IsValid {
return
}
bl.Collect()
pool.addFuzzyBaseline(bl)
case parsers.InitIndexSource:
defer pool.initwg.Done()
pool.locker.Lock()
pool.index = bl
pool.locker.Unlock()
if !bl.IsValid {
return
}
bl.Collect()
pool.doCrawl(bl)
pool.doAppend(bl)
pool.putToOutput(bl)
case parsers.CheckSource:
if bl.ErrString != "" {
logs.Log.Warnf("[check.error] %s maybe ip had banned, break (%d/%d), error: %s", pool.BaseURL, pool.failedCount, pool.BreakThreshold, bl.ErrString)
} else if i := pool.random.Compare(bl); i < 1 {
if i == 0 {
if pool.Fuzzy {
logs.Log.Debug("[check.fuzzy] maybe trigger risk control, " + bl.String())
}
} else {
atomic.AddInt32(&pool.failedCount, 1) //
logs.Log.Debug("[check.failed] maybe trigger risk control, " + bl.String())
pool.FailedBaselines = append(pool.FailedBaselines, bl)
}
} else {
pool.resetFailed() // 如果后续访问正常, 重置错误次数
logs.Log.Debug("[check.pass] " + bl.String())
}
case parsers.WordSource:
// 异步进行性能消耗较大的深度对比
pool.processCh <- bl
if int(pool.Statistor.ReqTotal)%pool.CheckPeriod == 0 {
// 间歇插入check waf的探针
pool.doCheck()
} else if pool.failedCount%pool.ErrPeriod == 0 {
// 发生错误时插入探针, 如果超过阈值提前退出
atomic.AddInt32(&pool.failedCount, 1)
pool.doCheck()
}
pool.Bar.Done()
case parsers.RedirectSource:
bl.FrontURL = unit.frontUrl
pool.processCh <- bl
default:
pool.processCh <- bl
}
}
func (pool *BrutePool) NoScopeInvoke(v interface{}) {
defer pool.wg.Done()
unit := v.(*Unit)
req, err := ihttp.BuildRequest(pool.ctx, pool.ClientType, unit.path, "", "", "GET")
if err != nil {
logs.Log.Error(err.Error())
return
}
req.SetHeaders(pool.Headers, pool.RandomUserAgent)
resp, reqerr := pool.client.Do(req)
if pool.ClientType == ihttp.FAST {
defer fasthttp.ReleaseResponse(resp.FastResponse)
defer fasthttp.ReleaseRequest(req.FastRequest)
}
if reqerr != nil {
logs.Log.Error(reqerr.Error())
return
}
if resp.StatusCode() == 200 {
bl := baseline.NewBaseline(req.URI(), req.Host(), resp)
bl.Source = unit.source
bl.ReqDepth = unit.depth
bl.Collect()
bl.CollectURL()
pool.wg.Add(1)
pool.doScopeCrawl(bl)
pool.putToOutput(bl)
}
}
func (pool *BrutePool) Handler() {
for bl := range pool.processCh {
if bl.IsValid {
pool.addFuzzyBaseline(bl)
}
if _, ok := pool.Statistor.Counts[bl.Status]; ok {
pool.Statistor.Counts[bl.Status]++
} else {
pool.Statistor.Counts[bl.Status] = 1
}
if _, ok := pool.Statistor.Sources[bl.Source]; ok {
pool.Statistor.Sources[bl.Source]++
} else {
pool.Statistor.Sources[bl.Source] = 1
}
var params map[string]interface{}
if pool.MatchExpr != nil || pool.FilterExpr != nil || pool.RecuExpr != nil {
params = map[string]interface{}{
"index": pool.index,
"random": pool.random,
"current": bl,
}
//for _, ok := range FuzzyStatus {
// if bl, ok := pool.baselines[ok]; ok {
// params["bl"+strconv.Itoa(ok)] = bl
// } else {
// params["bl"+strconv.Itoa(ok)] = nilBaseline
// }
//}
}
var ok bool
if pool.MatchExpr != nil {
if pkg.CompareWithExpr(pool.MatchExpr, params) {
ok = true
}
} else {
ok = pool.BaseCompare(bl)
}
if ok {
// unique判断
if EnableAllUnique || iutils.IntsContains(pkg.UniqueStatus, bl.Status) {
if _, ok := pool.uniques[bl.Unique]; ok {
bl.IsValid = false
bl.IsFuzzy = true
bl.Reason = pkg.ErrFuzzyNotUnique.Error()
} else {
pool.uniques[bl.Unique] = struct{}{}
}
}
// 对通过所有对比的有效数据进行再次filter
if bl.IsValid && pool.FilterExpr != nil && pkg.CompareWithExpr(pool.FilterExpr, params) {
pool.Statistor.FilteredNumber++
bl.Reason = pkg.ErrCustomFilter.Error()
bl.IsValid = false
}
} else {
bl.IsValid = false
}
if bl.IsValid || (bl.IsFuzzy && pool.Fuzzy) {
pool.doCrawl(bl)
pool.doAppend(bl)
}
// 如果要进行递归判断, 要满足 bl有效, mod为path-spray, 当前深度小于最大递归深度
if bl.IsValid {
pool.Statistor.FoundNumber++
if bl.RecuDepth < pool.MaxRecursionDepth {
if pkg.CompareWithExpr(pool.RecuExpr, params) {
bl.Recu = true
}
}
}
if !pool.closed {
// 如果任务被取消, 所有还没处理的请求结果都会被丢弃
pool.putToOutput(bl)
}
pool.wg.Done()
}
pool.analyzeDone = true
}
func (pool *BrutePool) checkRedirect(redirectURL string) bool {
if pool.random.RedirectURL == "" {
// 如果random的redirectURL为空, 忽略
return true
}
if redirectURL == pool.random.RedirectURL {
// 相同的RedirectURL将被认为是无效数据
return false
} else {
// path为3xx, 且与baseline中的RedirectURL不同时, 为有效数据
return true
}
}
func (pool *BrutePool) Upgrade(bl *baseline.Baseline) error {
rurl, err := url.Parse(bl.RedirectURL)
if err == nil && rurl.Hostname() == bl.Url.Hostname() && bl.Url.Scheme == "http" && rurl.Scheme == "https" {
logs.Log.Infof("baseurl %s upgrade http to https, reinit", pool.BaseURL)
pool.base = strings.Replace(pool.BaseURL, "http", "https", 1)
pool.url.Scheme = "https"
// 重新初始化
err = pool.Init()
if err != nil {
return err
}
}
return nil
}
func (pool *BrutePool) PreCompare(resp *ihttp.Response) error {
status := resp.StatusCode()
if pkg.StatusContain(pkg.WhiteStatus, status) {
// 如果为白名单状态码则直接返回
return nil
}
//if pool.random.Status != 200 && pool.random.Status == status {
// return pkg.ErrSameStatus
//}
if pkg.StatusContain(pkg.BlackStatus, status) {
return pkg.ErrBadStatus
}
if pkg.StatusContain(pkg.WAFStatus, status) {
return pkg.ErrWaf
}
if !pool.checkRedirect(resp.GetHeader("Location")) {
return pkg.ErrRedirect
}
return nil
}
// same host return true
// diff host return false
func (pool *BrutePool) checkHost(u string) bool {
if v, err := url.Parse(u); err == nil {
if v.Host == "" {
return true
}
if v.Host == pool.url.Host {
return true
} else {
return false
}
}
return true
}
func (pool *BrutePool) BaseCompare(bl *baseline.Baseline) bool {
if !bl.IsValid {
return false
}
var status = -1
// 30x状态码的特殊处理
if bl.RedirectURL != "" {
if bl.SameRedirectDomain && strings.HasSuffix(bl.RedirectURL, bl.Url.Path+"/") {
bl.Reason = pkg.ErrFuzzyRedirect.Error()
return false
}
}
// 使用与baseline相同状态码, 需要在fuzzystatus中提前配置
base, ok := pool.baselines[bl.Status] // 挑选对应状态码的baseline进行compare
if bl.IsBaseline {
ok = false
}
if !ok {
if pool.random.Status == bl.Status {
// 当other的状态码与base相同时, 会使用base
ok = true
base = pool.random
} else if pool.index.Status == bl.Status {
// 当other的状态码与index相同时, 会使用index
ok = true
base = pool.index
}
}
if ok {
if status = base.Compare(bl); status == 1 {
bl.Reason = pkg.ErrCompareFailed.Error()
return false
}
}
bl.Hashes = parsers.NewHashes(bl.Raw)
//if !pool.IgnoreWaf {
// // 部分情况下waf的特征可能是全局, 指定了--ignore-waf则不会进行waf的指纹检测
// for _, f := range bl.Frameworks {
// if f.HasTag("waf") {
// pool.Statistor.WafedNumber++
// bl.Reason = ErrWaf.Error()
// return false
// }
// }
//}
if ok && status == 0 && base.FuzzyCompare(bl) {
pool.Statistor.FuzzyNumber++
bl.Reason = pkg.ErrFuzzyCompareFailed.Error()
pool.putToFuzzy(bl)
return false
}
return true
}
func (pool *BrutePool) addFuzzyBaseline(bl *baseline.Baseline) {
if _, ok := pool.baselines[bl.Status]; !ok && (EnableAllFuzzy || iutils.IntsContains(pkg.FuzzyStatus, bl.Status)) {
bl.IsBaseline = true
bl.Collect()
pool.doCrawl(bl) // 非有效页面也可能存在一些特殊的url可以用来爬取
pool.baselines[bl.Status] = bl
logs.Log.Logf(pkg.LogVerbose, "[baseline.%dinit] %s", bl.Status, bl.Format([]string{"status", "length", "spend", "title", "frame", "redirect"}))
}
}
func (pool *BrutePool) fallback() {
logs.Log.Errorf("%s ,failed request exceeds the threshold , task will exit. Breakpoint %d", pool.BaseURL, pool.wordOffset)
for i, bl := range pool.FailedBaselines {
if i > 5 {
break
}
logs.Log.Errorf("[failed.%d] %s", i, bl.String())
}
}
func (pool *BrutePool) Close() {
for pool.analyzeDone {
// 等待缓存的待处理任务完成
time.Sleep(time.Duration(100) * time.Millisecond)
}
close(pool.additionCh) // 关闭addition管道
//close(pool.checkCh) // 关闭check管道
pool.Statistor.EndTime = time.Now().Unix()
pool.reqPool.Release()
pool.scopePool.Release()
}
func (pool *BrutePool) safePath(u string) string {
// 自动生成的目录将采用safepath的方式拼接到相对目录中, 避免出现//的情况. 例如init, check, common
if pool.isDir {
return pkg.SafePath(pool.dir, u)
} else {
return pkg.SafePath(pool.url.Path+"/", u)
}
}
func (pool *BrutePool) resetFailed() {
pool.failedCount = 1
pool.FailedBaselines = nil
}
func (pool *BrutePool) doCheck() {
if pool.failedCount > pool.BreakThreshold {
// 当报错次数超过上限是, 结束任务
if pool.isFallback.Load() {
return
}
pool.isFallback.Store(true)
pool.fallback()
pool.IsFailed = true
pool.Cancel()
return
}
if pool.Mod == HostSpray {
pool.checkCh <- struct{}{}
} else if pool.Mod == PathSpray {
pool.checkCh <- struct{}{}
}
}
func (pool *BrutePool) doRedirect(bl *baseline.Baseline, depth int) {
if depth >= pool.MaxRedirect {
return
}
//if !bl.SameRedirectDomain {
// return // 不同域名的重定向不处理
//}
reURL := pkg.FormatURL(bl.Url.Path, bl.RedirectURL)
pool.wg.Add(1)
go func() {
defer pool.wg.Done()
pool.addAddition(&Unit{
path: reURL,
parent: bl.Number,
host: bl.Host,
source: parsers.RedirectSource,
from: bl.Source,
frontUrl: bl.UrlString,
depth: depth + 1,
})
}()
}
func (pool *BrutePool) doCrawl(bl *baseline.Baseline) {
if !pool.Crawl || bl.ReqDepth >= pool.MaxCrawlDepth {
return
}
bl.CollectURL()
if bl.URLs == nil {
return
}
pool.wg.Add(2)
pool.doScopeCrawl(bl)
go func() {
defer pool.wg.Done()
for _, u := range bl.URLs {
if u = pkg.FormatURL(bl.Url.Path, u); u == "" {
continue
}
pool.addAddition(&Unit{
path: u,
parent: bl.Number,
host: bl.Host,
source: parsers.CrawlSource,
from: bl.Source,
depth: bl.ReqDepth + 1,
})
}
}()
}
func (pool *BrutePool) doScopeCrawl(bl *baseline.Baseline) {
if bl.ReqDepth >= pool.MaxCrawlDepth {
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for _, u := range bl.URLs {
if strings.HasPrefix(u, "http") {
if v, _ := url.Parse(u); v == nil || !pkg.MatchWithGlobs(v.Host, pool.Scope) {
continue
}
pool.scopeLocker.Lock()
if _, ok := pool.scopeurls[u]; !ok {
pool.urls.Store(u, nil)
pool.wg.Add(1)
pool.scopePool.Invoke(&Unit{
path: u,
parent: bl.Number,
source: parsers.CrawlSource,
from: bl.Source,
depth: bl.ReqDepth + 1,
})
}
pool.scopeLocker.Unlock()
}
}
}()
}
func (pool *BrutePool) doFuzzuli() {
defer pool.wg.Done()
if pool.Mod == HostSpray {
return
}
for w := range NewBruteDSL(pool.Config, "{?0}.{?@bak_ext}", [][]string{pkg.BakGenerator(pool.url.Host)}).Output {
pool.addAddition(&Unit{
path: pool.dir + w,
source: parsers.BakSource,
})
}
}
func (pool *BrutePool) doBak() {
defer pool.wg.Done()
if pool.Mod == HostSpray {
return
}
for w := range NewBruteDSL(pool.Config, "{?@bak_name}.{?@bak_ext}", nil).Output {
pool.addAddition(&Unit{
path: pool.dir + w,
source: parsers.BakSource,
})
}
}
func (pool *BrutePool) doAppend(bl *baseline.Baseline) {
pool.wg.Add(2)
pool.doAppendWords(bl)
pool.doAppendRule(bl)
}
func (pool *BrutePool) doAppendRule(bl *baseline.Baseline) {
if pool.AppendRule == nil || bl.Source == parsers.AppendRuleSource || bl.ReqDepth >= pool.MaxAppendDepth {
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for u := range rule.RunAsStream(pool.AppendRule.Expressions, path.Base(bl.Path)) {
pool.addAddition(&Unit{
path: pkg.Dir(bl.Url.Path) + u,
parent: bl.Number,
host: bl.Host,
source: parsers.AppendRuleSource,
from: bl.Source,
depth: bl.ReqDepth + 1,
})
}
}()
}
func (pool *BrutePool) doAppendWords(bl *baseline.Baseline) {
if pool.AppendWords == nil || bl.Source == parsers.AppendSource || bl.Source == parsers.RuleSource || bl.ReqDepth >= pool.MaxAppendDepth {
// 防止自身递归
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for u := range NewBruteWords(pool.Config, pool.AppendWords).Output {
pool.addAddition(&Unit{
path: pkg.SafePath(bl.Path, u),
parent: bl.Number,
host: bl.Host,
source: parsers.AppendSource,
from: bl.Source,
depth: bl.RecuDepth + 1,
})
}
}()
}
func (pool *BrutePool) doActive() {
defer pool.wg.Done()
if pool.Mod == HostSpray {
return
}
for _, u := range pkg.ActivePath {
pool.addAddition(&Unit{
path: pool.dir + u[1:],
source: parsers.FingerSource,
})
}
}
func (pool *BrutePool) doCommonFile() {
defer pool.wg.Done()
if pool.Mod == HostSpray {
return
}
for u := range NewBruteWords(pool.Config, append(pkg.Dicts["common"], pkg.Dicts["log"]...)).Output {
pool.addAddition(&Unit{
path: pool.dir + u,
source: parsers.CommonFileSource,
})
}
}

233
core/pool/checkpool.go Normal file
View File

@ -0,0 +1,233 @@
package pool
import (
"context"
"github.com/chainreactors/logs"
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/pkg"
"github.com/panjf2000/ants/v2"
"net/url"
"strings"
"sync"
"time"
)
// 类似httpx的无状态, 无scope, 无并发池的检测模式
func NewCheckPool(ctx context.Context, config *Config) (*CheckPool, error) {
pctx, cancel := context.WithCancel(ctx)
config.ClientType = ihttp.STANDARD
pool := &CheckPool{
BasePool: &BasePool{
Config: config,
Statistor: pkg.NewStatistor(""),
ctx: pctx,
Cancel: cancel,
client: ihttp.NewClient(&ihttp.ClientConfig{
Thread: config.Thread,
Type: config.ClientType,
Timeout: config.Timeout,
ProxyClient: config.ProxyClient,
}),
wg: &sync.WaitGroup{},
additionCh: make(chan *Unit, 1024),
closeCh: make(chan struct{}),
processCh: make(chan *baseline.Baseline, config.Thread),
},
}
pool.Headers.Set("Connection", "close")
p, _ := ants.NewPoolWithFunc(config.Thread, pool.Invoke)
pool.Pool = p
go pool.Handler()
return pool, nil
}
type CheckPool struct {
*BasePool
Pool *ants.PoolWithFunc
}
func (pool *CheckPool) Run(ctx context.Context, offset, limit int) {
pool.Worder.Run()
var done bool
// 挂起一个监控goroutine, 每100ms判断一次done, 如果已经done, 则关闭closeCh, 然后通过Loop中的select case closeCh去break, 实现退出
go func() {
for {
if done {
pool.wg.Wait()
close(pool.closeCh)
return
}
time.Sleep(100 * time.Millisecond)
}
}()
Loop:
for {
select {
case u, ok := <-pool.Worder.Output:
if !ok {
done = true
continue
}
if pool.reqCount < offset {
pool.reqCount++
continue
}
if pool.reqCount > limit {
continue
}
pool.wg.Add(1)
_ = pool.Pool.Invoke(newUnit(u, parsers.CheckSource))
case u, ok := <-pool.additionCh:
if !ok {
continue
}
_ = pool.Pool.Invoke(u)
case <-pool.closeCh:
break Loop
case <-ctx.Done():
break Loop
case <-pool.ctx.Done():
break Loop
}
}
pool.Close()
}
func (pool *CheckPool) Close() {
pool.Bar.Close()
pool.Pool.Release()
}
func (pool *CheckPool) Invoke(v interface{}) {
defer func() {
pool.reqCount++
pool.wg.Done()
}()
unit := v.(*Unit)
req, err := ihttp.BuildRequest(pool.ctx, pool.ClientType, unit.path, "", "", "GET")
if err != nil {
logs.Log.Debug(err.Error())
bl := &baseline.Baseline{
SprayResult: &parsers.SprayResult{
UrlString: unit.path,
IsValid: false,
ErrString: err.Error(),
Reason: pkg.ErrUrlError.Error(),
ReqDepth: unit.depth,
},
}
pool.processCh <- bl
return
}
req.SetHeaders(pool.Headers, pool.RandomUserAgent)
start := time.Now()
var bl *baseline.Baseline
resp, reqerr := pool.client.Do(req)
if reqerr != nil {
pool.failedCount++
bl = &baseline.Baseline{
SprayResult: &parsers.SprayResult{
UrlString: unit.path,
IsValid: false,
ErrString: reqerr.Error(),
Reason: pkg.ErrRequestFailed.Error(),
ReqDepth: unit.depth,
},
}
logs.Log.Debugf("%s, %s", unit.path, reqerr.Error())
pool.doUpgrade(bl)
} else {
bl = baseline.NewBaseline(req.URI(), req.Host(), resp)
bl.ReqDepth = unit.depth
bl.Collect()
if bl.Status == 400 {
pool.doUpgrade(bl)
}
}
bl.ReqDepth = unit.depth
bl.Source = unit.source
bl.Spended = time.Since(start).Milliseconds()
pool.processCh <- bl
}
func (pool *CheckPool) Handler() {
for bl := range pool.processCh {
if bl.IsValid {
if bl.RedirectURL != "" {
pool.doRedirect(bl, bl.ReqDepth)
pool.putToOutput(bl)
} else {
params := map[string]interface{}{
"current": bl,
}
if pool.MatchExpr != nil && pkg.CompareWithExpr(pool.MatchExpr, params) {
bl.IsValid = true
}
}
}
if bl.Source == parsers.CheckSource {
pool.Bar.Done()
}
pool.putToOutput(bl)
}
}
func (pool *CheckPool) doRedirect(bl *baseline.Baseline, depth int) {
if depth >= pool.MaxRedirect {
return
}
var reURL string
if strings.HasPrefix(bl.RedirectURL, "http") {
_, err := url.Parse(bl.RedirectURL)
if err != nil {
return
}
reURL = bl.RedirectURL
} else {
reURL = pkg.BaseURL(bl.Url) + pkg.FormatURL(pkg.BaseURL(bl.Url), bl.RedirectURL)
}
pool.wg.Add(1)
go func() {
pool.additionCh <- &Unit{
path: reURL,
parent: bl.Number,
source: parsers.RedirectSource,
frontUrl: bl.UrlString,
depth: depth + 1,
from: bl.Source,
}
}()
}
// tcp与400进行协议转换
func (pool *CheckPool) doUpgrade(bl *baseline.Baseline) {
if bl.ReqDepth >= 1 {
return
}
pool.wg.Add(1)
var reurl string
if strings.HasPrefix(bl.UrlString, "https") {
reurl = strings.Replace(bl.UrlString, "https", "http", 1)
} else {
reurl = strings.Replace(bl.UrlString, "http", "https", 1)
}
go func() {
pool.additionCh <- &Unit{
path: reurl,
parent: bl.Number,
source: parsers.UpgradeSource,
depth: bl.ReqDepth + 1,
from: bl.Source,
}
}()
}

72
core/pool/config.go Normal file
View File

@ -0,0 +1,72 @@
package pool
import (
"github.com/chainreactors/logs"
"github.com/chainreactors/proxyclient"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/words"
"github.com/chainreactors/words/rule"
"github.com/expr-lang/expr/vm"
"net/http"
"sync"
"time"
)
type Config struct {
BaseURL string
ProxyClient proxyclient.Dial
Thread int
Wordlist []string
Timeout time.Duration
ProcessCh chan *baseline.Baseline
OutputCh chan *baseline.Baseline
FuzzyCh chan *baseline.Baseline
Outwg *sync.WaitGroup
RateLimit int
CheckPeriod int
ErrPeriod int32
BreakThreshold int32
Method string
Mod SprayMod
Headers http.Header
ClientType int
MatchExpr *vm.Program
FilterExpr *vm.Program
RecuExpr *vm.Program
AppendRule *rule.Program
Fns []words.WordFunc
AppendWords []string
Fuzzy bool
IgnoreWaf bool
Crawl bool
Scope []string
Active bool
Bak bool
Fuzzuli bool
Common bool
RetryLimit int
RandomUserAgent bool
Random string
Index string
MaxRedirect int
MaxCrawlDepth int
MaxRecursionDepth int
MaxAppendDepth int
}
func NewBruteWords(config *Config, list []string) *words.Worder {
word := words.NewWorderWithList(list)
word.Fns = config.Fns
word.Run()
return word
}
func NewBruteDSL(config *Config, dsl string, params [][]string) *words.Worder {
word, err := words.NewWorderWithDsl(dsl, params, nil)
if err != nil {
logs.Log.Error(err.Error())
}
word.Fns = config.Fns
word.Run()
return word
}

72
core/pool/pool.go Normal file
View File

@ -0,0 +1,72 @@
package pool
import (
"context"
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words"
"sync"
"sync/atomic"
)
type BasePool struct {
*Config
Statistor *pkg.Statistor
Bar *pkg.Bar
Worder *words.Worder
Cancel context.CancelFunc
client *ihttp.Client
ctx context.Context
processCh chan *baseline.Baseline // 待处理的baseline
dir string
reqCount int
failedCount int
additionCh chan *Unit
closeCh chan struct{}
wg *sync.WaitGroup
isFallback atomic.Bool
}
func (pool *BasePool) doRetry(bl *baseline.Baseline) {
if bl.Retry >= pool.RetryLimit {
return
}
pool.wg.Add(1)
go func() {
defer pool.wg.Done()
pool.addAddition(&Unit{
path: bl.Path,
parent: bl.Number,
host: bl.Host,
source: parsers.RetrySource,
from: bl.Source,
retry: bl.Retry + 1,
})
}()
}
func (pool *BasePool) addAddition(u *Unit) {
// 强行屏蔽报错, 防止goroutine泄露
pool.wg.Add(1)
defer func() {
if err := recover(); err != nil {
}
}()
pool.additionCh <- u
}
func (pool *BasePool) putToOutput(bl *baseline.Baseline) {
if bl.IsValid || bl.IsFuzzy {
bl.Collect()
}
pool.Outwg.Add(1)
pool.OutputCh <- bl
}
func (pool *BasePool) putToFuzzy(bl *baseline.Baseline) {
pool.Outwg.Add(1)
bl.IsFuzzy = true
pool.FuzzyCh <- bl
}

57
core/pool/type.go Normal file
View File

@ -0,0 +1,57 @@
package pool
import (
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/core/baseline"
)
func newUnit(path string, source parsers.SpraySource) *Unit {
return &Unit{path: path, source: source}
}
type Unit struct {
number int
parent int
host string
path string
from parsers.SpraySource
source parsers.SpraySource
retry int
frontUrl string
depth int
}
func (u *Unit) Update(bl *baseline.Baseline) {
bl.Number = u.number
bl.Parent = u.parent
bl.Host = u.host
bl.Path = u.path
bl.Source = u.source
}
func NewBaselines() *Baselines {
return &Baselines{
baselines: map[int]*baseline.Baseline{},
}
}
type Baselines struct {
FailedBaselines []*baseline.Baseline
random *baseline.Baseline
index *baseline.Baseline
baselines map[int]*baseline.Baseline
}
type SprayMod int
const (
PathSpray SprayMod = iota + 1
HostSpray
ParamSpray
CustomSpray
)
var ModMap = map[string]SprayMod{
"path": PathSpray,
"host": HostSpray,
}

460
core/runner.go Normal file
View File

@ -0,0 +1,460 @@
package core
import (
"context"
"github.com/chainreactors/files"
"github.com/chainreactors/logs"
"github.com/chainreactors/proxyclient"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/core/pool"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words"
"github.com/chainreactors/words/rule"
"github.com/expr-lang/expr/vm"
"github.com/panjf2000/ants/v2"
"github.com/vbauerster/mpb/v8"
"github.com/vbauerster/mpb/v8/decor"
"net/http"
"strings"
"sync"
"time"
)
var (
MAX = 2147483647
)
type Runner struct {
*Option
taskCh chan *Task
poolwg *sync.WaitGroup
outwg *sync.WaitGroup
outputCh chan *baseline.Baseline
fuzzyCh chan *baseline.Baseline
bar *mpb.Bar
bruteMod bool
ProxyClient proxyclient.Dial
IsCheck bool
Pools *ants.PoolWithFunc
PoolName map[string]bool
Tasks *TaskGenerator
Rules *rule.Program
AppendRules *rule.Program
Headers map[string]string
FilterExpr *vm.Program
MatchExpr *vm.Program
RecursiveExpr *vm.Program
OutputFile *files.File
//FuzzyFile *files.File
DumpFile *files.File
StatFile *files.File
Progress *mpb.Progress
Fns []words.WordFunc
Count int // tasks total number
Wordlist []string
AppendWords []string
ClientType int
Probes []string
Total int // wordlist total number
Color bool
Jsonify bool
}
func (r *Runner) PrepareConfig() *pool.Config {
config := &pool.Config{
Thread: r.Threads,
Timeout: time.Duration(r.Timeout) * time.Second,
RateLimit: r.RateLimit,
Headers: make(http.Header),
Method: r.Method,
Mod: pool.ModMap[r.Mod],
OutputCh: r.outputCh,
FuzzyCh: r.fuzzyCh,
Outwg: r.outwg,
Fuzzy: r.Fuzzy,
CheckPeriod: r.CheckPeriod,
ErrPeriod: int32(r.ErrPeriod),
BreakThreshold: int32(r.BreakThreshold),
MatchExpr: r.MatchExpr,
FilterExpr: r.FilterExpr,
RecuExpr: r.RecursiveExpr,
AppendRule: r.AppendRules, // 对有效目录追加规则, 根据rule生成
AppendWords: r.AppendWords, // 对有效目录追加字典
Fns: r.Fns,
//IgnoreWaf: r.IgnoreWaf,
Crawl: r.CrawlPlugin,
Scope: r.Scope,
Active: r.Finger,
Bak: r.BakPlugin,
Fuzzuli: r.FuzzuliPlugin,
Common: r.CommonPlugin,
RetryLimit: r.RetryCount,
ClientType: r.ClientType,
RandomUserAgent: r.RandomUserAgent,
Random: r.Random,
Index: r.Index,
MaxRecursionDepth: r.Depth,
MaxRedirect: 3,
MaxAppendDepth: r.AppendDepth,
MaxCrawlDepth: r.CrawlDepth,
ProxyClient: r.ProxyClient,
}
if config.ClientType == ihttp.Auto {
if config.Mod == pool.PathSpray {
config.ClientType = ihttp.FAST
} else if config.Mod == pool.HostSpray {
config.ClientType = ihttp.STANDARD
}
}
for k, v := range r.Headers {
config.Headers.Set(k, v)
}
if config.Headers.Get("User-Agent") == "" {
config.Headers.Set("User-Agent", pkg.DefaultUserAgent)
}
if config.Headers.Get("Accept") == "" {
config.Headers.Set("Accept", "*/*")
}
return config
}
func (r *Runner) AppendFunction(fn func(string) []string) {
r.Fns = append(r.Fns, fn)
}
func (r *Runner) Prepare(ctx context.Context) error {
if r.bruteMod {
r.IsCheck = false
}
r.OutputHandler()
var err error
if r.IsCheck {
// 仅check, 类似httpx
r.Pools, err = ants.NewPoolWithFunc(1, func(i interface{}) {
config := r.PrepareConfig()
checkPool, err := pool.NewCheckPool(ctx, config)
if err != nil {
logs.Log.Error(err.Error())
checkPool.Cancel()
r.poolwg.Done()
return
}
ch := make(chan string)
go func() {
for t := range r.Tasks.tasks {
ch <- t.baseUrl
}
close(ch)
}()
checkPool.Worder = words.NewWorderWithChan(ch)
checkPool.Worder.Fns = r.Fns
checkPool.Bar = pkg.NewBar("check", r.Count-r.Offset, checkPool.Statistor, r.Progress)
checkPool.Run(ctx, r.Offset, r.Count)
r.poolwg.Done()
})
r.RunWithCheck(ctx)
} else {
// 完整探测模式
go func() {
for t := range r.Tasks.tasks {
r.taskCh <- t
}
close(r.taskCh)
}()
if r.Count > 0 {
r.newBar(r.Count)
}
r.Pools, err = ants.NewPoolWithFunc(r.PoolSize, func(i interface{}) {
t := i.(*Task)
if t.origin != nil && t.origin.End == t.origin.Total {
r.saveStat(t.origin.Json())
r.Done()
return
}
config := r.PrepareConfig()
config.BaseURL = t.baseUrl
brutePool, err := pool.NewBrutePool(ctx, config)
if err != nil {
logs.Log.Error(err.Error())
brutePool.Cancel()
r.Done()
return
}
if t.origin != nil && len(r.Wordlist) == 0 {
// 如果是从断点续传中恢复的任务, 则自动设置word,dict与rule, 不过优先级低于命令行参数
brutePool.Statistor = pkg.NewStatistorFromStat(t.origin.Statistor)
brutePool.Worder, err = t.origin.InitWorder(r.Fns)
if err != nil {
logs.Log.Error(err.Error())
r.Done()
return
}
brutePool.Statistor.Total = t.origin.sum
} else {
brutePool.Statistor = pkg.NewStatistor(t.baseUrl)
brutePool.Worder = words.NewWorderWithList(r.Wordlist)
brutePool.Worder.Fns = r.Fns
brutePool.Worder.Rules = r.Rules.Expressions
}
var limit int
if brutePool.Statistor.Total > r.Limit && r.Limit != 0 {
limit = r.Limit
} else {
limit = brutePool.Statistor.Total
}
brutePool.Bar = pkg.NewBar(config.BaseURL, limit-brutePool.Statistor.Offset, brutePool.Statistor, r.Progress)
logs.Log.Importantf("[pool] task: %s, total %d words, %d threads, proxy: %v",
brutePool.BaseURL, limit-brutePool.Statistor.Offset, brutePool.Thread, r.Proxies)
err = brutePool.Init()
if err != nil {
brutePool.Statistor.Error = err.Error()
if !r.Force {
// 如果没开启force, init失败将会关闭pool
brutePool.Bar.Close()
brutePool.Close()
r.PrintStat(brutePool)
r.Done()
return
}
}
brutePool.Run(brutePool.Statistor.Offset, limit)
if brutePool.IsFailed && len(brutePool.FailedBaselines) > 0 {
// 如果因为错误积累退出, end将指向第一个错误发生时, 防止resume时跳过大量目标
brutePool.Statistor.End = brutePool.FailedBaselines[0].Number
}
r.PrintStat(brutePool)
r.Done()
})
r.Run(ctx)
}
if err != nil {
return err
}
return nil
}
func (r *Runner) Run(ctx context.Context) {
Loop:
for {
select {
case <-ctx.Done():
// 如果超过了deadline, 尚未开始的任务都将被记录到stat中
if len(r.taskCh) > 0 {
for t := range r.taskCh {
stat := pkg.NewStatistor(t.baseUrl)
r.saveStat(stat.Json())
}
}
if r.StatFile != nil {
logs.Log.Importantf("already save all stat to %s", r.StatFile.Filename)
}
break Loop
case t, ok := <-r.taskCh:
if !ok {
break Loop
}
r.AddPool(t)
}
}
if r.bar != nil {
r.bar.Wait()
}
r.poolwg.Wait()
r.outwg.Wait()
}
func (r *Runner) RunWithCheck(ctx context.Context) {
stopCh := make(chan struct{})
r.poolwg.Add(1)
err := r.Pools.Invoke(struct{}{})
if err != nil {
return
}
go func() {
r.poolwg.Wait()
stopCh <- struct{}{}
}()
Loop:
for {
select {
case <-ctx.Done():
logs.Log.Error("cancel with deadline")
break Loop
case <-stopCh:
break Loop
}
}
r.outwg.Wait()
}
func (r *Runner) AddRecursive(bl *baseline.Baseline) {
// 递归新任务
task := &Task{
baseUrl: bl.UrlString,
depth: bl.RecuDepth + 1,
origin: NewOrigin(pkg.NewStatistor(bl.UrlString)),
}
r.AddPool(task)
}
func (r *Runner) AddPool(task *Task) {
// 递归新任务
if _, ok := r.PoolName[task.baseUrl]; ok {
logs.Log.Importantf("already added pool, skip %s", task.baseUrl)
return
}
task.depth++
r.poolwg.Add(1)
r.Pools.Invoke(task)
}
func (r *Runner) newBar(total int) {
if r.Progress == nil {
return
}
prompt := "total progressive:"
r.bar = r.Progress.AddBar(int64(total),
mpb.BarFillerClearOnComplete(), // 可选:当进度条完成时清除
mpb.PrependDecorators(
// 显示自定义的信息,比如下载速度和进度
decor.Name(prompt, decor.WC{W: len(prompt) + 1, C: decor.DindentRight}), // 这里调整了装饰器的参数
decor.OnComplete( // 当进度完成时显示的文本
decor.Counters(0, "% d/% d"), " done!",
),
),
mpb.AppendDecorators(
// 显示经过的时间
decor.Elapsed(decor.ET_STYLE_GO, decor.WC{W: 4}),
),
)
}
func (r *Runner) Done() {
if r.bar != nil {
r.bar.Increment()
}
r.poolwg.Done()
}
func (r *Runner) PrintStat(pool *pool.BrutePool) {
if r.Color {
logs.Log.Important(pool.Statistor.ColorString())
if pool.Statistor.Error == "" {
logs.Log.Log(pkg.LogVerbose, pool.Statistor.ColorCountString())
logs.Log.Log(pkg.LogVerbose, pool.Statistor.ColorSourceString())
}
} else {
logs.Log.Important(pool.Statistor.String())
if pool.Statistor.Error == "" {
logs.Log.Log(pkg.LogVerbose, pool.Statistor.CountString())
logs.Log.Log(pkg.LogVerbose, pool.Statistor.SourceString())
}
}
r.saveStat(pool.Statistor.Json())
}
func (r *Runner) saveStat(content string) {
if r.StatFile != nil {
r.StatFile.SafeWrite(content)
r.StatFile.SafeSync()
}
}
func (r *Runner) Output(bl *baseline.Baseline) {
var out string
if r.Option.Json {
out = bl.ToJson()
} else if len(r.Probes) > 0 {
out = bl.ProbeOutput(r.Probes)
} else if r.Color {
out = bl.ColorString()
} else {
out = bl.String()
}
if bl.IsValid {
logs.Log.Console(out + "\n")
} else if r.Fuzzy && bl.IsFuzzy {
logs.Log.Console("[fuzzy] " + out + "\n")
}
if r.OutputFile != nil {
if r.FileOutput == "json" {
r.OutputFile.SafeWrite(bl.ToJson() + "\n")
} else if r.FileOutput == "csv" {
r.OutputFile.SafeWrite(bl.ToCSV())
} else if r.FileOutput == "full" {
r.OutputFile.SafeWrite(bl.String() + "\n")
} else {
r.OutputFile.SafeWrite(bl.ProbeOutput(strings.Split(r.FileOutput, ",")) + "\n")
}
r.OutputFile.SafeSync()
}
}
func (r *Runner) OutputHandler() {
go func() {
for {
select {
case bl, ok := <-r.outputCh:
if !ok {
return
}
if r.DumpFile != nil {
r.DumpFile.SafeWrite(bl.ToJson() + "\n")
r.DumpFile.SafeSync()
}
if bl.IsValid {
r.Output(bl)
if bl.Recu {
r.AddRecursive(bl)
}
} else {
if r.Color {
logs.Log.Debug(bl.ColorString())
} else {
logs.Log.Debug(bl.String())
}
}
r.outwg.Done()
}
}
}()
go func() {
for {
select {
case bl, ok := <-r.fuzzyCh:
if !ok {
return
}
r.Output(bl)
r.outwg.Done()
}
}
}()
}

73
core/task.go Normal file
View File

@ -0,0 +1,73 @@
package core
import (
"fmt"
"github.com/chainreactors/logs"
"github.com/chainreactors/utils"
"github.com/chainreactors/words/rule"
"net/url"
)
type Task struct {
baseUrl string
depth int
rule []rule.Expression
origin *Origin
}
func NewTaskGenerator(port string) *TaskGenerator {
gen := &TaskGenerator{
ports: utils.ParsePortsString(port),
tasks: make(chan *Task),
In: make(chan *Task),
}
go func() {
for task := range gen.In {
gen.tasks <- task
}
close(gen.tasks)
}()
return gen
}
type TaskGenerator struct {
Name string
ports []string
tasks chan *Task
In chan *Task
}
func (gen *TaskGenerator) Run(baseurl string) {
parsed, err := url.Parse(baseurl)
if err != nil {
logs.Log.Warnf("parse %s, %s ", baseurl, err.Error())
return
}
if parsed.Scheme == "" {
if parsed.Port() == "443" {
parsed.Scheme = "https"
} else {
parsed.Scheme = "http"
}
}
if len(gen.ports) == 0 {
gen.In <- &Task{baseUrl: parsed.String()}
return
}
for _, p := range gen.ports {
if parsed.Host == "" {
gen.In <- &Task{baseUrl: fmt.Sprintf("%s://%s:%s", parsed.Scheme, parsed.Path, p)}
} else {
gen.In <- &Task{baseUrl: fmt.Sprintf("%s://%s:%s/%s", parsed.Scheme, parsed.Host, p, parsed.Path)}
}
}
}
func (gen *TaskGenerator) Close() {
close(gen.tasks)
}

37
core/types.go Normal file
View File

@ -0,0 +1,37 @@
package core
import (
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words"
)
func NewOrigin(stat *pkg.Statistor) *Origin {
return &Origin{Statistor: stat}
}
type Origin struct {
*pkg.Statistor
sum int
}
func (o *Origin) InitWorder(fns []words.WordFunc) (*words.Worder, error) {
var worder *words.Worder
wl, err := pkg.LoadWordlist(o.Word, o.Dictionaries)
if err != nil {
return nil, err
}
worder = words.NewWorderWithList(wl)
worder.Fns = fns
rules, err := pkg.LoadRuleWithFiles(o.RuleFiles, o.RuleFilter)
if err != nil {
return nil, err
}
worder.Rules = rules
if len(rules) > 0 {
o.sum = len(rules) * len(wl)
} else {
o.sum = len(wl)
}
return worder, nil
}

75
go.mod
View File

@ -1,34 +1,67 @@
module github.com/chainreactors/spray
go 1.17
go 1.20
require (
github.com/chainreactors/files v0.2.4
github.com/chainreactors/go-metrics v0.0.0-20220926021830-24787b7a10f8
github.com/chainreactors/gogo/v2 v2.9.5-0.20221110124606-bb8c89742d4d
github.com/chainreactors/logs v0.6.2
github.com/chainreactors/parsers v0.2.7
github.com/chainreactors/words v0.1.1
)
require (
github.com/chainreactors/ipcs v0.0.13
github.com/go-dedup/simhash v0.0.0-20170904020510-9ecaca7b509c
github.com/gosuri/uiprogress v0.0.1
github.com/chainreactors/files v0.0.0-20240716182835-7884ee1e77f0
github.com/chainreactors/fingers v0.0.0-20240716172449-2fc3147b9c2a
github.com/chainreactors/logs v0.0.0-20241115105204-6132e39f5261
github.com/chainreactors/parsers v0.0.0-20250605044448-6bc270f12c0e
github.com/chainreactors/proxyclient v1.0.3-0.20250219180226-a25a0c9e6ac8
github.com/chainreactors/utils v0.0.0-20240805193040-ff3b97aa3c3f
github.com/chainreactors/words v0.0.0-20240910083848-19a289e8984b
github.com/charmbracelet/lipgloss v0.13.0
github.com/expr-lang/expr v1.16.9
github.com/gookit/config/v2 v2.2.5
github.com/jessevdk/go-flags v1.5.0
github.com/panjf2000/ants/v2 v2.6.0
github.com/valyala/fasthttp v1.40.0
github.com/panjf2000/ants/v2 v2.9.1
github.com/valyala/fasthttp v1.53.0
github.com/vbauerster/mpb/v8 v8.7.3
golang.org/x/time v0.5.0
sigs.k8s.io/yaml v1.4.0
)
require (
github.com/andybalholm/brotli v1.0.4 // indirect
dario.cat/mergo v1.0.0 // indirect
github.com/VividCortex/ewma v1.2.0 // indirect
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d // indirect
github.com/andybalholm/brotli v1.1.0 // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/charmbracelet/x/ansi v0.1.4 // indirect
github.com/facebookincubator/nvdtools v0.1.5 // indirect
github.com/fatih/color v1.17.0 // indirect
github.com/go-dedup/megophone v0.0.0-20170830025436-f01be21026f5 // indirect
github.com/go-dedup/simhash v0.0.0-20170904020510-9ecaca7b509c // indirect
github.com/go-dedup/text v0.0.0-20170907015346-8bb1b95e3cb7 // indirect
github.com/gosuri/uilive v0.0.4 // indirect
github.com/klauspost/compress v1.15.10 // indirect
github.com/mattn/go-isatty v0.0.16 // indirect
github.com/twmb/murmur3 v1.1.6 // indirect
github.com/go-playground/validator/v10 v10.20.0 // indirect
github.com/goccy/go-yaml v1.11.3 // indirect
github.com/gookit/color v1.5.4 // indirect
github.com/gookit/goutil v0.6.15 // indirect
github.com/klauspost/compress v1.17.8 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.15 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/muesli/termenv v0.15.2 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/riobard/go-bloom v0.0.0-20200614022211-cdc8013cb5b3 // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/rogpeppe/go-internal v1.12.0 // indirect
github.com/shadowsocks/go-shadowsocks2 v0.1.5 // indirect
github.com/twmb/murmur3 v1.1.8 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect
golang.org/x/sys v0.2.0 // indirect
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
golang.org/x/crypto v0.33.0 // indirect
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 // indirect
golang.org/x/net v0.25.0 // indirect
golang.org/x/sync v0.11.0 // indirect
golang.org/x/sys v0.30.0 // indirect
golang.org/x/term v0.29.0 // indirect
golang.org/x/text v0.22.0 // indirect
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)
replace github.com/chainreactors/proxyclient => github.com/chainreactors/proxyclient v1.0.3

1006
go.sum

File diff suppressed because it is too large Load Diff

View File

@ -1,122 +0,0 @@
package internal
import (
"context"
"github.com/chainreactors/logs"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/spray/pkg/ihttp"
"github.com/chainreactors/words"
"github.com/panjf2000/ants/v2"
"github.com/valyala/fasthttp"
"sync"
)
func NewCheckPool(ctx context.Context, config *pkg.Config) (*CheckPool, error) {
pctx, cancel := context.WithCancel(ctx)
pool := &CheckPool{
Config: config,
ctx: pctx,
cancel: cancel,
client: ihttp.NewClient(config.Thread, 2, config.ClientType),
worder: words.NewWorder(config.Wordlist),
wg: sync.WaitGroup{},
reqCount: 1,
failedCount: 1,
}
switch config.Mod {
case pkg.PathSpray:
pool.genReq = func(s string) (*ihttp.Request, error) {
return ihttp.BuildPathRequest(pool.ClientType, s, "")
}
case pkg.HostSpray:
pool.genReq = func(s string) (*ihttp.Request, error) {
return ihttp.BuildHostRequest(pool.ClientType, s, "")
}
}
p, _ := ants.NewPoolWithFunc(config.Thread, func(i interface{}) {
unit := i.(*Unit)
req, err := pool.genReq(unit.path)
if err != nil {
logs.Log.Error(err.Error())
}
var bl *pkg.Baseline
resp, reqerr := pool.client.Do(pctx, req)
if pool.ClientType == ihttp.FAST {
defer fasthttp.ReleaseResponse(resp.FastResponse)
defer fasthttp.ReleaseRequest(req.FastRequest)
}
if reqerr != nil && reqerr != fasthttp.ErrBodyTooLarge {
pool.failedCount++
bl = &pkg.Baseline{Url: pool.BaseURL + unit.path, IsValid: false, Err: reqerr.Error(), Reason: ErrRequestFailed.Error()}
} else {
bl = pkg.NewBaseline(req.URI(), req.Host(), resp)
bl.Collect()
}
pool.OutputCh <- bl
pool.reqCount++
pool.wg.Done()
pool.bar.Done()
})
pool.pool = p
return pool, nil
}
type CheckPool struct {
*pkg.Config
client *ihttp.Client
pool *ants.PoolWithFunc
bar *pkg.Bar
ctx context.Context
cancel context.CancelFunc
reqCount int
failedCount int
genReq func(s string) (*ihttp.Request, error)
worder *words.Worder
wg sync.WaitGroup
}
func (p *CheckPool) Close() {
p.bar.Close()
}
func (p *CheckPool) Run(ctx context.Context, offset, limit int) {
Loop:
for {
select {
case u, ok := <-p.worder.C:
if !ok {
break Loop
}
if p.reqCount < offset {
p.reqCount++
continue
}
if p.reqCount > limit {
break Loop
}
for _, fn := range p.Fns {
u = fn(u)
}
if u == "" {
continue
}
p.wg.Add(1)
_ = p.pool.Invoke(newUnit(u, WordSource))
case <-ctx.Done():
break Loop
case <-p.ctx.Done():
break Loop
}
}
p.wg.Wait()
p.Close()
}

View File

@ -1,326 +0,0 @@
package internal
import (
"fmt"
"github.com/chainreactors/logs"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words/mask"
"github.com/gosuri/uiprogress"
"io/ioutil"
"os"
"strconv"
"strings"
)
type Option struct {
InputOptions
OutputOptions
RequestOptions
ModeOptions
MiscOptions
}
type InputOptions struct {
URL string `short:"u" long:"url" description:"String, input baseurl (separated by commas), e.g.: http://google.com, http://baidu.com"`
URLFile string `short:"l" long:"list" description:"File, input filename"`
Offset int `long:"offset" description:"Int, wordlist offset"`
Limit int `long:"limit" description:"Int, wordlist limit, start with offset. e.g.: --offset 1000 --limit 100"`
Dictionaries []string `short:"d" long:"dict" description:"Files, dict files, e.g.: -d 1.txt -d 2.txt"`
Word string `short:"w" long:"word" description:"String, word generate dsl, e.g.: -w test{?ld#4}"`
Extensions string `short:"e" long:"extension" description:"String, add extensions (separated by commas), e.g.: -e jsp,jspx"`
ExcludeExtensions string `long:"exclude-extension" description:"String, exclude extensions (separated by commas), e.g.: --exclude-extension jsp,jspx"`
RemoveExtensions string `long:"remove-extension" description:"String, remove extensions (separated by commas), e.g.: --remove-extension jsp,jspx"`
Uppercase bool `short:"U" long:"uppercase" description:"Bool, upper wordlist, e.g.: --uppercase"`
Lowercase bool `short:"L" long:"lowercase" description:"Bool, lower wordlist, e.g.: --lowercase"`
Prefixes []string `long:"prefix" description:"Strings, add prefix, e.g.: --prefix aaa --prefix bbb"`
Suffixes []string `long:"suffix" description:"Strings, add suffix, e.g.: --suffix aaa --suffix bbb"`
Replaces map[string]string `long:"replace" description:"Strings, replace string, e.g.: --replace aaa:bbb --replace ccc:ddd"`
}
type OutputOptions struct {
Matches map[string]string `long:"match" description:"String, "`
Filters map[string]string `long:"filter" description:"String, "`
Extracts []string `long:"extract" description:"String, "`
OutputFile string `short:"f" description:"String, output filename"`
FuzzyFile string `long:"fuzzy-file" description:"String, fuzzy output filename"`
Fuzzy bool `long:"fuzzy" description:"String, open fuzzy output"`
OutputProbe string `long:"probe" description:"String, output format"`
}
type RequestOptions struct {
Headers []string `long:"header"`
Method string `long:"method"`
Cookie string `long:"cookie"`
SimhashDistance int `long:"distance" default:"5"`
}
type ModeOptions struct {
Force bool `long:"force"`
CheckOnly bool `long:"check-only"`
CheckPeriod int `long:"check-period" default:"100"`
ErrPeriod int `long:"error-period" default:"10"`
BreakThreshold int `long:"error-threshold" default:"20"`
BlackStatus string `long:"black-status" default:"default"`
WhiteStatus string `long:"white-status" `
}
type MiscOptions struct {
Deadline int `long:"deadline" default:"999999" description:"Int, deadline (seconds)"` // todo 总的超时时间,适配云函数的deadline
Timeout int `long:"timeout" default:"2" description:"Int, timeout with request (seconds)"`
PoolSize int `short:"p" long:"pool" default:"5" description:"Int, Pool size"`
Threads int `short:"t" long:"thread" default:"20" description:"Int, number of threads per pool (seconds)"`
Debug bool `long:"debug" description:"Bool, output debug info"`
Quiet bool `short:"q" long:"quiet" description:"Bool, Quiet"`
Mod string `short:"m" long:"mod" default:"path" choice:"path" choice:"host" description:"String, path/host spray"`
Client string `short:"c" long:"client" default:"auto" choice:"fast" choice:"standard" choice:"auto" description:"String, Client type"`
}
func (opt *Option) PrepareRunner() (*Runner, error) {
ok := opt.Validate()
if !ok {
return nil, fmt.Errorf("validate failed")
}
var err error
r := &Runner{
Progress: uiprogress.New(),
Threads: opt.Threads,
PoolSize: opt.PoolSize,
Mod: opt.Mod,
Timeout: opt.Timeout,
Deadline: opt.Deadline,
Offset: opt.Offset,
Limit: opt.Limit,
urlCh: make(chan string),
OutputCh: make(chan *pkg.Baseline, 100),
FuzzyCh: make(chan *pkg.Baseline, 100),
Fuzzy: opt.Fuzzy,
Force: opt.Force,
CheckOnly: opt.CheckOnly,
CheckPeriod: opt.CheckPeriod,
ErrPeriod: opt.ErrPeriod,
BreakThreshold: opt.BreakThreshold,
}
err = pkg.LoadTemplates()
if err != nil {
return nil, err
}
// 一些全局变量初始化
if opt.Debug {
logs.Log.Level = logs.Debug
}
if !opt.Quiet {
r.Progress.Start()
logs.Log.Writer = r.Progress.Bypass()
}
if opt.SimhashDistance != 0 {
pkg.Distance = uint8(opt.SimhashDistance)
}
if opt.Force {
// 如果开启了force模式, 将关闭check机制, err积累到一定数量自动退出机制
r.BreakThreshold = max
r.CheckPeriod = max
r.ErrPeriod = max
}
if opt.BlackStatus != "default" {
for _, s := range strings.Split(opt.BlackStatus, ",") {
si, err := strconv.Atoi(s)
if err != nil {
return nil, err
}
BlackStatus = append(BlackStatus, si)
}
} else {
BlackStatus = []int{400, 404, 410}
}
if opt.WhiteStatus != "" {
for _, s := range strings.Split(opt.WhiteStatus, ",") {
si, err := strconv.Atoi(s)
if err != nil {
return nil, err
}
WhiteStatus = append(WhiteStatus, si)
}
}
// prepare url
var urls []string
var file *os.File
urlfrom := opt.URLFile
if opt.URL != "" {
urls = append(urls, opt.URL)
urlfrom = "cmd"
} else if opt.URLFile != "" {
file, err = os.Open(opt.URLFile)
if err != nil {
return nil, err
}
} else if pkg.HasStdin() {
file = os.Stdin
urlfrom = "stdin"
}
if file != nil {
content, err := ioutil.ReadAll(file)
if err != nil {
return nil, err
}
urls = strings.Split(string(content), "\n")
}
r.URLList = urls
logs.Log.Importantf("load %d urls from %s", len(urls), urlfrom)
// prepare word
dicts := make([][]string, len(opt.Dictionaries))
for i, f := range opt.Dictionaries {
dicts[i], err = loadFileToSlice(f)
if err != nil {
return nil, err
}
logs.Log.Importantf("load %d word from %s", len(dicts[i]), f)
}
if opt.Word == "" {
opt.Word = "{?"
for i, _ := range dicts {
opt.Word += strconv.Itoa(i)
}
opt.Word += "}"
}
if opt.Suffixes != nil {
dicts = append(dicts, opt.Suffixes)
opt.Word += fmt.Sprintf("{?%d}", len(dicts)-1)
}
if opt.Prefixes != nil {
dicts = append(dicts, opt.Prefixes)
opt.Word = fmt.Sprintf("{?%d}", len(dicts)-1) + opt.Word
}
if opt.Extensions != "" {
dicts = append(dicts, strings.Split(opt.Extensions, ","))
opt.Word += fmt.Sprintf("{?%d}", len(dicts)-1)
}
mask.CustomWords = dicts
r.Wordlist, err = mask.Run(opt.Word)
if err != nil {
return nil, err
}
if r.Limit == 0 {
if r.CheckOnly {
r.Limit = len(r.URLList)
} else {
r.Limit = len(r.Wordlist)
}
} else {
r.Limit = r.Offset + opt.Limit
}
if opt.Uppercase {
r.Fns = append(r.Fns, strings.ToUpper)
}
if opt.Lowercase {
r.Fns = append(r.Fns, strings.ToLower)
}
if opt.RemoveExtensions != "" {
rexts := strings.Split(opt.ExcludeExtensions, ",")
r.Fns = append(r.Fns, func(s string) string {
if ext := parseExtension(s); StringsContains(rexts, ext) {
return strings.TrimSuffix(s, "."+ext)
}
return s
})
}
if opt.ExcludeExtensions != "" {
exexts := strings.Split(opt.ExcludeExtensions, ",")
r.Fns = append(r.Fns, func(s string) string {
if ext := parseExtension(s); StringsContains(exexts, ext) {
return ""
}
return s
})
}
if len(opt.Replaces) > 0 {
r.Fns = append(r.Fns, func(s string) string {
for k, v := range opt.Replaces {
s = strings.Replace(s, k, v, -1)
}
return s
})
}
// prepare header
for _, h := range opt.Headers {
i := strings.Index(h, ":")
if i == -1 {
logs.Log.Warn("invalid header")
} else {
r.Headers.Add(h[:i], h[i+2:])
}
}
if opt.OutputProbe != "" {
r.Probes = strings.Split(opt.OutputProbe, ",")
}
return r, nil
}
func (opt *Option) Validate() bool {
if opt.Uppercase && opt.Lowercase {
logs.Log.Error("Cannot set -U and -L at the same time")
return false
}
return true
}
func loadFileToSlice(filename string) ([]string, error) {
var ss []string
content, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
ss = strings.Split(string(content), "\n")
// 统一windows与linux的回车换行差异
for i, word := range ss {
ss[i] = strings.TrimSpace(word)
}
return ss, nil
}
func parseExtension(s string) string {
if i := strings.Index(s, "."); i != -1 {
return s[i+1:]
}
return ""
}
func StringsContains(s []string, e string) bool {
for _, v := range s {
if v == e {
return true
}
}
return false
}
func IntsContains(s []int, e int) bool {
for _, v := range s {
if v == e {
return true
}
}
return false
}

View File

@ -1,361 +0,0 @@
package internal
import (
"context"
"fmt"
"github.com/chainreactors/logs"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/spray/pkg/ihttp"
"github.com/chainreactors/words"
"github.com/panjf2000/ants/v2"
"github.com/valyala/fasthttp"
"sync"
"time"
)
var (
CheckRedirect func(string) bool
)
var max = 2147483647
func NewPool(ctx context.Context, config *pkg.Config) (*Pool, error) {
pctx, cancel := context.WithCancel(ctx)
pool := &Pool{
Config: config,
ctx: pctx,
cancel: cancel,
client: ihttp.NewClient(config.Thread, 2, config.ClientType),
worder: words.NewWorder(config.Wordlist),
baselines: make(map[int]*pkg.Baseline),
tempCh: make(chan *pkg.Baseline, config.Thread),
wg: sync.WaitGroup{},
initwg: sync.WaitGroup{},
reqCount: 1,
failedCount: 1,
}
switch config.Mod {
case pkg.PathSpray:
pool.genReq = func(s string) (*ihttp.Request, error) {
return ihttp.BuildPathRequest(pool.ClientType, pool.BaseURL, s)
}
pool.check = func() {
_ = pool.pool.Invoke(newUnit(pkg.RandPath(), CheckSource))
if pool.failedCount > pool.BreakThreshold {
// 当报错次数超过上限是, 结束任务
pool.recover()
pool.cancel()
}
}
case pkg.HostSpray:
pool.genReq = func(s string) (*ihttp.Request, error) {
return ihttp.BuildHostRequest(pool.ClientType, pool.BaseURL, s)
}
pool.check = func() {
_ = pool.pool.Invoke(newUnit(pkg.RandHost(), CheckSource))
if pool.failedCount > pool.BreakThreshold {
// 当报错次数超过上限是, 结束任务
pool.recover()
pool.cancel()
}
}
}
p, _ := ants.NewPoolWithFunc(config.Thread, func(i interface{}) {
unit := i.(*Unit)
req, err := pool.genReq(unit.path)
if err != nil {
logs.Log.Error(err.Error())
}
var bl *pkg.Baseline
resp, reqerr := pool.client.Do(pctx, req)
if pool.ClientType == ihttp.FAST {
defer fasthttp.ReleaseResponse(resp.FastResponse)
defer fasthttp.ReleaseRequest(req.FastRequest)
}
if reqerr != nil && reqerr != fasthttp.ErrBodyTooLarge {
pool.failedCount++
bl = &pkg.Baseline{Url: pool.BaseURL + unit.path, IsValid: false, Err: reqerr.Error(), Reason: ErrRequestFailed.Error()}
pool.failedBaselines = append(pool.failedBaselines, bl)
} else {
if err = pool.PreCompare(resp); unit.source != WordSource || err == nil {
// 通过预对比跳过一些无用数据, 减少性能消耗
bl = pkg.NewBaseline(req.URI(), req.Host(), resp)
pool.addFuzzyBaseline(bl)
} else {
bl = pkg.NewInvalidBaseline(req.URI(), req.Host(), resp, err.Error())
}
}
switch unit.source {
case InitRandomSource:
pool.base = bl
pool.addFuzzyBaseline(bl)
pool.initwg.Done()
return
case InitIndexSource:
pool.index = bl
pool.initwg.Done()
return
case CheckSource:
if bl.Err != "" {
logs.Log.Warnf("[check.error] maybe ip had banned by waf, break (%d/%d), error: %s", pool.failedCount, pool.BreakThreshold, bl.Err)
pool.failedBaselines = append(pool.failedBaselines, bl)
} else if i := pool.base.Compare(bl); i < 1 {
if i == 0 {
logs.Log.Debug("[check.fuzzy] maybe trigger risk control, " + bl.String())
} else {
logs.Log.Warn("[check.failed] maybe trigger risk control, " + bl.String())
}
pool.failedBaselines = append(pool.failedBaselines, bl)
} else {
pool.resetFailed() // 如果后续访问正常, 重置错误次数
logs.Log.Debug("[check.pass] " + bl.String())
}
case WordSource:
// 异步进行性能消耗较大的深度对比
pool.tempCh <- bl
pool.reqCount++
if pool.reqCount%pool.CheckPeriod == 0 {
pool.reqCount++
go pool.check()
} else if pool.failedCount%pool.ErrPeriod == 0 {
pool.failedCount++
go pool.check()
}
pool.bar.Done()
}
})
pool.pool = p
go func() {
for bl := range pool.tempCh {
if pool.customCompare != nil {
if pool.customCompare(bl) {
pool.OutputCh <- bl
}
} else {
pool.BaseCompare(bl)
pool.wg.Done()
}
}
pool.analyzeDone = true
}()
return pool, nil
}
type Pool struct {
*pkg.Config
client *ihttp.Client
pool *ants.PoolWithFunc
bar *pkg.Bar
ctx context.Context
cancel context.CancelFunc
tempCh chan *pkg.Baseline // 待处理的baseline
reqCount int
failedCount int
failedBaselines []*pkg.Baseline
base *pkg.Baseline
index *pkg.Baseline
baselines map[int]*pkg.Baseline
analyzeDone bool
genReq func(s string) (*ihttp.Request, error)
check func()
customCompare func(*pkg.Baseline) bool
worder *words.Worder
wg sync.WaitGroup
initwg sync.WaitGroup // 初始化用, 之后改成锁
}
func (p *Pool) Init() error {
p.initwg.Add(2)
p.pool.Invoke(newUnit(pkg.RandPath(), InitRandomSource))
p.pool.Invoke(newUnit("/", InitIndexSource))
p.initwg.Wait()
// todo 分析baseline
// 检测基本访问能力
if p.base.Err != "" {
return fmt.Errorf(p.base.String())
}
if p.index.Err != "" {
return fmt.Errorf(p.index.String())
}
p.base.Collect()
p.index.Collect()
logs.Log.Important("[baseline.random] " + p.base.String())
logs.Log.Important("[baseline.index] " + p.index.String())
if p.base.RedirectURL != "" {
CheckRedirect = func(redirectURL string) bool {
if redirectURL == p.base.RedirectURL {
// 相同的RedirectURL将被认为是无效数据
return false
} else {
// path为3xx, 且与baseline中的RedirectURL不同时, 为有效数据
return true
}
}
}
return nil
}
func (p *Pool) Run(ctx context.Context, offset, limit int) {
Loop:
for {
select {
case u, ok := <-p.worder.C:
if !ok {
break Loop
}
if p.reqCount < offset {
p.reqCount++
continue
}
if p.reqCount > limit {
break Loop
}
for _, fn := range p.Fns {
u = fn(u)
}
if u == "" {
continue
}
p.wg.Add(1)
_ = p.pool.Invoke(newUnit(u, WordSource))
case <-ctx.Done():
break Loop
case <-p.ctx.Done():
break Loop
}
}
p.wg.Wait()
p.Close()
}
func (p *Pool) PreCompare(resp *ihttp.Response) error {
status := resp.StatusCode()
if IntsContains(WhiteStatus, status) {
// 如果为白名单状态码则直接返回
return nil
}
if p.base != nil && p.base.Status != 200 && p.base.Status == status {
return ErrSameStatus
}
if IntsContains(BlackStatus, status) {
return ErrBadStatus
}
if IntsContains(WAFStatus, status) {
return ErrWaf
}
if CheckRedirect != nil && !CheckRedirect(string(resp.GetHeader("Location"))) {
return ErrRedirect
}
return nil
}
func (p *Pool) BaseCompare(bl *pkg.Baseline) {
if !bl.IsValid {
// precompare 确认无效数据直接送入管道
p.OutputCh <- bl
return
}
var status = -1
base, ok := p.baselines[bl.Status] // 挑选对应状态码的baseline进行compare
if !ok {
if p.base.Status == bl.Status {
// 当other的状态码与base相同时, 会使用base
ok = true
base = p.base
} else if p.index.Status == bl.Status {
// 当other的状态码与index相同时, 会使用index
ok = true
base = p.index
}
}
if ok {
if status = base.Compare(bl); status == 1 {
p.PutToInvalid(bl, ErrCompareFailed.Error())
return
}
}
bl.Collect()
for _, f := range bl.Frameworks {
if f.Tag == "waf/cdn" {
p.PutToInvalid(bl, ErrWaf.Error())
return
}
}
if ok && status == 0 && base.FuzzyCompare(bl) {
p.PutToInvalid(bl, ErrFuzzyCompareFailed.Error())
p.PutToFuzzy(bl)
return
}
p.OutputCh <- bl
}
func (p *Pool) addFuzzyBaseline(bl *pkg.Baseline) {
if _, ok := p.baselines[bl.Status]; !ok && IntsContains(FuzzyStatus, bl.Status) {
bl.Collect()
p.baselines[bl.Status] = bl
logs.Log.Importantf("[baseline.%dinit] %s", bl.Status, bl.String())
}
}
func (p *Pool) PutToInvalid(bl *pkg.Baseline, reason string) {
bl.IsValid = false
bl.Reason = reason
p.OutputCh <- bl
}
func (p *Pool) PutToFuzzy(bl *pkg.Baseline) {
bl.IsFuzzy = true
p.FuzzyCh <- bl
}
func (p *Pool) resetFailed() {
p.failedCount = 1
p.failedBaselines = nil
}
func (p *Pool) recover() {
logs.Log.Errorf("failed request exceeds the threshold , task will exit. Breakpoint %d", p.reqCount)
logs.Log.Error("collecting failed check")
for i, bl := range p.failedBaselines {
logs.Log.Errorf("[failed.%d] %s", i, bl.String())
}
}
func (p *Pool) Close() {
for p.analyzeDone {
time.Sleep(time.Duration(100) * time.Millisecond)
}
close(p.tempCh)
p.bar.Close()
}

View File

@ -1,266 +0,0 @@
package internal
import (
"context"
"fmt"
"github.com/chainreactors/files"
"github.com/chainreactors/logs"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/spray/pkg/ihttp"
"github.com/gosuri/uiprogress"
"github.com/panjf2000/ants/v2"
"net/http"
"strings"
"sync"
"time"
)
var (
WhiteStatus []int
BlackStatus []int
FuzzyStatus = []int{403, 500, 501, 502, 503}
WAFStatus = []int{493, 418}
)
type Runner struct {
urlCh chan string
poolwg sync.WaitGroup
bar *uiprogress.Bar
finished int
URLList []string
Wordlist []string
Headers http.Header
Fns []func(string) string
Threads int
PoolSize int
Pools *ants.PoolWithFunc
Timeout int
Mod string
Probes []string
OutputCh chan *pkg.Baseline
FuzzyCh chan *pkg.Baseline
Fuzzy bool
OutputFile *files.File
FuzzyFile *files.File
Force bool
Progress *uiprogress.Progress
Offset int
Limit int
Deadline int
CheckPeriod int
ErrPeriod int
BreakThreshold int
CheckOnly bool
}
func (r *Runner) PrepareConfig() *pkg.Config {
config := &pkg.Config{
Thread: r.Threads,
Timeout: r.Timeout,
Headers: r.Headers,
Mod: pkg.ModMap[r.Mod],
Fns: r.Fns,
OutputCh: r.OutputCh,
FuzzyCh: r.FuzzyCh,
CheckPeriod: r.CheckPeriod,
ErrPeriod: r.ErrPeriod,
BreakThreshold: r.BreakThreshold,
}
if config.Mod == pkg.PathSpray {
config.ClientType = ihttp.FAST
} else if config.Mod == pkg.HostSpray {
config.ClientType = ihttp.STANDARD
}
return config
}
func (r *Runner) Prepare(ctx context.Context) error {
var err error
if r.CheckOnly {
r.Pools, err = ants.NewPoolWithFunc(1, func(i interface{}) {
config := r.PrepareConfig()
config.Wordlist = r.URLList
pool, err := NewCheckPool(ctx, config)
if err != nil {
logs.Log.Error(err.Error())
pool.cancel()
r.poolwg.Done()
return
}
pool.bar = pkg.NewBar("check", r.Limit-r.Offset, r.Progress)
pool.Run(ctx, r.Offset, r.Limit)
r.poolwg.Done()
})
} else {
go func() {
for _, u := range r.URLList {
r.urlCh <- strings.TrimSpace(u)
}
close(r.urlCh)
}()
if len(r.URLList) > 0 {
r.bar = r.Progress.AddBar(len(r.URLList))
r.bar.PrependCompleted()
r.bar.PrependFunc(func(b *uiprogress.Bar) string {
return fmt.Sprintf("total progressive: %d/%d ", r.finished, len(r.URLList))
})
r.bar.AppendElapsed()
}
r.Pools, err = ants.NewPoolWithFunc(r.PoolSize, func(i interface{}) {
u := i.(string)
config := r.PrepareConfig()
config.BaseURL = u
config.Wordlist = r.Wordlist
pool, err := NewPool(ctx, config)
if err != nil {
logs.Log.Error(err.Error())
pool.cancel()
r.Done()
return
}
pool.bar = pkg.NewBar(u, r.Limit-r.Offset, r.Progress)
err = pool.Init()
if err != nil {
logs.Log.Error(err.Error())
if !r.Force {
// 如果没开启force, init失败将会关闭pool
pool.cancel()
r.Done()
return
}
}
pool.Run(ctx, r.Offset, r.Limit)
r.Done()
})
}
if err != nil {
return err
}
r.Outputting()
return nil
}
func (r *Runner) Run(ctx context.Context) {
Loop:
for {
select {
case <-ctx.Done():
logs.Log.Error("cancel with deadline")
break Loop
case u, ok := <-r.urlCh:
if !ok {
break Loop
}
r.poolwg.Add(1)
r.Pools.Invoke(u)
}
}
r.poolwg.Wait()
time.Sleep(100) // 延迟100ms, 等所有数据处理完毕
for {
if len(r.OutputCh) == 0 {
close(r.OutputCh)
break
}
}
for {
if len(r.FuzzyCh) == 0 {
close(r.FuzzyCh)
break
}
}
time.Sleep(100) // 延迟100ms, 等所有数据处理完毕
}
func (r *Runner) RunWithCheck(ctx context.Context) {
stopCh := make(chan struct{})
r.poolwg.Add(1)
err := r.Pools.Invoke(struct{}{})
if err != nil {
return
}
go func() {
r.poolwg.Wait()
stopCh <- struct{}{}
}()
Loop:
for {
select {
case <-ctx.Done():
logs.Log.Error("cancel with deadline")
break Loop
case <-stopCh:
break Loop
}
}
for {
if len(r.OutputCh) == 0 {
close(r.OutputCh)
break
}
}
time.Sleep(100) // 延迟100ms, 等所有数据处理完毕
}
func (r *Runner) Done() {
r.bar.Incr()
r.finished++
r.poolwg.Done()
}
func (r *Runner) Outputting() {
go func() {
var outFunc func(*pkg.Baseline)
if len(r.Probes) > 0 {
outFunc = func(bl *pkg.Baseline) {
logs.Log.Console("[+] " + bl.Format(r.Probes) + "\n")
}
} else {
outFunc = func(bl *pkg.Baseline) {
logs.Log.Console("[+] " + bl.String() + "\n")
}
}
for {
select {
case bl, ok := <-r.OutputCh:
if !ok {
return
}
if bl.IsValid {
outFunc(bl)
} else {
logs.Log.Debug(bl.String())
}
}
}
}()
go func() {
for {
select {
case bl, ok := <-r.FuzzyCh:
if !ok {
return
}
if r.Fuzzy {
logs.Log.Console("[baseline.fuzzy] " + bl.String() + "\n")
}
}
}
}()
}

View File

@ -1,53 +0,0 @@
package internal
type ErrorType uint
const (
ErrBadStatus ErrorType = iota
ErrSameStatus
ErrRequestFailed
ErrWaf
ErrRedirect
ErrCompareFailed
ErrFuzzyCompareFailed
)
func (e ErrorType) Error() string {
switch e {
case ErrBadStatus:
return "bad status"
case ErrSameStatus:
return "same status"
case ErrRequestFailed:
return "request failed"
case ErrWaf:
return "maybe banned by waf"
case ErrRedirect:
return "duplicate redirect url"
case ErrCompareFailed:
return "compare failed"
case ErrFuzzyCompareFailed:
return "fuzzy compare failed"
default:
return "unknown error"
}
}
type sourceType int
const (
CheckSource sourceType = iota + 1
InitRandomSource
InitIndexSource
WordSource
WafSource
)
func newUnit(path string, source sourceType) *Unit {
return &Unit{path: path, source: source}
}
type Unit struct {
path string
source sourceType
}

View File

@ -2,44 +2,60 @@ package pkg
import (
"fmt"
"github.com/chainreactors/go-metrics"
"github.com/gosuri/uiprogress"
"github.com/vbauerster/mpb/v8"
"github.com/vbauerster/mpb/v8/decor"
"time"
)
func NewBar(u string, total int, progress *uiprogress.Progress) *Bar {
bar := &Bar{
Bar: progress.AddBar(total),
func NewBar(u string, total int, stat *Statistor, p *mpb.Progress) *Bar {
if p == nil {
return &Bar{
url: u,
m: metrics.NewMeter(),
}
}
bar := p.AddBar(int64(total),
mpb.BarFillerClearOnComplete(),
mpb.BarRemoveOnComplete(),
mpb.PrependDecorators(
decor.Name(u, decor.WC{W: len(u) + 1, C: decor.DindentRight}), // 这里调整了装饰器的参数
decor.NewAverageSpeed(0, "% .0f/s ", time.Now()),
decor.Counters(0, "%d/%d"),
decor.Any(func(s decor.Statistics) string {
return fmt.Sprintf(" found: %d", stat.FoundNumber)
}),
),
mpb.AppendDecorators(
decor.Percentage(),
decor.Elapsed(decor.ET_STYLE_GO, decor.WC{W: 4}),
),
)
metrics.Register(bar.url, bar.m)
bar.PrependCompleted()
bar.PrependFunc(func(b *uiprogress.Bar) string {
return fmt.Sprintf("%f/s %d/%d", bar.m.Rate1(), bar.m.Count(), bar.Bar.Total)
})
bar.PrependFunc(func(b *uiprogress.Bar) string {
return u
})
bar.AppendElapsed()
return bar
return &Bar{
url: u,
bar: bar,
//m: m,
}
}
type Bar struct {
url string
total int
close bool
*uiprogress.Bar
m metrics.Meter
bar *mpb.Bar
//m metrics.Meter
}
func (bar *Bar) Done() {
bar.m.Mark(1)
bar.Incr()
//bar.m.Mark(1)
if bar.bar == nil {
return
}
bar.bar.Increment()
}
func (bar *Bar) Close() {
metrics.Unregister(bar.url)
bar.close = true
//metrics.Unregister(bar.url)
// 标记进度条为完成状态
if bar.bar == nil {
return
}
bar.bar.Abort(true)
}

View File

@ -1,253 +0,0 @@
package pkg
import (
"encoding/json"
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/pkg/ihttp"
"net/url"
"strconv"
"strings"
)
func NewBaseline(u, host string, resp *ihttp.Response) *Baseline {
bl := &Baseline{
Url: u,
Status: resp.StatusCode(),
IsValid: true,
}
uu, err := url.Parse(u)
if err == nil {
bl.Path = uu.Path
}
if resp.ClientType == ihttp.STANDARD {
bl.Host = host
}
bl.Body = resp.Body()
bl.BodyLength = len(resp.Body())
bl.Header = resp.Header()
bl.HeaderLength = len(bl.Header)
bl.RedirectURL = resp.GetHeader("Location")
bl.Raw = append(bl.Header, bl.Body...)
return bl
}
func NewInvalidBaseline(u, host string, resp *ihttp.Response, reason string) *Baseline {
bl := &Baseline{
Url: u,
Status: resp.StatusCode(),
IsValid: false,
Reason: reason,
}
uu, err := url.Parse(u)
if err == nil {
bl.Path = uu.Path
}
if resp.ClientType == ihttp.STANDARD {
bl.Host = host
}
bl.Body = resp.Body()
bl.BodyLength = len(bl.Body)
bl.RedirectURL = string(resp.GetHeader("Location"))
return bl
}
type Baseline struct {
Url string `json:"url"`
Path string `json:"path"`
Host string `json:"host"`
Body []byte `json:"-"`
BodyLength int `json:"body_length"`
Header []byte `json:"-"`
Raw []byte `json:"-"`
HeaderLength int `json:"header_length"`
RedirectURL string `json:"redirect_url"`
Status int `json:"status"`
IsDynamicUrl bool `json:"is_dynamic_url"` // 判断是否存在动态的url
Spended int `json:"spended"` // 耗时, 毫秒
Title string `json:"title"`
Frameworks Frameworks `json:"frameworks"`
Extracteds Extracteds `json:"extracts"`
Err string `json:"error"`
Reason string `json:"reason"`
IsValid bool `json:"valid"`
IsFuzzy bool `json:"fuzzy"`
*parsers.Hashes
}
// Collect 深度收集信息
func (bl *Baseline) Collect() {
if len(bl.Body) > 0 {
bl.Title = parsers.MatchTitle(string(bl.Body))
}
bl.Hashes = parsers.NewHashes(bl.Raw)
// todo extract
bl.Extracteds = Extractors.Extract(string(bl.Raw))
bl.Frameworks = FingerDetect(string(bl.Raw))
}
// Compare
// if totally equal return 1
// if maybe equal return 0
// not equal return -1
func (bl *Baseline) Compare(other *Baseline) int {
if other.RedirectURL != "" && bl.RedirectURL == other.RedirectURL {
// 如果重定向url不为空, 且与base不相同, 则说明不是同一个页面
return 1
}
if i := bl.BodyLength - other.BodyLength; i < 16 || i > -16 {
// 如果body length相等且md5相等, 则说明是同一个页面
if bl.BodyMd5 == parsers.Md5Hash(other.Body) {
// 如果length相等, md5也相等, 则判断为全同
return 1
} else {
// 如果长度相等, 但是md5不相等, 可能是存在csrftoken之类的随机值
return 0
}
} else {
if strings.Contains(string(other.Body), other.Path) {
// 如果包含路径本身, 可能是路径自身的随机值影响结果
return 0
} else {
return -1
}
}
return -1
}
var Distance uint8 = 5
func (bl *Baseline) FuzzyCompare(other *Baseline) bool {
if parsers.SimhashCompare(other.BodySimhash, bl.BodySimhash) < Distance {
return true
}
return false
}
func (bl *Baseline) Get(key string) string {
switch key {
case "url":
return bl.Url
case "host":
return bl.Host
case "title":
return bl.Title
case "redirect":
return bl.RedirectURL
case "md5":
if bl.Hashes != nil {
return bl.Hashes.BodyMd5
} else {
return ""
}
case "simhash":
if bl.Hashes != nil {
return bl.Hashes.BodySimhash
} else {
return ""
}
case "mmh3":
if bl.Hashes != nil {
return bl.Hashes.BodySimhash
} else {
return ""
}
case "stat", "status":
return strconv.Itoa(bl.Status)
case "spend":
return strconv.Itoa(bl.Spended)
//case "extract":
// return bl.Extracteds
case "frame", "framework":
return bl.Frameworks.ToString()
default:
return ""
}
}
func (bl *Baseline) Additional(key string) string {
if v := bl.Get(key); v != "" {
return " [" + v + "] "
} else {
return " "
}
}
func (bl *Baseline) Format(probes []string) string {
var line strings.Builder
line.WriteString(bl.Url)
if bl.Host != "" {
line.WriteString(" (" + bl.Host + ")")
}
if bl.Reason != "" {
line.WriteString(" ,")
line.WriteString(bl.Reason)
}
if bl.Err != "" {
line.WriteString(" ,err: ")
line.WriteString(bl.Err)
return line.String()
}
for _, p := range probes {
line.WriteString(" ")
line.WriteString(bl.Additional(p))
}
return line.String()
}
func (bl *Baseline) String() string {
var line strings.Builder
//line.WriteString("[+] ")
line.WriteString(bl.Url)
if bl.Host != "" {
line.WriteString(" (" + bl.Host + ")")
}
if bl.Reason != "" {
line.WriteString(" [reason: ")
line.WriteString(bl.Reason)
line.WriteString("]")
}
if bl.Err != "" {
line.WriteString(" [err: ")
line.WriteString(bl.Err)
line.WriteString("]")
return line.String()
}
line.WriteString(" - ")
line.WriteString(strconv.Itoa(bl.Status))
line.WriteString(" - ")
line.WriteString(strconv.Itoa(bl.BodyLength))
if bl.RedirectURL != "" {
line.WriteString(" -> ")
line.WriteString(bl.RedirectURL)
line.WriteString(" ")
}
line.WriteString(bl.Additional("title"))
line.WriteString(bl.Frameworks.ToString())
return line.String()
}
func (bl *Baseline) Jsonify() string {
bs, err := json.Marshal(bl)
if err != nil {
return ""
}
return string(bs)
}
func (bl *Baseline) ToMap() map[string]interface{} {
return map[string]interface{}{
"status": bl.Status,
}
}

View File

@ -1,36 +0,0 @@
package pkg
import (
"net/http"
)
type SprayMod int
const (
PathSpray SprayMod = iota + 1
HostSpray
ParamSpray
CustomSpray
)
var ModMap = map[string]SprayMod{
"path": PathSpray,
"host": HostSpray,
}
type Config struct {
BaseURL string
Wordlist []string
Thread int
Timeout int
CheckPeriod int
ErrPeriod int
BreakThreshold int
Method string
Mod SprayMod
Headers http.Header
ClientType int
Fns []func(string) string
OutputCh chan *Baseline
FuzzyCh chan *Baseline
}

41
pkg/errors.go Normal file
View File

@ -0,0 +1,41 @@
package pkg
type ErrorType uint
const (
NoErr ErrorType = iota
ErrBadStatus
ErrSameStatus
ErrRequestFailed
ErrWaf
ErrRedirect
ErrCompareFailed
ErrCustomCompareFailed
ErrCustomFilter
ErrFuzzyCompareFailed
ErrFuzzyRedirect
ErrFuzzyNotUnique
ErrUrlError
ErrResponseError
)
var ErrMap = map[ErrorType]string{
NoErr: "",
ErrBadStatus: "blacklist status",
ErrSameStatus: "same status with random baseline",
ErrRequestFailed: "request failed",
ErrWaf: "maybe banned by waf",
ErrRedirect: "duplicate redirect url",
ErrCompareFailed: "compare failed",
ErrCustomCompareFailed: "custom compare failed",
ErrCustomFilter: "custom filtered",
ErrFuzzyCompareFailed: "fuzzy compare failed",
ErrFuzzyRedirect: "fuzzy redirect",
ErrFuzzyNotUnique: "not unique",
ErrUrlError: "url parse error",
ErrResponseError: "response parse error",
}
func (e ErrorType) Error() string {
return ErrMap[e]
}

17
pkg/fingers.go Normal file
View File

@ -0,0 +1,17 @@
package pkg
import (
"bytes"
"github.com/chainreactors/fingers/common"
)
// gogo fingers engine
func FingersDetect(content []byte) common.Frameworks {
frames, _ := FingerEngine.Fingers().HTTPMatch(bytes.ToLower(content), "")
return frames
}
func EngineDetect(content []byte) common.Frameworks {
frames, _ := FingerEngine.DetectContent(content)
return frames
}

View File

@ -1,105 +0,0 @@
package ihttp
import (
"context"
"crypto/tls"
"fmt"
"github.com/valyala/fasthttp"
"net/http"
"time"
)
var (
DefaultMaxBodySize = 1024 * 100 // 100k
)
const (
FAST = iota
STANDARD
)
func NewClient(thread int, timeout int, clientType int) *Client {
if clientType == FAST {
return &Client{
fastClient: &fasthttp.Client{
TLSConfig: &tls.Config{
Renegotiation: tls.RenegotiateOnceAsClient,
InsecureSkipVerify: true,
},
//ReadBufferSize: 20480,
MaxConnsPerHost: thread * 2,
MaxIdleConnDuration: time.Duration(timeout) * time.Second,
MaxConnWaitTimeout: time.Duration(timeout) * time.Second,
ReadTimeout: time.Duration(timeout) * time.Second,
WriteTimeout: time.Duration(timeout) * time.Second,
MaxResponseBodySize: DefaultMaxBodySize,
},
timeout: time.Duration(timeout) * time.Second,
clientType: clientType,
}
} else {
return &Client{
standardClient: &http.Client{
Transport: &http.Transport{
//Proxy: Proxy,
//TLSHandshakeTimeout : delay * time.Second,
TLSClientConfig: &tls.Config{
Renegotiation: tls.RenegotiateOnceAsClient,
InsecureSkipVerify: true,
},
MaxConnsPerHost: thread,
IdleConnTimeout: time.Duration(timeout) * time.Second,
},
Timeout: time.Second * time.Duration(timeout),
CheckRedirect: checkRedirect,
},
timeout: time.Duration(timeout) * time.Second,
clientType: clientType,
}
}
}
type Client struct {
fastClient *fasthttp.Client
standardClient *http.Client
clientType int
timeout time.Duration
}
func (c *Client) TransToCheck() {
if c.fastClient != nil {
c.fastClient.MaxConnsPerHost = 1
} else if c.standardClient != nil {
}
}
func (c *Client) FastDo(ctx context.Context, req *fasthttp.Request) (*fasthttp.Response, error) {
resp := fasthttp.AcquireResponse()
return resp, c.fastClient.Do(req, resp)
}
func (c *Client) StandardDo(ctx context.Context, req *http.Request) (*http.Response, error) {
return c.standardClient.Do(req)
}
func (c *Client) Do(ctx context.Context, req *Request) (*Response, error) {
if c.fastClient != nil {
resp, err := c.FastDo(ctx, req.FastRequest)
return &Response{FastResponse: resp, ClientType: FAST}, err
} else if c.standardClient != nil {
resp, err := c.StandardDo(ctx, req.StandardRequest)
return &Response{StandardResponse: resp, ClientType: STANDARD}, err
} else {
return nil, fmt.Errorf("not found client")
}
}
var MaxRedirects = 0
var checkRedirect = func(req *http.Request, via []*http.Request) error {
if len(via) > MaxRedirects {
return http.ErrUseLastResponse
}
return nil
}

136
pkg/load.go Normal file
View File

@ -0,0 +1,136 @@
package pkg
import (
"fmt"
"github.com/chainreactors/fingers"
"github.com/chainreactors/parsers"
"github.com/chainreactors/utils"
"github.com/chainreactors/utils/iutils"
"github.com/chainreactors/words/mask"
"os"
yaml "sigs.k8s.io/yaml/goyaml.v3"
"strings"
)
func LoadPorts() error {
var err error
var ports []*utils.PortConfig
err = yaml.Unmarshal(LoadConfig("port"), &ports)
if err != nil {
return err
}
utils.PrePort = utils.NewPortPreset(ports)
return nil
}
func LoadFingers() error {
var err error
FingerEngine, err = fingers.NewEngine()
if err != nil {
return err
}
for _, f := range FingerEngine.Fingers().HTTPFingers {
for _, rule := range f.Rules {
if rule.SendDataStr != "" {
ActivePath = append(ActivePath, rule.SendDataStr)
}
}
}
for _, f := range FingerEngine.FingerPrintHub().FingerPrints {
if f.Path != "/" {
ActivePath = append(ActivePath, f.Path)
}
}
return nil
}
func LoadTemplates() error {
var err error
// load rule
err = yaml.Unmarshal(LoadConfig("spray_rule"), &Rules)
if err != nil {
return err
}
// load default words
var dicts map[string]string
err = yaml.Unmarshal(LoadConfig("spray_dict"), &dicts)
if err != nil {
return err
}
for name, wordlist := range dicts {
dict := strings.Split(strings.TrimSpace(wordlist), "\n")
for i, d := range dict {
dict[i] = strings.TrimSpace(d)
}
Dicts[strings.TrimSuffix(name, ".txt")] = dict
}
// load mask
var keywords map[string]interface{}
err = yaml.Unmarshal(LoadConfig("spray_common"), &keywords)
if err != nil {
return err
}
for k, v := range keywords {
t := make([]string, len(v.([]interface{})))
for i, vv := range v.([]interface{}) {
t[i] = iutils.ToString(vv)
}
mask.SpecialWords[k] = t
}
var extracts []*parsers.Extractor
err = yaml.Unmarshal(LoadConfig("extract"), &extracts)
if err != nil {
return err
}
for _, extract := range extracts {
extract.Compile()
ExtractRegexps[extract.Name] = []*parsers.Extractor{extract}
for _, tag := range extract.Tags {
if _, ok := ExtractRegexps[tag]; !ok {
ExtractRegexps[tag] = []*parsers.Extractor{extract}
} else {
ExtractRegexps[tag] = append(ExtractRegexps[tag], extract)
}
}
}
return nil
}
func LoadExtractorConfig(filename string) ([]*parsers.Extractor, error) {
var extracts []*parsers.Extractor
content, err := os.ReadFile(filename)
if err != nil {
return nil, err
}
err = yaml.Unmarshal(content, &extracts)
if err != nil {
return nil, err
}
for _, extract := range extracts {
extract.Compile()
}
return extracts, nil
}
func Load() error {
err := LoadPorts()
if err != nil {
return fmt.Errorf("load ports, %w", err)
}
err = LoadTemplates()
if err != nil {
return fmt.Errorf("load templates, %w", err)
}
return nil
}

26
pkg/parse.go Normal file
View File

@ -0,0 +1,26 @@
package pkg
import "strings"
var (
SkipChar = "%SKIP%"
EXTChar = "%EXT%"
)
func ParseEXTPlaceholderFunc(exts []string) func(string) []string {
return func(s string) []string {
ss := make([]string, len(exts))
var n int
for i, e := range exts {
if strings.Contains(s, EXTChar) {
n++
ss[i] = strings.Replace(s, EXTChar, e, -1)
}
}
if n == 0 {
return []string{s}
} else {
return ss
}
}
}

196
pkg/statistor.go Normal file
View File

@ -0,0 +1,196 @@
package pkg
import (
"bytes"
"encoding/json"
"fmt"
"github.com/chainreactors/logs"
"github.com/chainreactors/parsers"
"io/ioutil"
"strconv"
"strings"
"time"
)
var DefaultStatistor Statistor
func NewStatistor(url string) *Statistor {
stat := DefaultStatistor
stat.StartTime = time.Now().Unix()
stat.Counts = make(map[int]int)
stat.Sources = make(map[parsers.SpraySource]int)
stat.BaseUrl = url
return &stat
}
func NewStatistorFromStat(origin *Statistor) *Statistor {
return &Statistor{
BaseUrl: origin.BaseUrl,
Word: origin.Word,
Dictionaries: origin.Dictionaries,
Offset: origin.End,
RuleFiles: origin.RuleFiles,
RuleFilter: origin.RuleFilter,
Counts: make(map[int]int),
Sources: map[parsers.SpraySource]int{},
StartTime: time.Now().Unix(),
}
}
type Statistor struct {
BaseUrl string `json:"url"`
Error string `json:"error"`
Counts map[int]int `json:"counts"`
Sources map[parsers.SpraySource]int `json:"sources"`
FailedNumber int32 `json:"failed"`
ReqTotal int32 `json:"req_total"`
CheckNumber int `json:"check"`
FoundNumber int `json:"found"`
FilteredNumber int `json:"filtered"`
FuzzyNumber int `json:"fuzzy"`
WafedNumber int `json:"wafed"`
End int `json:"end"`
Skipped int `json:"skipped"`
Offset int `json:"offset"`
Total int `json:"total"`
StartTime int64 `json:"start_time"`
EndTime int64 `json:"end_time"`
WordCount int `json:"word_count"`
Word string `json:"word"`
Dictionaries []string `json:"dictionaries"`
RuleFiles []string `json:"rule_files"`
RuleFilter string `json:"rule_filter"`
}
func (stat *Statistor) ColorString() string {
var s strings.Builder
s.WriteString(fmt.Sprintf("[stat] %s took %d s, request total: %s, finish: %s/%s(%s skipped), found: %s, check: %s, failed: %s",
logs.GreenLine(stat.BaseUrl),
stat.EndTime-stat.StartTime,
logs.YellowBold(strconv.Itoa(int(stat.ReqTotal))),
logs.YellowBold(strconv.Itoa(stat.End)),
logs.YellowBold(strconv.Itoa(stat.Total)),
logs.YellowLine(strconv.Itoa(stat.Skipped)),
logs.YellowBold(strconv.Itoa(stat.FoundNumber)),
logs.YellowBold(strconv.Itoa(stat.CheckNumber)),
logs.YellowBold(strconv.Itoa(int(stat.FailedNumber)))))
if stat.FuzzyNumber != 0 {
s.WriteString(", fuzzy: " + logs.Yellow(strconv.Itoa(stat.FuzzyNumber)))
}
if stat.FilteredNumber != 0 {
s.WriteString(", filtered: " + logs.Yellow(strconv.Itoa(stat.FilteredNumber)))
}
if stat.WafedNumber != 0 {
s.WriteString(", wafed: " + logs.Yellow(strconv.Itoa(stat.WafedNumber)))
}
return s.String()
}
func (stat *Statistor) String() string {
var s strings.Builder
s.WriteString(fmt.Sprintf("[stat] %s took %d s, request total: %d, finish: %d/%d(%d skipped), found: %d, check: %d, failed: %d",
stat.BaseUrl,
stat.EndTime-stat.StartTime,
stat.ReqTotal,
stat.End,
stat.Total,
stat.Skipped,
stat.FoundNumber,
stat.CheckNumber,
stat.FailedNumber))
if stat.FuzzyNumber != 0 {
s.WriteString(", fuzzy: " + strconv.Itoa(stat.FuzzyNumber))
}
if stat.FilteredNumber != 0 {
s.WriteString(", filtered: " + strconv.Itoa(stat.FilteredNumber))
}
if stat.WafedNumber != 0 {
s.WriteString(", wafed: " + strconv.Itoa(stat.WafedNumber))
}
return s.String()
}
func (stat *Statistor) CountString() string {
if len(stat.Counts) == 0 {
return ""
}
var s strings.Builder
s.WriteString("[stat] ")
s.WriteString(stat.BaseUrl)
for k, v := range stat.Counts {
if k == 0 {
continue
}
s.WriteString(fmt.Sprintf(" %d: %d,", k, v))
}
return s.String()
}
func (stat *Statistor) SourceString() string {
if len(stat.Sources) == 0 {
return ""
}
var s strings.Builder
s.WriteString("[stat] ")
s.WriteString(stat.BaseUrl)
for k, v := range stat.Sources {
s.WriteString(fmt.Sprintf(" %s: %d,", k.Name(), v))
}
return s.String()
}
func (stat *Statistor) ColorCountString() string {
if len(stat.Counts) == 0 {
return ""
}
var s strings.Builder
s.WriteString(fmt.Sprintf("[stat] %s ", stat.BaseUrl))
for k, v := range stat.Counts {
if k == 0 {
continue
}
s.WriteString(fmt.Sprintf(" %s: %s,", logs.Cyan(strconv.Itoa(k)), logs.YellowBold(strconv.Itoa(v))))
}
return s.String()
}
func (stat *Statistor) ColorSourceString() string {
if len(stat.Sources) == 0 {
return ""
}
var s strings.Builder
s.WriteString(fmt.Sprintf("[stat] %s ", stat.BaseUrl))
for k, v := range stat.Sources {
s.WriteString(fmt.Sprintf(" %s: %s,", logs.Cyan(k.Name()), logs.YellowBold(strconv.Itoa(v))))
}
return s.String()
}
func (stat *Statistor) Json() string {
content, err := json.Marshal(stat)
if err != nil {
return err.Error()
}
return string(content) + "\n"
}
func ReadStatistors(filename string) (Statistors, error) {
content, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
var stats Statistors
for _, line := range bytes.Split(bytes.TrimSpace(content), []byte("\n")) {
var stat Statistor
err := json.Unmarshal(line, &stat)
if err != nil {
return nil, err
}
stats = append(stats, &stat)
}
return stats, nil
}
type Statistors []*Statistor

View File

@ -1,116 +0,0 @@
//go:build ignore
// +build ignore
package main
import (
"encoding/json"
"fmt"
"github.com/chainreactors/files"
"github.com/chainreactors/parsers"
"io"
"os"
"path/filepath"
"sigs.k8s.io/yaml"
)
func Encode(input []byte) string {
return parsers.Base64Encode(files.Flate(input))
}
func loadYamlFile2JsonString(filename string) string {
var err error
file, err := os.Open("templates/" + filename)
if err != nil {
panic(err.Error())
}
bs, _ := io.ReadAll(file)
jsonstr, err := yaml.YAMLToJSON(bs)
if err != nil {
panic(filename + err.Error())
}
return Encode(jsonstr)
}
func visit(files *[]string) filepath.WalkFunc {
return func(path string, info os.FileInfo, err error) error {
if err != nil {
panic(err)
}
if !info.IsDir() {
*files = append(*files, path)
}
return nil
}
}
func recuLoadYamlFiles2JsonString(dir string, single bool) string {
var files []string
err := filepath.Walk("templates/"+dir, visit(&files))
if err != nil {
panic(err)
}
var pocs []interface{}
for _, file := range files {
var tmp interface{}
bs, err := os.ReadFile(file)
if err != nil {
panic(err)
}
err = yaml.Unmarshal(bs, &tmp)
if err != nil {
print(file)
panic(err)
}
if tmp == nil {
continue
}
if single {
pocs = append(pocs, tmp)
} else {
pocs = append(pocs, tmp.([]interface{})...)
}
}
jsonstr, err := json.Marshal(pocs)
if err != nil {
panic(err)
}
return Encode(jsonstr)
}
func main() {
template := `package pkg
import (
"github.com/chainreactors/files"
"github.com/chainreactors/parsers"
)
func LoadConfig(typ string) []byte {
if typ == "http" {
return files.UnFlate(parsers.Base64Decode("%s"))
}
return []byte{}
}
`
template = fmt.Sprintf(template,
recuLoadYamlFiles2JsonString("fingers/http", false),
)
f, err := os.OpenFile("pkg/templates.go", os.O_WRONLY|os.O_TRUNC|os.O_CREATE, 0644)
if err != nil {
panic(err)
}
f.WriteString(template)
f.Sync()
f.Close()
println("generate templates.go successfully")
}

View File

@ -1,21 +0,0 @@
package pkg
import (
"github.com/chainreactors/gogo/v2/pkg/fingers"
"github.com/chainreactors/parsers"
"strings"
)
type Frameworks []*parsers.Framework
func (fs Frameworks) ToString() string {
frameworkStrs := make([]string, len(fs))
for i, f := range fs {
frameworkStrs[i] = "[" + f.ToString() + "]"
}
return strings.Join(frameworkStrs, " ")
}
type Extracteds []*fingers.Extracted
var Extractors = make(fingers.Extractors)

View File

@ -1,32 +1,104 @@
package pkg
import (
"fmt"
"github.com/chainreactors/gogo/v2/pkg/fingers"
"github.com/chainreactors/gogo/v2/pkg/utils"
"github.com/chainreactors/ipcs"
"github.com/go-dedup/simhash"
"bufio"
"bytes"
"github.com/chainreactors/files"
"github.com/chainreactors/fingers"
"github.com/chainreactors/logs"
"github.com/chainreactors/parsers"
"github.com/chainreactors/utils/iutils"
"github.com/chainreactors/words/mask"
"github.com/chainreactors/words/rule"
"github.com/expr-lang/expr"
"github.com/expr-lang/expr/vm"
"io/ioutil"
"math/rand"
"os"
"net/http"
"net/url"
"path"
"path/filepath"
"strconv"
"strings"
"time"
"unsafe"
)
func HasStdin() bool {
stat, err := os.Stdin.Stat()
if err != nil {
return false
var (
LogVerbose = logs.Warn - 2
LogFuzz = logs.Warn - 1
DefaultWhiteStatus = []int{200} // cmd input
DefaultBlackStatus = []int{400, 410} // cmd input
DefaultFuzzyStatus = []int{500, 501, 502, 503, 301, 302, 404} // cmd input
DefaultUniqueStatus = []int{403, 200, 404} // 相同unique的403表示命中了同一条acl, 相同unique的200表示default页面
WhiteStatus = []int{} // cmd input, 200
BlackStatus = []int{} // cmd input, 400,410
FuzzyStatus = []int{} // cmd input, 500,501,502,503
WAFStatus = []int{493, 418, 1020, 406, 429, 406, 412}
UniqueStatus = []int{} // 相同unique的403表示命中了同一条acl, 相同unique的200表示default页面
// plugins
EnableAllFingerEngine = false
)
var (
Rules map[string]string = make(map[string]string)
Dicts map[string][]string = make(map[string][]string)
wordlistCache = make(map[string][]string)
ruleCache = make(map[string][]rule.Expression)
BadExt = []string{".js", ".css", ".scss", ".,", ".jpeg", ".jpg", ".png", ".gif", ".svg", ".vue", ".ts", ".swf", ".pdf", ".mp4", ".zip", ".rar"}
BadURL = []string{";", "}", "\\n", "webpack://", "{", "www.w3.org", ".src", ".url", ".att", ".href", "location.href", "javascript:", "location:", ".createObject", ":location", ".path"}
ExtractRegexps = make(parsers.Extractors)
Extractors = make(parsers.Extractors)
FingerEngine *fingers.Engine
ActivePath []string
ContentTypeMap = map[string]string{
"application/javascript": "js",
"application/json": "json",
"application/xml": "xml",
"application/octet-stream": "bin",
"application/atom+xml": "atom",
"application/msword": "doc",
"application/pdf": "pdf",
"image/gif": "gif",
"image/jpeg": "jpg",
"image/png": "png",
"image/svg+xml": "svg",
"text/css": "css",
"text/plain": "txt",
"text/html": "html",
"audio/mpeg": "mp3",
"video/mp4": "mp4",
"video/ogg": "ogg",
"video/webm": "webm",
"video/x-ms-wmv": "wmv",
"video/avi": "avi",
"image/x-icon": "ico",
}
isPipedFromChrDev := (stat.Mode() & os.ModeCharDevice) == 0
isPipedFromFIFO := (stat.Mode() & os.ModeNamedPipe) != 0
return isPipedFromChrDev || isPipedFromFIFO
// from feroxbuster
randomUserAgent = []string{
"Mozilla/5.0 (Linux; Android 8.0.0; SM-G960F Build/R16NW) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.84 Mobile Safari/537.36",
"Mozilla/5.0 (iPhone; CPU iPhone OS 12_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/12.0 Mobile/15E148 Safari/604.1",
"Mozilla/5.0 (Windows Phone 10.0; Android 6.0.1; Microsoft; RM-1152) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Mobile Safari/537.36 Edge/15.15254",
"Mozilla/5.0 (Linux; Android 7.0; Pixel C Build/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/52.0.2743.98 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.246",
"Mozilla/5.0 (X11; CrOS x86_64 8172.45.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.64 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_2) AppleWebKit/601.3.9 (KHTML, like Gecko) Version/9.0.2 Safari/601.3.9",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36",
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:15.0) Gecko/20100101 Firefox/15.0.1",
"Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)",
"Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)",
"Mozilla/5.0 (compatible; Yahoo! Slurp; http://help.yahoo.com/help/us/ysearch/slurp)",
}
uacount = len(randomUserAgent)
DefaultUserAgent = randomUserAgent[rand.Intn(uacount)]
)
func Simhash(raw []byte) string {
sh := simhash.NewSimhash()
return fmt.Sprintf("%x", sh.GetSimhash(sh.NewWordFeatureSet(raw)))
type BS []byte
func (b BS) String() string {
return string(b)
}
const letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
@ -44,9 +116,8 @@ const (
func RandPath() string {
n := 16
b := make([]byte, n)
b[0] = byte(0x2f)
// A rand.Int63() generates 63 random bits, enough for letterIdMax letters!
for i, cache, remain := n-1, src.Int63(), letterIdMax; i >= 1; {
for i, cache, remain := n-1, src.Int63(), letterIdMax; i >= 0; {
if remain == 0 {
cache, remain = src.Int63(), letterIdMax
}
@ -80,50 +151,466 @@ func RandHost() string {
return *(*string)(unsafe.Pointer(&b))
}
var (
Md5Fingers map[string]string = make(map[string]string)
Mmh3Fingers map[string]string = make(map[string]string)
Fingers fingers.Fingers
)
func FilterJs(u string) bool {
if commonFilter(u) {
return true
}
func LoadTemplates() error {
var err error
Fingers, err = fingers.LoadFingers(LoadConfig("http"))
return false
}
func FilterUrl(u string) bool {
if commonFilter(u) {
return true
}
parsed, err := url.Parse(u)
if err != nil {
utils.Fatal(err.Error())
return true
} else {
ext := path.Ext(parsed.Path)
for _, e := range BadExt {
if strings.EqualFold(e, ext) {
return true
}
}
}
return false
}
for _, finger := range Fingers {
err := finger.Compile(ipcs.ParsePorts)
func CleanURL(u string) string {
// 去掉frag与params, 节约url.parse性能, 防止带参数造成意外的影响
u = strings.Trim(u, "\"")
u = strings.Trim(u, "'")
if strings.Contains(u, "2f") || strings.Contains(u, "2F") {
u = strings.ReplaceAll(u, "\\u002F", "/")
u = strings.ReplaceAll(u, "\\u002f", "/")
u = strings.ReplaceAll(u, "%252F", "/")
u = strings.ReplaceAll(u, "%252f", "/")
u = strings.ReplaceAll(u, "%2f", "/")
u = strings.ReplaceAll(u, "%2F", "/")
}
u = strings.TrimRight(u, "\\")
if i := strings.Index(u, "?"); i != -1 {
return u[:i]
}
if i := strings.Index(u, "#"); i != -1 {
return u[:i]
}
return u
}
func commonFilter(u string) bool {
if strings.HasPrefix(u, "http") && len(u) < 15 {
return true
}
for _, bad := range BadURL {
if strings.Contains(u, bad) {
return true
}
}
return false
}
func BakGenerator(domain string) []string {
var possibilities []string
for first, _ := range domain {
for last, _ := range domain[first:] {
p := domain[first : first+last+1]
if !iutils.StringsContains(possibilities, p) {
possibilities = append(possibilities, p)
}
}
}
return possibilities
}
var MbTable = []uint16{
0x0000, 0xC0C1, 0xC181, 0x0140, 0xC301, 0x03C0, 0x0280, 0xC241,
0xC601, 0x06C0, 0x0780, 0xC741, 0x0500, 0xC5C1, 0xC481, 0x0440,
0xCC01, 0x0CC0, 0x0D80, 0xCD41, 0x0F00, 0xCFC1, 0xCE81, 0x0E40,
0x0A00, 0xCAC1, 0xCB81, 0x0B40, 0xC901, 0x09C0, 0x0880, 0xC841,
0xD801, 0x18C0, 0x1980, 0xD941, 0x1B00, 0xDBC1, 0xDA81, 0x1A40,
0x1E00, 0xDEC1, 0xDF81, 0x1F40, 0xDD01, 0x1DC0, 0x1C80, 0xDC41,
0x1400, 0xD4C1, 0xD581, 0x1540, 0xD701, 0x17C0, 0x1680, 0xD641,
0xD201, 0x12C0, 0x1380, 0xD341, 0x1100, 0xD1C1, 0xD081, 0x1040,
0xF001, 0x30C0, 0x3180, 0xF141, 0x3300, 0xF3C1, 0xF281, 0x3240,
0x3600, 0xF6C1, 0xF781, 0x3740, 0xF501, 0x35C0, 0x3480, 0xF441,
0x3C00, 0xFCC1, 0xFD81, 0x3D40, 0xFF01, 0x3FC0, 0x3E80, 0xFE41,
0xFA01, 0x3AC0, 0x3B80, 0xFB41, 0x3900, 0xF9C1, 0xF881, 0x3840,
0x2800, 0xE8C1, 0xE981, 0x2940, 0xEB01, 0x2BC0, 0x2A80, 0xEA41,
0xEE01, 0x2EC0, 0x2F80, 0xEF41, 0x2D00, 0xEDC1, 0xEC81, 0x2C40,
0xE401, 0x24C0, 0x2580, 0xE541, 0x2700, 0xE7C1, 0xE681, 0x2640,
0x2200, 0xE2C1, 0xE381, 0x2340, 0xE101, 0x21C0, 0x2080, 0xE041,
0xA001, 0x60C0, 0x6180, 0xA141, 0x6300, 0xA3C1, 0xA281, 0x6240,
0x6600, 0xA6C1, 0xA781, 0x6740, 0xA501, 0x65C0, 0x6480, 0xA441,
0x6C00, 0xACC1, 0xAD81, 0x6D40, 0xAF01, 0x6FC0, 0x6E80, 0xAE41,
0xAA01, 0x6AC0, 0x6B80, 0xAB41, 0x6900, 0xA9C1, 0xA881, 0x6840,
0x7800, 0xB8C1, 0xB981, 0x7940, 0xBB01, 0x7BC0, 0x7A80, 0xBA41,
0xBE01, 0x7EC0, 0x7F80, 0xBF41, 0x7D00, 0xBDC1, 0xBC81, 0x7C40,
0xB401, 0x74C0, 0x7580, 0xB541, 0x7700, 0xB7C1, 0xB681, 0x7640,
0x7200, 0xB2C1, 0xB381, 0x7340, 0xB101, 0x71C0, 0x7080, 0xB041,
0x5000, 0x90C1, 0x9181, 0x5140, 0x9301, 0x53C0, 0x5280, 0x9241,
0x9601, 0x56C0, 0x5780, 0x9741, 0x5500, 0x95C1, 0x9481, 0x5440,
0x9C01, 0x5CC0, 0x5D80, 0x9D41, 0x5F00, 0x9FC1, 0x9E81, 0x5E40,
0x5A00, 0x9AC1, 0x9B81, 0x5B40, 0x9901, 0x59C0, 0x5880, 0x9841,
0x8801, 0x48C0, 0x4980, 0x8941, 0x4B00, 0x8BC1, 0x8A81, 0x4A40,
0x4E00, 0x8EC1, 0x8F81, 0x4F40, 0x8D01, 0x4DC0, 0x4C80, 0x8C41,
0x4400, 0x84C1, 0x8581, 0x4540, 0x8701, 0x47C0, 0x4680, 0x8641,
0x8201, 0x42C0, 0x4380, 0x8341, 0x4100, 0x81C1, 0x8081, 0x4040}
func CRC16Hash(data []byte) uint16 {
var crc16 uint16
crc16 = 0xffff
for _, v := range data {
n := uint8(uint16(v) ^ crc16)
crc16 >>= 8
crc16 ^= MbTable[n]
}
return crc16
}
func SafePath(dir, u string) string {
hasSlash := strings.HasPrefix(u, "/")
if hasSlash {
return dir + u[1:]
} else {
return dir + u
}
}
func RelaPath(base, u string) string {
// 拼接相对目录, 不使用path.join的原因是, 如果存在"////"这样的情况, 可能真的是有意义的路由, 不能随意去掉.
// "" /a /a
// "" a /a
// / "" /
// /a/ b /a/b
// /a/ /b /a/b
// /a b /b
// /a /b /b
if u == "" {
return base
}
pathSlash := strings.HasPrefix(u, "/")
if base == "" {
if pathSlash {
return u[1:]
} else {
return "/" + u
}
} else if strings.HasSuffix(base, "/") {
if pathSlash {
return base + u[1:]
} else {
return base + u
}
} else {
if pathSlash {
return Dir(base) + u[1:]
} else {
return Dir(base) + u
}
}
}
func Dir(u string) string {
// 安全的获取目录, 不会额外处理多个"//", 并非用来获取上级目录
// /a /
// /a/ /a/
// a/ a/
// aaa /
if strings.HasSuffix(u, "/") {
return u
} else if i := strings.LastIndex(u, "/"); i == -1 {
return "/"
} else {
return u[:i+1]
}
}
func FormatURL(base, u string) string {
if strings.HasPrefix(u, "http") {
parsed, err := url.Parse(u)
if err != nil {
return err
return ""
}
return parsed.Path
} else if strings.HasPrefix(u, "//") {
parsed, err := url.Parse(u)
if err != nil {
return ""
}
return parsed.Path
} else if strings.HasPrefix(u, "/") {
// 绝对目录拼接
// 不需要进行处理, 用来跳过下面的判断
return u
} else if strings.HasPrefix(u, "./") {
// "./"相对目录拼接
return RelaPath(base, u[2:])
} else if strings.HasPrefix(u, "../") {
return path.Join(Dir(base), u)
} else {
// 相对目录拼接
return RelaPath(base, u)
}
}
for _, f := range Fingers {
for _, rule := range f.Rules {
if rule.Favicon != nil {
for _, mmh3 := range rule.Favicon.Mmh3 {
Mmh3Fingers[mmh3] = f.Name
func BaseURL(u *url.URL) string {
return u.Scheme + "://" + u.Host
}
for _, md5 := range rule.Favicon.Md5 {
Md5Fingers[md5] = f.Name
func RandomUA() string {
return randomUserAgent[rand.Intn(uacount)]
}
func CompareWithExpr(exp *vm.Program, params map[string]interface{}) bool {
res, err := expr.Run(exp, params)
if err != nil {
logs.Log.Warn(err.Error())
}
if res == true {
return true
} else {
return false
}
}
return nil
func MatchWithGlobs(u string, globs []string) bool {
for _, glob := range globs {
ok, err := filepath.Match(glob, u)
if err == nil && ok {
return true
}
}
return false
}
func FingerDetect(content string) Frameworks {
var frames Frameworks
//content := string(body)
for _, finger := range Fingers {
frame, _, ok := fingers.FingerMatcher(finger, content, 0, nil)
if ok {
frames = append(frames, frame)
func ParseRawResponse(raw []byte) (*http.Response, error) {
reader := bytes.NewReader(raw)
// 使用http.ReadResponse解析HTTP响应
resp, err := http.ReadResponse(bufio.NewReader(reader), nil)
if err != nil {
return nil, err
}
defer resp.Body.Close()
return resp, nil
}
func GetPresetWordList(key []string) []string {
var wordlist []string
for _, k := range key {
if v, ok := mask.SpecialWords[k]; ok {
wordlist = append(wordlist, v...)
}
}
return frames
return wordlist
}
func ParseExtension(s string) string {
if i := strings.Index(s, "."); i != -1 {
return s[i+1:]
}
return ""
}
// ParseStatus parses the input string and updates the preset status filters.
func ParseStatus(preset []int, changed string) []int {
if changed == "" {
return preset
}
parseToken := func(s string) (int, bool) {
s = strings.TrimSpace(s)
if strings.HasSuffix(s, "*") {
prefix := s[:len(s)-1]
if t, err := strconv.Atoi(prefix); err == nil {
return t, true // isPrefix = true
}
} else if t, err := strconv.Atoi(s); err == nil {
return t, false // isPrefix = false
}
return 0, false
}
if strings.HasPrefix(changed, "+") {
for _, s := range strings.Split(changed[1:], ",") {
if t, _ := parseToken(s); t != 0 {
preset = append(preset, t)
}
}
} else if strings.HasPrefix(changed, "!") {
for _, s := range strings.Split(changed[1:], ",") {
if t, _ := parseToken(s); t != 0 {
newPreset := preset[:0]
for _, val := range preset {
if val != t {
newPreset = append(newPreset, val)
}
}
preset = newPreset
}
}
} else {
preset = []int{}
for _, s := range strings.Split(changed, ",") {
if t, _ := parseToken(s); t != 0 {
preset = append(preset, t)
}
}
}
return UniqueInts(preset)
}
func UniqueInts(input []int) []int {
seen := make(map[int]bool)
result := make([]int, 0, len(input))
for _, val := range input {
if !seen[val] {
seen[val] = true
result = append(result, val)
}
}
return result
}
// StatusContain checks if a status matches any of the preset filters.
// Preset values < 100 are treated as prefix filters (e.g. 5 = 5xx, 51 = 51x).
func StatusContain(preset []int, status int) bool {
if len(preset) == 0 {
return true
}
for _, s := range preset {
if s < 10 {
if status/100 == s {
return true
}
} else if s < 100 {
if status/10 == s {
return true
}
} else if s == status {
return true
}
}
return false
}
func LoadFileToSlice(filename string) ([]string, error) {
var ss []string
if dicts, ok := Dicts[filename]; ok {
if files.IsExist(filename) {
logs.Log.Warnf("load and overwrite %s from preset", filename)
}
return dicts, nil
}
content, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
ss = strings.Split(strings.TrimSpace(string(content)), "\n")
// 统一windows与linux的回车换行差异
for i, word := range ss {
ss[i] = strings.TrimSpace(word)
}
return ss, nil
}
func LoadRuleAndCombine(filename []string) (string, error) {
var bs bytes.Buffer
for _, f := range filename {
if data, ok := Rules[f]; ok {
bs.WriteString(strings.TrimSpace(data))
bs.WriteString("\n")
} else {
content, err := ioutil.ReadFile(f)
if err != nil {
return "", err
}
bs.Write(bytes.TrimSpace(content))
bs.WriteString("\n")
}
}
return bs.String(), nil
}
func loadFileWithCache(filename string) ([]string, error) {
if dict, ok := Dicts[filename]; ok {
return dict, nil
}
dict, err := LoadFileToSlice(filename)
if err != nil {
return nil, err
}
Dicts[filename] = dict
return dict, nil
}
func loadDictionaries(filenames []string) ([][]string, error) {
dicts := make([][]string, len(filenames))
for i, name := range filenames {
dict, err := loadFileWithCache(name)
if err != nil {
return nil, err
}
dicts[i] = dict
}
return dicts, nil
}
func LoadWordlist(word string, dictNames []string) ([]string, error) {
if wl, ok := wordlistCache[word+strings.Join(dictNames, ",")]; ok {
return wl, nil
}
dicts, err := loadDictionaries(dictNames)
if err != nil {
return nil, err
}
wl, err := mask.Run(word, dicts, nil)
if err != nil {
return nil, err
}
wordlistCache[word] = wl
return wl, nil
}
func LoadRuleWithFiles(ruleFiles []string, filter string) ([]rule.Expression, error) {
if rules, ok := ruleCache[strings.Join(ruleFiles, ",")]; ok {
return rules, nil
}
var rules bytes.Buffer
for _, filename := range ruleFiles {
content, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
rules.Write(content)
rules.WriteString("\n")
}
return rule.Compile(rules.String(), filter).Expressions, nil
}
func WrapWordsFunc(f func(string) string) func(string) []string {
return func(s string) []string {
return []string{f(s)}
}
}
func SafeFilename(filename string) string {
filename = strings.ReplaceAll(filename, "http://", "")
filename = strings.ReplaceAll(filename, "https://", "")
filename = strings.ReplaceAll(filename, ":", "_")
filename = strings.ReplaceAll(filename, "/", "_")
return filename
}

View File

@ -1,7 +1,20 @@
//go:generate go run pkg/templates_gen.go
//go:generate go run templates/templates_gen.go -t templates -o pkg/templates.go -need spray
package main
import "github.com/chainreactors/spray/cmd"
import (
"github.com/chainreactors/spray/cmd"
"github.com/gookit/config/v2"
"github.com/gookit/config/v2/yaml"
//_ "net/http/pprof"
)
func init() {
config.WithOptions(func(opt *config.Options) {
opt.DecoderConfig.TagName = "config"
opt.ParseDefault = true
})
config.AddDriver(yaml.Driver)
}
func main() {
//f, _ := os.Create("cpu.txt")

1
templates Submodule

@ -0,0 +1 @@
Subproject commit fe95f1f22d18b6cf2046b004191f5bd745f1c578