Compare commits

..

82 Commits

Author SHA1 Message Date
M09Ic
dfa3b2da56
Merge pull request #111 from chainreactors/dev
merge v1.2.2
2025-06-06 13:30:01 +08:00
M09Ic
57c67fe36a fix: cicd releaser 2025-06-05 13:59:13 +08:00
M09Ic
20b70d0dcd feat: support unique when format result, https://github.com/chainreactors/spray/issues/104 2025-06-05 13:52:30 +08:00
M09Ic
0a833b0326 feat: 支持通配符状态码, https://github.com/chainreactors/spray/issues/38 2025-06-05 13:45:48 +08:00
M09Ic
fd7b603e02 feat: recover fuzzuli generate algorithm 2025-06-05 12:55:36 +08:00
M09Ic
10540f942c fix: csv format , https://github.com/chainreactors/spray/issues/97 2025-06-05 12:46:53 +08:00
M09Ic
ff06fd1902 fix: addition word not safe join path, https://github.com/chainreactors/spray/issues/106 2025-06-05 12:34:06 +08:00
M09Ic
104d41380e fix: null word not continue, https://github.com/chainreactors/spray/issues/108 2025-06-05 12:29:17 +08:00
M09Ic
d21dd493f6 fix: -e not expect , https://github.com/chainreactors/spray/issues/108 2025-06-05 12:29:02 +08:00
M09Ic
cd58c410d8 fix: fix log and option 2025-06-05 11:41:11 +08:00
M09Ic
5c2e377d0d chore: add black,white,unique short flag 2025-05-20 00:23:25 +08:00
M09Ic
31f48d4b06 chore: append-rule add short flag -R 2025-05-12 23:03:17 +08:00
M09Ic
f7c1034310 fix: crawl and url not print 2025-04-18 21:20:00 +08:00
M09Ic
fb63ed010c
Update README.md 2025-04-10 19:30:21 +08:00
M09Ic
0d700f8ea0
Update README.md 2025-03-04 15:10:37 +08:00
M09Ic
c82f0564f5
Merge pull request #95 from chainreactors/dev
merge v1.2.1
2025-03-04 15:06:24 +08:00
M09Ic
08ce95b43d fix: check pool net set headers 2025-03-04 14:58:28 +08:00
M09Ic
c746c26ff9 fix: brute init not set header, https://github.com/chainreactors/spray/issues/94 2025-03-04 14:56:48 +08:00
M09Ic
b13903ea98 fix: map panic, https://github.com/chainreactors/spray/issues/93 2025-03-04 14:45:31 +08:00
M09Ic
e951b68e75
Merge pull request #92 from chainreactors/dev
fix: adapt template yaml
2025-02-23 00:08:33 +08:00
M09Ic
0e9d094dd1 fix: adapt template yaml 2025-02-23 00:02:20 +08:00
M09Ic
72720a942d
Merge pull request #91 from chainreactors/dev
merge v1.2.0
2025-02-22 21:14:47 +08:00
M09Ic
6c5811f1d2 ci: fix golang version go1.20 2025-02-22 21:05:39 +08:00
M09Ic
ef69d46b2a ci: fix golang version go1.20 2025-02-22 21:01:00 +08:00
M09Ic
ff1e596380 feat: support proxyclient for http and fasthttp 2025-02-22 20:58:24 +08:00
M09Ic
f1b9400e19 refactor: remove internal pkg, use engine replace
fix: chunk mod not read
fix: nil bar panic
enhance: add default accept and user-agent
2025-02-22 20:31:32 +08:00
M09Ic
c07c2305af ci: update gorelease go version to 1.20 2025-02-22 14:41:37 +08:00
M09Ic
3087ec32d1 chore: improve format output 2025-02-22 14:38:24 +08:00
M09Ic
286710f5ec fix: init failed bar not close and total bar not wait 2025-02-22 14:01:10 +08:00
M09Ic
5f8f5c7795 chore: improve format output 2025-02-22 02:50:50 +08:00
M09Ic
0f1e6b8333 fix: try fix deadlock, thanks https://github.com/chainreactors/spray/pull/89 2025-02-22 02:49:50 +08:00
M09Ic
7621514bd9
Merge pull request #79 from chainreactors/dev
merge v1.1.6
2024-11-01 13:54:13 +08:00
M09Ic
de12d568ce enhance: add hard exit, https://github.com/chainreactors/spray/issues/78 2024-11-01 12:30:55 +08:00
M09Ic
02162cffd6 revert: not same redirect banned 2024-11-01 12:27:31 +08:00
M09Ic
9e74a17096 fix: path join not expect 2024-11-01 12:25:53 +08:00
M09Ic
0ca5c02de7 enhance: skip not same host redirect 2024-10-30 16:11:05 +08:00
M09Ic
5cb9aa119d fix: not same domain filtered 2024-10-30 15:57:32 +08:00
M09Ic
6bbc6141ac enhance: add 404 default fuzzystatus, 429 waf status 2024-10-16 14:47:29 +08:00
M09Ic
af82ae43b9 enhance probe output 2024-10-14 02:20:39 +08:00
M09Ic
e483bb4439 baseline add from and parent prop 2024-10-14 02:20:16 +08:00
M09Ic
344e560471 add --append-depth limit append recu depth 2024-10-14 01:54:57 +08:00
M09Ic
2a68d0b49b
Merge pull request #76 from chainreactors/dev
merge v1.1.3
2024-09-28 10:58:07 +08:00
M09Ic
f1684ffeb4 fix brutepool baseurl bug 2024-09-23 16:47:25 +08:00
M09Ic
a4b9e77029 fix no-stat not work 2024-09-23 16:25:06 +08:00
M09Ic
24eade89d0 clean fallback print and fix multi print 2024-09-23 16:19:41 +08:00
M09Ic
ed3e95f21d -q work for config print 2024-09-23 16:10:00 +08:00
M09Ic
fcce861ae3 fix stat Found bug 2024-09-23 16:08:43 +08:00
M09Ic
7693b4d38f fix checkpool time not work 2024-09-23 15:02:44 +08:00
M09Ic
2f28b0ec3c
Merge pull request #74 from chainreactors/dev
merge v1.1.2
2024-09-10 18:03:59 +08:00
M09Ic
a942bac337 add config panel 2024-09-10 17:59:38 +08:00
M09Ic
2de8822b01 fix %EXT% not work in plugin , https://github.com/chainreactors/spray/issues/63 2024-09-10 16:47:49 +08:00
M09Ic
29db702744 fix init timeout not work, https://github.com/chainreactors/spray/issues/58 2024-09-10 15:41:48 +08:00
M09Ic
5cf02cbbcb fix init panic when request failed
https://github.com/chainreactors/spray/issues/73
https://github.com/chainreactors/spray/issues/72
https://github.com/chainreactors/spray/issues/71
2024-09-10 14:14:01 +08:00
M09Ic
2e8a923bac
Merge pull request #67 from chainreactors/dev
merge v1.1.1
2024-08-29 14:38:18 +08:00
M09Ic
4a0c8f86eb support csv and fix fuzzy output 2024-08-29 14:04:56 +08:00
M09Ic
b4c6a77a98 fix host mod not work, https://github.com/chainreactors/spray/issues/63 2024-08-29 01:43:16 +08:00
M09Ic
d6e7e58b18 fix fuzzy output when not --fuzzy flag 2024-08-29 01:43:16 +08:00
M09Ic
15110ab895 fix fasthttp socket timeout, https://github.com/chainreactors/spray/issues/58 2024-08-29 01:43:15 +08:00
M09Ic
f6037d7a1e
Merge pull request #65 from chainreactors/dev
merge v1.1.0
2024-08-27 14:30:40 +08:00
M09Ic
da71cbc575 fix -a not load recon bug 2024-08-26 02:25:02 +08:00
M09Ic
106f007693 union load appendwords and dict 2024-08-26 02:22:35 +08:00
M09Ic
77a5e58a2a add --print print all preset config 2024-08-26 01:47:39 +08:00
M09Ic
06bd9820e7 adapt spray_dict load 2024-08-26 01:32:11 +08:00
M09Ic
105c426396 refactor plugin 2024-08-26 01:20:03 +08:00
M09Ic
491b8c16a5 enhance crawl and append 2024-08-26 00:33:01 +08:00
M09Ic
1c28898631 refactor output and format 2024-08-26 00:04:44 +08:00
M09Ic
de168e0be9 enhance basepool and brutepool structure 2024-08-25 23:06:10 +08:00
M09Ic
678a6a44e4 fix break error print limit 2024-08-21 16:52:20 +08:00
M09Ic
57eab148ac enhance http tls performance 2024-08-21 15:57:41 +08:00
M09Ic
bf6d1c5f0b
Merge pull request #57 from chainreactors/dev
merge v1.0.2
2024-08-20 16:53:55 +08:00
M09Ic
937855c075 fix brute mod not work 2024-08-20 16:42:32 +08:00
M09Ic
32f558f9c5 fix doUpgrade deadlock 2024-08-16 00:32:53 +08:00
M09Ic
8bf4b374ac
Merge pull request #54 from chainreactors/dev
merge v1.0.1
2024-08-06 16:53:00 +08:00
M09Ic
3791b765ea
Merge pull request #52 from chainreactors/dev
merge v1.0.0
2024-07-29 17:08:42 +08:00
M09Ic
bb98110292
Merge pull request #45 from chainreactors/dev
rm same status with random baseline filter
2024-07-04 15:40:23 +08:00
M09Ic
0d4a3652ce
Merge pull request #41 from chainreactors/dev
merge v0.9.8
2024-06-24 14:40:36 +08:00
M09Ic
c4d4efe6b7
Merge pull request #27 from chainreactors/dev
merge v0.9.5
2024-03-04 20:05:57 +08:00
M09Ic
4e28fb59b4
Merge pull request #26 from chainreactors/dev
merge v0.9.4
2024-02-20 19:17:56 +08:00
M09Ic
be19895446
Merge pull request #24 from chainreactors/dev
merge v0.9.3
2024-02-12 17:06:42 +08:00
M09Ic
63b39cead1
Merge pull request #9 from chainreactors/dev
merge v0.9.1
2023-06-12 10:46:24 +08:00
M09Ic
c5bbe36289
Merge pull request #5 from chainreactors/issue4
fix. url list file parse bug
2023-06-06 20:34:51 +08:00
M09Ic
1eddc5fcd3 fix. url list file parse bug 2023-06-03 22:33:38 +08:00
32 changed files with 1620 additions and 1218 deletions

View File

@ -8,7 +8,7 @@ on:
jobs:
goreleaser:
runs-on: ubuntu-20.04
runs-on: ubuntu-22.04
steps:
-
name: Checkout
@ -26,7 +26,7 @@ jobs:
name: Set up Go
uses: actions/setup-go@v3
with:
go-version: 1.21
go-version: "1.20"
-
name: Run GoReleaser
uses: goreleaser/goreleaser-action@v4

View File

@ -25,12 +25,17 @@ builds:
ldflags: "-s -w -X 'github.com/chainreactors/spray/cmd.ver=={{ .Tag }}'"
flags:
- -trimpath
- -tags=forceposix
asmflags:
- all=-trimpath={{.Env.GOPATH}}
gcflags:
- all=-trimpath={{.Env.GOPATH}}
no_unique_dist_dir: true
env:
- CGO_ENABLED=0
tags:
- forceposix
- osusergo
- netgo
upx:
-

View File

@ -1,5 +1,10 @@
# SPRAY
blog posts:
- https://chainreactors.github.io/wiki/blog/2024/07/24/fingers-introduce/
- https://chainreactors.github.io/wiki/blog/2024/08/25/spray-best-practices/
![](https://socialify.git.ci/chainreactors/spray/image?description=1&font=Inter&forks=1&issues=1&language=1&name=1&owner=1&pattern=Circuit%20Board&pulls=1&stargazers=1&theme=Light)
<p align="center">
@ -11,6 +16,8 @@
## Features
**最好用最智能最可控的目录爆破工具**
* 超强的性能, 在本地测试极限性能的场景下, 能超过ffuf与feroxbruster的性能50%以上. 实际情况受到网络的影响, 感受没有这么明确. 但在多目标下可以感受到明显的区别.
* 基于掩码的字典生成
* 基于规则的字典生成

View File

@ -5,9 +5,10 @@ import (
"fmt"
"github.com/chainreactors/files"
"github.com/chainreactors/logs"
"github.com/chainreactors/spray/internal"
"github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/core"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/utils/iutils"
"github.com/jessevdk/go-flags"
"os"
"os/signal"
@ -15,7 +16,7 @@ import (
"time"
)
var ver = "v1.0.1"
var ver = "dev"
var DefaultConfig = "config.yaml"
func init() {
@ -27,11 +28,11 @@ func init() {
}
func Spray() {
var option internal.Option
var option core.Option
if files.IsExist(DefaultConfig) {
logs.Log.Debug("config.yaml exist, loading")
err := internal.LoadConfig(DefaultConfig, &option)
err := core.LoadConfig(DefaultConfig, &option)
if err != nil {
logs.Log.Error(err.Error())
return
@ -75,14 +76,14 @@ func Spray() {
}
// logs
logs.AddLevel(pkg.LogVerbose, "verbose", "[=] %s {{suffix}}")
logs.AddLevel(pkg.LogVerbose, "verbose", "[=] %s {{suffix}}\n")
if option.Debug {
logs.Log.SetLevel(logs.Debug)
} else if len(option.Verbose) > 0 {
logs.Log.SetLevel(pkg.LogVerbose)
}
if option.InitConfig {
configStr := internal.InitDefaultConfig(&option, 0)
configStr := core.InitDefaultConfig(&option, 0)
err := os.WriteFile(DefaultConfig, []byte(configStr), 0o744)
if err != nil {
logs.Log.Warn("cannot create config: config.yaml, " + err.Error())
@ -94,8 +95,10 @@ func Spray() {
logs.Log.Info("init default config: ./config.yaml")
return
}
defer time.Sleep(time.Second)
if option.Config != "" {
err := internal.LoadConfig(option.Config, &option)
err := core.LoadConfig(option.Config, &option)
if err != nil {
logs.Log.Error(err.Error())
return
@ -112,8 +115,23 @@ func Spray() {
return
}
if option.PrintPreset {
err = pkg.Load()
if err != nil {
iutils.Fatal(err.Error())
}
err = pkg.LoadFingers()
if err != nil {
iutils.Fatal(err.Error())
}
core.PrintPreset()
return
}
if option.Format != "" {
internal.Format(option.Format, !option.NoColor)
core.Format(option)
return
}
@ -128,11 +146,20 @@ func Spray() {
logs.Log.Errorf(err.Error())
return
}
if option.ReadAll || runner.Crawl {
if option.ReadAll || runner.CrawlPlugin {
ihttp.DefaultMaxBodySize = -1
}
ctx, canceler := context.WithTimeout(context.Background(), time.Duration(runner.Deadline)*time.Second)
go func() {
select {
case <-ctx.Done():
time.Sleep(10 * time.Second)
logs.Log.Errorf("deadline and timeout not work, hard exit!!!")
os.Exit(0)
}
}()
go func() {
exitChan := make(chan os.Signal, 2)
signal.Notify(exitChan, os.Interrupt, syscall.SIGTERM)
@ -159,5 +186,4 @@ func Spray() {
return
}
time.Sleep(1 * time.Second)
}

View File

@ -61,6 +61,8 @@ output:
no-color: false
# Bool, No progress bar
no-bar: false
# Bool, No stat
no-stat: true
plugins:
# Bool, enable all plugin
all: false

View File

@ -1,14 +1,16 @@
package pkg
package baseline
import (
"bytes"
"github.com/chainreactors/fingers/common"
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/utils/encode"
"github.com/chainreactors/utils/iutils"
"net/http"
"net/url"
"strconv"
"strings"
)
@ -23,7 +25,7 @@ func NewBaseline(u, host string, resp *ihttp.Response) *Baseline {
},
}
if t, ok := ContentTypeMap[resp.ContentType()]; ok {
if t, ok := pkg.ContentTypeMap[resp.ContentType()]; ok {
bl.ContentType = t
bl.Title = t + " data"
} else {
@ -34,7 +36,6 @@ func NewBaseline(u, host string, resp *ihttp.Response) *Baseline {
bl.Header = make([]byte, len(header))
copy(bl.Header, header)
bl.HeaderLength = len(bl.Header)
if i := resp.ContentLength(); ihttp.CheckBodySize(i) {
if body := resp.Body(); body != nil {
bl.Body = make([]byte, len(body))
@ -50,10 +51,10 @@ func NewBaseline(u, host string, resp *ihttp.Response) *Baseline {
}
bl.Raw = append(bl.Header, bl.Body...)
bl.Response, err = ParseRawResponse(bl.Raw)
bl.Response, err = pkg.ParseRawResponse(bl.Raw)
if err != nil {
bl.IsValid = false
bl.Reason = ErrResponseError.Error()
bl.Reason = pkg.ErrResponseError.Error()
bl.ErrString = err.Error()
return bl
}
@ -73,7 +74,7 @@ func NewBaseline(u, host string, resp *ihttp.Response) *Baseline {
}
} else {
bl.IsValid = false
bl.Reason = ErrUrlError.Error()
bl.Reason = pkg.ErrUrlError.Error()
bl.ErrString = err.Error()
}
bl.Unique = UniqueHash(bl)
@ -113,18 +114,20 @@ func NewInvalidBaseline(u, host string, resp *ihttp.Response, reason string) *Ba
type Baseline struct {
*parsers.SprayResult
Url *url.URL `json:"-"`
Dir bool `json:"-"`
Chunked bool `json:"-"`
Body BS `json:"-"`
Header BS `json:"-"`
Raw BS `json:"-"`
Response *http.Response `json:"-"`
Recu bool `json:"-"`
RecuDepth int `json:"-"`
URLs []string `json:"-"`
Collected bool `json:"-"`
Retry int `json:"-"`
Url *url.URL `json:"-"`
Dir bool `json:"-"`
Chunked bool `json:"-"`
Body pkg.BS `json:"-"`
Header pkg.BS `json:"-"`
Raw pkg.BS `json:"-"`
Response *http.Response `json:"-"`
Recu bool `json:"-"`
RecuDepth int `json:"-"`
URLs []string `json:"-"`
Collected bool `json:"-"`
Retry int `json:"-"`
SameRedirectDomain bool `json:"-"`
IsBaseline bool `json:"-"`
}
func (bl *Baseline) IsDir() bool {
@ -145,10 +148,10 @@ func (bl *Baseline) Collect() {
if bl.ContentType == "html" || bl.ContentType == "json" || bl.ContentType == "txt" {
// 指纹库设计的时候没考虑js,css文件的指纹, 跳过非必要的指纹收集减少误报提高性能
//fmt.Println(bl.Source, bl.Url.String()+bl.Path, bl.RedirectURL, "call fingersengine")
if EnableAllFingerEngine {
bl.Frameworks = EngineDetect(bl.Raw)
if pkg.EnableAllFingerEngine {
bl.Frameworks = pkg.EngineDetect(bl.Raw)
} else {
bl.Frameworks = FingersDetect(bl.Raw)
bl.Frameworks = pkg.FingersDetect(bl.Raw)
}
}
@ -156,14 +159,14 @@ func (bl *Baseline) Collect() {
if bl.ContentType == "html" {
bl.Title = iutils.AsciiEncode(parsers.MatchTitle(bl.Body))
} else if bl.ContentType == "ico" {
if frame := FingerEngine.Favicon().Match(bl.Body); frame != nil {
if frame := pkg.FingerEngine.Favicon().Match(bl.Body); frame != nil {
bl.Frameworks.Merge(frame)
}
}
}
bl.Hashes = parsers.NewHashes(bl.Raw)
bl.Extracteds = Extractors.Extract(string(bl.Raw))
bl.Extracteds.Merge(pkg.Extractors.Extract(string(bl.Raw), true))
bl.Unique = UniqueHash(bl)
}
@ -171,21 +174,21 @@ func (bl *Baseline) CollectURL() {
if len(bl.Body) == 0 {
return
}
for _, reg := range ExtractRegexps["js"][0].CompiledRegexps {
for _, reg := range pkg.ExtractRegexps["js"][0].CompiledRegexps {
urls := reg.FindAllStringSubmatch(string(bl.Body), -1)
for _, u := range urls {
u[1] = CleanURL(u[1])
if u[1] != "" && !FilterJs(u[1]) {
u[1] = pkg.CleanURL(u[1])
if u[1] != "" && !pkg.FilterJs(u[1]) {
bl.URLs = append(bl.URLs, u[1])
}
}
}
for _, reg := range ExtractRegexps["url"][0].CompiledRegexps {
for _, reg := range pkg.ExtractRegexps["url"][0].CompiledRegexps {
urls := reg.FindAllStringSubmatch(string(bl.Body), -1)
for _, u := range urls {
u[1] = CleanURL(u[1])
if u[1] != "" && !FilterUrl(u[1]) {
u[1] = pkg.CleanURL(u[1])
if u[1] != "" && !pkg.FilterUrl(u[1]) {
bl.URLs = append(bl.URLs, u[1])
}
}
@ -235,6 +238,15 @@ func (bl *Baseline) Compare(other *Baseline) int {
return -1
}
func (bl *Baseline) ProbeOutput(format []string) string {
var s strings.Builder
for _, f := range format {
s.WriteString("\t")
s.WriteString(bl.Get(f))
}
return strings.TrimSpace(s.String())
}
var Distance uint8 = 5 // 数字越小越相似, 数字为0则为完全一致.
func (bl *Baseline) FuzzyCompare(other *Baseline) bool {
@ -244,3 +256,9 @@ func (bl *Baseline) FuzzyCompare(other *Baseline) bool {
}
return false
}
func UniqueHash(bl *Baseline) uint16 {
// 由host+状态码+重定向url+content-type+title+length舍去个位组成的hash
// body length可能会导致一些误报, 目前没有更好的解决办法
return pkg.CRC16Hash([]byte(bl.Host + strconv.Itoa(bl.Status) + bl.RedirectURL + bl.ContentType + bl.Title + strconv.Itoa(bl.BodyLength/10*10)))
}

View File

@ -1,4 +1,4 @@
package internal
package core
import (
"fmt"

View File

@ -1,4 +1,4 @@
package internal
package core
import (
"fmt"

92
core/format.go Normal file
View File

@ -0,0 +1,92 @@
package core
import (
"bytes"
"encoding/json"
"github.com/chainreactors/logs"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words/mask"
"io"
"net/url"
"os"
"strings"
)
func Format(opts Option) {
var content []byte
var err error
if opts.Format == "stdin" {
content, err = io.ReadAll(os.Stdin)
} else {
content, err = os.ReadFile(opts.Format)
}
if err != nil {
return
}
group := make(map[string]map[string]*baseline.Baseline)
for _, line := range bytes.Split(bytes.TrimSpace(content), []byte("\n")) {
var result baseline.Baseline
err := json.Unmarshal(line, &result)
if err != nil {
logs.Log.Error(err.Error())
return
}
result.Url, err = url.Parse(result.UrlString)
if err != nil {
continue
}
if _, exists := group[result.Url.Host]; !exists {
group[result.Url.Host] = make(map[string]*baseline.Baseline)
}
group[result.Url.Host][result.Path] = &result
}
for _, results := range group {
for _, result := range results {
if !opts.Fuzzy && result.IsFuzzy {
continue
}
if opts.OutputProbe == "" {
if !opts.NoColor {
logs.Log.Console(result.ColorString() + "\n")
} else {
logs.Log.Console(result.String() + "\n")
}
} else {
probes := strings.Split(opts.OutputProbe, ",")
logs.Log.Console(result.ProbeOutput(probes) + "\n")
}
}
}
}
func PrintPreset() {
logs.Log.Console("internal rules:\n")
for name, rule := range pkg.Rules {
logs.Log.Consolef("\t%s\t%d rules\n", name, len(strings.Split(rule, "\n")))
}
logs.Log.Console("\ninternal dicts:\n")
for name, dict := range pkg.Dicts {
logs.Log.Consolef("\t%s\t%d items\n", name, len(dict))
}
logs.Log.Console("\ninternal words keyword:\n")
for name, words := range mask.SpecialWords {
logs.Log.Consolef("\t%s\t%d words\n", name, len(words))
}
logs.Log.Console("\ninternal extractor:\n")
for name, _ := range pkg.ExtractRegexps {
logs.Log.Consolef("\t%s\n", name)
}
logs.Log.Console("\ninternal fingers:\n")
for name, engine := range pkg.FingerEngine.EnginesImpl {
logs.Log.Consolef("\t%s\t%d fingerprints \n", name, engine.Len())
}
logs.Log.Consolef("\nload %d active path\n", len(pkg.ActivePath))
}

View File

@ -4,14 +4,10 @@ import (
"context"
"crypto/tls"
"fmt"
"github.com/chainreactors/logs"
"github.com/chainreactors/proxyclient"
"github.com/valyala/fasthttp"
"github.com/valyala/fasthttp/fasthttpproxy"
"golang.org/x/net/proxy"
"net"
"net/http"
"net/url"
"strings"
"time"
)
@ -37,6 +33,7 @@ const (
func NewClient(config *ClientConfig) *Client {
var client *Client
if config.Type == FAST {
client = &Client{
fastClient: &fasthttp.Client{
@ -44,28 +41,27 @@ func NewClient(config *ClientConfig) *Client {
Renegotiation: tls.RenegotiateOnceAsClient,
InsecureSkipVerify: true,
},
Dial: customDialFunc(config.ProxyAddr, config.Timeout),
Dial: customDialFunc(config.ProxyClient, config.Timeout),
MaxConnsPerHost: config.Thread * 3 / 2,
MaxIdleConnDuration: config.Timeout,
//MaxConnWaitTimeout: time.Duration(timeout) * time.Second,
//ReadTimeout: config.Timeout * time.Second,
//WriteTimeout: config.Timeout * time.Second,
ReadTimeout: config.Timeout,
WriteTimeout: config.Timeout,
ReadBufferSize: 16384, // 16k
MaxResponseBodySize: int(DefaultMaxBodySize),
NoDefaultUserAgentHeader: true,
DisablePathNormalizing: true,
DisableHeaderNamesNormalizing: true,
},
Config: config,
ClientConfig: config,
}
} else {
client = &Client{
standardClient: &http.Client{
Transport: &http.Transport{
//Proxy: Proxy,
//TLSHandshakeTimeout : delay * time.Second,
DialContext: config.ProxyClient,
TLSClientConfig: &tls.Config{
Renegotiation: tls.RenegotiateOnceAsClient,
Renegotiation: tls.RenegotiateNever,
InsecureSkipVerify: true,
},
TLSHandshakeTimeout: config.Timeout,
@ -78,28 +74,23 @@ func NewClient(config *ClientConfig) *Client {
return http.ErrUseLastResponse
},
},
Config: config,
}
if config.ProxyAddr != "" {
client.standardClient.Transport.(*http.Transport).Proxy = func(_ *http.Request) (*url.URL, error) {
return url.Parse(config.ProxyAddr)
}
ClientConfig: config,
}
}
return client
}
type ClientConfig struct {
Type int
Timeout time.Duration
Thread int
ProxyAddr string
Type int
Timeout time.Duration
Thread int
ProxyClient proxyclient.Dial
}
type Client struct {
fastClient *fasthttp.Client
standardClient *http.Client
Config *ClientConfig
*ClientConfig
}
func (c *Client) TransToCheck() {
@ -110,70 +101,36 @@ func (c *Client) TransToCheck() {
}
}
func (c *Client) FastDo(ctx context.Context, req *fasthttp.Request) (*fasthttp.Response, error) {
func (c *Client) FastDo(req *fasthttp.Request) (*fasthttp.Response, error) {
resp := fasthttp.AcquireResponse()
err := c.fastClient.Do(req, resp)
err := c.fastClient.DoTimeout(req, resp, c.Timeout)
return resp, err
}
func (c *Client) StandardDo(ctx context.Context, req *http.Request) (*http.Response, error) {
func (c *Client) StandardDo(req *http.Request) (*http.Response, error) {
return c.standardClient.Do(req)
}
func (c *Client) Do(ctx context.Context, req *Request) (*Response, error) {
func (c *Client) Do(req *Request) (*Response, error) {
if c.fastClient != nil {
resp, err := c.FastDo(ctx, req.FastRequest)
resp, err := c.FastDo(req.FastRequest)
return &Response{FastResponse: resp, ClientType: FAST}, err
} else if c.standardClient != nil {
resp, err := c.StandardDo(ctx, req.StandardRequest)
resp, err := c.StandardDo(req.StandardRequest)
return &Response{StandardResponse: resp, ClientType: STANDARD}, err
} else {
return nil, fmt.Errorf("not found client")
}
}
func customDialFunc(proxyAddr string, timeout time.Duration) fasthttp.DialFunc {
if proxyAddr == "" {
func customDialFunc(dialer proxyclient.Dial, timeout time.Duration) fasthttp.DialFunc {
if dialer == nil {
return func(addr string) (net.Conn, error) {
return fasthttp.DialTimeout(addr, timeout)
}
}
u, err := url.Parse(proxyAddr)
if err != nil {
logs.Log.Error(err.Error())
return nil
}
if strings.ToLower(u.Scheme) == "socks5" {
return func(addr string) (net.Conn, error) {
var auth *proxy.Auth
username := u.User.Username()
password, ok := u.User.Password()
if ok {
auth = &proxy.Auth{
User: username,
Password: password,
}
}
dialer, err := proxy.SOCKS5("tcp", u.Host, auth, proxy.Direct)
if err != nil {
return nil, err
}
// Set up a connection with a timeout
conn, err := dialer.Dial("tcp", addr)
if err != nil {
return nil, err
}
// Set deadlines for the connection
deadline := time.Now().Add(timeout)
if err := conn.SetDeadline(deadline); err != nil {
conn.Close()
return nil, err
}
return conn, nil
}
} else {
return fasthttpproxy.FasthttpHTTPDialerTimeout(u.Host, timeout)
return func(addr string) (net.Conn, error) {
ctx, _ := context.WithTimeout(context.Background(), timeout)
return dialer.DialContext(ctx, "tcp", addr)
}
}

View File

@ -1,31 +1,26 @@
package ihttp
import (
"context"
"github.com/chainreactors/spray/pkg"
"github.com/valyala/fasthttp"
"net/http"
)
func BuildPathRequest(clientType int, base, path, method string) (*Request, error) {
func BuildRequest(ctx context.Context, clientType int, base, path, host, method string) (*Request, error) {
if clientType == FAST {
req := fasthttp.AcquireRequest()
req.Header.SetMethod(method)
req.SetRequestURI(base + path)
if host != "" {
req.SetHost(host)
}
return &Request{FastRequest: req, ClientType: FAST}, nil
} else {
req, err := http.NewRequest(method, base+path, nil)
return &Request{StandardRequest: req, ClientType: STANDARD}, err
}
}
func BuildHostRequest(clientType int, base, host string) (*Request, error) {
if clientType == FAST {
req := fasthttp.AcquireRequest()
req.SetRequestURI(base)
req.SetHost(host)
return &Request{FastRequest: req, ClientType: FAST}, nil
} else {
req, err := http.NewRequest("GET", base, nil)
req.Host = host
req, err := http.NewRequestWithContext(ctx, method, base+path, nil)
if host != "" {
req.Host = host
}
return &Request{StandardRequest: req, ClientType: STANDARD}, err
}
}
@ -36,14 +31,18 @@ type Request struct {
ClientType int
}
func (r *Request) SetHeaders(header map[string]string) {
func (r *Request) SetHeaders(header http.Header, RandomUA bool) {
if RandomUA {
r.SetHeader("User-Agent", pkg.RandomUA())
}
if r.StandardRequest != nil {
for k, v := range header {
r.StandardRequest.Header.Set(k, v)
}
r.StandardRequest.Header = header
} else if r.FastRequest != nil {
for k, v := range header {
r.FastRequest.Header.Set(k, v)
for _, i := range v {
r.FastRequest.Header.Set(k, i)
}
}
}
}

View File

@ -29,7 +29,7 @@ func (r *Response) Body() []byte {
if r.FastResponse != nil {
return r.FastResponse.Body()
} else if r.StandardResponse != nil {
if DefaultMaxBodySize == -1 {
if r.StandardResponse.ContentLength == -1 {
body, err := io.ReadAll(r.StandardResponse.Body)
if err != nil {
return nil

View File

@ -1,20 +1,22 @@
package internal
package core
import (
"bufio"
"bytes"
"errors"
"fmt"
"github.com/chainreactors/files"
"github.com/chainreactors/logs"
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/internal/pool"
"github.com/chainreactors/proxyclient"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/core/pool"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/utils"
"github.com/chainreactors/utils/iutils"
"github.com/chainreactors/words/mask"
"github.com/chainreactors/words/rule"
"github.com/charmbracelet/lipgloss"
"github.com/expr-lang/expr"
"github.com/vbauerster/mpb/v8"
"io/ioutil"
@ -31,7 +33,6 @@ import (
var (
DefaultThreads = 20
SkipChar = "%SKIP%"
)
type Option struct {
@ -54,15 +55,14 @@ type InputOptions struct {
CIDRs []string `short:"i" long:"cidr" description:"String, input cidr, e.g.: 1.1.1.1/24 "`
RawFile string `long:"raw" description:"File, input raw request filename"`
Dictionaries []string `short:"d" long:"dict" description:"Files, Multi,dict files, e.g.: -d 1.txt -d 2.txt" config:"dictionaries"`
//NoDict bool `long:"no-dict" description:"Bool, no dictionary" config:"no-dict"`
DefaultDict bool `short:"D" long:"default" description:"Bool, use default dictionary" config:"default"`
Word string `short:"w" long:"word" description:"String, word generate dsl, e.g.: -w test{?ld#4}" config:"word"`
Rules []string `short:"r" long:"rules" description:"Files, rule files, e.g.: -r rule1.txt -r rule2.txt" config:"rules"`
AppendRule []string `long:"append-rule" description:"Files, when found valid path , use append rule generator new word with current path" config:"append-rules"`
FilterRule string `long:"filter-rule" description:"String, filter rule, e.g.: --rule-filter '>8 <4'" config:"filter-rule"`
AppendFile []string `long:"append-file" description:"Files, when found valid path , use append file new word with current path" config:"append-files"`
Offset int `long:"offset" description:"Int, wordlist offset"`
Limit int `long:"limit" description:"Int, wordlist limit, start with offset. e.g.: --offset 1000 --limit 100"`
DefaultDict bool `short:"D" long:"default" description:"Bool, use default dictionary" config:"default"`
Word string `short:"w" long:"word" description:"String, word generate dsl, e.g.: -w test{?ld#4}" config:"word"`
Rules []string `short:"r" long:"rules" description:"Files, rule files, e.g.: -r rule1.txt -r rule2.txt" config:"rules"`
AppendRule []string `short:"R" long:"append-rule" description:"Files, when found valid path , use append rule generator new word with current path" config:"append-rules"`
FilterRule string `long:"filter-rule" description:"String, filter rule, e.g.: --rule-filter '>8 <4'" config:"filter-rule"`
AppendFile []string `long:"append" description:"Files, when found valid path , use append file new word with current path" config:"append-files"`
Offset int `long:"offset" description:"Int, wordlist offset"`
Limit int `long:"limit" description:"Int, wordlist limit, start with offset. e.g.: --offset 1000 --limit 100"`
}
type FunctionOptions struct {
@ -84,13 +84,13 @@ type OutputOptions struct {
Filter string `long:"filter" description:"String, custom filter function, e.g.: --filter 'current.Body contains \"hello\"'" config:"filter"`
Fuzzy bool `long:"fuzzy" description:"String, open fuzzy output" config:"fuzzy"`
OutputFile string `short:"f" long:"file" description:"String, output filename" json:"output_file,omitempty" config:"output-file"`
FuzzyFile string `long:"fuzzy-file" description:"String, fuzzy output filename" json:"fuzzy_file,omitempty" config:"fuzzy-file"`
DumpFile string `long:"dump-file" description:"String, dump all request, and write to filename" config:"dump-file"`
Dump bool `long:"dump" description:"Bool, dump all request" config:"dump"`
AutoFile bool `long:"auto-file" description:"Bool, auto generator output and fuzzy filename" config:"auto-file"`
Format string `short:"F" long:"format" description:"String, output format, e.g.: --format 1.json" config:"format"`
Json bool `short:"j" long:"json" description:"Bool, output json" config:"json"`
OutputProbe string `short:"o" long:"probe" description:"String, output format" config:"output_probe"`
FileOutput string `short:"O" long:"file-output" default:"json" description:"Bool, file output format" config:"file_output"`
OutputProbe string `short:"o" long:"probe" description:"String, output format" config:"output"`
Quiet bool `short:"q" long:"quiet" description:"Bool, Quiet" config:"quiet"`
NoColor bool `long:"no-color" description:"Bool, no color" config:"no-color"`
NoBar bool `long:"no-bar" description:"Bool, No progress bar" config:"no-bar"`
@ -98,8 +98,8 @@ type OutputOptions struct {
}
type RequestOptions struct {
Method string `short:"x" long:"method" default:"GET" description:"String, request method, e.g.: --method POST" config:"method"`
Headers []string `long:"header" description:"Strings, custom headers, e.g.: --headers 'Auth: example_auth'" config:"headers"`
Method string `short:"X" long:"method" default:"GET" description:"String, request method, e.g.: --method POST" config:"method"`
Headers []string `short:"H" long:"header" description:"Strings, custom headers, e.g.: --header 'Auth: example_auth'" config:"headers"`
UserAgent string `long:"user-agent" description:"String, custom user-agent, e.g.: --user-agent Custom" config:"useragent"`
RandomUserAgent bool `long:"random-agent" description:"Bool, use random with default user-agent" config:"random-useragent"`
Cookie []string `long:"cookie" description:"Strings, custom cookie" config:"cookies"`
@ -111,19 +111,19 @@ type PluginOptions struct {
Advance bool `short:"a" long:"advance" description:"Bool, enable all plugin" config:"all" `
Extracts []string `long:"extract" description:"Strings, extract response, e.g.: --extract js --extract ip --extract version:(.*?)" config:"extract"`
ExtractConfig string `long:"extract-config" description:"String, extract config filename" config:"extract-config"`
Active bool `long:"active" description:"Bool, enable active finger path"`
Recon bool `long:"recon" description:"Bool, enable recon" config:"recon"`
Bak bool `long:"bak" description:"Bool, enable bak found" config:"bak"`
FileBak bool `long:"file-bak" description:"Bool, enable valid result bak found, equal --append-rule rule/filebak.txt" config:"file-bak"`
Common bool `long:"common" description:"Bool, enable common file found" config:"common"`
Crawl bool `long:"crawl" description:"Bool, enable crawl" config:"crawl"`
ActivePlugin bool `long:"active" description:"Bool, enable active finger path"`
ReconPlugin bool `long:"recon" description:"Bool, enable recon" config:"recon"`
BakPlugin bool `long:"bak" description:"Bool, enable bak found" config:"bak"`
FuzzuliPlugin bool `long:"fuzzuli" description:"Bool, enable fuzzuli plugin" config:"fuzzuli"`
CommonPlugin bool `long:"common" description:"Bool, enable common file found" config:"common"`
CrawlPlugin bool `long:"crawl" description:"Bool, enable crawl" config:"crawl"`
CrawlDepth int `long:"crawl-depth" default:"3" description:"Int, crawl depth" config:"crawl-depth"`
AppendDepth int `long:"append-depth" default:"2" description:"Int, append depth" config:"append-depth"`
}
type ModeOptions struct {
RateLimit int `long:"rate-limit" default:"0" description:"Int, request rate limit (rate/s), e.g.: --rate-limit 100" config:"rate-limit"`
Force bool `long:"force" description:"Bool, skip error break" config:"force"`
//CheckOnly bool `long:"check-only" description:"Bool, check only" config:"check-only"`
RateLimit int `long:"rate-limit" default:"0" description:"Int, request rate limit (rate/s), e.g.: --rate-limit 100" config:"rate-limit"`
Force bool `long:"force" description:"Bool, skip error break" config:"force"`
NoScope bool `long:"no-scope" description:"Bool, no scope" config:"no-scope"`
Scope []string `long:"scope" description:"String, custom scope, e.g.: --scope *.example.com" config:"scope"`
Recursive string `long:"recursive" default:"current.IsDir()" description:"String,custom recursive rule, e.g.: --recursive current.IsDir()" config:"recursive"`
@ -133,27 +133,28 @@ type ModeOptions struct {
CheckPeriod int `long:"check-period" default:"200" description:"Int, check period when request" config:"check-period"`
ErrPeriod int `long:"error-period" default:"10" description:"Int, check period when error" config:"error-period"`
BreakThreshold int `long:"error-threshold" default:"20" description:"Int, break when the error exceeds the threshold" config:"error-threshold"`
BlackStatus string `long:"black-status" default:"400,410" description:"Strings (comma split),custom black status" config:"black-status"`
WhiteStatus string `long:"white-status" default:"200" description:"Strings (comma split), custom white status" config:"white-status"`
FuzzyStatus string `long:"fuzzy-status" default:"500,501,502,503" description:"Strings (comma split), custom fuzzy status" config:"fuzzy-status"`
BlackStatus string `short:"B" long:"black-status" default:"400,410" description:"Strings (comma split),custom black status" config:"black-status"`
WhiteStatus string `short:"W" long:"white-status" default:"200" description:"Strings (comma split), custom white status" config:"white-status"`
FuzzyStatus string `long:"fuzzy-status" default:"500,501,502,503,301,302,404" description:"Strings (comma split), custom fuzzy status" config:"fuzzy-status"`
UniqueStatus string `long:"unique-status" default:"403,200,404" description:"Strings (comma split), custom unique status" config:"unique-status"`
Unique bool `long:"unique" description:"Bool, unique response" config:"unique"`
RetryCount int `long:"retry" default:"0" description:"Int, retry count" config:"retry"`
SimhashDistance int `long:"sim-distance" default:"5" config:"sim-distance"`
SimhashDistance int `long:"sim-distance" default:"8" config:"sim-distance"`
}
type MiscOptions struct {
Mod string `short:"m" long:"mod" default:"path" choice:"path" choice:"host" description:"String, path/host spray" config:"mod"`
Client string `short:"C" long:"client" default:"auto" choice:"fast" choice:"standard" choice:"auto" description:"String, Client type" config:"client"`
Deadline int `long:"deadline" default:"999999" description:"Int, deadline (seconds)" config:"deadline"` // todo 总的超时时间,适配云函数的deadline
Timeout int `short:"T" long:"timeout" default:"5" description:"Int, timeout with request (seconds)" config:"timeout"`
PoolSize int `short:"P" long:"pool" default:"5" description:"Int, Pool size" config:"pool"`
Threads int `short:"t" long:"thread" default:"20" description:"Int, number of threads per pool" config:"thread"`
Debug bool `long:"debug" description:"Bool, output debug info" config:"debug"`
Version bool `long:"version" description:"Bool, show version"`
Verbose []bool `short:"v" description:"Bool, log verbose level ,default 0, level1: -v level2 -vv " config:"verbose"`
Proxy string `long:"proxy" description:"String, proxy address, e.g.: --proxy socks5://127.0.0.1:1080" config:"proxy"`
InitConfig bool `long:"init" description:"Bool, init config file"`
Mod string `short:"m" long:"mod" default:"path" choice:"path" choice:"host" description:"String, path/host spray" config:"mod"`
Client string `short:"C" long:"client" default:"auto" choice:"fast" choice:"standard" choice:"auto" description:"String, Client type" config:"client"`
Deadline int `long:"deadline" default:"999999" description:"Int, deadline (seconds)" config:"deadline"` // todo 总的超时时间,适配云函数的deadline
Timeout int `short:"T" long:"timeout" default:"5" description:"Int, timeout with request (seconds)" config:"timeout"`
PoolSize int `short:"P" long:"pool" default:"5" description:"Int, Pool size" config:"pool"`
Threads int `short:"t" long:"thread" default:"20" description:"Int, number of threads per pool" config:"thread"`
Debug bool `long:"debug" description:"Bool, output debug info" config:"debug"`
Version bool `long:"version" description:"Bool, show version"`
Verbose []bool `short:"v" description:"Bool, log verbose level ,default 0, level1: -v level2 -vv " config:"verbose"`
Proxies []string `long:"proxy" description:"String, proxy address, e.g.: --proxy socks5://127.0.0.1:1080" config:"proxies"`
InitConfig bool `long:"init" description:"Bool, init config file"`
PrintPreset bool `long:"print" description:"Bool, print preset all preset config "`
}
func (opt *Option) Validate() error {
@ -168,7 +169,6 @@ func (opt *Option) Validate() error {
if opt.Depth > 0 && opt.ResumeFrom != "" {
// 递归与断点续传会造成混淆, 断点续传的word与rule不是通过命令行获取的
return errors.New("--resume and --depth cannot be used at the same time")
}
@ -206,6 +206,11 @@ func (opt *Option) Prepare() error {
return err
}
err = pkg.Load()
if err != nil {
return err
}
if opt.Extracts != nil {
for _, e := range opt.Extracts {
if reg, ok := pkg.ExtractRegexps[e]; ok {
@ -228,33 +233,30 @@ func (opt *Option) Prepare() error {
pkg.Extractors[opt.ExtractConfig] = extracts
}
err = pkg.Load()
if err != nil {
iutils.Fatal(err.Error())
}
// 初始化全局变量
pkg.Distance = uint8(opt.SimhashDistance)
baseline.Distance = uint8(opt.SimhashDistance)
if opt.MaxBodyLength == -1 {
ihttp.DefaultMaxBodySize = -1
} else {
ihttp.DefaultMaxBodySize = opt.MaxBodyLength * 1024
}
pkg.BlackStatus = parseStatus(pkg.BlackStatus, opt.BlackStatus)
pkg.WhiteStatus = parseStatus(pkg.WhiteStatus, opt.WhiteStatus)
pkg.BlackStatus = pkg.ParseStatus(pkg.DefaultBlackStatus, opt.BlackStatus)
pkg.WhiteStatus = pkg.ParseStatus(pkg.DefaultWhiteStatus, opt.WhiteStatus)
if opt.FuzzyStatus == "all" {
pool.EnableAllFuzzy = true
} else {
pkg.FuzzyStatus = parseStatus(pkg.FuzzyStatus, opt.FuzzyStatus)
pkg.FuzzyStatus = pkg.ParseStatus(pkg.DefaultFuzzyStatus, opt.FuzzyStatus)
}
if opt.Unique {
pool.EnableAllUnique = true
} else {
pkg.UniqueStatus = parseStatus(pkg.UniqueStatus, opt.UniqueStatus)
pkg.UniqueStatus = pkg.ParseStatus(pkg.DefaultUniqueStatus, opt.UniqueStatus)
}
pool.MaxCrawl = opt.CrawlDepth
logs.Log.Logf(pkg.LogVerbose, "Black Status: %v, WhiteStatus: %v, WAFStatus: %v", pkg.BlackStatus, pkg.WhiteStatus, pkg.WAFStatus)
logs.Log.Logf(pkg.LogVerbose, "Fuzzy Status: %v, Unique Status: %v", pkg.FuzzyStatus, pkg.UniqueStatus)
return nil
}
@ -264,9 +266,10 @@ func (opt *Option) NewRunner() (*Runner, error) {
r := &Runner{
Option: opt,
taskCh: make(chan *Task),
outputCh: make(chan *pkg.Baseline, 256),
outputCh: make(chan *baseline.Baseline, 256),
poolwg: &sync.WaitGroup{},
outwg: &sync.WaitGroup{},
fuzzyCh: make(chan *pkg.Baseline, 256),
fuzzyCh: make(chan *baseline.Baseline, 256),
Headers: make(map[string]string),
Total: opt.Limit,
Color: true,
@ -305,56 +308,19 @@ func (opt *Option) NewRunner() (*Runner, error) {
r.ClientType = ihttp.STANDARD
}
if opt.Threads == DefaultThreads && len(opt.Dictionaries) == 0 {
r.Threads = 1000
if len(opt.Proxies) > 0 {
urls, err := proxyclient.ParseProxyURLs(opt.Proxies)
if err != nil {
return nil, err
}
r.ProxyClient, err = proxyclient.NewClientChain(urls)
if err != nil {
return nil, err
}
}
if opt.Recon {
pkg.Extractors["recon"] = pkg.ExtractRegexps["pentest"]
}
if opt.Finger {
pkg.EnableAllFingerEngine = true
}
// brute only
if opt.Advance {
r.Crawl = true
r.Finger = true
r.Bak = true
r.Common = true
r.Active = true
pkg.EnableAllFingerEngine = true
pkg.Extractors["recon"] = pkg.ExtractRegexps["pentest"]
r.IsCheck = false
opt.AppendRule = append(opt.AppendRule, "filebak")
}
if opt.FileBak {
r.IsCheck = false
opt.AppendRule = append(opt.AppendRule, "filebak")
}
if opt.Common {
r.IsCheck = false
r.AppendWords = append(r.AppendWords, mask.SpecialWords["common_file"]...)
}
if opt.Active {
r.IsCheck = false
r.AppendWords = append(r.AppendWords, pkg.ActivePath...)
}
if opt.Crawl {
r.IsCheck = false
}
opt.PrintPlugin()
if r.IsCheck == true {
logs.Log.Important("enabling brute mod, because of enabled brute plugin")
}
if opt.NoScope {
r.Scope = []string{"*"}
err = opt.BuildPlugin(r)
if err != nil {
return nil, err
}
err = opt.BuildWords(r)
@ -362,6 +328,10 @@ func (opt *Option) NewRunner() (*Runner, error) {
return nil, err
}
if opt.Threads == DefaultThreads && r.bruteMod {
r.Threads = 1000
}
pkg.DefaultStatistor = pkg.Statistor{
Word: opt.Word,
WordCount: len(r.Wordlist),
@ -397,13 +367,12 @@ func (opt *Option) NewRunner() (*Runner, error) {
var express string
if opt.Recursive != "current.IsDir()" && opt.Depth != 0 {
// 默认不打开递归, 除非指定了非默认的递归表达式
pool.MaxRecursion = 1
opt.Depth = 1
express = opt.Recursive
}
if opt.Depth != 0 {
// 手动设置的depth优先级高于默认
pool.MaxRecursion = opt.Depth
express = opt.Recursive
}
@ -436,6 +405,10 @@ func (opt *Option) NewRunner() (*Runner, error) {
r.Probes = strings.Split(opt.OutputProbe, ",")
}
if !opt.Quiet {
fmt.Println(opt.PrintConfig(r))
}
// init output file
if opt.OutputFile != "" {
r.OutputFile, err = files.NewFile(opt.OutputFile, false, false, true)
@ -449,18 +422,6 @@ func (opt *Option) NewRunner() (*Runner, error) {
}
}
if opt.FuzzyFile != "" {
r.FuzzyFile, err = files.NewFile(opt.FuzzyFile, false, false, true)
if err != nil {
return nil, err
}
} else if opt.AutoFile {
r.FuzzyFile, err = files.NewFile("fuzzy.json", false, false, true)
if err != nil {
return nil, err
}
}
if opt.DumpFile != "" {
r.DumpFile, err = files.NewFile(opt.DumpFile, false, false, true)
if err != nil {
@ -474,14 +435,13 @@ func (opt *Option) NewRunner() (*Runner, error) {
}
if opt.ResumeFrom != "" {
r.StatFile, err = files.NewFile(opt.ResumeFrom, false, true, true)
} else {
r.StatFile, err = files.NewFile(safeFilename(r.Tasks.Name)+".stat", false, true, true)
}
if err != nil {
return nil, err
}
if !opt.NoStat {
r.StatFile, err = files.NewFile(pkg.SafeFilename(r.Tasks.Name)+".stat", false, true, true)
r.StatFile.Mod = os.O_WRONLY | os.O_CREATE
err = r.StatFile.Init()
if err != nil {
@ -491,57 +451,213 @@ func (opt *Option) NewRunner() (*Runner, error) {
return r, nil
}
func (opt *Option) PrintPlugin() {
var s strings.Builder
if opt.Crawl {
s.WriteString("crawl enable; ")
func (opt *Option) PrintConfig(r *Runner) string {
// 定义颜色样式
keyStyle := lipgloss.NewStyle().Bold(true).Foreground(lipgloss.Color("#FFFFFF")).Width(20) // Key 加粗并设定宽度
stringValueStyle := lipgloss.NewStyle().Foreground(lipgloss.Color("#FFA07A")) // 字符串样式
arrayValueStyle := lipgloss.NewStyle().Foreground(lipgloss.Color("#98FB98")) // 数组样式
numberValueStyle := lipgloss.NewStyle().Foreground(lipgloss.Color("#ADD8E6")) // 数字样式
panelWidth := 60 // 调整 panelWidth 使内容稍微靠左
padding := 2 // 减少 padding 以调整布局靠左
// 分割线样式和终端宽度计算
divider := strings.Repeat("─", panelWidth) // 使用"─"符号生成更加连贯的分割线
// 处理不同类型的值
formatValue := func(value interface{}) string {
switch v := value.(type) {
case string:
return stringValueStyle.Render(v)
case []string:
return arrayValueStyle.Render(fmt.Sprintf("%v", v))
case int, int64, float64:
return numberValueStyle.Render(fmt.Sprintf("%v", v))
default:
return stringValueStyle.Render(fmt.Sprintf("%v", v)) // 默认为字符串样式
}
}
// 处理互斥参数,选择输出有值的那一个
inputSource := ""
if opt.ResumeFrom != "" {
inputSource = lipgloss.JoinHorizontal(lipgloss.Left, "🌀 ", keyStyle.Render("ResumeFrom: "), formatValue(opt.ResumeFrom))
} else if len(opt.URL) > 0 {
inputSource = lipgloss.JoinHorizontal(lipgloss.Left, "🌐 ", keyStyle.Render("URL: "), formatValue(opt.URL))
} else if opt.URLFile != "" {
inputSource = lipgloss.JoinHorizontal(lipgloss.Left, "📂 ", keyStyle.Render("URLFile: "), formatValue(opt.URLFile))
} else if len(opt.CIDRs) > 0 {
inputSource = lipgloss.JoinHorizontal(lipgloss.Left, "📡 ", keyStyle.Render("CIDRs: "), formatValue(opt.CIDRs))
} else if opt.RawFile != "" {
inputSource = lipgloss.JoinHorizontal(lipgloss.Left, "📄 ", keyStyle.Render("RawFile: "), formatValue(opt.RawFile))
}
// Input Options
inputOptions := lipgloss.JoinVertical(lipgloss.Left,
inputSource, // 互斥量处理
// PortRange 展示
lipgloss.JoinHorizontal(lipgloss.Left, "🔢 ", keyStyle.Render("PortRange: "), formatValue(opt.PortRange)),
// Dictionaries 展示
lipgloss.JoinHorizontal(lipgloss.Left, "📚 ", keyStyle.Render("Dictionaries: "), formatValue(opt.Dictionaries)),
// Word, Rules, FilterRule 展开为单独的行
lipgloss.JoinVertical(lipgloss.Left,
lipgloss.JoinHorizontal(lipgloss.Left, "💡 ", keyStyle.Render("Word: "), formatValue(r.Word)),
lipgloss.JoinHorizontal(lipgloss.Left, "📜 ", keyStyle.Render("Rules: "), formatValue(opt.Rules)),
lipgloss.JoinHorizontal(lipgloss.Left, "🔍 ", keyStyle.Render("FilterRule: "), formatValue(opt.FilterRule)),
),
// AppendRule 和 AppendWords 展开为单独的行
lipgloss.JoinVertical(lipgloss.Left,
lipgloss.JoinHorizontal(lipgloss.Left, "🔧 ", keyStyle.Render("AppendRule: "), formatValue(r.AppendRule)),
lipgloss.JoinHorizontal(lipgloss.Left, "🧩 ", keyStyle.Render("AppendWords: "), formatValue(len(r.AppendWords))),
),
)
// Output Options
outputOptions := lipgloss.JoinVertical(lipgloss.Left,
lipgloss.JoinHorizontal(lipgloss.Left, "📊 ", keyStyle.Render("Match: "), formatValue(opt.Match)),
lipgloss.JoinHorizontal(lipgloss.Left, "⚙️ ", keyStyle.Render("Filter: "), formatValue(opt.Filter)),
)
// Plugin Options
pluginValues := []string{}
if opt.ActivePlugin {
pluginValues = append(pluginValues, "active")
}
if opt.ReconPlugin {
pluginValues = append(pluginValues, "recon")
}
if opt.BakPlugin {
pluginValues = append(pluginValues, "bak")
}
if opt.FuzzuliPlugin {
pluginValues = append(pluginValues, "fuzzuli")
}
if opt.CommonPlugin {
pluginValues = append(pluginValues, "common")
}
if opt.CrawlPlugin {
pluginValues = append(pluginValues, "crawl")
}
pluginOptions := lipgloss.JoinVertical(lipgloss.Left,
lipgloss.JoinHorizontal(lipgloss.Left, "🔎 ", keyStyle.Render("Extracts: "), formatValue(opt.Extracts)),
lipgloss.JoinHorizontal(lipgloss.Left, "🔌 ", keyStyle.Render("Plugins: "), formatValue(strings.Join(pluginValues, ", "))),
)
// Mode Options
modeOptions := lipgloss.JoinVertical(lipgloss.Left,
lipgloss.JoinHorizontal(lipgloss.Left, "🛑 ", keyStyle.Render("BlackStatus: "), formatValue(pkg.BlackStatus)),
lipgloss.JoinHorizontal(lipgloss.Left, "✅ ", keyStyle.Render("WhiteStatus: "), formatValue(pkg.WhiteStatus)),
lipgloss.JoinHorizontal(lipgloss.Left, "🔄 ", keyStyle.Render("FuzzyStatus: "), formatValue(pkg.FuzzyStatus)),
lipgloss.JoinHorizontal(lipgloss.Left, "🔒 ", keyStyle.Render("UniqueStatus: "), formatValue(pkg.UniqueStatus)),
lipgloss.JoinHorizontal(lipgloss.Left, "🔑 ", keyStyle.Render("Unique: "), formatValue(opt.Unique)),
)
// Misc Options
miscOptions := lipgloss.JoinVertical(lipgloss.Left,
lipgloss.JoinHorizontal(lipgloss.Left, "⏱ ", keyStyle.Render("Timeout: "), formatValue(opt.Timeout)),
lipgloss.JoinHorizontal(lipgloss.Left, "📈 ", keyStyle.Render("PoolSize: "), formatValue(opt.PoolSize)),
lipgloss.JoinHorizontal(lipgloss.Left, "🧵 ", keyStyle.Render("Threads: "), formatValue(opt.Threads)),
lipgloss.JoinHorizontal(lipgloss.Left, "🌍 ", keyStyle.Render("Proxies: "), formatValue(opt.Proxies)),
)
// 将所有内容拼接在一起
content := lipgloss.JoinVertical(lipgloss.Left,
inputOptions,
outputOptions,
pluginOptions,
modeOptions,
miscOptions,
)
// 使用正确的方式添加 padding并居中显示内容
contentWithPadding := lipgloss.NewStyle().PaddingLeft(padding).Render(content)
// 使用 Place 方法来将整个内容居中显示
return lipgloss.Place(panelWidth+padding*2, 0, lipgloss.Center, lipgloss.Center,
lipgloss.JoinVertical(lipgloss.Center,
divider, // 顶部分割线
contentWithPadding,
divider, // 底部分割线
),
)
}
func (opt *Option) BuildPlugin(r *Runner) error {
// brute only
if opt.Advance {
opt.CrawlPlugin = true
opt.Finger = true
opt.BakPlugin = true
opt.FuzzuliPlugin = true
opt.CommonPlugin = true
opt.ActivePlugin = true
opt.ReconPlugin = true
}
if opt.ReconPlugin {
pkg.Extractors["recon"] = pkg.ExtractRegexps["pentest"]
}
if opt.Finger {
s.WriteString("active fingerprint enable; ")
}
if opt.Bak {
s.WriteString("bak file enable; ")
}
if opt.Common {
s.WriteString("common file enable; ")
}
if opt.Recon {
s.WriteString("recon enable; ")
}
if opt.FileBak {
s.WriteString("file bak enable; ")
pkg.EnableAllFingerEngine = true
}
if opt.RetryCount > 0 {
s.WriteString("Retry Count: " + strconv.Itoa(opt.RetryCount))
if opt.BakPlugin {
r.bruteMod = true
opt.AppendRule = append(opt.AppendRule, "filebak")
r.AppendWords = append(r.AppendWords, pkg.GetPresetWordList([]string{"bak_file"})...)
}
if s.Len() > 0 {
logs.Log.Important(s.String())
if opt.CommonPlugin {
r.bruteMod = true
r.AppendWords = append(r.AppendWords, pkg.Dicts["common"]...)
r.AppendWords = append(r.AppendWords, pkg.Dicts["log"]...)
}
if opt.ActivePlugin {
r.bruteMod = true
r.AppendWords = append(r.AppendWords, pkg.ActivePath...)
}
if opt.CrawlPlugin {
r.bruteMod = true
}
if r.bruteMod {
logs.Log.Important("enabling brute mod, because of enabled brute plugin")
}
if opt.NoScope {
r.Scope = []string{"*"}
}
return nil
}
func (opt *Option) BuildWords(r *Runner) error {
var dicts [][]string
var err error
if opt.DefaultDict {
dicts = append(dicts, pkg.LoadDefaultDict())
dicts = append(dicts, pkg.Dicts["default"])
logs.Log.Info("use default dictionary: https://github.com/maurosoria/dirsearch/blob/master/db/dicc.txt")
}
for i, f := range opt.Dictionaries {
dict, err := loadFileToSlice(f)
dict, err := pkg.LoadFileToSlice(f)
if err != nil {
return err
}
dicts = append(dicts, dict)
if opt.ResumeFrom != "" {
dictCache[f] = dicts[i]
pkg.Dicts[f] = dicts[i]
}
logs.Log.Logf(pkg.LogVerbose, "Loaded %d word from %s", len(dicts[i]), f)
logs.Log.Logf(pkg.LogVerbose, "Loaded %d word from %s", len(dict), f)
}
if len(dicts) == 0 && opt.Word == "" {
if len(dicts) == 0 && opt.Word == "" && len(opt.Rules) == 0 && len(opt.AppendRule) == 0 {
r.IsCheck = true
}
@ -582,7 +698,7 @@ func (opt *Option) BuildWords(r *Runner) error {
}
if len(opt.Rules) != 0 {
rules, err := loadRuleAndCombine(opt.Rules)
rules, err := pkg.LoadRuleAndCombine(opt.Rules)
if err != nil {
return err
}
@ -601,7 +717,7 @@ func (opt *Option) BuildWords(r *Runner) error {
}
if len(opt.AppendRule) != 0 {
content, err := loadRuleAndCombine(opt.AppendRule)
content, err := pkg.LoadRuleAndCombine(opt.AppendRule)
if err != nil {
return err
}
@ -609,37 +725,23 @@ func (opt *Option) BuildWords(r *Runner) error {
}
if len(opt.AppendFile) != 0 {
var bs bytes.Buffer
var lines []string
for _, f := range opt.AppendFile {
content, err := ioutil.ReadFile(f)
dict, err := pkg.LoadFileToSlice(f)
if err != nil {
return err
}
bs.Write(bytes.TrimSpace(content))
bs.WriteString("\n")
}
lines := strings.Split(bs.String(), "\n")
for i, line := range lines {
lines[i] = strings.TrimSpace(line)
lines = append(lines, dict...)
}
r.AppendWords = append(r.AppendWords, lines...)
}
// 类似dirsearch中的
if opt.Extensions != "" {
r.AppendFunction(func(s string) []string {
exts := strings.Split(opt.Extensions, ",")
ss := make([]string, len(exts))
for i, e := range exts {
if strings.Contains(s, "%EXT%") {
ss[i] = strings.Replace(s, "%EXT%", e, -1)
}
}
return ss
})
r.AppendFunction(pkg.ParseEXTPlaceholderFunc(strings.Split(opt.Extensions, ",")))
} else {
r.AppendFunction(func(s string) []string {
if strings.Contains(s, "%EXT%") {
if strings.Contains(s, pkg.EXTChar) {
return nil
}
return []string{s}
@ -647,16 +749,16 @@ func (opt *Option) BuildWords(r *Runner) error {
}
if opt.Uppercase {
r.AppendFunction(wrapWordsFunc(strings.ToUpper))
r.AppendFunction(pkg.WrapWordsFunc(strings.ToUpper))
}
if opt.Lowercase {
r.AppendFunction(wrapWordsFunc(strings.ToLower))
r.AppendFunction(pkg.WrapWordsFunc(strings.ToLower))
}
if opt.RemoveExtensions != "" {
rexts := strings.Split(opt.ExcludeExtensions, ",")
r.AppendFunction(func(s string) []string {
if ext := parseExtension(s); iutils.StringsContains(rexts, ext) {
if ext := pkg.ParseExtension(s); iutils.StringsContains(rexts, ext) {
return []string{strings.TrimSuffix(s, "."+ext)}
}
return []string{s}
@ -666,7 +768,7 @@ func (opt *Option) BuildWords(r *Runner) error {
if opt.ExcludeExtensions != "" {
exexts := strings.Split(opt.ExcludeExtensions, ",")
r.AppendFunction(func(s string) []string {
if ext := parseExtension(s); iutils.StringsContains(exexts, ext) {
if ext := pkg.ParseExtension(s); iutils.StringsContains(exexts, ext) {
return nil
}
return []string{s}
@ -682,13 +784,6 @@ func (opt *Option) BuildWords(r *Runner) error {
})
}
// default skip function, skip %EXT%
r.AppendFunction(func(s string) []string {
if strings.Contains(s, "%EXT%") {
return nil
}
return []string{s}
})
if len(opt.Skips) > 0 {
r.AppendFunction(func(s string) []string {
for _, skip := range opt.Skips {
@ -700,7 +795,6 @@ func (opt *Option) BuildWords(r *Runner) error {
})
}
logs.Log.Logf(pkg.LogVerbose, "Loaded %d dictionaries and %d decorators", len(opt.Dictionaries), len(r.Fns))
return nil
}

View File

@ -6,15 +6,17 @@ import (
"fmt"
"github.com/chainreactors/logs"
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/utils/iutils"
"github.com/chainreactors/words"
"github.com/chainreactors/words/rule"
"github.com/panjf2000/ants/v2"
"github.com/valyala/fasthttp"
"golang.org/x/time/rate"
"math/rand"
"net/url"
"path"
"strings"
"sync"
"sync/atomic"
@ -22,11 +24,9 @@ import (
)
var (
MaxRedirect = 3
MaxCrawl = 3
MaxRecursion = 0
EnableAllFuzzy = false
EnableAllUnique = false
//AllowHostModSource = []parsers.SpraySource{parsers.WordSource, parsers.CheckSource, parsers.InitIndexSource, parsers.InitRandomSource}
)
func NewBrutePool(ctx context.Context, config *Config) (*BrutePool, error) {
@ -43,14 +43,14 @@ func NewBrutePool(ctx context.Context, config *Config) (*BrutePool, error) {
ctx: pctx,
Cancel: cancel,
client: ihttp.NewClient(&ihttp.ClientConfig{
Thread: config.Thread,
Type: config.ClientType,
Timeout: time.Duration(config.Timeout) * time.Second,
ProxyAddr: config.ProxyAddr,
Thread: config.Thread,
Type: config.ClientType,
Timeout: config.Timeout,
ProxyClient: config.ProxyClient,
}),
additionCh: make(chan *Unit, config.Thread),
closeCh: make(chan struct{}),
processCh: make(chan *pkg.Baseline, config.Thread),
processCh: make(chan *baseline.Baseline, config.Thread),
wg: &sync.WaitGroup{},
},
base: u.Scheme + "://" + u.Host,
@ -106,46 +106,30 @@ type BrutePool struct {
initwg sync.WaitGroup // 初始化用, 之后改成锁
}
func (pool *BrutePool) checkRedirect(redirectURL string) bool {
if pool.random.RedirectURL == "" {
// 如果random的redirectURL为空, 此时该项
return true
}
if redirectURL == pool.random.RedirectURL {
// 相同的RedirectURL将被认为是无效数据
return false
} else {
// path为3xx, 且与baseline中的RedirectURL不同时, 为有效数据
return true
}
}
func (pool *BrutePool) genReq(mod SprayMod, s string) (*ihttp.Request, error) {
if mod == HostSpray {
return ihttp.BuildHostRequest(pool.ClientType, pool.BaseURL, s)
} else if mod == PathSpray {
return ihttp.BuildPathRequest(pool.ClientType, pool.base, s, pool.Method)
}
return nil, fmt.Errorf("unknown mod")
}
func (pool *BrutePool) Init() error {
pool.initwg.Add(2)
if pool.Index != "/" {
logs.Log.Logf(pkg.LogVerbose, "custom index url: %s", pkg.BaseURL(pool.url)+pkg.FormatURL(pkg.BaseURL(pool.url), pool.Index))
pool.reqPool.Invoke(newUnit(pool.Index, parsers.InitIndexSource))
pool.reqPool.Invoke(&Unit{path: pool.Index, source: parsers.InitIndexSource})
//pool.urls[dir(pool.Index)] = struct{}{}
} else {
pool.reqPool.Invoke(newUnit(pool.url.Path, parsers.InitIndexSource))
pool.reqPool.Invoke(&Unit{path: pool.url.Path, source: parsers.InitIndexSource})
//pool.urls[dir(pool.url.Path)] = struct{}{}
}
if pool.Random != "" {
logs.Log.Logf(pkg.LogVerbose, "custom random url: %s", pkg.BaseURL(pool.url)+pkg.FormatURL(pkg.BaseURL(pool.url), pool.Random))
pool.reqPool.Invoke(newUnit(pool.Random, parsers.InitRandomSource))
if pool.Mod == PathSpray {
pool.reqPool.Invoke(&Unit{path: pool.Random, source: parsers.InitRandomSource})
} else {
pool.reqPool.Invoke(&Unit{host: pool.Random, source: parsers.InitRandomSource})
}
} else {
pool.reqPool.Invoke(newUnit(pool.safePath(pkg.RandPath()), parsers.InitRandomSource))
if pool.Mod == PathSpray {
pool.reqPool.Invoke(&Unit{path: pool.safePath(pkg.RandPath()), source: parsers.InitRandomSource})
} else {
pool.reqPool.Invoke(&Unit{host: pkg.RandHost(), source: parsers.InitRandomSource})
}
}
pool.initwg.Wait()
@ -180,23 +164,7 @@ func (pool *BrutePool) Init() error {
return nil
}
func (pool *BrutePool) Upgrade(bl *pkg.Baseline) error {
rurl, err := url.Parse(bl.RedirectURL)
if err == nil && rurl.Hostname() == bl.Url.Hostname() && bl.Url.Scheme == "http" && rurl.Scheme == "https" {
logs.Log.Infof("baseurl %s upgrade http to https, reinit", pool.BaseURL)
pool.base = strings.Replace(pool.BaseURL, "http", "https", 1)
pool.url.Scheme = "https"
// 重新初始化
err = pool.Init()
if err != nil {
return err
}
}
return nil
}
func (pool *BrutePool) Run(ctx context.Context, offset, limit int) {
func (pool *BrutePool) Run(offset, limit int) {
pool.Worder.Run()
if pool.Active {
pool.wg.Add(1)
@ -208,6 +176,11 @@ func (pool *BrutePool) Run(ctx context.Context, offset, limit int) {
go pool.doBak()
}
if pool.Fuzzuli {
pool.wg.Add(1)
go pool.doFuzzuli()
}
if pool.Common {
pool.wg.Add(1)
go pool.doCommonFile()
@ -229,16 +202,12 @@ func (pool *BrutePool) Run(ctx context.Context, offset, limit int) {
Loop:
for {
select {
case w, ok := <-pool.Worder.C:
case w, ok := <-pool.Worder.Output:
if !ok {
done = true
continue
}
pool.Statistor.End++
if w == "" {
pool.Statistor.Skipped++
pool.Bar.Done()
}
pool.wordOffset++
if pool.wordOffset < offset {
@ -250,20 +219,26 @@ Loop:
continue
}
if w == "" {
pool.Statistor.Skipped++
pool.Bar.Done()
continue
}
pool.wg.Add(1)
if pool.Mod == HostSpray {
pool.reqPool.Invoke(newUnitWithNumber(w, parsers.WordSource, pool.wordOffset))
pool.reqPool.Invoke(&Unit{host: w, source: parsers.WordSource, number: pool.wordOffset})
} else {
// 原样的目录拼接, 输入了几个"/"就是几个, 适配/有语义的中间件
pool.reqPool.Invoke(newUnitWithNumber(pool.safePath(w), parsers.WordSource, pool.wordOffset))
pool.reqPool.Invoke(&Unit{path: pool.safePath(w), source: parsers.WordSource, number: pool.wordOffset})
}
case <-pool.checkCh:
pool.Statistor.CheckNumber++
if pool.Mod == HostSpray {
pool.reqPool.Invoke(newUnitWithNumber(pkg.RandHost(), parsers.CheckSource, pool.wordOffset))
pool.reqPool.Invoke(&Unit{host: pkg.RandHost(), source: parsers.CheckSource, number: pool.wordOffset})
} else if pool.Mod == PathSpray {
pool.reqPool.Invoke(newUnitWithNumber(pool.safePath(pkg.RandPath()), parsers.CheckSource, pool.wordOffset))
pool.reqPool.Invoke(&Unit{path: pool.safePath(pkg.RandPath()), source: parsers.CheckSource, number: pool.wordOffset})
}
case unit, ok := <-pool.additionCh:
if !ok || pool.closed {
@ -274,13 +249,12 @@ Loop:
pool.wg.Done()
} else {
pool.urls.Store(unit.path, nil)
unit.path = pool.safePath(unit.path)
unit.number = pool.wordOffset
pool.reqPool.Invoke(unit)
}
case <-pool.closeCh:
break Loop
case <-ctx.Done():
break Loop
case <-pool.ctx.Done():
break Loop
}
@ -299,35 +273,28 @@ func (pool *BrutePool) Invoke(v interface{}) {
var req *ihttp.Request
var err error
if unit.source == parsers.WordSource {
req, err = pool.genReq(pool.Mod, unit.path)
} else {
req, err = pool.genReq(PathSpray, unit.path)
}
req, err = ihttp.BuildRequest(pool.ctx, pool.ClientType, pool.base, unit.path, unit.host, pool.Method)
if err != nil {
logs.Log.Error(err.Error())
return
}
req.SetHeaders(pool.Headers)
if pool.RandomUserAgent {
req.SetHeader("User-Agent", pkg.RandomUA())
}
req.SetHeaders(pool.Headers, pool.RandomUserAgent)
start := time.Now()
resp, reqerr := pool.client.Do(pool.ctx, req)
resp, reqerr := pool.client.Do(req)
if pool.ClientType == ihttp.FAST {
defer fasthttp.ReleaseResponse(resp.FastResponse)
defer fasthttp.ReleaseRequest(req.FastRequest)
}
// compare与各种错误处理
var bl *pkg.Baseline
var bl *baseline.Baseline
if reqerr != nil && !errors.Is(reqerr, fasthttp.ErrBodyTooLarge) {
atomic.AddInt32(&pool.failedCount, 1)
atomic.AddInt32(&pool.Statistor.FailedNumber, 1)
bl = &pkg.Baseline{
bl = &baseline.Baseline{
SprayResult: &parsers.SprayResult{
UrlString: pool.base + unit.path,
ErrString: reqerr.Error(),
@ -340,51 +307,54 @@ func (pool *BrutePool) Invoke(v interface{}) {
} else { // 特定场景优化
if unit.source <= 3 || unit.source == parsers.CrawlSource || unit.source == parsers.CommonFileSource {
// 一些高优先级的source, 将跳过PreCompare
bl = pkg.NewBaseline(req.URI(), req.Host(), resp)
bl = baseline.NewBaseline(req.URI(), req.Host(), resp)
} else if pool.MatchExpr != nil {
// 如果自定义了match函数, 则所有数据送入tempch中
bl = pkg.NewBaseline(req.URI(), req.Host(), resp)
bl = baseline.NewBaseline(req.URI(), req.Host(), resp)
} else if err = pool.PreCompare(resp); err == nil {
// 通过预对比跳过一些无用数据, 减少性能消耗
bl = pkg.NewBaseline(req.URI(), req.Host(), resp)
bl = baseline.NewBaseline(req.URI(), req.Host(), resp)
} else {
bl = pkg.NewInvalidBaseline(req.URI(), req.Host(), resp, err.Error())
bl = baseline.NewInvalidBaseline(req.URI(), req.Host(), resp, err.Error())
}
}
// 手动处理重定向
if bl.IsValid && unit.source != parsers.CheckSource && bl.RedirectURL != "" {
//pool.wg.Add(1)
bl.SameRedirectDomain = pool.checkHost(bl.RedirectURL)
pool.doRedirect(bl, unit.depth)
}
if !ihttp.CheckBodySize(int64(bl.BodyLength)) {
bl.ExceedLength = true
}
bl.Source = unit.source
bl.ReqDepth = unit.depth
bl.Number = unit.number
unit.Update(bl)
bl.Spended = time.Since(start).Milliseconds()
switch unit.source {
case parsers.InitRandomSource:
bl.Collect()
defer pool.initwg.Done()
pool.locker.Lock()
pool.random = bl
pool.addFuzzyBaseline(bl)
pool.locker.Unlock()
pool.initwg.Done()
case parsers.InitIndexSource:
if !bl.IsValid {
return
}
bl.Collect()
pool.addFuzzyBaseline(bl)
case parsers.InitIndexSource:
defer pool.initwg.Done()
pool.locker.Lock()
pool.index = bl
pool.locker.Unlock()
if bl.Status == 200 || (bl.Status/100) == 3 {
// 保留index输出结果
pool.wg.Add(1)
pool.doCrawl(bl)
pool.putToOutput(bl)
if !bl.IsValid {
return
}
pool.initwg.Done()
bl.Collect()
pool.doCrawl(bl)
pool.doAppend(bl)
pool.putToOutput(bl)
case parsers.CheckSource:
if bl.ErrString != "" {
logs.Log.Warnf("[check.error] %s maybe ip had banned, break (%d/%d), error: %s", pool.BaseURL, pool.failedCount, pool.BreakThreshold, bl.ErrString)
@ -426,14 +396,13 @@ func (pool *BrutePool) Invoke(v interface{}) {
func (pool *BrutePool) NoScopeInvoke(v interface{}) {
defer pool.wg.Done()
unit := v.(*Unit)
req, err := ihttp.BuildPathRequest(pool.ClientType, unit.path, "", pool.Method)
req, err := ihttp.BuildRequest(pool.ctx, pool.ClientType, unit.path, "", "", "GET")
if err != nil {
logs.Log.Error(err.Error())
return
}
req.SetHeaders(pool.Headers)
req.SetHeader("User-Agent", pkg.RandomUA())
resp, reqerr := pool.client.Do(pool.ctx, req)
req.SetHeaders(pool.Headers, pool.RandomUserAgent)
resp, reqerr := pool.client.Do(req)
if pool.ClientType == ihttp.FAST {
defer fasthttp.ReleaseResponse(resp.FastResponse)
defer fasthttp.ReleaseRequest(req.FastRequest)
@ -443,7 +412,7 @@ func (pool *BrutePool) NoScopeInvoke(v interface{}) {
return
}
if resp.StatusCode() == 200 {
bl := pkg.NewBaseline(req.URI(), req.Host(), resp)
bl := baseline.NewBaseline(req.URI(), req.Host(), resp)
bl.Source = unit.source
bl.ReqDepth = unit.depth
bl.Collect()
@ -497,8 +466,6 @@ func (pool *BrutePool) Handler() {
}
if ok {
pool.Statistor.FoundNumber++
// unique判断
if EnableAllUnique || iutils.IntsContains(pkg.UniqueStatus, bl.Status) {
if _, ok := pool.uniques[bl.Unique]; ok {
@ -520,19 +487,15 @@ func (pool *BrutePool) Handler() {
bl.IsValid = false
}
if bl.IsValid || bl.IsFuzzy {
pool.wg.Add(2)
if bl.IsValid || (bl.IsFuzzy && pool.Fuzzy) {
pool.doCrawl(bl)
pool.doRule(bl)
if iutils.IntsContains(pkg.WhiteStatus, bl.Status) || iutils.IntsContains(pkg.UniqueStatus, bl.Status) {
pool.wg.Add(1)
pool.doAppendWords(bl)
}
pool.doAppend(bl)
}
// 如果要进行递归判断, 要满足 bl有效, mod为path-spray, 当前深度小于最大递归深度
if bl.IsValid {
if bl.RecuDepth < MaxRecursion {
pool.Statistor.FoundNumber++
if bl.RecuDepth < pool.MaxRecursionDepth {
if pkg.CompareWithExpr(pool.RecuExpr, params) {
bl.Recu = true
}
@ -549,9 +512,40 @@ func (pool *BrutePool) Handler() {
pool.analyzeDone = true
}
func (pool *BrutePool) checkRedirect(redirectURL string) bool {
if pool.random.RedirectURL == "" {
// 如果random的redirectURL为空, 忽略
return true
}
if redirectURL == pool.random.RedirectURL {
// 相同的RedirectURL将被认为是无效数据
return false
} else {
// path为3xx, 且与baseline中的RedirectURL不同时, 为有效数据
return true
}
}
func (pool *BrutePool) Upgrade(bl *baseline.Baseline) error {
rurl, err := url.Parse(bl.RedirectURL)
if err == nil && rurl.Hostname() == bl.Url.Hostname() && bl.Url.Scheme == "http" && rurl.Scheme == "https" {
logs.Log.Infof("baseurl %s upgrade http to https, reinit", pool.BaseURL)
pool.base = strings.Replace(pool.BaseURL, "http", "https", 1)
pool.url.Scheme = "https"
// 重新初始化
err = pool.Init()
if err != nil {
return err
}
}
return nil
}
func (pool *BrutePool) PreCompare(resp *ihttp.Response) error {
status := resp.StatusCode()
if iutils.IntsContains(pkg.WhiteStatus, status) {
if pkg.StatusContain(pkg.WhiteStatus, status) {
// 如果为白名单状态码则直接返回
return nil
}
@ -559,11 +553,11 @@ func (pool *BrutePool) PreCompare(resp *ihttp.Response) error {
// return pkg.ErrSameStatus
//}
if iutils.IntsContains(pkg.BlackStatus, status) {
if pkg.StatusContain(pkg.BlackStatus, status) {
return pkg.ErrBadStatus
}
if iutils.IntsContains(pkg.WAFStatus, status) {
if pkg.StatusContain(pkg.WAFStatus, status) {
return pkg.ErrWaf
}
@ -574,19 +568,41 @@ func (pool *BrutePool) PreCompare(resp *ihttp.Response) error {
return nil
}
func (pool *BrutePool) BaseCompare(bl *pkg.Baseline) bool {
// same host return true
// diff host return false
func (pool *BrutePool) checkHost(u string) bool {
if v, err := url.Parse(u); err == nil {
if v.Host == "" {
return true
}
if v.Host == pool.url.Host {
return true
} else {
return false
}
}
return true
}
func (pool *BrutePool) BaseCompare(bl *baseline.Baseline) bool {
if !bl.IsValid {
return false
}
var status = -1
// 30x状态码的特殊处理
if bl.RedirectURL != "" && strings.HasSuffix(bl.RedirectURL, bl.Url.Path+"/") {
bl.Reason = pkg.ErrFuzzyRedirect.Error()
pool.putToFuzzy(bl)
return false
if bl.RedirectURL != "" {
if bl.SameRedirectDomain && strings.HasSuffix(bl.RedirectURL, bl.Url.Path+"/") {
bl.Reason = pkg.ErrFuzzyRedirect.Error()
return false
}
}
// 使用与baseline相同状态码, 需要在fuzzystatus中提前配置
base, ok := pool.baselines[bl.Status] // 挑选对应状态码的baseline进行compare
if bl.IsBaseline {
ok = false
}
if !ok {
if pool.random.Status == bl.Status {
// 当other的状态码与base相同时, 会使用base
@ -629,117 +645,22 @@ func (pool *BrutePool) BaseCompare(bl *pkg.Baseline) bool {
return true
}
func (pool *BrutePool) doCheck() {
if pool.failedCount > pool.BreakThreshold {
// 当报错次数超过上限是, 结束任务
pool.recover()
pool.Cancel()
pool.IsFailed = true
return
}
if pool.Mod == HostSpray {
pool.checkCh <- struct{}{}
} else if pool.Mod == PathSpray {
pool.checkCh <- struct{}{}
}
}
func (pool *BrutePool) doCrawl(bl *pkg.Baseline) {
if !pool.Crawl || bl.ReqDepth >= MaxCrawl {
pool.wg.Done()
return
}
bl.CollectURL()
if bl.URLs == nil {
pool.wg.Done()
return
}
pool.wg.Add(1)
pool.doScopeCrawl(bl)
go func() {
defer pool.wg.Done()
for _, u := range bl.URLs {
if u = pkg.FormatURL(bl.Url.Path, u); u == "" {
continue
}
pool.addAddition(&Unit{
path: u,
source: parsers.CrawlSource,
depth: bl.ReqDepth + 1,
})
}
}()
}
func (pool *BrutePool) doScopeCrawl(bl *pkg.Baseline) {
if bl.ReqDepth >= MaxCrawl {
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for _, u := range bl.URLs {
if strings.HasPrefix(u, "http") {
if v, _ := url.Parse(u); v == nil || !pkg.MatchWithGlobs(v.Host, pool.Scope) {
continue
}
pool.scopeLocker.Lock()
if _, ok := pool.scopeurls[u]; !ok {
pool.urls.Store(u, nil)
pool.wg.Add(1)
pool.scopePool.Invoke(&Unit{path: u, source: parsers.CrawlSource, depth: bl.ReqDepth + 1})
}
pool.scopeLocker.Unlock()
}
}
}()
}
func (pool *BrutePool) addFuzzyBaseline(bl *pkg.Baseline) {
func (pool *BrutePool) addFuzzyBaseline(bl *baseline.Baseline) {
if _, ok := pool.baselines[bl.Status]; !ok && (EnableAllFuzzy || iutils.IntsContains(pkg.FuzzyStatus, bl.Status)) {
bl.IsBaseline = true
bl.Collect()
pool.wg.Add(1)
pool.doCrawl(bl) // 非有效页面也可能存在一些特殊的url可以用来爬取
pool.baselines[bl.Status] = bl
logs.Log.Logf(pkg.LogVerbose, "[baseline.%dinit] %s", bl.Status, bl.Format([]string{"status", "length", "spend", "title", "frame", "redirect"}))
}
}
func (pool *BrutePool) doBak() {
defer pool.wg.Done()
worder, err := words.NewWorderWithDsl("{?0}.{?@bak_ext}", [][]string{pkg.BakGenerator(pool.url.Host)}, nil)
if err != nil {
return
}
worder.Run()
for w := range worder.C {
pool.addAddition(&Unit{
path: pool.dir + w,
source: parsers.BakSource,
})
}
worder, err = words.NewWorderWithDsl("{?@bak_name}.{?@bak_ext}", nil, nil)
if err != nil {
return
}
worder.Run()
for w := range worder.C {
pool.addAddition(&Unit{
path: pool.dir + w,
source: parsers.BakSource,
})
}
}
func (pool *BrutePool) recover() {
func (pool *BrutePool) fallback() {
logs.Log.Errorf("%s ,failed request exceeds the threshold , task will exit. Breakpoint %d", pool.BaseURL, pool.wordOffset)
for i, bl := range pool.FailedBaselines {
if i > 5 {
break
}
logs.Log.Errorf("[failed.%d] %s", i, bl.String())
}
}
@ -750,9 +671,10 @@ func (pool *BrutePool) Close() {
time.Sleep(time.Duration(100) * time.Millisecond)
}
close(pool.additionCh) // 关闭addition管道
close(pool.checkCh) // 关闭check管道
//close(pool.checkCh) // 关闭check管道
pool.Statistor.EndTime = time.Now().Unix()
pool.Bar.Close()
pool.reqPool.Release()
pool.scopePool.Release()
}
func (pool *BrutePool) safePath(u string) string {
@ -768,3 +690,213 @@ func (pool *BrutePool) resetFailed() {
pool.failedCount = 1
pool.FailedBaselines = nil
}
func (pool *BrutePool) doCheck() {
if pool.failedCount > pool.BreakThreshold {
// 当报错次数超过上限是, 结束任务
if pool.isFallback.Load() {
return
}
pool.isFallback.Store(true)
pool.fallback()
pool.IsFailed = true
pool.Cancel()
return
}
if pool.Mod == HostSpray {
pool.checkCh <- struct{}{}
} else if pool.Mod == PathSpray {
pool.checkCh <- struct{}{}
}
}
func (pool *BrutePool) doRedirect(bl *baseline.Baseline, depth int) {
if depth >= pool.MaxRedirect {
return
}
//if !bl.SameRedirectDomain {
// return // 不同域名的重定向不处理
//}
reURL := pkg.FormatURL(bl.Url.Path, bl.RedirectURL)
pool.wg.Add(1)
go func() {
defer pool.wg.Done()
pool.addAddition(&Unit{
path: reURL,
parent: bl.Number,
host: bl.Host,
source: parsers.RedirectSource,
from: bl.Source,
frontUrl: bl.UrlString,
depth: depth + 1,
})
}()
}
func (pool *BrutePool) doCrawl(bl *baseline.Baseline) {
if !pool.Crawl || bl.ReqDepth >= pool.MaxCrawlDepth {
return
}
bl.CollectURL()
if bl.URLs == nil {
return
}
pool.wg.Add(2)
pool.doScopeCrawl(bl)
go func() {
defer pool.wg.Done()
for _, u := range bl.URLs {
if u = pkg.FormatURL(bl.Url.Path, u); u == "" {
continue
}
pool.addAddition(&Unit{
path: u,
parent: bl.Number,
host: bl.Host,
source: parsers.CrawlSource,
from: bl.Source,
depth: bl.ReqDepth + 1,
})
}
}()
}
func (pool *BrutePool) doScopeCrawl(bl *baseline.Baseline) {
if bl.ReqDepth >= pool.MaxCrawlDepth {
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for _, u := range bl.URLs {
if strings.HasPrefix(u, "http") {
if v, _ := url.Parse(u); v == nil || !pkg.MatchWithGlobs(v.Host, pool.Scope) {
continue
}
pool.scopeLocker.Lock()
if _, ok := pool.scopeurls[u]; !ok {
pool.urls.Store(u, nil)
pool.wg.Add(1)
pool.scopePool.Invoke(&Unit{
path: u,
parent: bl.Number,
source: parsers.CrawlSource,
from: bl.Source,
depth: bl.ReqDepth + 1,
})
}
pool.scopeLocker.Unlock()
}
}
}()
}
func (pool *BrutePool) doFuzzuli() {
defer pool.wg.Done()
if pool.Mod == HostSpray {
return
}
for w := range NewBruteDSL(pool.Config, "{?0}.{?@bak_ext}", [][]string{pkg.BakGenerator(pool.url.Host)}).Output {
pool.addAddition(&Unit{
path: pool.dir + w,
source: parsers.BakSource,
})
}
}
func (pool *BrutePool) doBak() {
defer pool.wg.Done()
if pool.Mod == HostSpray {
return
}
for w := range NewBruteDSL(pool.Config, "{?@bak_name}.{?@bak_ext}", nil).Output {
pool.addAddition(&Unit{
path: pool.dir + w,
source: parsers.BakSource,
})
}
}
func (pool *BrutePool) doAppend(bl *baseline.Baseline) {
pool.wg.Add(2)
pool.doAppendWords(bl)
pool.doAppendRule(bl)
}
func (pool *BrutePool) doAppendRule(bl *baseline.Baseline) {
if pool.AppendRule == nil || bl.Source == parsers.AppendRuleSource || bl.ReqDepth >= pool.MaxAppendDepth {
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for u := range rule.RunAsStream(pool.AppendRule.Expressions, path.Base(bl.Path)) {
pool.addAddition(&Unit{
path: pkg.Dir(bl.Url.Path) + u,
parent: bl.Number,
host: bl.Host,
source: parsers.AppendRuleSource,
from: bl.Source,
depth: bl.ReqDepth + 1,
})
}
}()
}
func (pool *BrutePool) doAppendWords(bl *baseline.Baseline) {
if pool.AppendWords == nil || bl.Source == parsers.AppendSource || bl.Source == parsers.RuleSource || bl.ReqDepth >= pool.MaxAppendDepth {
// 防止自身递归
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for u := range NewBruteWords(pool.Config, pool.AppendWords).Output {
pool.addAddition(&Unit{
path: pkg.SafePath(bl.Path, u),
parent: bl.Number,
host: bl.Host,
source: parsers.AppendSource,
from: bl.Source,
depth: bl.RecuDepth + 1,
})
}
}()
}
func (pool *BrutePool) doActive() {
defer pool.wg.Done()
if pool.Mod == HostSpray {
return
}
for _, u := range pkg.ActivePath {
pool.addAddition(&Unit{
path: pool.dir + u[1:],
source: parsers.FingerSource,
})
}
}
func (pool *BrutePool) doCommonFile() {
defer pool.wg.Done()
if pool.Mod == HostSpray {
return
}
for u := range NewBruteWords(pool.Config, append(pkg.Dicts["common"], pkg.Dicts["log"]...)).Output {
pool.addAddition(&Unit{
path: pool.dir + u,
source: parsers.CommonFileSource,
})
}
}

View File

@ -4,7 +4,8 @@ import (
"context"
"github.com/chainreactors/logs"
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/pkg"
"github.com/panjf2000/ants/v2"
"net/url"
@ -18,33 +19,34 @@ func NewCheckPool(ctx context.Context, config *Config) (*CheckPool, error) {
pctx, cancel := context.WithCancel(ctx)
config.ClientType = ihttp.STANDARD
pool := &CheckPool{
&BasePool{
BasePool: &BasePool{
Config: config,
Statistor: pkg.NewStatistor(""),
ctx: pctx,
Cancel: cancel,
client: ihttp.NewClient(&ihttp.ClientConfig{
Thread: config.Thread,
Type: config.ClientType,
Timeout: time.Duration(config.Timeout) * time.Second,
ProxyAddr: config.ProxyAddr,
Thread: config.Thread,
Type: config.ClientType,
Timeout: config.Timeout,
ProxyClient: config.ProxyClient,
}),
wg: &sync.WaitGroup{},
additionCh: make(chan *Unit, 1024),
closeCh: make(chan struct{}),
processCh: make(chan *pkg.Baseline, config.Thread),
processCh: make(chan *baseline.Baseline, config.Thread),
},
}
pool.Headers = map[string]string{"Connection": "close"}
pool.Headers.Set("Connection", "close")
p, _ := ants.NewPoolWithFunc(config.Thread, pool.Invoke)
pool.BasePool.Pool = p
pool.Pool = p
go pool.Handler()
return pool, nil
}
type CheckPool struct {
*BasePool
Pool *ants.PoolWithFunc
}
func (pool *CheckPool) Run(ctx context.Context, offset, limit int) {
@ -66,7 +68,7 @@ func (pool *CheckPool) Run(ctx context.Context, offset, limit int) {
Loop:
for {
select {
case u, ok := <-pool.Worder.C:
case u, ok := <-pool.Worder.Output:
if !ok {
done = true
continue
@ -82,12 +84,12 @@ Loop:
}
pool.wg.Add(1)
_ = pool.BasePool.Pool.Invoke(newUnit(u, parsers.CheckSource))
_ = pool.Pool.Invoke(newUnit(u, parsers.CheckSource))
case u, ok := <-pool.additionCh:
if !ok {
continue
}
_ = pool.BasePool.Pool.Invoke(u)
_ = pool.Pool.Invoke(u)
case <-pool.closeCh:
break Loop
case <-ctx.Done():
@ -99,6 +101,10 @@ Loop:
pool.Close()
}
func (pool *CheckPool) Close() {
pool.Bar.Close()
pool.Pool.Release()
}
func (pool *CheckPool) Invoke(v interface{}) {
defer func() {
@ -107,10 +113,10 @@ func (pool *CheckPool) Invoke(v interface{}) {
}()
unit := v.(*Unit)
req, err := pool.genReq(unit.path)
req, err := ihttp.BuildRequest(pool.ctx, pool.ClientType, unit.path, "", "", "GET")
if err != nil {
logs.Log.Debug(err.Error())
bl := &pkg.Baseline{
bl := &baseline.Baseline{
SprayResult: &parsers.SprayResult{
UrlString: unit.path,
IsValid: false,
@ -122,13 +128,13 @@ func (pool *CheckPool) Invoke(v interface{}) {
pool.processCh <- bl
return
}
req.SetHeaders(pool.Headers)
req.SetHeaders(pool.Headers, pool.RandomUserAgent)
start := time.Now()
var bl *pkg.Baseline
resp, reqerr := pool.client.Do(pool.ctx, req)
var bl *baseline.Baseline
resp, reqerr := pool.client.Do(req)
if reqerr != nil {
pool.failedCount++
bl = &pkg.Baseline{
bl = &baseline.Baseline{
SprayResult: &parsers.SprayResult{
UrlString: unit.path,
IsValid: false,
@ -140,7 +146,8 @@ func (pool *CheckPool) Invoke(v interface{}) {
logs.Log.Debugf("%s, %s", unit.path, reqerr.Error())
pool.doUpgrade(bl)
} else {
bl = pkg.NewBaseline(req.URI(), req.Host(), resp)
bl = baseline.NewBaseline(req.URI(), req.Host(), resp)
bl.ReqDepth = unit.depth
bl.Collect()
if bl.Status == 400 {
pool.doUpgrade(bl)
@ -174,8 +181,8 @@ func (pool *CheckPool) Handler() {
}
}
func (pool *CheckPool) doRedirect(bl *pkg.Baseline, depth int) {
if depth >= MaxRedirect {
func (pool *CheckPool) doRedirect(bl *baseline.Baseline, depth int) {
if depth >= pool.MaxRedirect {
return
}
var reURL string
@ -193,15 +200,17 @@ func (pool *CheckPool) doRedirect(bl *pkg.Baseline, depth int) {
go func() {
pool.additionCh <- &Unit{
path: reURL,
parent: bl.Number,
source: parsers.RedirectSource,
frontUrl: bl.UrlString,
depth: depth + 1,
from: bl.Source,
}
}()
}
// tcp与400进行协议转换
func (pool *CheckPool) doUpgrade(bl *pkg.Baseline) {
func (pool *CheckPool) doUpgrade(bl *baseline.Baseline) {
if bl.ReqDepth >= 1 {
return
}
@ -215,8 +224,10 @@ func (pool *CheckPool) doUpgrade(bl *pkg.Baseline) {
go func() {
pool.additionCh <- &Unit{
path: reurl,
parent: bl.Number,
source: parsers.UpgradeSource,
depth: bl.ReqDepth + 1,
from: bl.Source,
}
}()
}

72
core/pool/config.go Normal file
View File

@ -0,0 +1,72 @@
package pool
import (
"github.com/chainreactors/logs"
"github.com/chainreactors/proxyclient"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/words"
"github.com/chainreactors/words/rule"
"github.com/expr-lang/expr/vm"
"net/http"
"sync"
"time"
)
type Config struct {
BaseURL string
ProxyClient proxyclient.Dial
Thread int
Wordlist []string
Timeout time.Duration
ProcessCh chan *baseline.Baseline
OutputCh chan *baseline.Baseline
FuzzyCh chan *baseline.Baseline
Outwg *sync.WaitGroup
RateLimit int
CheckPeriod int
ErrPeriod int32
BreakThreshold int32
Method string
Mod SprayMod
Headers http.Header
ClientType int
MatchExpr *vm.Program
FilterExpr *vm.Program
RecuExpr *vm.Program
AppendRule *rule.Program
Fns []words.WordFunc
AppendWords []string
Fuzzy bool
IgnoreWaf bool
Crawl bool
Scope []string
Active bool
Bak bool
Fuzzuli bool
Common bool
RetryLimit int
RandomUserAgent bool
Random string
Index string
MaxRedirect int
MaxCrawlDepth int
MaxRecursionDepth int
MaxAppendDepth int
}
func NewBruteWords(config *Config, list []string) *words.Worder {
word := words.NewWorderWithList(list)
word.Fns = config.Fns
word.Run()
return word
}
func NewBruteDSL(config *Config, dsl string, params [][]string) *words.Worder {
word, err := words.NewWorderWithDsl(dsl, params, nil)
if err != nil {
logs.Log.Error(err.Error())
}
word.Fns = config.Fns
word.Run()
return word
}

72
core/pool/pool.go Normal file
View File

@ -0,0 +1,72 @@
package pool
import (
"context"
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words"
"sync"
"sync/atomic"
)
type BasePool struct {
*Config
Statistor *pkg.Statistor
Bar *pkg.Bar
Worder *words.Worder
Cancel context.CancelFunc
client *ihttp.Client
ctx context.Context
processCh chan *baseline.Baseline // 待处理的baseline
dir string
reqCount int
failedCount int
additionCh chan *Unit
closeCh chan struct{}
wg *sync.WaitGroup
isFallback atomic.Bool
}
func (pool *BasePool) doRetry(bl *baseline.Baseline) {
if bl.Retry >= pool.RetryLimit {
return
}
pool.wg.Add(1)
go func() {
defer pool.wg.Done()
pool.addAddition(&Unit{
path: bl.Path,
parent: bl.Number,
host: bl.Host,
source: parsers.RetrySource,
from: bl.Source,
retry: bl.Retry + 1,
})
}()
}
func (pool *BasePool) addAddition(u *Unit) {
// 强行屏蔽报错, 防止goroutine泄露
pool.wg.Add(1)
defer func() {
if err := recover(); err != nil {
}
}()
pool.additionCh <- u
}
func (pool *BasePool) putToOutput(bl *baseline.Baseline) {
if bl.IsValid || bl.IsFuzzy {
bl.Collect()
}
pool.Outwg.Add(1)
pool.OutputCh <- bl
}
func (pool *BasePool) putToFuzzy(bl *baseline.Baseline) {
pool.Outwg.Add(1)
bl.IsFuzzy = true
pool.FuzzyCh <- bl
}

57
core/pool/type.go Normal file
View File

@ -0,0 +1,57 @@
package pool
import (
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/core/baseline"
)
func newUnit(path string, source parsers.SpraySource) *Unit {
return &Unit{path: path, source: source}
}
type Unit struct {
number int
parent int
host string
path string
from parsers.SpraySource
source parsers.SpraySource
retry int
frontUrl string
depth int
}
func (u *Unit) Update(bl *baseline.Baseline) {
bl.Number = u.number
bl.Parent = u.parent
bl.Host = u.host
bl.Path = u.path
bl.Source = u.source
}
func NewBaselines() *Baselines {
return &Baselines{
baselines: map[int]*baseline.Baseline{},
}
}
type Baselines struct {
FailedBaselines []*baseline.Baseline
random *baseline.Baseline
index *baseline.Baseline
baselines map[int]*baseline.Baseline
}
type SprayMod int
const (
PathSpray SprayMod = iota + 1
HostSpray
ParamSpray
CustomSpray
)
var ModMap = map[string]SprayMod{
"path": PathSpray,
"host": HostSpray,
}

View File

@ -1,11 +1,13 @@
package internal
package core
import (
"context"
"github.com/chainreactors/files"
"github.com/chainreactors/logs"
"github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/internal/pool"
"github.com/chainreactors/proxyclient"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/core/pool"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words"
"github.com/chainreactors/words/rule"
@ -13,28 +15,28 @@ import (
"github.com/panjf2000/ants/v2"
"github.com/vbauerster/mpb/v8"
"github.com/vbauerster/mpb/v8/decor"
"net/http"
"strings"
"sync"
"time"
)
var (
MAX = 2147483647
)
var (
dictCache = make(map[string][]string)
wordlistCache = make(map[string][]string)
ruleCache = make(map[string][]rule.Expression)
)
type Runner struct {
*Option
taskCh chan *Task
poolwg sync.WaitGroup
outwg *sync.WaitGroup
outputCh chan *pkg.Baseline
fuzzyCh chan *pkg.Baseline
bar *mpb.Bar
taskCh chan *Task
poolwg *sync.WaitGroup
outwg *sync.WaitGroup
outputCh chan *baseline.Baseline
fuzzyCh chan *baseline.Baseline
bar *mpb.Bar
bruteMod bool
ProxyClient proxyclient.Dial
IsCheck bool
Pools *ants.PoolWithFunc
PoolName map[string]bool
@ -46,28 +48,27 @@ type Runner struct {
MatchExpr *vm.Program
RecursiveExpr *vm.Program
OutputFile *files.File
FuzzyFile *files.File
DumpFile *files.File
StatFile *files.File
Progress *mpb.Progress
Fns []func(string) []string
Count int // tasks total number
Wordlist []string
AppendWords []string
RecuDepth int
ClientType int
Probes []string
Total int // wordlist total number
Color bool
Jsonify bool
//FuzzyFile *files.File
DumpFile *files.File
StatFile *files.File
Progress *mpb.Progress
Fns []words.WordFunc
Count int // tasks total number
Wordlist []string
AppendWords []string
ClientType int
Probes []string
Total int // wordlist total number
Color bool
Jsonify bool
}
func (r *Runner) PrepareConfig() *pool.Config {
config := &pool.Config{
Thread: r.Threads,
Timeout: r.Timeout,
Timeout: time.Duration(r.Timeout) * time.Second,
RateLimit: r.RateLimit,
Headers: r.Headers,
Headers: make(http.Header),
Method: r.Method,
Mod: pool.ModMap[r.Mod],
OutputCh: r.outputCh,
@ -80,20 +81,26 @@ func (r *Runner) PrepareConfig() *pool.Config {
MatchExpr: r.MatchExpr,
FilterExpr: r.FilterExpr,
RecuExpr: r.RecursiveExpr,
AppendRule: r.AppendRules,
AppendWords: r.AppendWords,
AppendRule: r.AppendRules, // 对有效目录追加规则, 根据rule生成
AppendWords: r.AppendWords, // 对有效目录追加字典
Fns: r.Fns,
//IgnoreWaf: r.IgnoreWaf,
Crawl: r.Crawl,
Scope: r.Scope,
Active: r.Finger,
Bak: r.Bak,
Common: r.Common,
Retry: r.RetryCount,
ClientType: r.ClientType,
RandomUserAgent: r.RandomUserAgent,
Random: r.Random,
Index: r.Index,
ProxyAddr: r.Proxy,
Crawl: r.CrawlPlugin,
Scope: r.Scope,
Active: r.Finger,
Bak: r.BakPlugin,
Fuzzuli: r.FuzzuliPlugin,
Common: r.CommonPlugin,
RetryLimit: r.RetryCount,
ClientType: r.ClientType,
RandomUserAgent: r.RandomUserAgent,
Random: r.Random,
Index: r.Index,
MaxRecursionDepth: r.Depth,
MaxRedirect: 3,
MaxAppendDepth: r.AppendDepth,
MaxCrawlDepth: r.CrawlDepth,
ProxyClient: r.ProxyClient,
}
if config.ClientType == ihttp.Auto {
@ -103,6 +110,19 @@ func (r *Runner) PrepareConfig() *pool.Config {
config.ClientType = ihttp.STANDARD
}
}
for k, v := range r.Headers {
config.Headers.Set(k, v)
}
if config.Headers.Get("User-Agent") == "" {
config.Headers.Set("User-Agent", pkg.DefaultUserAgent)
}
if config.Headers.Get("Accept") == "" {
config.Headers.Set("Accept", "*/*")
}
return config
}
@ -111,6 +131,9 @@ func (r *Runner) AppendFunction(fn func(string) []string) {
}
func (r *Runner) Prepare(ctx context.Context) error {
if r.bruteMod {
r.IsCheck = false
}
r.OutputHandler()
var err error
if r.IsCheck {
@ -182,7 +205,7 @@ func (r *Runner) Prepare(ctx context.Context) error {
brutePool.Statistor.Total = t.origin.sum
} else {
brutePool.Statistor = pkg.NewStatistor(t.baseUrl)
brutePool.Worder = words.NewWorder(r.Wordlist)
brutePool.Worder = words.NewWorderWithList(r.Wordlist)
brutePool.Worder.Fns = r.Fns
brutePool.Worder.Rules = r.Rules.Expressions
}
@ -194,12 +217,14 @@ func (r *Runner) Prepare(ctx context.Context) error {
limit = brutePool.Statistor.Total
}
brutePool.Bar = pkg.NewBar(config.BaseURL, limit-brutePool.Statistor.Offset, brutePool.Statistor, r.Progress)
logs.Log.Importantf("[pool] task: %s, total %d words, %d threads, proxy: %s", brutePool.BaseURL, limit-brutePool.Statistor.Offset, brutePool.Thread, brutePool.ProxyAddr)
logs.Log.Importantf("[pool] task: %s, total %d words, %d threads, proxy: %v",
brutePool.BaseURL, limit-brutePool.Statistor.Offset, brutePool.Thread, r.Proxies)
err = brutePool.Init()
if err != nil {
brutePool.Statistor.Error = err.Error()
if !r.Force {
// 如果没开启force, init失败将会关闭pool
brutePool.Bar.Close()
brutePool.Close()
r.PrintStat(brutePool)
r.Done()
@ -207,7 +232,7 @@ func (r *Runner) Prepare(ctx context.Context) error {
}
}
brutePool.Run(ctx, brutePool.Statistor.Offset, limit)
brutePool.Run(brutePool.Statistor.Offset, limit)
if brutePool.IsFailed && len(brutePool.FailedBaselines) > 0 {
// 如果因为错误积累退出, end将指向第一个错误发生时, 防止resume时跳过大量目标
@ -230,6 +255,7 @@ Loop:
for {
select {
case <-ctx.Done():
// 如果超过了deadline, 尚未开始的任务都将被记录到stat中
if len(r.taskCh) > 0 {
for t := range r.taskCh {
stat := pkg.NewStatistor(t.baseUrl)
@ -248,6 +274,9 @@ Loop:
}
}
if r.bar != nil {
r.bar.Wait()
}
r.poolwg.Wait()
r.outwg.Wait()
}
@ -278,7 +307,7 @@ Loop:
r.outwg.Wait()
}
func (r *Runner) AddRecursive(bl *pkg.Baseline) {
func (r *Runner) AddRecursive(bl *baseline.Baseline) {
// 递归新任务
task := &Task{
baseUrl: bl.UrlString,
@ -354,62 +383,40 @@ func (r *Runner) saveStat(content string) {
}
}
func (r *Runner) OutputHandler() {
debugPrint := func(bl *pkg.Baseline) {
if r.Color {
logs.Log.Debug(bl.ColorString())
} else {
logs.Log.Debug(bl.String())
}
func (r *Runner) Output(bl *baseline.Baseline) {
var out string
if r.Option.Json {
out = bl.ToJson()
} else if len(r.Probes) > 0 {
out = bl.ProbeOutput(r.Probes)
} else if r.Color {
out = bl.ColorString()
} else {
out = bl.String()
}
if bl.IsValid {
logs.Log.Console(out + "\n")
} else if r.Fuzzy && bl.IsFuzzy {
logs.Log.Console("[fuzzy] " + out + "\n")
}
var saveFunc func(string)
if r.OutputFile != nil {
saveFunc = func(line string) {
r.OutputFile.SafeWrite(line + "\n")
r.OutputFile.SafeSync()
}
} else {
saveFunc = func(line string) {
logs.Log.Console(line + "\n")
}
}
var fuzzySaveFunc func(string)
if r.FuzzyFile != nil {
fuzzySaveFunc = func(line string) {
r.FuzzyFile.SafeWrite(line + "\n")
r.FuzzyFile.SafeSync()
}
} else {
fuzzySaveFunc = func(line string) {
logs.Log.Console("[fuzzy] " + line + "\n")
}
}
outputPrint := func(bl *pkg.Baseline) {
var outFunc func(string)
if bl.IsFuzzy {
outFunc = fuzzySaveFunc
if r.FileOutput == "json" {
r.OutputFile.SafeWrite(bl.ToJson() + "\n")
} else if r.FileOutput == "csv" {
r.OutputFile.SafeWrite(bl.ToCSV())
} else if r.FileOutput == "full" {
r.OutputFile.SafeWrite(bl.String() + "\n")
} else {
outFunc = saveFunc
r.OutputFile.SafeWrite(bl.ProbeOutput(strings.Split(r.FileOutput, ",")) + "\n")
}
if r.Option.Json {
outFunc(bl.Jsonify())
} else if r.Color {
if len(r.Probes) > 0 {
outFunc(logs.GreenBold(bl.Format(r.Probes)))
} else {
outFunc(logs.GreenBold(bl.ColorString()))
}
} else {
if len(r.Probes) > 0 {
outFunc(bl.Format(r.Probes))
} else {
outFunc(bl.String())
}
}
}
r.OutputFile.SafeSync()
}
}
func (r *Runner) OutputHandler() {
go func() {
for {
select {
@ -418,16 +425,20 @@ func (r *Runner) OutputHandler() {
return
}
if r.DumpFile != nil {
r.DumpFile.SafeWrite(bl.Jsonify() + "\n")
r.DumpFile.SafeWrite(bl.ToJson() + "\n")
r.DumpFile.SafeSync()
}
if bl.IsValid {
outputPrint(bl)
r.Output(bl)
if bl.Recu {
r.AddRecursive(bl)
}
} else {
debugPrint(bl)
if r.Color {
logs.Log.Debug(bl.ColorString())
} else {
logs.Log.Debug(bl.String())
}
}
r.outwg.Done()
}
@ -441,9 +452,7 @@ func (r *Runner) OutputHandler() {
if !ok {
return
}
if r.Fuzzy {
outputPrint(bl)
}
r.Output(bl)
r.outwg.Done()
}
}

View File

@ -1,4 +1,4 @@
package internal
package core
import (
"fmt"

View File

@ -1,4 +1,4 @@
package internal
package core
import (
"github.com/chainreactors/spray/pkg"
@ -14,15 +14,15 @@ type Origin struct {
sum int
}
func (o *Origin) InitWorder(fns []func(string) []string) (*words.Worder, error) {
func (o *Origin) InitWorder(fns []words.WordFunc) (*words.Worder, error) {
var worder *words.Worder
wl, err := loadWordlist(o.Word, o.Dictionaries)
wl, err := pkg.LoadWordlist(o.Word, o.Dictionaries)
if err != nil {
return nil, err
}
worder = words.NewWorder(wl)
worder = words.NewWorderWithList(wl)
worder.Fns = fns
rules, err := loadRuleWithFiles(o.RuleFiles, o.RuleFilter)
rules, err := pkg.LoadRuleWithFiles(o.RuleFiles, o.RuleFilter)
if err != nil {
return nil, err
}

33
go.mod
View File

@ -1,23 +1,22 @@
module github.com/chainreactors/spray
go 1.22
toolchain go1.22.2
go 1.20
require (
github.com/chainreactors/files v0.0.0-20240716182835-7884ee1e77f0
github.com/chainreactors/fingers v0.0.0-20240716172449-2fc3147b9c2a
github.com/chainreactors/logs v0.0.0-20240207121836-c946f072f81f
github.com/chainreactors/parsers v0.0.0-20240708072709-07deeece7ce2
github.com/chainreactors/logs v0.0.0-20241115105204-6132e39f5261
github.com/chainreactors/parsers v0.0.0-20250605044448-6bc270f12c0e
github.com/chainreactors/proxyclient v1.0.3-0.20250219180226-a25a0c9e6ac8
github.com/chainreactors/utils v0.0.0-20240805193040-ff3b97aa3c3f
github.com/chainreactors/words v0.4.1-0.20240510105042-5ba5c2edc508
github.com/chainreactors/words v0.0.0-20240910083848-19a289e8984b
github.com/charmbracelet/lipgloss v0.13.0
github.com/expr-lang/expr v1.16.9
github.com/gookit/config/v2 v2.2.5
github.com/jessevdk/go-flags v1.5.0
github.com/panjf2000/ants/v2 v2.9.1
github.com/valyala/fasthttp v1.53.0
github.com/vbauerster/mpb/v8 v8.7.3
golang.org/x/net v0.25.0
golang.org/x/time v0.5.0
sigs.k8s.io/yaml v1.4.0
)
@ -27,6 +26,8 @@ require (
github.com/VividCortex/ewma v1.2.0 // indirect
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d // indirect
github.com/andybalholm/brotli v1.1.0 // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/charmbracelet/x/ansi v0.1.4 // indirect
github.com/facebookincubator/nvdtools v0.1.5 // indirect
github.com/fatih/color v1.17.0 // indirect
github.com/go-dedup/megophone v0.0.0-20170830025436-f01be21026f5 // indirect
@ -34,27 +35,33 @@ require (
github.com/go-dedup/text v0.0.0-20170907015346-8bb1b95e3cb7 // indirect
github.com/go-playground/validator/v10 v10.20.0 // indirect
github.com/goccy/go-yaml v1.11.3 // indirect
github.com/google/go-cmp v0.6.0 // indirect
github.com/gookit/color v1.5.4 // indirect
github.com/gookit/goutil v0.6.15 // indirect
github.com/klauspost/compress v1.17.8 // indirect
github.com/kr/pretty v0.3.1 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.15 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/muesli/termenv v0.15.2 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/riobard/go-bloom v0.0.0-20200614022211-cdc8013cb5b3 // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/rogpeppe/go-internal v1.12.0 // indirect
github.com/shadowsocks/go-shadowsocks2 v0.1.5 // indirect
github.com/twmb/murmur3 v1.1.8 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
golang.org/x/crypto v0.33.0 // indirect
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 // indirect
golang.org/x/sync v0.7.0 // indirect
golang.org/x/sys v0.20.0 // indirect
golang.org/x/term v0.20.0 // indirect
golang.org/x/text v0.15.0 // indirect
golang.org/x/net v0.25.0 // indirect
golang.org/x/sync v0.11.0 // indirect
golang.org/x/sys v0.30.0 // indirect
golang.org/x/term v0.29.0 // indirect
golang.org/x/text v0.22.0 // indirect
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)
replace github.com/chainreactors/proxyclient => github.com/chainreactors/proxyclient v1.0.3

105
go.sum
View File

@ -50,6 +50,7 @@ dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/toml v0.4.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
@ -62,6 +63,7 @@ github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuy
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
github.com/andybalholm/brotli v1.0.6/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M=
github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
@ -70,16 +72,18 @@ github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmV
github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc=
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/chainreactors/files v0.0.0-20230731174853-acee21c8c45a/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2IDcovf42/6A=
github.com/chainreactors/files v0.0.0-20231102192550-a652458cee26/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2IDcovf42/6A=
github.com/chainreactors/files v0.0.0-20231123083421-cea5b4ad18a8/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2IDcovf42/6A=
github.com/chainreactors/files v0.0.0-20240716182835-7884ee1e77f0 h1:cU3sGEODXZsUZGBXfnz0nyxF6+37vA+ZGDx6L/FKN4o=
@ -87,27 +91,38 @@ github.com/chainreactors/files v0.0.0-20240716182835-7884ee1e77f0/go.mod h1:NSxG
github.com/chainreactors/fingers v0.0.0-20240702104653-a66e34aa41df/go.mod h1:l8AO6ZbIL8WQ8PkihCK/MD6Iww/O+LY/osAhRJjThs4=
github.com/chainreactors/fingers v0.0.0-20240716172449-2fc3147b9c2a h1:5l4i8TdHRlz088J5xZM30yvTUMLVcWJ6iXiO/VyD3ro=
github.com/chainreactors/fingers v0.0.0-20240716172449-2fc3147b9c2a/go.mod h1:R03soobTE/AnZWtFgfQVYNM5QLH52NZ946wZTJVBXh4=
github.com/chainreactors/logs v0.0.0-20231027080134-7a11bb413460/go.mod h1:VZFqkFDGmp7/JOMeraW+YI7kTGcgz9fgc/HArVFnrGQ=
github.com/chainreactors/logs v0.0.0-20240207121836-c946f072f81f h1:tcfp+CEdgiMvjyUzWab5edJtxUwRMSMEIkLybupIx0k=
github.com/chainreactors/logs v0.0.0-20240207121836-c946f072f81f/go.mod h1:6Mv6W70JrtL6VClulZhmMRZnoYpcTahcDTKLMNEjK0o=
github.com/chainreactors/logs v0.0.0-20241115105204-6132e39f5261 h1:gcRLCAF4ANvltkdh7cnLFCNrogwl0Qh8oNaYrKHMyz4=
github.com/chainreactors/logs v0.0.0-20241115105204-6132e39f5261/go.mod h1:6Mv6W70JrtL6VClulZhmMRZnoYpcTahcDTKLMNEjK0o=
github.com/chainreactors/parsers v0.0.0-20240702104902-1ce563b7ef76/go.mod h1:G/XLE5RAaUdqADkbhQ59mPrUAbsJLiQ2DN6CwtwNpBQ=
github.com/chainreactors/parsers v0.0.0-20240708072709-07deeece7ce2 h1:sE3SChgHLtPsEaqHo5tDSy8niDys1SO174C4eHlShSw=
github.com/chainreactors/parsers v0.0.0-20240708072709-07deeece7ce2/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA=
github.com/chainreactors/parsers v0.0.0-20250222062812-66fe23cfde02 h1:zpBTjOampIeifWQKiyfpSwHvIO0aJ60N7FlO1Z5ePKc=
github.com/chainreactors/parsers v0.0.0-20250222062812-66fe23cfde02/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA=
github.com/chainreactors/parsers v0.0.0-20250418131403-e9e233e9d804 h1:6YFXQEaJH/A7sFAVHJbH3iOf1L0gbD9IaBGTj0ETIHc=
github.com/chainreactors/parsers v0.0.0-20250418131403-e9e233e9d804/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA=
github.com/chainreactors/parsers v0.0.0-20250605044448-6bc270f12c0e h1:8AgGNkG1JoO6CIGlMNOecUCaQCnB/Ko/WI3Y6VgVPrI=
github.com/chainreactors/parsers v0.0.0-20250605044448-6bc270f12c0e/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA=
github.com/chainreactors/proxyclient v1.0.3 h1:afnymFICAEqzG4rVMMJhd4Tqrx6LfTssUbo+T8P52vs=
github.com/chainreactors/proxyclient v1.0.3/go.mod h1:kuB9olIK/GOW3lrpbYcJ2Uxb1aKsQPQmxewfCyIZ/0g=
github.com/chainreactors/utils v0.0.0-20240528085651-ba1b255482c1/go.mod h1:JA4eiQZm+7AsfjXBcIzIdVKBEhDCb16eNtWFCGTxlvs=
github.com/chainreactors/utils v0.0.0-20240704062557-662d623b74f4/go.mod h1:JA4eiQZm+7AsfjXBcIzIdVKBEhDCb16eNtWFCGTxlvs=
github.com/chainreactors/utils v0.0.0-20240715080349-d2d0484c95ed/go.mod h1:LajXuvESQwP+qCMAvlcoSXppQCjuLlBrnQpu9XQ1HtU=
github.com/chainreactors/utils v0.0.0-20240716182459-e85f2b01ee16 h1:TCOshCp7PrWqhP/HSAM5kT3VxoOe7EoJbRseyoSX3RM=
github.com/chainreactors/utils v0.0.0-20240716182459-e85f2b01ee16/go.mod h1:LajXuvESQwP+qCMAvlcoSXppQCjuLlBrnQpu9XQ1HtU=
github.com/chainreactors/utils v0.0.0-20240805193040-ff3b97aa3c3f h1:2NKmadFYP9vCwC0YrazgttFACleOhxScTPzg0i76YAY=
github.com/chainreactors/utils v0.0.0-20240805193040-ff3b97aa3c3f/go.mod h1:LajXuvESQwP+qCMAvlcoSXppQCjuLlBrnQpu9XQ1HtU=
github.com/chainreactors/words v0.4.1-0.20240510105042-5ba5c2edc508 h1:iT4HWkoZzUAfQYcQMRH8XyrMau9tCVE0zSuFQnkhrqw=
github.com/chainreactors/words v0.4.1-0.20240510105042-5ba5c2edc508/go.mod h1:DUDx7PdsMEm5PvVhzkFyppzpiUhQb8dOJaWjVc1SMVk=
github.com/chainreactors/words v0.0.0-20240910083848-19a289e8984b h1:OsZ1fyarW4NwK/Oi+Yf3nm/dTW0uX0UfxFjyky5Mb60=
github.com/chainreactors/words v0.0.0-20240910083848-19a289e8984b/go.mod h1:zfz367PUmyaX6oAqV9SktVqyRXKlEh0sel9Wsq9dd2c=
github.com/charmbracelet/lipgloss v0.13.0 h1:4X3PPeoWEDCMvzDvGmTajSyYPcZM4+y8sCA/SsA3cjw=
github.com/charmbracelet/lipgloss v0.13.0/go.mod h1:nw4zy0SBX/F/eAO1cWdcvy6qnkDUxr8Lw7dvFrAIbbY=
github.com/charmbracelet/x/ansi v0.1.4 h1:IEU3D6+dWwPSgZ6HBH+v6oUuZ/nVawMiWj5831KfiLM=
github.com/charmbracelet/x/ansi v0.1.4/go.mod h1:dk73KoMTT5AX5BsX0KrqhsTqAnhZZoCBjs7dGWp4Ktw=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag=
github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cloudflare/circl v1.3.7/go.mod h1:sRTcRWXGLrKw6yIGJ+l7amYJFfAXbZG0kBSc8r4zxgA=
github.com/cloudflare/circl v1.3.8/go.mod h1:PDRU+oXvdD7KCtgKxW95M5Z8BpSCJXQORiZFnBQS5QU=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
@ -121,6 +136,7 @@ github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWH
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
@ -149,7 +165,6 @@ github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4=
github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI=
github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU=
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/go-dedup/megophone v0.0.0-20170830025436-f01be21026f5 h1:4U+x+EB1P66zwYgTjxWXSOT8vF+651Ksr1lojiCZnT8=
github.com/go-dedup/megophone v0.0.0-20170830025436-f01be21026f5/go.mod h1:poR/Cp00iqtqu9ltFwl6C00sKC0HY13u/Gh05ZBmP54=
@ -160,18 +175,18 @@ github.com/go-dedup/text v0.0.0-20170907015346-8bb1b95e3cb7/go.mod h1:wSsK4VOECO
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gost/gosocks5 v0.3.0/go.mod h1:1G6I7HP7VFVxveGkoK8mnprnJqSqJjdcASKsdUn4Pp4=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8=
github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
github.com/goccy/go-yaml v1.11.3 h1:B3W9IdWbvrUu2OYQGwvU1nZtvMQJPBKgBUuweJjLj6I=
github.com/goccy/go-yaml v1.11.3/go.mod h1:wKnAMd44+9JAAnGQpWVEgBzGt3YuTaQ4uXoHvE4m7WU=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
@ -258,7 +273,6 @@ github.com/gookit/config/v2 v2.2.5/go.mod h1:NeX+yiNYn6Ei10eJvCQFXuHEPIE/IPS8bqa
github.com/gookit/goutil v0.6.15 h1:mMQ0ElojNZoyPD0eVROk5QXJPh2uKR4g06slgPDF5Jo=
github.com/gookit/goutil v0.6.15/go.mod h1:qdKdYEHQdEtyH+4fNdQNZfJHhI0jUZzHxQVAV3DaMDY=
github.com/gookit/ini/v2 v2.2.3 h1:nSbN+x9OfQPcMObTFP+XuHt8ev6ndv/fWWqxFhPMu2E=
github.com/gookit/ini/v2 v2.2.3/go.mod h1:Vu6p7P7xcfmb8KYu3L0ek8bqu/Im63N81q208SCCZY4=
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
github.com/hashicorp/consul/api v1.11.0/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M=
github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms=
@ -303,8 +317,11 @@ github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHm
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/kataras/golog v0.1.8/go.mod h1:rGPAin4hYROfk1qT9wZP6VY2rsb4zzc37QpdPjdkqVw=
github.com/kataras/pio v0.0.11/go.mod h1:38hH6SWH6m4DKSYmRhlrCJ5WItwWgCVrTNU62XZyUvI=
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
github.com/klauspost/compress v1.17.8 h1:YcnTYrq7MikUT7k0Yb5eceMmALQPYBW/Xltxn0NAMnU=
github.com/klauspost/compress v1.17.8/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
@ -320,7 +337,8 @@ github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc87/1qhoTACD8w=
github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
@ -358,6 +376,8 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJ
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/muesli/termenv v0.15.2 h1:GohcuySI0QmI3wN8Ok9PtKGkgkFIk7y6Vpb5PvrY+Wo=
github.com/muesli/termenv v0.15.2/go.mod h1:Epx+iuz8sNs7mNKhxzH4fWXGNpZwUaJKRS1noLXviQ8=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/panjf2000/ants/v2 v2.9.1 h1:Q5vh5xohbsZXGcD6hhszzGqB7jSSc2/CRr3QKIga8Kw=
github.com/panjf2000/ants/v2 v2.9.1/go.mod h1:7ZxyxsqE4vvW0M7LSD8aI3cKwgFhBHbxnlN8mDqHa1I=
@ -386,6 +406,9 @@ github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8b
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
github.com/refraction-networking/utls v1.6.4/go.mod h1:2VL2xfiqgFAZtJKeUTlf+PSYFs3Eu7km0gCtXJ3m8zs=
github.com/riobard/go-bloom v0.0.0-20200614022211-cdc8013cb5b3 h1:f/FNXud6gA3MNr8meMVVGxhp+QBTqY91tM8HjEuMjGg=
github.com/riobard/go-bloom v0.0.0-20200614022211-cdc8013cb5b3/go.mod h1:HgjTstvQsPGkxUsCd2KWxErBblirPizecHcpD3ffK+s=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
@ -399,6 +422,8 @@ github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb
github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/shadowsocks/go-shadowsocks2 v0.1.5 h1:PDSQv9y2S85Fl7VBeOMF9StzeXZyK1HakRm86CUbr28=
github.com/shadowsocks/go-shadowsocks2 v0.1.5/go.mod h1:AGGpIoek4HRno4xzyFiAtLHkOpcoznZEkAccaI/rplM=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
@ -424,12 +449,15 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
github.com/things-go/go-socks5 v0.0.5/go.mod h1:mtzInf8v5xmsBpHZVbIw2YQYhc4K0jRwzfsH64Uh0IQ=
github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM=
github.com/twmb/murmur3 v1.1.8 h1:8Yt9taO/WN3l08xErzjeschgZU2QSrwm1kclYq+0aRg=
github.com/twmb/murmur3 v1.1.8/go.mod h1:Qq/R7NUyOfr65zD+6Q5IHKsJLwP7exErjN6lyyq3OSQ=
github.com/urfave/cli/v2 v2.27.4/go.mod h1:m4QzxcD2qpra4z7WhzEGn74WZLViBnMpb1ToCAKdGRQ=
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
github.com/valyala/fasthttp v1.53.0 h1:lW/+SUkOxCx2vlIu0iaImv4JLrVRnbbkpCoaawvA4zc=
@ -438,12 +466,15 @@ github.com/vbauerster/mpb/v8 v8.7.3 h1:n/mKPBav4FFWp5fH4U0lPpXfiOmCEgl5Yx/NM3tKJ
github.com/vbauerster/mpb/v8 v8.7.3/go.mod h1:9nFlNpDGVoTmQ4QvNjSLtwLmAFjwmq0XaAF26toHGNM=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM=
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/zema1/rawhttp v0.2.0/go.mod h1:EYBmBgSu01yb/kLh6lgjJWa6kDV+DrSO8nbgmEzuG6E=
github.com/zema1/suo5 v1.3.2-0.20250219115440-31983ee59a83/go.mod h1:MAuFXiTGFS3PLzZ6cTVsjdQqze4SWPfHvciBTPE6dkw=
go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs=
go.etcd.io/etcd/client/pkg/v3 v3.5.1/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g=
go.etcd.io/etcd/client/v2 v2.305.1/go.mod h1:pMEacxZW7o8pg4CrFE7pquyCJJzZvkvdD2RibOCCCGs=
@ -467,11 +498,18 @@ golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8U
golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.11.1-0.20230711161743-2e82bdd1719d/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio=
golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw=
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.33.0 h1:IOBPskki6Lysi0lo9qQvbxiQ+FvsCC/YWOecCHAixus=
golang.org/x/crypto v0.33.0/go.mod h1:bVdXmD7IV/4GdElGPozy6U7lWdRXA4qyRVGJV57uQ5M=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@ -512,6 +550,9 @@ golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -556,6 +597,9 @@ golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qx
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI=
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
@ -590,8 +634,10 @@ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w=
golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -660,19 +706,34 @@ golang.org/x/sys v0.0.0-20211205182925-97ca703d548d/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220919091848-fb04ddd9f9c8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o=
golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU=
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0=
golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.20.0 h1:VnkxpohqXaOBYJtBmEppKUG6mXpi+4O6purfc2+sMhw=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU=
golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@ -684,9 +745,13 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@ -748,6 +813,8 @@ golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=

View File

@ -1,41 +0,0 @@
package internal
import (
"bytes"
"encoding/json"
"github.com/chainreactors/logs"
"github.com/chainreactors/spray/pkg"
"io"
"os"
)
func Format(filename string, color bool) {
var content []byte
var err error
if filename == "stdin" {
content, err = io.ReadAll(os.Stdin)
} else {
content, err = os.ReadFile(filename)
}
if err != nil {
return
}
var results []*pkg.Baseline
for _, line := range bytes.Split(bytes.TrimSpace(content), []byte("\n")) {
var result pkg.Baseline
err := json.Unmarshal(line, &result)
if err != nil {
logs.Log.Error(err.Error())
return
}
results = append(results, &result)
}
for _, result := range results {
if color {
logs.Log.Info(result.ColorString())
} else {
logs.Log.Info(result.String())
}
}
}

View File

@ -1,58 +0,0 @@
package pool
import (
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words/rule"
"github.com/expr-lang/expr/vm"
"sync"
)
type SprayMod int
const (
PathSpray SprayMod = iota + 1
HostSpray
ParamSpray
CustomSpray
)
var ModMap = map[string]SprayMod{
"path": PathSpray,
"host": HostSpray,
}
type Config struct {
BaseURL string
ProxyAddr string
Thread int
Wordlist []string
Timeout int
ProcessCh chan *pkg.Baseline
OutputCh chan *pkg.Baseline
FuzzyCh chan *pkg.Baseline
Outwg *sync.WaitGroup
RateLimit int
CheckPeriod int
ErrPeriod int32
BreakThreshold int32
Method string
Mod SprayMod
Headers map[string]string
ClientType int
MatchExpr *vm.Program
FilterExpr *vm.Program
RecuExpr *vm.Program
AppendRule *rule.Program
AppendWords []string
Fuzzy bool
IgnoreWaf bool
Crawl bool
Scope []string
Active bool
Bak bool
Common bool
Retry int
RandomUserAgent bool
Random string
Index string
}

View File

@ -1,164 +0,0 @@
package pool
import (
"context"
"fmt"
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words"
"github.com/chainreactors/words/mask"
"github.com/chainreactors/words/rule"
"github.com/panjf2000/ants/v2"
"path"
"sync"
)
type BasePool struct {
*Config
Statistor *pkg.Statistor
Pool *ants.PoolWithFunc
Bar *pkg.Bar
Worder *words.Worder
Cancel context.CancelFunc
client *ihttp.Client
ctx context.Context
processCh chan *pkg.Baseline // 待处理的baseline
dir string
reqCount int
failedCount int
additionCh chan *Unit
closeCh chan struct{}
wg *sync.WaitGroup
}
func (pool *BasePool) doRedirect(bl *pkg.Baseline, depth int) {
if depth >= MaxRedirect {
return
}
reURL := pkg.FormatURL(bl.Url.Path, bl.RedirectURL)
pool.wg.Add(1)
go func() {
defer pool.wg.Done()
pool.addAddition(&Unit{
path: reURL,
source: parsers.RedirectSource,
frontUrl: bl.UrlString,
depth: depth + 1,
})
}()
}
func (pool *BasePool) doRule(bl *pkg.Baseline) {
if pool.AppendRule == nil {
pool.wg.Done()
return
}
if bl.Source == parsers.RuleSource {
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for u := range rule.RunAsStream(pool.AppendRule.Expressions, path.Base(bl.Path)) {
pool.addAddition(&Unit{
path: pkg.Dir(bl.Url.Path) + u,
source: parsers.RuleSource,
})
}
}()
}
func (pool *BasePool) doAppendWords(bl *pkg.Baseline) {
if pool.AppendWords == nil {
pool.wg.Done()
return
}
if bl.Source == parsers.AppendSource {
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for _, u := range pool.AppendWords {
pool.addAddition(&Unit{
path: pkg.SafePath(bl.Path, u),
source: parsers.AppendSource,
})
}
}()
}
func (pool *BasePool) doRetry(bl *pkg.Baseline) {
if bl.Retry >= pool.Retry {
return
}
pool.wg.Add(1)
go func() {
defer pool.wg.Done()
pool.addAddition(&Unit{
path: bl.Path,
source: parsers.RetrySource,
retry: bl.Retry + 1,
})
}()
}
func (pool *BasePool) doActive() {
defer pool.wg.Done()
for _, u := range pkg.ActivePath {
pool.addAddition(&Unit{
path: pool.dir + u[1:],
source: parsers.FingerSource,
})
}
}
func (pool *BasePool) doCommonFile() {
defer pool.wg.Done()
for _, u := range mask.SpecialWords["common_file"] {
pool.addAddition(&Unit{
path: pool.dir + u,
source: parsers.CommonFileSource,
})
}
}
func (pool *BasePool) addAddition(u *Unit) {
// 强行屏蔽报错, 防止goroutine泄露
pool.wg.Add(1)
defer func() {
if err := recover(); err != nil {
}
}()
pool.additionCh <- u
}
func (pool *BasePool) Close() {
pool.Bar.Close()
}
func (pool *BasePool) genReq(s string) (*ihttp.Request, error) {
if pool.Mod == HostSpray {
return ihttp.BuildHostRequest(pool.ClientType, pool.BaseURL, s)
} else if pool.Mod == PathSpray {
return ihttp.BuildPathRequest(pool.ClientType, pool.BaseURL, s, pool.Method)
}
return nil, fmt.Errorf("unknown mod")
}
func (pool *BasePool) putToOutput(bl *pkg.Baseline) {
if bl.IsValid || bl.IsFuzzy {
bl.Collect()
}
pool.Outwg.Add(1)
pool.OutputCh <- bl
}
func (pool *BasePool) putToFuzzy(bl *pkg.Baseline) {
pool.Outwg.Add(1)
bl.IsFuzzy = true
pool.FuzzyCh <- bl
}

View File

@ -1,36 +0,0 @@
package pool
import (
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/pkg"
)
func newUnit(path string, source parsers.SpraySource) *Unit {
return &Unit{path: path, source: source}
}
func newUnitWithNumber(path string, source parsers.SpraySource, number int) *Unit {
return &Unit{path: path, source: source, number: number}
}
type Unit struct {
number int
path string
source parsers.SpraySource
retry int
frontUrl string
depth int // redirect depth
}
func NewBaselines() *Baselines {
return &Baselines{
baselines: map[int]*pkg.Baseline{},
}
}
type Baselines struct {
FailedBaselines []*pkg.Baseline
random *pkg.Baseline
index *pkg.Baseline
baselines map[int]*pkg.Baseline
}

View File

@ -1,174 +0,0 @@
package internal
import (
"bytes"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words/mask"
"github.com/chainreactors/words/rule"
"io/ioutil"
"strconv"
"strings"
)
func parseExtension(s string) string {
if i := strings.Index(s, "."); i != -1 {
return s[i+1:]
}
return ""
}
func parseStatus(preset []int, changed string) []int {
if changed == "" {
return preset
}
if strings.HasPrefix(changed, "+") {
for _, s := range strings.Split(changed[1:], ",") {
if t, err := strconv.Atoi(s); err != nil {
continue
} else {
preset = append(preset, t)
}
}
} else if strings.HasPrefix(changed, "!") {
for _, s := range strings.Split(changed[1:], ",") {
for i, status := range preset {
if t, err := strconv.Atoi(s); err != nil {
break
} else if t == status {
preset = append(preset[:i], preset[i+1:]...)
break
}
}
}
} else {
preset = []int{}
for _, s := range strings.Split(changed, ",") {
if t, err := strconv.Atoi(s); err != nil {
continue
} else {
preset = append(preset, t)
}
}
}
return preset
}
func loadFileToSlice(filename string) ([]string, error) {
var ss []string
content, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
ss = strings.Split(strings.TrimSpace(string(content)), "\n")
// 统一windows与linux的回车换行差异
for i, word := range ss {
ss[i] = strings.TrimSpace(word)
}
return ss, nil
}
func loadRuleAndCombine(filename []string) (string, error) {
var bs bytes.Buffer
for _, f := range filename {
if data, ok := pkg.Rules[f]; ok {
bs.WriteString(strings.TrimSpace(data))
bs.WriteString("\n")
} else {
content, err := ioutil.ReadFile(f)
if err != nil {
return "", err
}
bs.Write(bytes.TrimSpace(content))
bs.WriteString("\n")
}
}
return bs.String(), nil
}
func loadFileWithCache(filename string) ([]string, error) {
if dict, ok := dictCache[filename]; ok {
return dict, nil
}
dict, err := loadFileToSlice(filename)
if err != nil {
return nil, err
}
dictCache[filename] = dict
return dict, nil
}
func loadDictionaries(filenames []string) ([][]string, error) {
dicts := make([][]string, len(filenames))
for i, name := range filenames {
dict, err := loadFileWithCache(name)
if err != nil {
return nil, err
}
dicts[i] = dict
}
return dicts, nil
}
func loadWordlist(word string, dictNames []string) ([]string, error) {
if wl, ok := wordlistCache[word+strings.Join(dictNames, ",")]; ok {
return wl, nil
}
dicts, err := loadDictionaries(dictNames)
if err != nil {
return nil, err
}
wl, err := mask.Run(word, dicts, nil)
if err != nil {
return nil, err
}
wordlistCache[word] = wl
return wl, nil
}
func loadRuleWithFiles(ruleFiles []string, filter string) ([]rule.Expression, error) {
if rules, ok := ruleCache[strings.Join(ruleFiles, ",")]; ok {
return rules, nil
}
var rules bytes.Buffer
for _, filename := range ruleFiles {
content, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
rules.Write(content)
rules.WriteString("\n")
}
return rule.Compile(rules.String(), filter).Expressions, nil
}
//type bytesPatcher struct{}
//
//func (p *bytesPatcher) Visit(node *ast.Node) {
// switch (*node).(type) {
// case *ast.MemberNode:
// ast.Patch(node, &ast.CallNode{
// Callee: &ast.MemberNode{
// Node: *node,
// Name: "String",
// Property: &ast.StringNode{Value: "String"},
// },
// })
// }
//}
func wrapWordsFunc(f func(string) string) func(string) []string {
return func(s string) []string {
return []string{f(s)}
}
}
func safeFilename(filename string) string {
filename = strings.ReplaceAll(filename, "http://", "")
filename = strings.ReplaceAll(filename, "https://", "")
filename = strings.ReplaceAll(filename, ":", "_")
filename = strings.ReplaceAll(filename, "/", "_")
return filename
}

View File

@ -1,7 +1,7 @@
package pkg
import (
"encoding/json"
"fmt"
"github.com/chainreactors/fingers"
"github.com/chainreactors/parsers"
"github.com/chainreactors/utils"
@ -12,18 +12,10 @@ import (
"strings"
)
var (
ExtractRegexps = make(parsers.Extractors)
Extractors = make(parsers.Extractors)
FingerEngine *fingers.Engine
ActivePath []string
)
func LoadPorts() error {
var err error
var ports []*utils.PortConfig
err = json.Unmarshal(LoadConfig("port"), &ports)
err = yaml.Unmarshal(LoadConfig("port"), &ports)
if err != nil {
return err
}
@ -55,18 +47,29 @@ func LoadFingers() error {
func LoadTemplates() error {
var err error
// load rule
var data map[string]interface{}
err = json.Unmarshal(LoadConfig("spray_rule"), &data)
err = yaml.Unmarshal(LoadConfig("spray_rule"), &Rules)
if err != nil {
return err
}
for k, v := range data {
Rules[k] = v.(string)
// load default words
var dicts map[string]string
err = yaml.Unmarshal(LoadConfig("spray_dict"), &dicts)
if err != nil {
return err
}
for name, wordlist := range dicts {
dict := strings.Split(strings.TrimSpace(wordlist), "\n")
for i, d := range dict {
dict[i] = strings.TrimSpace(d)
}
Dicts[strings.TrimSuffix(name, ".txt")] = dict
}
// load mask
var keywords map[string]interface{}
err = json.Unmarshal(LoadConfig("spray_common"), &keywords)
err = yaml.Unmarshal(LoadConfig("spray_common"), &keywords)
if err != nil {
return err
}
@ -80,7 +83,7 @@ func LoadTemplates() error {
}
var extracts []*parsers.Extractor
err = json.Unmarshal(LoadConfig("extract"), &extracts)
err = yaml.Unmarshal(LoadConfig("extract"), &extracts)
if err != nil {
return err
}
@ -122,16 +125,12 @@ func LoadExtractorConfig(filename string) ([]*parsers.Extractor, error) {
func Load() error {
err := LoadPorts()
if err != nil {
return err
return fmt.Errorf("load ports, %w", err)
}
err = LoadTemplates()
if err != nil {
return err
return fmt.Errorf("load templates, %w", err)
}
return nil
}
func LoadDefaultDict() []string {
return strings.Split(strings.TrimSpace(string(LoadConfig("spray_default"))), "\n")
}

26
pkg/parse.go Normal file
View File

@ -0,0 +1,26 @@
package pkg
import "strings"
var (
SkipChar = "%SKIP%"
EXTChar = "%EXT%"
)
func ParseEXTPlaceholderFunc(exts []string) func(string) []string {
return func(s string) []string {
ss := make([]string, len(exts))
var n int
for i, e := range exts {
if strings.Contains(s, EXTChar) {
n++
ss[i] = strings.Replace(s, EXTChar, e, -1)
}
}
if n == 0 {
return []string{s}
} else {
return ss
}
}
}

View File

@ -3,10 +3,16 @@ package pkg
import (
"bufio"
"bytes"
"github.com/chainreactors/files"
"github.com/chainreactors/fingers"
"github.com/chainreactors/logs"
"github.com/chainreactors/parsers"
"github.com/chainreactors/utils/iutils"
"github.com/chainreactors/words/mask"
"github.com/chainreactors/words/rule"
"github.com/expr-lang/expr"
"github.com/expr-lang/expr/vm"
"io/ioutil"
"math/rand"
"net/http"
"net/url"
@ -19,23 +25,33 @@ import (
)
var (
LogVerbose = logs.Warn - 2
LogFuzz = logs.Warn - 1
WhiteStatus = []int{} // cmd input, 200
BlackStatus = []int{} // cmd input, 400,410
FuzzyStatus = []int{} // cmd input, 500,501,502,503
WAFStatus = []int{493, 418, 1020, 406}
UniqueStatus = []int{} // 相同unique的403表示命中了同一条acl, 相同unique的200表示default页面
LogVerbose = logs.Warn - 2
LogFuzz = logs.Warn - 1
DefaultWhiteStatus = []int{200} // cmd input
DefaultBlackStatus = []int{400, 410} // cmd input
DefaultFuzzyStatus = []int{500, 501, 502, 503, 301, 302, 404} // cmd input
DefaultUniqueStatus = []int{403, 200, 404} // 相同unique的403表示命中了同一条acl, 相同unique的200表示default页面
WhiteStatus = []int{} // cmd input, 200
BlackStatus = []int{} // cmd input, 400,410
FuzzyStatus = []int{} // cmd input, 500,501,502,503
WAFStatus = []int{493, 418, 1020, 406, 429, 406, 412}
UniqueStatus = []int{} // 相同unique的403表示命中了同一条acl, 相同unique的200表示default页面
// plugins
EnableAllFingerEngine = false
)
var (
Rules map[string]string = make(map[string]string)
BadExt = []string{".js", ".css", ".scss", ".,", ".jpeg", ".jpg", ".png", ".gif", ".svg", ".vue", ".ts", ".swf", ".pdf", ".mp4", ".zip", ".rar"}
BadURL = []string{";", "}", "\\n", "webpack://", "{", "www.w3.org", ".src", ".url", ".att", ".href", "location.href", "javascript:", "location:", ".createObject", ":location", ".path"}
Rules map[string]string = make(map[string]string)
Dicts map[string][]string = make(map[string][]string)
wordlistCache = make(map[string][]string)
ruleCache = make(map[string][]rule.Expression)
BadExt = []string{".js", ".css", ".scss", ".,", ".jpeg", ".jpg", ".png", ".gif", ".svg", ".vue", ".ts", ".swf", ".pdf", ".mp4", ".zip", ".rar"}
BadURL = []string{";", "}", "\\n", "webpack://", "{", "www.w3.org", ".src", ".url", ".att", ".href", "location.href", "javascript:", "location:", ".createObject", ":location", ".path"}
ExtractRegexps = make(parsers.Extractors)
Extractors = make(parsers.Extractors)
FingerEngine *fingers.Engine
ActivePath []string
ContentTypeMap = map[string]string{
"application/javascript": "js",
"application/json": "json",
@ -75,7 +91,8 @@ var (
"Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)",
"Mozilla/5.0 (compatible; Yahoo! Slurp; http://help.yahoo.com/help/us/ysearch/slurp)",
}
uacount = len(randomUserAgent)
uacount = len(randomUserAgent)
DefaultUserAgent = randomUserAgent[rand.Intn(uacount)]
)
type BS []byte
@ -258,9 +275,9 @@ func CRC16Hash(data []byte) uint16 {
func SafePath(dir, u string) string {
hasSlash := strings.HasPrefix(u, "/")
if hasSlash {
return path.Join(dir, u[1:])
return dir + u[1:]
} else {
return path.Join(dir, u)
return dir + u
}
}
@ -315,12 +332,6 @@ func Dir(u string) string {
}
}
func UniqueHash(bl *Baseline) uint16 {
// 由host+状态码+重定向url+content-type+title+length舍去个位组成的hash
// body length可能会导致一些误报, 目前没有更好的解决办法
return CRC16Hash([]byte(bl.Host + strconv.Itoa(bl.Status) + bl.RedirectURL + bl.ContentType + bl.Title + strconv.Itoa(bl.BodyLength/10*10)))
}
func FormatURL(base, u string) string {
if strings.HasPrefix(u, "http") {
parsed, err := url.Parse(u)
@ -391,3 +402,215 @@ func ParseRawResponse(raw []byte) (*http.Response, error) {
defer resp.Body.Close()
return resp, nil
}
func GetPresetWordList(key []string) []string {
var wordlist []string
for _, k := range key {
if v, ok := mask.SpecialWords[k]; ok {
wordlist = append(wordlist, v...)
}
}
return wordlist
}
func ParseExtension(s string) string {
if i := strings.Index(s, "."); i != -1 {
return s[i+1:]
}
return ""
}
// ParseStatus parses the input string and updates the preset status filters.
func ParseStatus(preset []int, changed string) []int {
if changed == "" {
return preset
}
parseToken := func(s string) (int, bool) {
s = strings.TrimSpace(s)
if strings.HasSuffix(s, "*") {
prefix := s[:len(s)-1]
if t, err := strconv.Atoi(prefix); err == nil {
return t, true // isPrefix = true
}
} else if t, err := strconv.Atoi(s); err == nil {
return t, false // isPrefix = false
}
return 0, false
}
if strings.HasPrefix(changed, "+") {
for _, s := range strings.Split(changed[1:], ",") {
if t, _ := parseToken(s); t != 0 {
preset = append(preset, t)
}
}
} else if strings.HasPrefix(changed, "!") {
for _, s := range strings.Split(changed[1:], ",") {
if t, _ := parseToken(s); t != 0 {
newPreset := preset[:0]
for _, val := range preset {
if val != t {
newPreset = append(newPreset, val)
}
}
preset = newPreset
}
}
} else {
preset = []int{}
for _, s := range strings.Split(changed, ",") {
if t, _ := parseToken(s); t != 0 {
preset = append(preset, t)
}
}
}
return UniqueInts(preset)
}
func UniqueInts(input []int) []int {
seen := make(map[int]bool)
result := make([]int, 0, len(input))
for _, val := range input {
if !seen[val] {
seen[val] = true
result = append(result, val)
}
}
return result
}
// StatusContain checks if a status matches any of the preset filters.
// Preset values < 100 are treated as prefix filters (e.g. 5 = 5xx, 51 = 51x).
func StatusContain(preset []int, status int) bool {
if len(preset) == 0 {
return true
}
for _, s := range preset {
if s < 10 {
if status/100 == s {
return true
}
} else if s < 100 {
if status/10 == s {
return true
}
} else if s == status {
return true
}
}
return false
}
func LoadFileToSlice(filename string) ([]string, error) {
var ss []string
if dicts, ok := Dicts[filename]; ok {
if files.IsExist(filename) {
logs.Log.Warnf("load and overwrite %s from preset", filename)
}
return dicts, nil
}
content, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
ss = strings.Split(strings.TrimSpace(string(content)), "\n")
// 统一windows与linux的回车换行差异
for i, word := range ss {
ss[i] = strings.TrimSpace(word)
}
return ss, nil
}
func LoadRuleAndCombine(filename []string) (string, error) {
var bs bytes.Buffer
for _, f := range filename {
if data, ok := Rules[f]; ok {
bs.WriteString(strings.TrimSpace(data))
bs.WriteString("\n")
} else {
content, err := ioutil.ReadFile(f)
if err != nil {
return "", err
}
bs.Write(bytes.TrimSpace(content))
bs.WriteString("\n")
}
}
return bs.String(), nil
}
func loadFileWithCache(filename string) ([]string, error) {
if dict, ok := Dicts[filename]; ok {
return dict, nil
}
dict, err := LoadFileToSlice(filename)
if err != nil {
return nil, err
}
Dicts[filename] = dict
return dict, nil
}
func loadDictionaries(filenames []string) ([][]string, error) {
dicts := make([][]string, len(filenames))
for i, name := range filenames {
dict, err := loadFileWithCache(name)
if err != nil {
return nil, err
}
dicts[i] = dict
}
return dicts, nil
}
func LoadWordlist(word string, dictNames []string) ([]string, error) {
if wl, ok := wordlistCache[word+strings.Join(dictNames, ",")]; ok {
return wl, nil
}
dicts, err := loadDictionaries(dictNames)
if err != nil {
return nil, err
}
wl, err := mask.Run(word, dicts, nil)
if err != nil {
return nil, err
}
wordlistCache[word] = wl
return wl, nil
}
func LoadRuleWithFiles(ruleFiles []string, filter string) ([]rule.Expression, error) {
if rules, ok := ruleCache[strings.Join(ruleFiles, ",")]; ok {
return rules, nil
}
var rules bytes.Buffer
for _, filename := range ruleFiles {
content, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
rules.Write(content)
rules.WriteString("\n")
}
return rule.Compile(rules.String(), filter).Expressions, nil
}
func WrapWordsFunc(f func(string) string) func(string) []string {
return func(s string) []string {
return []string{f(s)}
}
}
func SafeFilename(filename string) string {
filename = strings.ReplaceAll(filename, "http://", "")
filename = strings.ReplaceAll(filename, "https://", "")
filename = strings.ReplaceAll(filename, ":", "_")
filename = strings.ReplaceAll(filename, "/", "_")
return filename
}

@ -1 +1 @@
Subproject commit f2980b8d312c8088f3947d914499e96cfc40d975
Subproject commit fe95f1f22d18b6cf2046b004191f5bd745f1c578