mirror of
https://github.com/chainreactors/spray.git
synced 2025-09-15 19:50:18 +00:00
Compare commits
89 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
dfa3b2da56 | ||
![]() |
57c67fe36a | ||
![]() |
20b70d0dcd | ||
![]() |
0a833b0326 | ||
![]() |
fd7b603e02 | ||
![]() |
10540f942c | ||
![]() |
ff06fd1902 | ||
![]() |
104d41380e | ||
![]() |
d21dd493f6 | ||
![]() |
cd58c410d8 | ||
![]() |
5c2e377d0d | ||
![]() |
31f48d4b06 | ||
![]() |
f7c1034310 | ||
![]() |
fb63ed010c | ||
![]() |
0d700f8ea0 | ||
![]() |
c82f0564f5 | ||
![]() |
08ce95b43d | ||
![]() |
c746c26ff9 | ||
![]() |
b13903ea98 | ||
![]() |
e951b68e75 | ||
![]() |
0e9d094dd1 | ||
![]() |
72720a942d | ||
![]() |
6c5811f1d2 | ||
![]() |
ef69d46b2a | ||
![]() |
ff1e596380 | ||
![]() |
f1b9400e19 | ||
![]() |
c07c2305af | ||
![]() |
3087ec32d1 | ||
![]() |
286710f5ec | ||
![]() |
5f8f5c7795 | ||
![]() |
0f1e6b8333 | ||
![]() |
7621514bd9 | ||
![]() |
de12d568ce | ||
![]() |
02162cffd6 | ||
![]() |
9e74a17096 | ||
![]() |
0ca5c02de7 | ||
![]() |
5cb9aa119d | ||
![]() |
6bbc6141ac | ||
![]() |
af82ae43b9 | ||
![]() |
e483bb4439 | ||
![]() |
344e560471 | ||
![]() |
2a68d0b49b | ||
![]() |
f1684ffeb4 | ||
![]() |
a4b9e77029 | ||
![]() |
24eade89d0 | ||
![]() |
ed3e95f21d | ||
![]() |
fcce861ae3 | ||
![]() |
7693b4d38f | ||
![]() |
2f28b0ec3c | ||
![]() |
a942bac337 | ||
![]() |
2de8822b01 | ||
![]() |
29db702744 | ||
![]() |
5cf02cbbcb | ||
![]() |
2e8a923bac | ||
![]() |
4a0c8f86eb | ||
![]() |
b4c6a77a98 | ||
![]() |
d6e7e58b18 | ||
![]() |
15110ab895 | ||
![]() |
f6037d7a1e | ||
![]() |
da71cbc575 | ||
![]() |
106f007693 | ||
![]() |
77a5e58a2a | ||
![]() |
06bd9820e7 | ||
![]() |
105c426396 | ||
![]() |
491b8c16a5 | ||
![]() |
1c28898631 | ||
![]() |
de168e0be9 | ||
![]() |
678a6a44e4 | ||
![]() |
57eab148ac | ||
![]() |
bf6d1c5f0b | ||
![]() |
937855c075 | ||
![]() |
32f558f9c5 | ||
![]() |
411f24d94d | ||
![]() |
8bf4b374ac | ||
![]() |
28aacea18c | ||
![]() |
b1aa68f20c | ||
![]() |
38bc2d33f2 | ||
![]() |
021e84ae81 | ||
![]() |
ebc74c1987 | ||
![]() |
dc8829ecca | ||
![]() |
3791b765ea | ||
![]() |
bb98110292 | ||
![]() |
0d4a3652ce | ||
![]() |
c4d4efe6b7 | ||
![]() |
4e28fb59b4 | ||
![]() |
be19895446 | ||
![]() |
63b39cead1 | ||
![]() |
c5bbe36289 | ||
![]() |
1eddc5fcd3 |
4
.github/workflows/gorelease.yml
vendored
4
.github/workflows/gorelease.yml
vendored
@ -8,7 +8,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
goreleaser:
|
goreleaser:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
@ -26,7 +26,7 @@ jobs:
|
|||||||
name: Set up Go
|
name: Set up Go
|
||||||
uses: actions/setup-go@v3
|
uses: actions/setup-go@v3
|
||||||
with:
|
with:
|
||||||
go-version: 1.21
|
go-version: "1.20"
|
||||||
-
|
-
|
||||||
name: Run GoReleaser
|
name: Run GoReleaser
|
||||||
uses: goreleaser/goreleaser-action@v4
|
uses: goreleaser/goreleaser-action@v4
|
||||||
|
@ -25,12 +25,17 @@ builds:
|
|||||||
ldflags: "-s -w -X 'github.com/chainreactors/spray/cmd.ver=={{ .Tag }}'"
|
ldflags: "-s -w -X 'github.com/chainreactors/spray/cmd.ver=={{ .Tag }}'"
|
||||||
flags:
|
flags:
|
||||||
- -trimpath
|
- -trimpath
|
||||||
- -tags=forceposix
|
|
||||||
asmflags:
|
asmflags:
|
||||||
- all=-trimpath={{.Env.GOPATH}}
|
- all=-trimpath={{.Env.GOPATH}}
|
||||||
gcflags:
|
gcflags:
|
||||||
- all=-trimpath={{.Env.GOPATH}}
|
- all=-trimpath={{.Env.GOPATH}}
|
||||||
no_unique_dist_dir: true
|
no_unique_dist_dir: true
|
||||||
|
env:
|
||||||
|
- CGO_ENABLED=0
|
||||||
|
tags:
|
||||||
|
- forceposix
|
||||||
|
- osusergo
|
||||||
|
- netgo
|
||||||
|
|
||||||
upx:
|
upx:
|
||||||
-
|
-
|
||||||
|
@ -1,5 +1,10 @@
|
|||||||
# SPRAY
|
# SPRAY
|
||||||
|
|
||||||
|
blog posts:
|
||||||
|
|
||||||
|
- https://chainreactors.github.io/wiki/blog/2024/07/24/fingers-introduce/
|
||||||
|
- https://chainreactors.github.io/wiki/blog/2024/08/25/spray-best-practices/
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
@ -11,6 +16,8 @@
|
|||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
|
**最好用最智能最可控的目录爆破工具**
|
||||||
|
|
||||||
* 超强的性能, 在本地测试极限性能的场景下, 能超过ffuf与feroxbruster的性能50%以上. 实际情况受到网络的影响, 感受没有这么明确. 但在多目标下可以感受到明显的区别.
|
* 超强的性能, 在本地测试极限性能的场景下, 能超过ffuf与feroxbruster的性能50%以上. 实际情况受到网络的影响, 感受没有这么明确. 但在多目标下可以感受到明显的区别.
|
||||||
* 基于掩码的字典生成
|
* 基于掩码的字典生成
|
||||||
* 基于规则的字典生成
|
* 基于规则的字典生成
|
||||||
@ -125,4 +132,4 @@ go build .
|
|||||||
* [fuzzuli](https://github.com/musana/fuzzuli) 提供了一个备份文件字典生成思路
|
* [fuzzuli](https://github.com/musana/fuzzuli) 提供了一个备份文件字典生成思路
|
||||||
* [fingerprinthub](https://github.com/0x727/FingerprintHub) 作为指纹库的补充
|
* [fingerprinthub](https://github.com/0x727/FingerprintHub) 作为指纹库的补充
|
||||||
* [wappalyzer](https://github.com/projectdiscovery/wappalyzergo) 作为指纹库补充
|
* [wappalyzer](https://github.com/projectdiscovery/wappalyzergo) 作为指纹库补充
|
||||||
* [dirsearch](https://github.com/maurosoria/dirsearch) 提供了默认字典
|
* [dirsearch](https://github.com/maurosoria/dirsearch) 提供了默认字典
|
||||||
|
74
cmd/cmd.go
74
cmd/cmd.go
@ -5,9 +5,10 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"github.com/chainreactors/files"
|
"github.com/chainreactors/files"
|
||||||
"github.com/chainreactors/logs"
|
"github.com/chainreactors/logs"
|
||||||
"github.com/chainreactors/spray/internal"
|
"github.com/chainreactors/spray/core"
|
||||||
"github.com/chainreactors/spray/internal/ihttp"
|
"github.com/chainreactors/spray/core/ihttp"
|
||||||
"github.com/chainreactors/spray/pkg"
|
"github.com/chainreactors/spray/pkg"
|
||||||
|
"github.com/chainreactors/utils/iutils"
|
||||||
"github.com/jessevdk/go-flags"
|
"github.com/jessevdk/go-flags"
|
||||||
"os"
|
"os"
|
||||||
"os/signal"
|
"os/signal"
|
||||||
@ -15,7 +16,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
var ver = "v1.0.0"
|
var ver = "dev"
|
||||||
var DefaultConfig = "config.yaml"
|
var DefaultConfig = "config.yaml"
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
@ -27,11 +28,11 @@ func init() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func Spray() {
|
func Spray() {
|
||||||
var option internal.Option
|
var option core.Option
|
||||||
|
|
||||||
if files.IsExist(DefaultConfig) {
|
if files.IsExist(DefaultConfig) {
|
||||||
logs.Log.Debug("config.yaml exist, loading")
|
logs.Log.Debug("config.yaml exist, loading")
|
||||||
err := internal.LoadConfig(DefaultConfig, &option)
|
err := core.LoadConfig(DefaultConfig, &option)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logs.Log.Error(err.Error())
|
logs.Log.Error(err.Error())
|
||||||
return
|
return
|
||||||
@ -44,13 +45,19 @@ func Spray() {
|
|||||||
WIKI: https://chainreactors.github.io/wiki/spray
|
WIKI: https://chainreactors.github.io/wiki/spray
|
||||||
|
|
||||||
QUICKSTART:
|
QUICKSTART:
|
||||||
simple example:
|
basic:
|
||||||
|
spray -u http://example.com
|
||||||
|
|
||||||
|
basic cidr and port:
|
||||||
|
spray -i example -p top2,top3
|
||||||
|
|
||||||
|
simple brute:
|
||||||
spray -u http://example.com -d wordlist1.txt -d wordlist2.txt
|
spray -u http://example.com -d wordlist1.txt -d wordlist2.txt
|
||||||
|
|
||||||
mask-base wordlist:
|
mask-base brute with wordlist:
|
||||||
spray -u http://example.com -w "/aaa/bbb{?l#4}/ccc"
|
spray -u http://example.com -w "/aaa/bbb{?l#4}/ccc"
|
||||||
|
|
||||||
rule-base wordlist:
|
rule-base brute with wordlist:
|
||||||
spray -u http://example.com -r rule.txt -d 1.txt
|
spray -u http://example.com -r rule.txt -d 1.txt
|
||||||
|
|
||||||
list input spray:
|
list input spray:
|
||||||
@ -69,14 +76,14 @@ func Spray() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// logs
|
// logs
|
||||||
logs.AddLevel(pkg.LogVerbose, "verbose", "[=] %s {{suffix}}")
|
logs.AddLevel(pkg.LogVerbose, "verbose", "[=] %s {{suffix}}\n")
|
||||||
if option.Debug {
|
if option.Debug {
|
||||||
logs.Log.SetLevel(logs.Debug)
|
logs.Log.SetLevel(logs.Debug)
|
||||||
} else if len(option.Verbose) > 0 {
|
} else if len(option.Verbose) > 0 {
|
||||||
logs.Log.SetLevel(pkg.LogVerbose)
|
logs.Log.SetLevel(pkg.LogVerbose)
|
||||||
}
|
}
|
||||||
if option.InitConfig {
|
if option.InitConfig {
|
||||||
configStr := internal.InitDefaultConfig(&option, 0)
|
configStr := core.InitDefaultConfig(&option, 0)
|
||||||
err := os.WriteFile(DefaultConfig, []byte(configStr), 0o744)
|
err := os.WriteFile(DefaultConfig, []byte(configStr), 0o744)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logs.Log.Warn("cannot create config: config.yaml, " + err.Error())
|
logs.Log.Warn("cannot create config: config.yaml, " + err.Error())
|
||||||
@ -88,8 +95,10 @@ func Spray() {
|
|||||||
logs.Log.Info("init default config: ./config.yaml")
|
logs.Log.Info("init default config: ./config.yaml")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
defer time.Sleep(time.Second)
|
||||||
if option.Config != "" {
|
if option.Config != "" {
|
||||||
err := internal.LoadConfig(option.Config, &option)
|
err := core.LoadConfig(option.Config, &option)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logs.Log.Error(err.Error())
|
logs.Log.Error(err.Error())
|
||||||
return
|
return
|
||||||
@ -106,8 +115,23 @@ func Spray() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if option.PrintPreset {
|
||||||
|
err = pkg.Load()
|
||||||
|
if err != nil {
|
||||||
|
iutils.Fatal(err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
err = pkg.LoadFingers()
|
||||||
|
if err != nil {
|
||||||
|
iutils.Fatal(err.Error())
|
||||||
|
}
|
||||||
|
core.PrintPreset()
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if option.Format != "" {
|
if option.Format != "" {
|
||||||
internal.Format(option.Format, !option.NoColor)
|
core.Format(option)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -122,17 +146,19 @@ func Spray() {
|
|||||||
logs.Log.Errorf(err.Error())
|
logs.Log.Errorf(err.Error())
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if option.ReadAll || runner.Crawl {
|
if option.ReadAll || runner.CrawlPlugin {
|
||||||
ihttp.DefaultMaxBodySize = -1
|
ihttp.DefaultMaxBodySize = -1
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx, canceler := context.WithTimeout(context.Background(), time.Duration(runner.Deadline)*time.Second)
|
ctx, canceler := context.WithTimeout(context.Background(), time.Duration(runner.Deadline)*time.Second)
|
||||||
|
go func() {
|
||||||
err = runner.Prepare(ctx)
|
select {
|
||||||
if err != nil {
|
case <-ctx.Done():
|
||||||
logs.Log.Errorf(err.Error())
|
time.Sleep(10 * time.Second)
|
||||||
return
|
logs.Log.Errorf("deadline and timeout not work, hard exit!!!")
|
||||||
}
|
os.Exit(0)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
go func() {
|
go func() {
|
||||||
exitChan := make(chan os.Signal, 2)
|
exitChan := make(chan os.Signal, 2)
|
||||||
@ -154,10 +180,10 @@ func Spray() {
|
|||||||
}()
|
}()
|
||||||
}()
|
}()
|
||||||
|
|
||||||
if runner.IsCheck {
|
err = runner.Prepare(ctx)
|
||||||
runner.RunWithCheck(ctx)
|
if err != nil {
|
||||||
} else {
|
logs.Log.Errorf(err.Error())
|
||||||
runner.Run(ctx)
|
return
|
||||||
}
|
}
|
||||||
time.Sleep(1 * time.Second)
|
|
||||||
}
|
}
|
||||||
|
@ -61,6 +61,8 @@ output:
|
|||||||
no-color: false
|
no-color: false
|
||||||
# Bool, No progress bar
|
# Bool, No progress bar
|
||||||
no-bar: false
|
no-bar: false
|
||||||
|
# Bool, No stat
|
||||||
|
no-stat: true
|
||||||
plugins:
|
plugins:
|
||||||
# Bool, enable all plugin
|
# Bool, enable all plugin
|
||||||
all: false
|
all: false
|
||||||
|
@ -1,14 +1,16 @@
|
|||||||
package pkg
|
package baseline
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"github.com/chainreactors/fingers/common"
|
"github.com/chainreactors/fingers/common"
|
||||||
"github.com/chainreactors/parsers"
|
"github.com/chainreactors/parsers"
|
||||||
"github.com/chainreactors/spray/internal/ihttp"
|
"github.com/chainreactors/spray/core/ihttp"
|
||||||
|
"github.com/chainreactors/spray/pkg"
|
||||||
"github.com/chainreactors/utils/encode"
|
"github.com/chainreactors/utils/encode"
|
||||||
"github.com/chainreactors/utils/iutils"
|
"github.com/chainreactors/utils/iutils"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -23,7 +25,7 @@ func NewBaseline(u, host string, resp *ihttp.Response) *Baseline {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
if t, ok := ContentTypeMap[resp.ContentType()]; ok {
|
if t, ok := pkg.ContentTypeMap[resp.ContentType()]; ok {
|
||||||
bl.ContentType = t
|
bl.ContentType = t
|
||||||
bl.Title = t + " data"
|
bl.Title = t + " data"
|
||||||
} else {
|
} else {
|
||||||
@ -34,11 +36,11 @@ func NewBaseline(u, host string, resp *ihttp.Response) *Baseline {
|
|||||||
bl.Header = make([]byte, len(header))
|
bl.Header = make([]byte, len(header))
|
||||||
copy(bl.Header, header)
|
copy(bl.Header, header)
|
||||||
bl.HeaderLength = len(bl.Header)
|
bl.HeaderLength = len(bl.Header)
|
||||||
|
|
||||||
if i := resp.ContentLength(); ihttp.CheckBodySize(i) {
|
if i := resp.ContentLength(); ihttp.CheckBodySize(i) {
|
||||||
body := resp.Body()
|
if body := resp.Body(); body != nil {
|
||||||
bl.Body = make([]byte, len(body))
|
bl.Body = make([]byte, len(body))
|
||||||
copy(bl.Body, body)
|
copy(bl.Body, body)
|
||||||
|
}
|
||||||
|
|
||||||
if i == -1 {
|
if i == -1 {
|
||||||
bl.Chunked = true
|
bl.Chunked = true
|
||||||
@ -49,10 +51,10 @@ func NewBaseline(u, host string, resp *ihttp.Response) *Baseline {
|
|||||||
}
|
}
|
||||||
|
|
||||||
bl.Raw = append(bl.Header, bl.Body...)
|
bl.Raw = append(bl.Header, bl.Body...)
|
||||||
bl.Response, err = ParseRawResponse(bl.Raw)
|
bl.Response, err = pkg.ParseRawResponse(bl.Raw)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
bl.IsValid = false
|
bl.IsValid = false
|
||||||
bl.Reason = ErrResponseError.Error()
|
bl.Reason = pkg.ErrResponseError.Error()
|
||||||
bl.ErrString = err.Error()
|
bl.ErrString = err.Error()
|
||||||
return bl
|
return bl
|
||||||
}
|
}
|
||||||
@ -72,7 +74,7 @@ func NewBaseline(u, host string, resp *ihttp.Response) *Baseline {
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
bl.IsValid = false
|
bl.IsValid = false
|
||||||
bl.Reason = ErrUrlError.Error()
|
bl.Reason = pkg.ErrUrlError.Error()
|
||||||
bl.ErrString = err.Error()
|
bl.ErrString = err.Error()
|
||||||
}
|
}
|
||||||
bl.Unique = UniqueHash(bl)
|
bl.Unique = UniqueHash(bl)
|
||||||
@ -112,18 +114,20 @@ func NewInvalidBaseline(u, host string, resp *ihttp.Response, reason string) *Ba
|
|||||||
|
|
||||||
type Baseline struct {
|
type Baseline struct {
|
||||||
*parsers.SprayResult
|
*parsers.SprayResult
|
||||||
Url *url.URL `json:"-"`
|
Url *url.URL `json:"-"`
|
||||||
Dir bool `json:"-"`
|
Dir bool `json:"-"`
|
||||||
Chunked bool `json:"-"`
|
Chunked bool `json:"-"`
|
||||||
Body BS `json:"-"`
|
Body pkg.BS `json:"-"`
|
||||||
Header BS `json:"-"`
|
Header pkg.BS `json:"-"`
|
||||||
Raw BS `json:"-"`
|
Raw pkg.BS `json:"-"`
|
||||||
Response *http.Response `json:"-"`
|
Response *http.Response `json:"-"`
|
||||||
Recu bool `json:"-"`
|
Recu bool `json:"-"`
|
||||||
RecuDepth int `json:"-"`
|
RecuDepth int `json:"-"`
|
||||||
URLs []string `json:"-"`
|
URLs []string `json:"-"`
|
||||||
Collected bool `json:"-"`
|
Collected bool `json:"-"`
|
||||||
Retry int `json:"-"`
|
Retry int `json:"-"`
|
||||||
|
SameRedirectDomain bool `json:"-"`
|
||||||
|
IsBaseline bool `json:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bl *Baseline) IsDir() bool {
|
func (bl *Baseline) IsDir() bool {
|
||||||
@ -144,10 +148,10 @@ func (bl *Baseline) Collect() {
|
|||||||
if bl.ContentType == "html" || bl.ContentType == "json" || bl.ContentType == "txt" {
|
if bl.ContentType == "html" || bl.ContentType == "json" || bl.ContentType == "txt" {
|
||||||
// 指纹库设计的时候没考虑js,css文件的指纹, 跳过非必要的指纹收集减少误报提高性能
|
// 指纹库设计的时候没考虑js,css文件的指纹, 跳过非必要的指纹收集减少误报提高性能
|
||||||
//fmt.Println(bl.Source, bl.Url.String()+bl.Path, bl.RedirectURL, "call fingersengine")
|
//fmt.Println(bl.Source, bl.Url.String()+bl.Path, bl.RedirectURL, "call fingersengine")
|
||||||
if EnableAllFingerEngine {
|
if pkg.EnableAllFingerEngine {
|
||||||
bl.Frameworks = EngineDetect(bl.Raw)
|
bl.Frameworks = pkg.EngineDetect(bl.Raw)
|
||||||
} else {
|
} else {
|
||||||
bl.Frameworks = FingersDetect(bl.Raw)
|
bl.Frameworks = pkg.FingersDetect(bl.Raw)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -155,14 +159,14 @@ func (bl *Baseline) Collect() {
|
|||||||
if bl.ContentType == "html" {
|
if bl.ContentType == "html" {
|
||||||
bl.Title = iutils.AsciiEncode(parsers.MatchTitle(bl.Body))
|
bl.Title = iutils.AsciiEncode(parsers.MatchTitle(bl.Body))
|
||||||
} else if bl.ContentType == "ico" {
|
} else if bl.ContentType == "ico" {
|
||||||
if frame := FingerEngine.Favicon().Match(bl.Body); frame != nil {
|
if frame := pkg.FingerEngine.Favicon().Match(bl.Body); frame != nil {
|
||||||
bl.Frameworks.Merge(frame)
|
bl.Frameworks.Merge(frame)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bl.Hashes = parsers.NewHashes(bl.Raw)
|
bl.Hashes = parsers.NewHashes(bl.Raw)
|
||||||
bl.Extracteds = Extractors.Extract(string(bl.Raw))
|
bl.Extracteds.Merge(pkg.Extractors.Extract(string(bl.Raw), true))
|
||||||
bl.Unique = UniqueHash(bl)
|
bl.Unique = UniqueHash(bl)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -170,21 +174,21 @@ func (bl *Baseline) CollectURL() {
|
|||||||
if len(bl.Body) == 0 {
|
if len(bl.Body) == 0 {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
for _, reg := range ExtractRegexps["js"][0].CompiledRegexps {
|
for _, reg := range pkg.ExtractRegexps["js"][0].CompiledRegexps {
|
||||||
urls := reg.FindAllStringSubmatch(string(bl.Body), -1)
|
urls := reg.FindAllStringSubmatch(string(bl.Body), -1)
|
||||||
for _, u := range urls {
|
for _, u := range urls {
|
||||||
u[1] = CleanURL(u[1])
|
u[1] = pkg.CleanURL(u[1])
|
||||||
if u[1] != "" && !FilterJs(u[1]) {
|
if u[1] != "" && !pkg.FilterJs(u[1]) {
|
||||||
bl.URLs = append(bl.URLs, u[1])
|
bl.URLs = append(bl.URLs, u[1])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, reg := range ExtractRegexps["url"][0].CompiledRegexps {
|
for _, reg := range pkg.ExtractRegexps["url"][0].CompiledRegexps {
|
||||||
urls := reg.FindAllStringSubmatch(string(bl.Body), -1)
|
urls := reg.FindAllStringSubmatch(string(bl.Body), -1)
|
||||||
for _, u := range urls {
|
for _, u := range urls {
|
||||||
u[1] = CleanURL(u[1])
|
u[1] = pkg.CleanURL(u[1])
|
||||||
if u[1] != "" && !FilterUrl(u[1]) {
|
if u[1] != "" && !pkg.FilterUrl(u[1]) {
|
||||||
bl.URLs = append(bl.URLs, u[1])
|
bl.URLs = append(bl.URLs, u[1])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -234,6 +238,15 @@ func (bl *Baseline) Compare(other *Baseline) int {
|
|||||||
return -1
|
return -1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (bl *Baseline) ProbeOutput(format []string) string {
|
||||||
|
var s strings.Builder
|
||||||
|
for _, f := range format {
|
||||||
|
s.WriteString("\t")
|
||||||
|
s.WriteString(bl.Get(f))
|
||||||
|
}
|
||||||
|
return strings.TrimSpace(s.String())
|
||||||
|
}
|
||||||
|
|
||||||
var Distance uint8 = 5 // 数字越小越相似, 数字为0则为完全一致.
|
var Distance uint8 = 5 // 数字越小越相似, 数字为0则为完全一致.
|
||||||
|
|
||||||
func (bl *Baseline) FuzzyCompare(other *Baseline) bool {
|
func (bl *Baseline) FuzzyCompare(other *Baseline) bool {
|
||||||
@ -243,3 +256,9 @@ func (bl *Baseline) FuzzyCompare(other *Baseline) bool {
|
|||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func UniqueHash(bl *Baseline) uint16 {
|
||||||
|
// 由host+状态码+重定向url+content-type+title+length舍去个位组成的hash
|
||||||
|
// body length可能会导致一些误报, 目前没有更好的解决办法
|
||||||
|
return pkg.CRC16Hash([]byte(bl.Host + strconv.Itoa(bl.Status) + bl.RedirectURL + bl.ContentType + bl.Title + strconv.Itoa(bl.BodyLength/10*10)))
|
||||||
|
}
|
@ -1,4 +1,4 @@
|
|||||||
package internal
|
package core
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
@ -1,4 +1,4 @@
|
|||||||
package internal
|
package core
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
92
core/format.go
Normal file
92
core/format.go
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
package core
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"github.com/chainreactors/logs"
|
||||||
|
"github.com/chainreactors/spray/core/baseline"
|
||||||
|
"github.com/chainreactors/spray/pkg"
|
||||||
|
"github.com/chainreactors/words/mask"
|
||||||
|
"io"
|
||||||
|
"net/url"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Format(opts Option) {
|
||||||
|
var content []byte
|
||||||
|
var err error
|
||||||
|
if opts.Format == "stdin" {
|
||||||
|
content, err = io.ReadAll(os.Stdin)
|
||||||
|
} else {
|
||||||
|
content, err = os.ReadFile(opts.Format)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
group := make(map[string]map[string]*baseline.Baseline)
|
||||||
|
for _, line := range bytes.Split(bytes.TrimSpace(content), []byte("\n")) {
|
||||||
|
var result baseline.Baseline
|
||||||
|
err := json.Unmarshal(line, &result)
|
||||||
|
if err != nil {
|
||||||
|
logs.Log.Error(err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
result.Url, err = url.Parse(result.UrlString)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if _, exists := group[result.Url.Host]; !exists {
|
||||||
|
group[result.Url.Host] = make(map[string]*baseline.Baseline)
|
||||||
|
}
|
||||||
|
group[result.Url.Host][result.Path] = &result
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, results := range group {
|
||||||
|
for _, result := range results {
|
||||||
|
if !opts.Fuzzy && result.IsFuzzy {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if opts.OutputProbe == "" {
|
||||||
|
if !opts.NoColor {
|
||||||
|
logs.Log.Console(result.ColorString() + "\n")
|
||||||
|
} else {
|
||||||
|
logs.Log.Console(result.String() + "\n")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
probes := strings.Split(opts.OutputProbe, ",")
|
||||||
|
logs.Log.Console(result.ProbeOutput(probes) + "\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func PrintPreset() {
|
||||||
|
logs.Log.Console("internal rules:\n")
|
||||||
|
for name, rule := range pkg.Rules {
|
||||||
|
logs.Log.Consolef("\t%s\t%d rules\n", name, len(strings.Split(rule, "\n")))
|
||||||
|
}
|
||||||
|
|
||||||
|
logs.Log.Console("\ninternal dicts:\n")
|
||||||
|
for name, dict := range pkg.Dicts {
|
||||||
|
logs.Log.Consolef("\t%s\t%d items\n", name, len(dict))
|
||||||
|
}
|
||||||
|
|
||||||
|
logs.Log.Console("\ninternal words keyword:\n")
|
||||||
|
for name, words := range mask.SpecialWords {
|
||||||
|
logs.Log.Consolef("\t%s\t%d words\n", name, len(words))
|
||||||
|
}
|
||||||
|
|
||||||
|
logs.Log.Console("\ninternal extractor:\n")
|
||||||
|
for name, _ := range pkg.ExtractRegexps {
|
||||||
|
logs.Log.Consolef("\t%s\n", name)
|
||||||
|
}
|
||||||
|
|
||||||
|
logs.Log.Console("\ninternal fingers:\n")
|
||||||
|
for name, engine := range pkg.FingerEngine.EnginesImpl {
|
||||||
|
logs.Log.Consolef("\t%s\t%d fingerprints \n", name, engine.Len())
|
||||||
|
}
|
||||||
|
|
||||||
|
logs.Log.Consolef("\nload %d active path\n", len(pkg.ActivePath))
|
||||||
|
}
|
@ -4,14 +4,10 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"crypto/tls"
|
"crypto/tls"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/chainreactors/logs"
|
"github.com/chainreactors/proxyclient"
|
||||||
"github.com/valyala/fasthttp"
|
"github.com/valyala/fasthttp"
|
||||||
"github.com/valyala/fasthttp/fasthttpproxy"
|
|
||||||
"golang.org/x/net/proxy"
|
|
||||||
"net"
|
"net"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
|
||||||
"strings"
|
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -37,6 +33,7 @@ const (
|
|||||||
|
|
||||||
func NewClient(config *ClientConfig) *Client {
|
func NewClient(config *ClientConfig) *Client {
|
||||||
var client *Client
|
var client *Client
|
||||||
|
|
||||||
if config.Type == FAST {
|
if config.Type == FAST {
|
||||||
client = &Client{
|
client = &Client{
|
||||||
fastClient: &fasthttp.Client{
|
fastClient: &fasthttp.Client{
|
||||||
@ -44,28 +41,27 @@ func NewClient(config *ClientConfig) *Client {
|
|||||||
Renegotiation: tls.RenegotiateOnceAsClient,
|
Renegotiation: tls.RenegotiateOnceAsClient,
|
||||||
InsecureSkipVerify: true,
|
InsecureSkipVerify: true,
|
||||||
},
|
},
|
||||||
Dial: customDialFunc(config.ProxyAddr, config.Timeout),
|
Dial: customDialFunc(config.ProxyClient, config.Timeout),
|
||||||
MaxConnsPerHost: config.Thread * 3 / 2,
|
MaxConnsPerHost: config.Thread * 3 / 2,
|
||||||
MaxIdleConnDuration: config.Timeout,
|
MaxIdleConnDuration: config.Timeout,
|
||||||
//MaxConnWaitTimeout: time.Duration(timeout) * time.Second,
|
//MaxConnWaitTimeout: time.Duration(timeout) * time.Second,
|
||||||
//ReadTimeout: config.Timeout * time.Second,
|
ReadTimeout: config.Timeout,
|
||||||
//WriteTimeout: config.Timeout * time.Second,
|
WriteTimeout: config.Timeout,
|
||||||
ReadBufferSize: 16384, // 16k
|
ReadBufferSize: 16384, // 16k
|
||||||
MaxResponseBodySize: int(DefaultMaxBodySize),
|
MaxResponseBodySize: int(DefaultMaxBodySize),
|
||||||
NoDefaultUserAgentHeader: true,
|
NoDefaultUserAgentHeader: true,
|
||||||
DisablePathNormalizing: true,
|
DisablePathNormalizing: true,
|
||||||
DisableHeaderNamesNormalizing: true,
|
DisableHeaderNamesNormalizing: true,
|
||||||
},
|
},
|
||||||
Config: config,
|
ClientConfig: config,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
client = &Client{
|
client = &Client{
|
||||||
standardClient: &http.Client{
|
standardClient: &http.Client{
|
||||||
Transport: &http.Transport{
|
Transport: &http.Transport{
|
||||||
//Proxy: Proxy,
|
DialContext: config.ProxyClient,
|
||||||
//TLSHandshakeTimeout : delay * time.Second,
|
|
||||||
TLSClientConfig: &tls.Config{
|
TLSClientConfig: &tls.Config{
|
||||||
Renegotiation: tls.RenegotiateOnceAsClient,
|
Renegotiation: tls.RenegotiateNever,
|
||||||
InsecureSkipVerify: true,
|
InsecureSkipVerify: true,
|
||||||
},
|
},
|
||||||
TLSHandshakeTimeout: config.Timeout,
|
TLSHandshakeTimeout: config.Timeout,
|
||||||
@ -78,28 +74,23 @@ func NewClient(config *ClientConfig) *Client {
|
|||||||
return http.ErrUseLastResponse
|
return http.ErrUseLastResponse
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Config: config,
|
ClientConfig: config,
|
||||||
}
|
|
||||||
if config.ProxyAddr != "" {
|
|
||||||
client.standardClient.Transport.(*http.Transport).Proxy = func(_ *http.Request) (*url.URL, error) {
|
|
||||||
return url.Parse(config.ProxyAddr)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return client
|
return client
|
||||||
}
|
}
|
||||||
|
|
||||||
type ClientConfig struct {
|
type ClientConfig struct {
|
||||||
Type int
|
Type int
|
||||||
Timeout time.Duration
|
Timeout time.Duration
|
||||||
Thread int
|
Thread int
|
||||||
ProxyAddr string
|
ProxyClient proxyclient.Dial
|
||||||
}
|
}
|
||||||
|
|
||||||
type Client struct {
|
type Client struct {
|
||||||
fastClient *fasthttp.Client
|
fastClient *fasthttp.Client
|
||||||
standardClient *http.Client
|
standardClient *http.Client
|
||||||
Config *ClientConfig
|
*ClientConfig
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) TransToCheck() {
|
func (c *Client) TransToCheck() {
|
||||||
@ -110,70 +101,36 @@ func (c *Client) TransToCheck() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) FastDo(ctx context.Context, req *fasthttp.Request) (*fasthttp.Response, error) {
|
func (c *Client) FastDo(req *fasthttp.Request) (*fasthttp.Response, error) {
|
||||||
resp := fasthttp.AcquireResponse()
|
resp := fasthttp.AcquireResponse()
|
||||||
err := c.fastClient.Do(req, resp)
|
err := c.fastClient.DoTimeout(req, resp, c.Timeout)
|
||||||
return resp, err
|
return resp, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) StandardDo(ctx context.Context, req *http.Request) (*http.Response, error) {
|
func (c *Client) StandardDo(req *http.Request) (*http.Response, error) {
|
||||||
return c.standardClient.Do(req)
|
return c.standardClient.Do(req)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) Do(ctx context.Context, req *Request) (*Response, error) {
|
func (c *Client) Do(req *Request) (*Response, error) {
|
||||||
if c.fastClient != nil {
|
if c.fastClient != nil {
|
||||||
resp, err := c.FastDo(ctx, req.FastRequest)
|
resp, err := c.FastDo(req.FastRequest)
|
||||||
return &Response{FastResponse: resp, ClientType: FAST}, err
|
return &Response{FastResponse: resp, ClientType: FAST}, err
|
||||||
} else if c.standardClient != nil {
|
} else if c.standardClient != nil {
|
||||||
resp, err := c.StandardDo(ctx, req.StandardRequest)
|
resp, err := c.StandardDo(req.StandardRequest)
|
||||||
return &Response{StandardResponse: resp, ClientType: STANDARD}, err
|
return &Response{StandardResponse: resp, ClientType: STANDARD}, err
|
||||||
} else {
|
} else {
|
||||||
return nil, fmt.Errorf("not found client")
|
return nil, fmt.Errorf("not found client")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func customDialFunc(proxyAddr string, timeout time.Duration) fasthttp.DialFunc {
|
func customDialFunc(dialer proxyclient.Dial, timeout time.Duration) fasthttp.DialFunc {
|
||||||
if proxyAddr == "" {
|
if dialer == nil {
|
||||||
return func(addr string) (net.Conn, error) {
|
return func(addr string) (net.Conn, error) {
|
||||||
return fasthttp.DialTimeout(addr, timeout)
|
return fasthttp.DialTimeout(addr, timeout)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
u, err := url.Parse(proxyAddr)
|
return func(addr string) (net.Conn, error) {
|
||||||
if err != nil {
|
ctx, _ := context.WithTimeout(context.Background(), timeout)
|
||||||
logs.Log.Error(err.Error())
|
return dialer.DialContext(ctx, "tcp", addr)
|
||||||
return nil
|
|
||||||
}
|
|
||||||
if strings.ToLower(u.Scheme) == "socks5" {
|
|
||||||
return func(addr string) (net.Conn, error) {
|
|
||||||
var auth *proxy.Auth
|
|
||||||
username := u.User.Username()
|
|
||||||
password, ok := u.User.Password()
|
|
||||||
if ok {
|
|
||||||
auth = &proxy.Auth{
|
|
||||||
User: username,
|
|
||||||
Password: password,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
dialer, err := proxy.SOCKS5("tcp", u.Host, auth, proxy.Direct)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set up a connection with a timeout
|
|
||||||
conn, err := dialer.Dial("tcp", addr)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set deadlines for the connection
|
|
||||||
deadline := time.Now().Add(timeout)
|
|
||||||
if err := conn.SetDeadline(deadline); err != nil {
|
|
||||||
conn.Close()
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return conn, nil
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return fasthttpproxy.FasthttpHTTPDialerTimeout(u.Host, timeout)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -1,31 +1,26 @@
|
|||||||
package ihttp
|
package ihttp
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/chainreactors/spray/pkg"
|
||||||
"github.com/valyala/fasthttp"
|
"github.com/valyala/fasthttp"
|
||||||
"net/http"
|
"net/http"
|
||||||
)
|
)
|
||||||
|
|
||||||
func BuildPathRequest(clientType int, base, path, method string) (*Request, error) {
|
func BuildRequest(ctx context.Context, clientType int, base, path, host, method string) (*Request, error) {
|
||||||
if clientType == FAST {
|
if clientType == FAST {
|
||||||
req := fasthttp.AcquireRequest()
|
req := fasthttp.AcquireRequest()
|
||||||
req.Header.SetMethod(method)
|
req.Header.SetMethod(method)
|
||||||
req.SetRequestURI(base + path)
|
req.SetRequestURI(base + path)
|
||||||
|
if host != "" {
|
||||||
|
req.SetHost(host)
|
||||||
|
}
|
||||||
return &Request{FastRequest: req, ClientType: FAST}, nil
|
return &Request{FastRequest: req, ClientType: FAST}, nil
|
||||||
} else {
|
} else {
|
||||||
req, err := http.NewRequest(method, base+path, nil)
|
req, err := http.NewRequestWithContext(ctx, method, base+path, nil)
|
||||||
return &Request{StandardRequest: req, ClientType: STANDARD}, err
|
if host != "" {
|
||||||
}
|
req.Host = host
|
||||||
}
|
}
|
||||||
|
|
||||||
func BuildHostRequest(clientType int, base, host string) (*Request, error) {
|
|
||||||
if clientType == FAST {
|
|
||||||
req := fasthttp.AcquireRequest()
|
|
||||||
req.SetRequestURI(base)
|
|
||||||
req.SetHost(host)
|
|
||||||
return &Request{FastRequest: req, ClientType: FAST}, nil
|
|
||||||
} else {
|
|
||||||
req, err := http.NewRequest("GET", base, nil)
|
|
||||||
req.Host = host
|
|
||||||
return &Request{StandardRequest: req, ClientType: STANDARD}, err
|
return &Request{StandardRequest: req, ClientType: STANDARD}, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -36,14 +31,18 @@ type Request struct {
|
|||||||
ClientType int
|
ClientType int
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Request) SetHeaders(header map[string]string) {
|
func (r *Request) SetHeaders(header http.Header, RandomUA bool) {
|
||||||
|
if RandomUA {
|
||||||
|
r.SetHeader("User-Agent", pkg.RandomUA())
|
||||||
|
}
|
||||||
|
|
||||||
if r.StandardRequest != nil {
|
if r.StandardRequest != nil {
|
||||||
for k, v := range header {
|
r.StandardRequest.Header = header
|
||||||
r.StandardRequest.Header.Set(k, v)
|
|
||||||
}
|
|
||||||
} else if r.FastRequest != nil {
|
} else if r.FastRequest != nil {
|
||||||
for k, v := range header {
|
for k, v := range header {
|
||||||
r.FastRequest.Header.Set(k, v)
|
for _, i := range v {
|
||||||
|
r.FastRequest.Header.Set(k, i)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -1,8 +1,8 @@
|
|||||||
package ihttp
|
package ihttp
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"github.com/chainreactors/logs"
|
"github.com/chainreactors/logs"
|
||||||
|
"github.com/chainreactors/utils/httputils"
|
||||||
"github.com/valyala/fasthttp"
|
"github.com/valyala/fasthttp"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
@ -29,7 +29,7 @@ func (r *Response) Body() []byte {
|
|||||||
if r.FastResponse != nil {
|
if r.FastResponse != nil {
|
||||||
return r.FastResponse.Body()
|
return r.FastResponse.Body()
|
||||||
} else if r.StandardResponse != nil {
|
} else if r.StandardResponse != nil {
|
||||||
if DefaultMaxBodySize == -1 {
|
if r.StandardResponse.ContentLength == -1 {
|
||||||
body, err := io.ReadAll(r.StandardResponse.Body)
|
body, err := io.ReadAll(r.StandardResponse.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil
|
return nil
|
||||||
@ -93,15 +93,7 @@ func (r *Response) Header() []byte {
|
|||||||
if r.FastResponse != nil {
|
if r.FastResponse != nil {
|
||||||
return r.FastResponse.Header.Header()
|
return r.FastResponse.Header.Header()
|
||||||
} else if r.StandardResponse != nil {
|
} else if r.StandardResponse != nil {
|
||||||
var header bytes.Buffer
|
return append(httputils.ReadRawHeader(r.StandardResponse), []byte("\r\n")...)
|
||||||
header.WriteString(r.StandardResponse.Proto + " " + r.StandardResponse.Status)
|
|
||||||
for k, v := range r.StandardResponse.Header {
|
|
||||||
for _, i := range v {
|
|
||||||
header.WriteString(k + ": " + i + "\r\n")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
header.WriteString("\r\n")
|
|
||||||
return header.Bytes()
|
|
||||||
} else {
|
} else {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
@ -1,20 +1,22 @@
|
|||||||
package internal
|
package core
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"bytes"
|
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/chainreactors/files"
|
"github.com/chainreactors/files"
|
||||||
"github.com/chainreactors/logs"
|
"github.com/chainreactors/logs"
|
||||||
"github.com/chainreactors/parsers"
|
"github.com/chainreactors/parsers"
|
||||||
"github.com/chainreactors/spray/internal/ihttp"
|
"github.com/chainreactors/proxyclient"
|
||||||
"github.com/chainreactors/spray/internal/pool"
|
"github.com/chainreactors/spray/core/baseline"
|
||||||
|
"github.com/chainreactors/spray/core/ihttp"
|
||||||
|
"github.com/chainreactors/spray/core/pool"
|
||||||
"github.com/chainreactors/spray/pkg"
|
"github.com/chainreactors/spray/pkg"
|
||||||
"github.com/chainreactors/utils"
|
"github.com/chainreactors/utils"
|
||||||
"github.com/chainreactors/utils/iutils"
|
"github.com/chainreactors/utils/iutils"
|
||||||
"github.com/chainreactors/words/mask"
|
"github.com/chainreactors/words/mask"
|
||||||
"github.com/chainreactors/words/rule"
|
"github.com/chainreactors/words/rule"
|
||||||
|
"github.com/charmbracelet/lipgloss"
|
||||||
"github.com/expr-lang/expr"
|
"github.com/expr-lang/expr"
|
||||||
"github.com/vbauerster/mpb/v8"
|
"github.com/vbauerster/mpb/v8"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
@ -31,7 +33,6 @@ import (
|
|||||||
|
|
||||||
var (
|
var (
|
||||||
DefaultThreads = 20
|
DefaultThreads = 20
|
||||||
SkipChar = "%SKIP%"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Option struct {
|
type Option struct {
|
||||||
@ -54,15 +55,14 @@ type InputOptions struct {
|
|||||||
CIDRs []string `short:"i" long:"cidr" description:"String, input cidr, e.g.: 1.1.1.1/24 "`
|
CIDRs []string `short:"i" long:"cidr" description:"String, input cidr, e.g.: 1.1.1.1/24 "`
|
||||||
RawFile string `long:"raw" description:"File, input raw request filename"`
|
RawFile string `long:"raw" description:"File, input raw request filename"`
|
||||||
Dictionaries []string `short:"d" long:"dict" description:"Files, Multi,dict files, e.g.: -d 1.txt -d 2.txt" config:"dictionaries"`
|
Dictionaries []string `short:"d" long:"dict" description:"Files, Multi,dict files, e.g.: -d 1.txt -d 2.txt" config:"dictionaries"`
|
||||||
//NoDict bool `long:"no-dict" description:"Bool, no dictionary" config:"no-dict"`
|
DefaultDict bool `short:"D" long:"default" description:"Bool, use default dictionary" config:"default"`
|
||||||
DefaultDict bool `short:"D" long:"default" description:"Bool, use default dictionary" config:"default"`
|
Word string `short:"w" long:"word" description:"String, word generate dsl, e.g.: -w test{?ld#4}" config:"word"`
|
||||||
Word string `short:"w" long:"word" description:"String, word generate dsl, e.g.: -w test{?ld#4}" config:"word"`
|
Rules []string `short:"r" long:"rules" description:"Files, rule files, e.g.: -r rule1.txt -r rule2.txt" config:"rules"`
|
||||||
Rules []string `short:"r" long:"rules" description:"Files, rule files, e.g.: -r rule1.txt -r rule2.txt" config:"rules"`
|
AppendRule []string `short:"R" long:"append-rule" description:"Files, when found valid path , use append rule generator new word with current path" config:"append-rules"`
|
||||||
AppendRule []string `long:"append-rule" description:"Files, when found valid path , use append rule generator new word with current path" config:"append-rules"`
|
FilterRule string `long:"filter-rule" description:"String, filter rule, e.g.: --rule-filter '>8 <4'" config:"filter-rule"`
|
||||||
FilterRule string `long:"filter-rule" description:"String, filter rule, e.g.: --rule-filter '>8 <4'" config:"filter-rule"`
|
AppendFile []string `long:"append" description:"Files, when found valid path , use append file new word with current path" config:"append-files"`
|
||||||
AppendFile []string `long:"append-file" description:"Files, when found valid path , use append file new word with current path" config:"append-files"`
|
Offset int `long:"offset" description:"Int, wordlist offset"`
|
||||||
Offset int `long:"offset" description:"Int, wordlist offset"`
|
Limit int `long:"limit" description:"Int, wordlist limit, start with offset. e.g.: --offset 1000 --limit 100"`
|
||||||
Limit int `long:"limit" description:"Int, wordlist limit, start with offset. e.g.: --offset 1000 --limit 100"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type FunctionOptions struct {
|
type FunctionOptions struct {
|
||||||
@ -84,13 +84,13 @@ type OutputOptions struct {
|
|||||||
Filter string `long:"filter" description:"String, custom filter function, e.g.: --filter 'current.Body contains \"hello\"'" config:"filter"`
|
Filter string `long:"filter" description:"String, custom filter function, e.g.: --filter 'current.Body contains \"hello\"'" config:"filter"`
|
||||||
Fuzzy bool `long:"fuzzy" description:"String, open fuzzy output" config:"fuzzy"`
|
Fuzzy bool `long:"fuzzy" description:"String, open fuzzy output" config:"fuzzy"`
|
||||||
OutputFile string `short:"f" long:"file" description:"String, output filename" json:"output_file,omitempty" config:"output-file"`
|
OutputFile string `short:"f" long:"file" description:"String, output filename" json:"output_file,omitempty" config:"output-file"`
|
||||||
FuzzyFile string `long:"fuzzy-file" description:"String, fuzzy output filename" json:"fuzzy_file,omitempty" config:"fuzzy-file"`
|
|
||||||
DumpFile string `long:"dump-file" description:"String, dump all request, and write to filename" config:"dump-file"`
|
DumpFile string `long:"dump-file" description:"String, dump all request, and write to filename" config:"dump-file"`
|
||||||
Dump bool `long:"dump" description:"Bool, dump all request" config:"dump"`
|
Dump bool `long:"dump" description:"Bool, dump all request" config:"dump"`
|
||||||
AutoFile bool `long:"auto-file" description:"Bool, auto generator output and fuzzy filename" config:"auto-file"`
|
AutoFile bool `long:"auto-file" description:"Bool, auto generator output and fuzzy filename" config:"auto-file"`
|
||||||
Format string `short:"F" long:"format" description:"String, output format, e.g.: --format 1.json" config:"format"`
|
Format string `short:"F" long:"format" description:"String, output format, e.g.: --format 1.json" config:"format"`
|
||||||
Json bool `short:"j" long:"json" description:"Bool, output json" config:"json"`
|
Json bool `short:"j" long:"json" description:"Bool, output json" config:"json"`
|
||||||
OutputProbe string `short:"o" long:"probe" description:"String, output format" config:"output_probe"`
|
FileOutput string `short:"O" long:"file-output" default:"json" description:"Bool, file output format" config:"file_output"`
|
||||||
|
OutputProbe string `short:"o" long:"probe" description:"String, output format" config:"output"`
|
||||||
Quiet bool `short:"q" long:"quiet" description:"Bool, Quiet" config:"quiet"`
|
Quiet bool `short:"q" long:"quiet" description:"Bool, Quiet" config:"quiet"`
|
||||||
NoColor bool `long:"no-color" description:"Bool, no color" config:"no-color"`
|
NoColor bool `long:"no-color" description:"Bool, no color" config:"no-color"`
|
||||||
NoBar bool `long:"no-bar" description:"Bool, No progress bar" config:"no-bar"`
|
NoBar bool `long:"no-bar" description:"Bool, No progress bar" config:"no-bar"`
|
||||||
@ -98,8 +98,8 @@ type OutputOptions struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type RequestOptions struct {
|
type RequestOptions struct {
|
||||||
Method string `short:"x" long:"method" default:"GET" description:"String, request method, e.g.: --method POST" config:"method"`
|
Method string `short:"X" long:"method" default:"GET" description:"String, request method, e.g.: --method POST" config:"method"`
|
||||||
Headers []string `long:"header" description:"Strings, custom headers, e.g.: --headers 'Auth: example_auth'" config:"headers"`
|
Headers []string `short:"H" long:"header" description:"Strings, custom headers, e.g.: --header 'Auth: example_auth'" config:"headers"`
|
||||||
UserAgent string `long:"user-agent" description:"String, custom user-agent, e.g.: --user-agent Custom" config:"useragent"`
|
UserAgent string `long:"user-agent" description:"String, custom user-agent, e.g.: --user-agent Custom" config:"useragent"`
|
||||||
RandomUserAgent bool `long:"random-agent" description:"Bool, use random with default user-agent" config:"random-useragent"`
|
RandomUserAgent bool `long:"random-agent" description:"Bool, use random with default user-agent" config:"random-useragent"`
|
||||||
Cookie []string `long:"cookie" description:"Strings, custom cookie" config:"cookies"`
|
Cookie []string `long:"cookie" description:"Strings, custom cookie" config:"cookies"`
|
||||||
@ -111,18 +111,19 @@ type PluginOptions struct {
|
|||||||
Advance bool `short:"a" long:"advance" description:"Bool, enable all plugin" config:"all" `
|
Advance bool `short:"a" long:"advance" description:"Bool, enable all plugin" config:"all" `
|
||||||
Extracts []string `long:"extract" description:"Strings, extract response, e.g.: --extract js --extract ip --extract version:(.*?)" config:"extract"`
|
Extracts []string `long:"extract" description:"Strings, extract response, e.g.: --extract js --extract ip --extract version:(.*?)" config:"extract"`
|
||||||
ExtractConfig string `long:"extract-config" description:"String, extract config filename" config:"extract-config"`
|
ExtractConfig string `long:"extract-config" description:"String, extract config filename" config:"extract-config"`
|
||||||
Recon bool `long:"recon" description:"Bool, enable recon" config:"recon"`
|
ActivePlugin bool `long:"active" description:"Bool, enable active finger path"`
|
||||||
Bak bool `long:"bak" description:"Bool, enable bak found" config:"bak"`
|
ReconPlugin bool `long:"recon" description:"Bool, enable recon" config:"recon"`
|
||||||
FileBak bool `long:"file-bak" description:"Bool, enable valid result bak found, equal --append-rule rule/filebak.txt" config:"file-bak"`
|
BakPlugin bool `long:"bak" description:"Bool, enable bak found" config:"bak"`
|
||||||
Common bool `long:"common" description:"Bool, enable common file found" config:"common"`
|
FuzzuliPlugin bool `long:"fuzzuli" description:"Bool, enable fuzzuli plugin" config:"fuzzuli"`
|
||||||
Crawl bool `long:"crawl" description:"Bool, enable crawl" config:"crawl"`
|
CommonPlugin bool `long:"common" description:"Bool, enable common file found" config:"common"`
|
||||||
|
CrawlPlugin bool `long:"crawl" description:"Bool, enable crawl" config:"crawl"`
|
||||||
CrawlDepth int `long:"crawl-depth" default:"3" description:"Int, crawl depth" config:"crawl-depth"`
|
CrawlDepth int `long:"crawl-depth" default:"3" description:"Int, crawl depth" config:"crawl-depth"`
|
||||||
|
AppendDepth int `long:"append-depth" default:"2" description:"Int, append depth" config:"append-depth"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type ModeOptions struct {
|
type ModeOptions struct {
|
||||||
RateLimit int `long:"rate-limit" default:"0" description:"Int, request rate limit (rate/s), e.g.: --rate-limit 100" config:"rate-limit"`
|
RateLimit int `long:"rate-limit" default:"0" description:"Int, request rate limit (rate/s), e.g.: --rate-limit 100" config:"rate-limit"`
|
||||||
Force bool `long:"force" description:"Bool, skip error break" config:"force"`
|
Force bool `long:"force" description:"Bool, skip error break" config:"force"`
|
||||||
//CheckOnly bool `long:"check-only" description:"Bool, check only" config:"check-only"`
|
|
||||||
NoScope bool `long:"no-scope" description:"Bool, no scope" config:"no-scope"`
|
NoScope bool `long:"no-scope" description:"Bool, no scope" config:"no-scope"`
|
||||||
Scope []string `long:"scope" description:"String, custom scope, e.g.: --scope *.example.com" config:"scope"`
|
Scope []string `long:"scope" description:"String, custom scope, e.g.: --scope *.example.com" config:"scope"`
|
||||||
Recursive string `long:"recursive" default:"current.IsDir()" description:"String,custom recursive rule, e.g.: --recursive current.IsDir()" config:"recursive"`
|
Recursive string `long:"recursive" default:"current.IsDir()" description:"String,custom recursive rule, e.g.: --recursive current.IsDir()" config:"recursive"`
|
||||||
@ -132,27 +133,28 @@ type ModeOptions struct {
|
|||||||
CheckPeriod int `long:"check-period" default:"200" description:"Int, check period when request" config:"check-period"`
|
CheckPeriod int `long:"check-period" default:"200" description:"Int, check period when request" config:"check-period"`
|
||||||
ErrPeriod int `long:"error-period" default:"10" description:"Int, check period when error" config:"error-period"`
|
ErrPeriod int `long:"error-period" default:"10" description:"Int, check period when error" config:"error-period"`
|
||||||
BreakThreshold int `long:"error-threshold" default:"20" description:"Int, break when the error exceeds the threshold" config:"error-threshold"`
|
BreakThreshold int `long:"error-threshold" default:"20" description:"Int, break when the error exceeds the threshold" config:"error-threshold"`
|
||||||
BlackStatus string `long:"black-status" default:"400,410" description:"Strings (comma split),custom black status" config:"black-status"`
|
BlackStatus string `short:"B" long:"black-status" default:"400,410" description:"Strings (comma split),custom black status" config:"black-status"`
|
||||||
WhiteStatus string `long:"white-status" default:"200" description:"Strings (comma split), custom white status" config:"white-status"`
|
WhiteStatus string `short:"W" long:"white-status" default:"200" description:"Strings (comma split), custom white status" config:"white-status"`
|
||||||
FuzzyStatus string `long:"fuzzy-status" default:"500,501,502,503" description:"Strings (comma split), custom fuzzy status" config:"fuzzy-status"`
|
FuzzyStatus string `long:"fuzzy-status" default:"500,501,502,503,301,302,404" description:"Strings (comma split), custom fuzzy status" config:"fuzzy-status"`
|
||||||
UniqueStatus string `long:"unique-status" default:"403,200,404" description:"Strings (comma split), custom unique status" config:"unique-status"`
|
UniqueStatus string `long:"unique-status" default:"403,200,404" description:"Strings (comma split), custom unique status" config:"unique-status"`
|
||||||
Unique bool `long:"unique" description:"Bool, unique response" config:"unique"`
|
Unique bool `long:"unique" description:"Bool, unique response" config:"unique"`
|
||||||
RetryCount int `long:"retry" default:"0" description:"Int, retry count" config:"retry"`
|
RetryCount int `long:"retry" default:"0" description:"Int, retry count" config:"retry"`
|
||||||
SimhashDistance int `long:"sim-distance" default:"5" config:"sim-distance"`
|
SimhashDistance int `long:"sim-distance" default:"8" config:"sim-distance"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type MiscOptions struct {
|
type MiscOptions struct {
|
||||||
Mod string `short:"m" long:"mod" default:"path" choice:"path" choice:"host" description:"String, path/host spray" config:"mod"`
|
Mod string `short:"m" long:"mod" default:"path" choice:"path" choice:"host" description:"String, path/host spray" config:"mod"`
|
||||||
Client string `short:"C" long:"client" default:"auto" choice:"fast" choice:"standard" choice:"auto" description:"String, Client type" config:"client"`
|
Client string `short:"C" long:"client" default:"auto" choice:"fast" choice:"standard" choice:"auto" description:"String, Client type" config:"client"`
|
||||||
Deadline int `long:"deadline" default:"999999" description:"Int, deadline (seconds)" config:"deadline"` // todo 总的超时时间,适配云函数的deadline
|
Deadline int `long:"deadline" default:"999999" description:"Int, deadline (seconds)" config:"deadline"` // todo 总的超时时间,适配云函数的deadline
|
||||||
Timeout int `short:"T" long:"timeout" default:"5" description:"Int, timeout with request (seconds)" config:"timeout"`
|
Timeout int `short:"T" long:"timeout" default:"5" description:"Int, timeout with request (seconds)" config:"timeout"`
|
||||||
PoolSize int `short:"P" long:"pool" default:"5" description:"Int, Pool size" config:"pool"`
|
PoolSize int `short:"P" long:"pool" default:"5" description:"Int, Pool size" config:"pool"`
|
||||||
Threads int `short:"t" long:"thread" default:"20" description:"Int, number of threads per pool" config:"thread"`
|
Threads int `short:"t" long:"thread" default:"20" description:"Int, number of threads per pool" config:"thread"`
|
||||||
Debug bool `long:"debug" description:"Bool, output debug info" config:"debug"`
|
Debug bool `long:"debug" description:"Bool, output debug info" config:"debug"`
|
||||||
Version bool `long:"version" description:"Bool, show version"`
|
Version bool `long:"version" description:"Bool, show version"`
|
||||||
Verbose []bool `short:"v" description:"Bool, log verbose level ,default 0, level1: -v level2 -vv " config:"verbose"`
|
Verbose []bool `short:"v" description:"Bool, log verbose level ,default 0, level1: -v level2 -vv " config:"verbose"`
|
||||||
Proxy string `long:"proxy" description:"String, proxy address, e.g.: --proxy socks5://127.0.0.1:1080" config:"proxy"`
|
Proxies []string `long:"proxy" description:"String, proxy address, e.g.: --proxy socks5://127.0.0.1:1080" config:"proxies"`
|
||||||
InitConfig bool `long:"init" description:"Bool, init config file"`
|
InitConfig bool `long:"init" description:"Bool, init config file"`
|
||||||
|
PrintPreset bool `long:"print" description:"Bool, print preset all preset config "`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (opt *Option) Validate() error {
|
func (opt *Option) Validate() error {
|
||||||
@ -167,7 +169,6 @@ func (opt *Option) Validate() error {
|
|||||||
|
|
||||||
if opt.Depth > 0 && opt.ResumeFrom != "" {
|
if opt.Depth > 0 && opt.ResumeFrom != "" {
|
||||||
// 递归与断点续传会造成混淆, 断点续传的word与rule不是通过命令行获取的
|
// 递归与断点续传会造成混淆, 断点续传的word与rule不是通过命令行获取的
|
||||||
|
|
||||||
return errors.New("--resume and --depth cannot be used at the same time")
|
return errors.New("--resume and --depth cannot be used at the same time")
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -205,6 +206,11 @@ func (opt *Option) Prepare() error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
err = pkg.Load()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
if opt.Extracts != nil {
|
if opt.Extracts != nil {
|
||||||
for _, e := range opt.Extracts {
|
for _, e := range opt.Extracts {
|
||||||
if reg, ok := pkg.ExtractRegexps[e]; ok {
|
if reg, ok := pkg.ExtractRegexps[e]; ok {
|
||||||
@ -227,33 +233,30 @@ func (opt *Option) Prepare() error {
|
|||||||
pkg.Extractors[opt.ExtractConfig] = extracts
|
pkg.Extractors[opt.ExtractConfig] = extracts
|
||||||
}
|
}
|
||||||
|
|
||||||
err = pkg.Load()
|
|
||||||
if err != nil {
|
|
||||||
iutils.Fatal(err.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
// 初始化全局变量
|
// 初始化全局变量
|
||||||
pkg.Distance = uint8(opt.SimhashDistance)
|
baseline.Distance = uint8(opt.SimhashDistance)
|
||||||
if opt.MaxBodyLength == -1 {
|
if opt.MaxBodyLength == -1 {
|
||||||
ihttp.DefaultMaxBodySize = -1
|
ihttp.DefaultMaxBodySize = -1
|
||||||
} else {
|
} else {
|
||||||
ihttp.DefaultMaxBodySize = opt.MaxBodyLength * 1024
|
ihttp.DefaultMaxBodySize = opt.MaxBodyLength * 1024
|
||||||
}
|
}
|
||||||
|
|
||||||
pkg.BlackStatus = parseStatus(pkg.BlackStatus, opt.BlackStatus)
|
pkg.BlackStatus = pkg.ParseStatus(pkg.DefaultBlackStatus, opt.BlackStatus)
|
||||||
pkg.WhiteStatus = parseStatus(pkg.WhiteStatus, opt.WhiteStatus)
|
pkg.WhiteStatus = pkg.ParseStatus(pkg.DefaultWhiteStatus, opt.WhiteStatus)
|
||||||
if opt.FuzzyStatus == "all" {
|
if opt.FuzzyStatus == "all" {
|
||||||
pool.EnableAllFuzzy = true
|
pool.EnableAllFuzzy = true
|
||||||
} else {
|
} else {
|
||||||
pkg.FuzzyStatus = parseStatus(pkg.FuzzyStatus, opt.FuzzyStatus)
|
pkg.FuzzyStatus = pkg.ParseStatus(pkg.DefaultFuzzyStatus, opt.FuzzyStatus)
|
||||||
}
|
}
|
||||||
|
|
||||||
if opt.Unique {
|
if opt.Unique {
|
||||||
pool.EnableAllUnique = true
|
pool.EnableAllUnique = true
|
||||||
} else {
|
} else {
|
||||||
pkg.UniqueStatus = parseStatus(pkg.UniqueStatus, opt.UniqueStatus)
|
pkg.UniqueStatus = pkg.ParseStatus(pkg.DefaultUniqueStatus, opt.UniqueStatus)
|
||||||
}
|
}
|
||||||
pool.MaxCrawl = opt.CrawlDepth
|
|
||||||
|
logs.Log.Logf(pkg.LogVerbose, "Black Status: %v, WhiteStatus: %v, WAFStatus: %v", pkg.BlackStatus, pkg.WhiteStatus, pkg.WAFStatus)
|
||||||
|
logs.Log.Logf(pkg.LogVerbose, "Fuzzy Status: %v, Unique Status: %v", pkg.FuzzyStatus, pkg.UniqueStatus)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -263,9 +266,10 @@ func (opt *Option) NewRunner() (*Runner, error) {
|
|||||||
r := &Runner{
|
r := &Runner{
|
||||||
Option: opt,
|
Option: opt,
|
||||||
taskCh: make(chan *Task),
|
taskCh: make(chan *Task),
|
||||||
outputCh: make(chan *pkg.Baseline, 256),
|
outputCh: make(chan *baseline.Baseline, 256),
|
||||||
|
poolwg: &sync.WaitGroup{},
|
||||||
outwg: &sync.WaitGroup{},
|
outwg: &sync.WaitGroup{},
|
||||||
fuzzyCh: make(chan *pkg.Baseline, 256),
|
fuzzyCh: make(chan *baseline.Baseline, 256),
|
||||||
Headers: make(map[string]string),
|
Headers: make(map[string]string),
|
||||||
Total: opt.Limit,
|
Total: opt.Limit,
|
||||||
Color: true,
|
Color: true,
|
||||||
@ -304,39 +308,19 @@ func (opt *Option) NewRunner() (*Runner, error) {
|
|||||||
r.ClientType = ihttp.STANDARD
|
r.ClientType = ihttp.STANDARD
|
||||||
}
|
}
|
||||||
|
|
||||||
if opt.Threads == DefaultThreads && len(opt.Dictionaries) == 0 {
|
if len(opt.Proxies) > 0 {
|
||||||
r.Threads = 1000
|
urls, err := proxyclient.ParseProxyURLs(opt.Proxies)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
r.ProxyClient, err = proxyclient.NewClientChain(urls)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
err = opt.BuildPlugin(r)
|
||||||
if opt.Recon {
|
if err != nil {
|
||||||
pkg.Extractors["recon"] = pkg.ExtractRegexps["pentest"]
|
return nil, err
|
||||||
}
|
|
||||||
|
|
||||||
if opt.Advance {
|
|
||||||
r.Crawl = true
|
|
||||||
r.Finger = true
|
|
||||||
r.Bak = true
|
|
||||||
r.Common = true
|
|
||||||
pkg.EnableAllFingerEngine = true
|
|
||||||
pkg.Extractors["recon"] = pkg.ExtractRegexps["pentest"]
|
|
||||||
opt.AppendRule = append(opt.AppendRule, "filebak")
|
|
||||||
}
|
|
||||||
|
|
||||||
if opt.FileBak {
|
|
||||||
opt.AppendRule = append(opt.AppendRule, "filebak")
|
|
||||||
}
|
|
||||||
if opt.Common {
|
|
||||||
r.AppendWords = append(r.AppendWords, mask.SpecialWords["common_file"]...)
|
|
||||||
}
|
|
||||||
if opt.Finger {
|
|
||||||
r.AppendWords = append(r.AppendWords, pkg.ActivePath...)
|
|
||||||
pkg.EnableAllFingerEngine = true
|
|
||||||
}
|
|
||||||
|
|
||||||
opt.PrintPlugin()
|
|
||||||
|
|
||||||
if opt.NoScope {
|
|
||||||
r.Scope = []string{"*"}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
err = opt.BuildWords(r)
|
err = opt.BuildWords(r)
|
||||||
@ -344,6 +328,10 @@ func (opt *Option) NewRunner() (*Runner, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if opt.Threads == DefaultThreads && r.bruteMod {
|
||||||
|
r.Threads = 1000
|
||||||
|
}
|
||||||
|
|
||||||
pkg.DefaultStatistor = pkg.Statistor{
|
pkg.DefaultStatistor = pkg.Statistor{
|
||||||
Word: opt.Word,
|
Word: opt.Word,
|
||||||
WordCount: len(r.Wordlist),
|
WordCount: len(r.Wordlist),
|
||||||
@ -379,13 +367,12 @@ func (opt *Option) NewRunner() (*Runner, error) {
|
|||||||
var express string
|
var express string
|
||||||
if opt.Recursive != "current.IsDir()" && opt.Depth != 0 {
|
if opt.Recursive != "current.IsDir()" && opt.Depth != 0 {
|
||||||
// 默认不打开递归, 除非指定了非默认的递归表达式
|
// 默认不打开递归, 除非指定了非默认的递归表达式
|
||||||
pool.MaxRecursion = 1
|
opt.Depth = 1
|
||||||
express = opt.Recursive
|
express = opt.Recursive
|
||||||
}
|
}
|
||||||
|
|
||||||
if opt.Depth != 0 {
|
if opt.Depth != 0 {
|
||||||
// 手动设置的depth优先级高于默认
|
// 手动设置的depth优先级高于默认
|
||||||
pool.MaxRecursion = opt.Depth
|
|
||||||
express = opt.Recursive
|
express = opt.Recursive
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -418,6 +405,10 @@ func (opt *Option) NewRunner() (*Runner, error) {
|
|||||||
r.Probes = strings.Split(opt.OutputProbe, ",")
|
r.Probes = strings.Split(opt.OutputProbe, ",")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !opt.Quiet {
|
||||||
|
fmt.Println(opt.PrintConfig(r))
|
||||||
|
}
|
||||||
|
|
||||||
// init output file
|
// init output file
|
||||||
if opt.OutputFile != "" {
|
if opt.OutputFile != "" {
|
||||||
r.OutputFile, err = files.NewFile(opt.OutputFile, false, false, true)
|
r.OutputFile, err = files.NewFile(opt.OutputFile, false, false, true)
|
||||||
@ -431,18 +422,6 @@ func (opt *Option) NewRunner() (*Runner, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if opt.FuzzyFile != "" {
|
|
||||||
r.FuzzyFile, err = files.NewFile(opt.FuzzyFile, false, false, true)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
} else if opt.AutoFile {
|
|
||||||
r.FuzzyFile, err = files.NewFile("fuzzy.json", false, false, true)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if opt.DumpFile != "" {
|
if opt.DumpFile != "" {
|
||||||
r.DumpFile, err = files.NewFile(opt.DumpFile, false, false, true)
|
r.DumpFile, err = files.NewFile(opt.DumpFile, false, false, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -456,14 +435,13 @@ func (opt *Option) NewRunner() (*Runner, error) {
|
|||||||
}
|
}
|
||||||
if opt.ResumeFrom != "" {
|
if opt.ResumeFrom != "" {
|
||||||
r.StatFile, err = files.NewFile(opt.ResumeFrom, false, true, true)
|
r.StatFile, err = files.NewFile(opt.ResumeFrom, false, true, true)
|
||||||
} else {
|
|
||||||
r.StatFile, err = files.NewFile(safeFilename(r.Tasks.Name)+".stat", false, true, true)
|
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if !opt.NoStat {
|
if !opt.NoStat {
|
||||||
|
r.StatFile, err = files.NewFile(pkg.SafeFilename(r.Tasks.Name)+".stat", false, true, true)
|
||||||
r.StatFile.Mod = os.O_WRONLY | os.O_CREATE
|
r.StatFile.Mod = os.O_WRONLY | os.O_CREATE
|
||||||
err = r.StatFile.Init()
|
err = r.StatFile.Init()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -473,55 +451,213 @@ func (opt *Option) NewRunner() (*Runner, error) {
|
|||||||
return r, nil
|
return r, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (opt *Option) PrintPlugin() {
|
func (opt *Option) PrintConfig(r *Runner) string {
|
||||||
var s strings.Builder
|
// 定义颜色样式
|
||||||
if opt.Crawl {
|
keyStyle := lipgloss.NewStyle().Bold(true).Foreground(lipgloss.Color("#FFFFFF")).Width(20) // Key 加粗并设定宽度
|
||||||
s.WriteString("crawl enable; ")
|
stringValueStyle := lipgloss.NewStyle().Foreground(lipgloss.Color("#FFA07A")) // 字符串样式
|
||||||
}
|
arrayValueStyle := lipgloss.NewStyle().Foreground(lipgloss.Color("#98FB98")) // 数组样式
|
||||||
if opt.Finger {
|
numberValueStyle := lipgloss.NewStyle().Foreground(lipgloss.Color("#ADD8E6")) // 数字样式
|
||||||
s.WriteString("active fingerprint enable; ")
|
panelWidth := 60 // 调整 panelWidth 使内容稍微靠左
|
||||||
}
|
padding := 2 // 减少 padding 以调整布局靠左
|
||||||
if opt.Bak {
|
|
||||||
s.WriteString("bak file enable; ")
|
// 分割线样式和终端宽度计算
|
||||||
}
|
divider := strings.Repeat("─", panelWidth) // 使用"─"符号生成更加连贯的分割线
|
||||||
if opt.Common {
|
|
||||||
s.WriteString("common file enable; ")
|
// 处理不同类型的值
|
||||||
}
|
formatValue := func(value interface{}) string {
|
||||||
if opt.Recon {
|
switch v := value.(type) {
|
||||||
s.WriteString("recon enable; ")
|
case string:
|
||||||
}
|
return stringValueStyle.Render(v)
|
||||||
if opt.FileBak {
|
case []string:
|
||||||
s.WriteString("file bak enable; ")
|
return arrayValueStyle.Render(fmt.Sprintf("%v", v))
|
||||||
|
case int, int64, float64:
|
||||||
|
return numberValueStyle.Render(fmt.Sprintf("%v", v))
|
||||||
|
default:
|
||||||
|
return stringValueStyle.Render(fmt.Sprintf("%v", v)) // 默认为字符串样式
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if opt.RetryCount > 0 {
|
// 处理互斥参数,选择输出有值的那一个
|
||||||
s.WriteString("Retry Count: " + strconv.Itoa(opt.RetryCount))
|
inputSource := ""
|
||||||
|
if opt.ResumeFrom != "" {
|
||||||
|
inputSource = lipgloss.JoinHorizontal(lipgloss.Left, "🌀 ", keyStyle.Render("ResumeFrom: "), formatValue(opt.ResumeFrom))
|
||||||
|
} else if len(opt.URL) > 0 {
|
||||||
|
inputSource = lipgloss.JoinHorizontal(lipgloss.Left, "🌐 ", keyStyle.Render("URL: "), formatValue(opt.URL))
|
||||||
|
} else if opt.URLFile != "" {
|
||||||
|
inputSource = lipgloss.JoinHorizontal(lipgloss.Left, "📂 ", keyStyle.Render("URLFile: "), formatValue(opt.URLFile))
|
||||||
|
} else if len(opt.CIDRs) > 0 {
|
||||||
|
inputSource = lipgloss.JoinHorizontal(lipgloss.Left, "📡 ", keyStyle.Render("CIDRs: "), formatValue(opt.CIDRs))
|
||||||
|
} else if opt.RawFile != "" {
|
||||||
|
inputSource = lipgloss.JoinHorizontal(lipgloss.Left, "📄 ", keyStyle.Render("RawFile: "), formatValue(opt.RawFile))
|
||||||
}
|
}
|
||||||
if s.Len() > 0 {
|
|
||||||
logs.Log.Important(s.String())
|
// Input Options
|
||||||
|
inputOptions := lipgloss.JoinVertical(lipgloss.Left,
|
||||||
|
inputSource, // 互斥量处理
|
||||||
|
|
||||||
|
// PortRange 展示
|
||||||
|
lipgloss.JoinHorizontal(lipgloss.Left, "🔢 ", keyStyle.Render("PortRange: "), formatValue(opt.PortRange)),
|
||||||
|
|
||||||
|
// Dictionaries 展示
|
||||||
|
lipgloss.JoinHorizontal(lipgloss.Left, "📚 ", keyStyle.Render("Dictionaries: "), formatValue(opt.Dictionaries)),
|
||||||
|
|
||||||
|
// Word, Rules, FilterRule 展开为单独的行
|
||||||
|
lipgloss.JoinVertical(lipgloss.Left,
|
||||||
|
lipgloss.JoinHorizontal(lipgloss.Left, "💡 ", keyStyle.Render("Word: "), formatValue(r.Word)),
|
||||||
|
lipgloss.JoinHorizontal(lipgloss.Left, "📜 ", keyStyle.Render("Rules: "), formatValue(opt.Rules)),
|
||||||
|
lipgloss.JoinHorizontal(lipgloss.Left, "🔍 ", keyStyle.Render("FilterRule: "), formatValue(opt.FilterRule)),
|
||||||
|
),
|
||||||
|
|
||||||
|
// AppendRule 和 AppendWords 展开为单独的行
|
||||||
|
lipgloss.JoinVertical(lipgloss.Left,
|
||||||
|
lipgloss.JoinHorizontal(lipgloss.Left, "🔧 ", keyStyle.Render("AppendRule: "), formatValue(r.AppendRule)),
|
||||||
|
lipgloss.JoinHorizontal(lipgloss.Left, "🧩 ", keyStyle.Render("AppendWords: "), formatValue(len(r.AppendWords))),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
// Output Options
|
||||||
|
outputOptions := lipgloss.JoinVertical(lipgloss.Left,
|
||||||
|
lipgloss.JoinHorizontal(lipgloss.Left, "📊 ", keyStyle.Render("Match: "), formatValue(opt.Match)),
|
||||||
|
lipgloss.JoinHorizontal(lipgloss.Left, "⚙️ ", keyStyle.Render("Filter: "), formatValue(opt.Filter)),
|
||||||
|
)
|
||||||
|
|
||||||
|
// Plugin Options
|
||||||
|
pluginValues := []string{}
|
||||||
|
if opt.ActivePlugin {
|
||||||
|
pluginValues = append(pluginValues, "active")
|
||||||
}
|
}
|
||||||
|
if opt.ReconPlugin {
|
||||||
|
pluginValues = append(pluginValues, "recon")
|
||||||
|
}
|
||||||
|
if opt.BakPlugin {
|
||||||
|
pluginValues = append(pluginValues, "bak")
|
||||||
|
}
|
||||||
|
if opt.FuzzuliPlugin {
|
||||||
|
pluginValues = append(pluginValues, "fuzzuli")
|
||||||
|
}
|
||||||
|
if opt.CommonPlugin {
|
||||||
|
pluginValues = append(pluginValues, "common")
|
||||||
|
}
|
||||||
|
if opt.CrawlPlugin {
|
||||||
|
pluginValues = append(pluginValues, "crawl")
|
||||||
|
}
|
||||||
|
|
||||||
|
pluginOptions := lipgloss.JoinVertical(lipgloss.Left,
|
||||||
|
lipgloss.JoinHorizontal(lipgloss.Left, "🔎 ", keyStyle.Render("Extracts: "), formatValue(opt.Extracts)),
|
||||||
|
lipgloss.JoinHorizontal(lipgloss.Left, "🔌 ", keyStyle.Render("Plugins: "), formatValue(strings.Join(pluginValues, ", "))),
|
||||||
|
)
|
||||||
|
|
||||||
|
// Mode Options
|
||||||
|
modeOptions := lipgloss.JoinVertical(lipgloss.Left,
|
||||||
|
lipgloss.JoinHorizontal(lipgloss.Left, "🛑 ", keyStyle.Render("BlackStatus: "), formatValue(pkg.BlackStatus)),
|
||||||
|
lipgloss.JoinHorizontal(lipgloss.Left, "✅ ", keyStyle.Render("WhiteStatus: "), formatValue(pkg.WhiteStatus)),
|
||||||
|
lipgloss.JoinHorizontal(lipgloss.Left, "🔄 ", keyStyle.Render("FuzzyStatus: "), formatValue(pkg.FuzzyStatus)),
|
||||||
|
lipgloss.JoinHorizontal(lipgloss.Left, "🔒 ", keyStyle.Render("UniqueStatus: "), formatValue(pkg.UniqueStatus)),
|
||||||
|
lipgloss.JoinHorizontal(lipgloss.Left, "🔑 ", keyStyle.Render("Unique: "), formatValue(opt.Unique)),
|
||||||
|
)
|
||||||
|
|
||||||
|
// Misc Options
|
||||||
|
miscOptions := lipgloss.JoinVertical(lipgloss.Left,
|
||||||
|
lipgloss.JoinHorizontal(lipgloss.Left, "⏱ ", keyStyle.Render("Timeout: "), formatValue(opt.Timeout)),
|
||||||
|
lipgloss.JoinHorizontal(lipgloss.Left, "📈 ", keyStyle.Render("PoolSize: "), formatValue(opt.PoolSize)),
|
||||||
|
lipgloss.JoinHorizontal(lipgloss.Left, "🧵 ", keyStyle.Render("Threads: "), formatValue(opt.Threads)),
|
||||||
|
lipgloss.JoinHorizontal(lipgloss.Left, "🌍 ", keyStyle.Render("Proxies: "), formatValue(opt.Proxies)),
|
||||||
|
)
|
||||||
|
|
||||||
|
// 将所有内容拼接在一起
|
||||||
|
content := lipgloss.JoinVertical(lipgloss.Left,
|
||||||
|
inputOptions,
|
||||||
|
outputOptions,
|
||||||
|
pluginOptions,
|
||||||
|
modeOptions,
|
||||||
|
miscOptions,
|
||||||
|
)
|
||||||
|
|
||||||
|
// 使用正确的方式添加 padding,并居中显示内容
|
||||||
|
contentWithPadding := lipgloss.NewStyle().PaddingLeft(padding).Render(content)
|
||||||
|
|
||||||
|
// 使用 Place 方法来将整个内容居中显示
|
||||||
|
return lipgloss.Place(panelWidth+padding*2, 0, lipgloss.Center, lipgloss.Center,
|
||||||
|
lipgloss.JoinVertical(lipgloss.Center,
|
||||||
|
divider, // 顶部分割线
|
||||||
|
contentWithPadding,
|
||||||
|
divider, // 底部分割线
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (opt *Option) BuildPlugin(r *Runner) error {
|
||||||
|
// brute only
|
||||||
|
if opt.Advance {
|
||||||
|
opt.CrawlPlugin = true
|
||||||
|
opt.Finger = true
|
||||||
|
opt.BakPlugin = true
|
||||||
|
opt.FuzzuliPlugin = true
|
||||||
|
opt.CommonPlugin = true
|
||||||
|
opt.ActivePlugin = true
|
||||||
|
opt.ReconPlugin = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if opt.ReconPlugin {
|
||||||
|
pkg.Extractors["recon"] = pkg.ExtractRegexps["pentest"]
|
||||||
|
}
|
||||||
|
|
||||||
|
if opt.Finger {
|
||||||
|
pkg.EnableAllFingerEngine = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if opt.BakPlugin {
|
||||||
|
r.bruteMod = true
|
||||||
|
opt.AppendRule = append(opt.AppendRule, "filebak")
|
||||||
|
r.AppendWords = append(r.AppendWords, pkg.GetPresetWordList([]string{"bak_file"})...)
|
||||||
|
}
|
||||||
|
|
||||||
|
if opt.CommonPlugin {
|
||||||
|
r.bruteMod = true
|
||||||
|
r.AppendWords = append(r.AppendWords, pkg.Dicts["common"]...)
|
||||||
|
r.AppendWords = append(r.AppendWords, pkg.Dicts["log"]...)
|
||||||
|
}
|
||||||
|
|
||||||
|
if opt.ActivePlugin {
|
||||||
|
r.bruteMod = true
|
||||||
|
r.AppendWords = append(r.AppendWords, pkg.ActivePath...)
|
||||||
|
}
|
||||||
|
|
||||||
|
if opt.CrawlPlugin {
|
||||||
|
r.bruteMod = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.bruteMod {
|
||||||
|
logs.Log.Important("enabling brute mod, because of enabled brute plugin")
|
||||||
|
}
|
||||||
|
|
||||||
|
if opt.NoScope {
|
||||||
|
r.Scope = []string{"*"}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (opt *Option) BuildWords(r *Runner) error {
|
func (opt *Option) BuildWords(r *Runner) error {
|
||||||
var dicts [][]string
|
var dicts [][]string
|
||||||
var err error
|
var err error
|
||||||
if opt.DefaultDict {
|
if opt.DefaultDict {
|
||||||
dicts = append(dicts, pkg.LoadDefaultDict())
|
dicts = append(dicts, pkg.Dicts["default"])
|
||||||
logs.Log.Info("use default dictionary: https://github.com/maurosoria/dirsearch/blob/master/db/dicc.txt")
|
logs.Log.Info("use default dictionary: https://github.com/maurosoria/dirsearch/blob/master/db/dicc.txt")
|
||||||
}
|
}
|
||||||
for i, f := range opt.Dictionaries {
|
for i, f := range opt.Dictionaries {
|
||||||
dict, err := loadFileToSlice(f)
|
dict, err := pkg.LoadFileToSlice(f)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
dicts = append(dicts, dict)
|
dicts = append(dicts, dict)
|
||||||
if opt.ResumeFrom != "" {
|
if opt.ResumeFrom != "" {
|
||||||
dictCache[f] = dicts[i]
|
pkg.Dicts[f] = dicts[i]
|
||||||
}
|
}
|
||||||
|
|
||||||
logs.Log.Logf(pkg.LogVerbose, "Loaded %d word from %s", len(dicts[i]), f)
|
logs.Log.Logf(pkg.LogVerbose, "Loaded %d word from %s", len(dict), f)
|
||||||
}
|
}
|
||||||
if len(dicts) == 0 {
|
|
||||||
|
if len(dicts) == 0 && opt.Word == "" && len(opt.Rules) == 0 && len(opt.AppendRule) == 0 {
|
||||||
r.IsCheck = true
|
r.IsCheck = true
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -562,7 +698,7 @@ func (opt *Option) BuildWords(r *Runner) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if len(opt.Rules) != 0 {
|
if len(opt.Rules) != 0 {
|
||||||
rules, err := loadRuleAndCombine(opt.Rules)
|
rules, err := pkg.LoadRuleAndCombine(opt.Rules)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -581,7 +717,7 @@ func (opt *Option) BuildWords(r *Runner) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if len(opt.AppendRule) != 0 {
|
if len(opt.AppendRule) != 0 {
|
||||||
content, err := loadRuleAndCombine(opt.AppendRule)
|
content, err := pkg.LoadRuleAndCombine(opt.AppendRule)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -589,37 +725,23 @@ func (opt *Option) BuildWords(r *Runner) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if len(opt.AppendFile) != 0 {
|
if len(opt.AppendFile) != 0 {
|
||||||
var bs bytes.Buffer
|
var lines []string
|
||||||
for _, f := range opt.AppendFile {
|
for _, f := range opt.AppendFile {
|
||||||
content, err := ioutil.ReadFile(f)
|
dict, err := pkg.LoadFileToSlice(f)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
bs.Write(bytes.TrimSpace(content))
|
lines = append(lines, dict...)
|
||||||
bs.WriteString("\n")
|
|
||||||
}
|
|
||||||
lines := strings.Split(bs.String(), "\n")
|
|
||||||
for i, line := range lines {
|
|
||||||
lines[i] = strings.TrimSpace(line)
|
|
||||||
}
|
}
|
||||||
r.AppendWords = append(r.AppendWords, lines...)
|
r.AppendWords = append(r.AppendWords, lines...)
|
||||||
}
|
}
|
||||||
|
|
||||||
// 类似dirsearch中的
|
// 类似dirsearch中的
|
||||||
if opt.Extensions != "" {
|
if opt.Extensions != "" {
|
||||||
r.AppendFunction(func(s string) []string {
|
r.AppendFunction(pkg.ParseEXTPlaceholderFunc(strings.Split(opt.Extensions, ",")))
|
||||||
exts := strings.Split(opt.Extensions, ",")
|
|
||||||
ss := make([]string, len(exts))
|
|
||||||
for i, e := range exts {
|
|
||||||
if strings.Contains(s, "%EXT%") {
|
|
||||||
ss[i] = strings.Replace(s, "%EXT%", e, -1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ss
|
|
||||||
})
|
|
||||||
} else {
|
} else {
|
||||||
r.AppendFunction(func(s string) []string {
|
r.AppendFunction(func(s string) []string {
|
||||||
if strings.Contains(s, "%EXT%") {
|
if strings.Contains(s, pkg.EXTChar) {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
return []string{s}
|
return []string{s}
|
||||||
@ -627,16 +749,16 @@ func (opt *Option) BuildWords(r *Runner) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if opt.Uppercase {
|
if opt.Uppercase {
|
||||||
r.AppendFunction(wrapWordsFunc(strings.ToUpper))
|
r.AppendFunction(pkg.WrapWordsFunc(strings.ToUpper))
|
||||||
}
|
}
|
||||||
if opt.Lowercase {
|
if opt.Lowercase {
|
||||||
r.AppendFunction(wrapWordsFunc(strings.ToLower))
|
r.AppendFunction(pkg.WrapWordsFunc(strings.ToLower))
|
||||||
}
|
}
|
||||||
|
|
||||||
if opt.RemoveExtensions != "" {
|
if opt.RemoveExtensions != "" {
|
||||||
rexts := strings.Split(opt.ExcludeExtensions, ",")
|
rexts := strings.Split(opt.ExcludeExtensions, ",")
|
||||||
r.AppendFunction(func(s string) []string {
|
r.AppendFunction(func(s string) []string {
|
||||||
if ext := parseExtension(s); iutils.StringsContains(rexts, ext) {
|
if ext := pkg.ParseExtension(s); iutils.StringsContains(rexts, ext) {
|
||||||
return []string{strings.TrimSuffix(s, "."+ext)}
|
return []string{strings.TrimSuffix(s, "."+ext)}
|
||||||
}
|
}
|
||||||
return []string{s}
|
return []string{s}
|
||||||
@ -646,7 +768,7 @@ func (opt *Option) BuildWords(r *Runner) error {
|
|||||||
if opt.ExcludeExtensions != "" {
|
if opt.ExcludeExtensions != "" {
|
||||||
exexts := strings.Split(opt.ExcludeExtensions, ",")
|
exexts := strings.Split(opt.ExcludeExtensions, ",")
|
||||||
r.AppendFunction(func(s string) []string {
|
r.AppendFunction(func(s string) []string {
|
||||||
if ext := parseExtension(s); iutils.StringsContains(exexts, ext) {
|
if ext := pkg.ParseExtension(s); iutils.StringsContains(exexts, ext) {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
return []string{s}
|
return []string{s}
|
||||||
@ -662,13 +784,6 @@ func (opt *Option) BuildWords(r *Runner) error {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// default skip function, skip %EXT%
|
|
||||||
r.AppendFunction(func(s string) []string {
|
|
||||||
if strings.Contains(s, "%EXT%") {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return []string{s}
|
|
||||||
})
|
|
||||||
if len(opt.Skips) > 0 {
|
if len(opt.Skips) > 0 {
|
||||||
r.AppendFunction(func(s string) []string {
|
r.AppendFunction(func(s string) []string {
|
||||||
for _, skip := range opt.Skips {
|
for _, skip := range opt.Skips {
|
||||||
@ -680,7 +795,6 @@ func (opt *Option) BuildWords(r *Runner) error {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
logs.Log.Logf(pkg.LogVerbose, "Loaded %d dictionaries and %d decorators", len(opt.Dictionaries), len(r.Fns))
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
@ -6,15 +6,17 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"github.com/chainreactors/logs"
|
"github.com/chainreactors/logs"
|
||||||
"github.com/chainreactors/parsers"
|
"github.com/chainreactors/parsers"
|
||||||
"github.com/chainreactors/spray/internal/ihttp"
|
"github.com/chainreactors/spray/core/baseline"
|
||||||
|
"github.com/chainreactors/spray/core/ihttp"
|
||||||
"github.com/chainreactors/spray/pkg"
|
"github.com/chainreactors/spray/pkg"
|
||||||
"github.com/chainreactors/utils/iutils"
|
"github.com/chainreactors/utils/iutils"
|
||||||
"github.com/chainreactors/words"
|
"github.com/chainreactors/words/rule"
|
||||||
"github.com/panjf2000/ants/v2"
|
"github.com/panjf2000/ants/v2"
|
||||||
"github.com/valyala/fasthttp"
|
"github.com/valyala/fasthttp"
|
||||||
"golang.org/x/time/rate"
|
"golang.org/x/time/rate"
|
||||||
"math/rand"
|
"math/rand"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
"path"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
@ -22,11 +24,9 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
MaxRedirect = 3
|
|
||||||
MaxCrawl = 3
|
|
||||||
MaxRecursion = 0
|
|
||||||
EnableAllFuzzy = false
|
EnableAllFuzzy = false
|
||||||
EnableAllUnique = false
|
EnableAllUnique = false
|
||||||
|
//AllowHostModSource = []parsers.SpraySource{parsers.WordSource, parsers.CheckSource, parsers.InitIndexSource, parsers.InitRandomSource}
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewBrutePool(ctx context.Context, config *Config) (*BrutePool, error) {
|
func NewBrutePool(ctx context.Context, config *Config) (*BrutePool, error) {
|
||||||
@ -43,15 +43,15 @@ func NewBrutePool(ctx context.Context, config *Config) (*BrutePool, error) {
|
|||||||
ctx: pctx,
|
ctx: pctx,
|
||||||
Cancel: cancel,
|
Cancel: cancel,
|
||||||
client: ihttp.NewClient(&ihttp.ClientConfig{
|
client: ihttp.NewClient(&ihttp.ClientConfig{
|
||||||
Thread: config.Thread,
|
Thread: config.Thread,
|
||||||
Type: config.ClientType,
|
Type: config.ClientType,
|
||||||
Timeout: time.Duration(config.Timeout) * time.Second,
|
Timeout: config.Timeout,
|
||||||
ProxyAddr: config.ProxyAddr,
|
ProxyClient: config.ProxyClient,
|
||||||
}),
|
}),
|
||||||
additionCh: make(chan *Unit, config.Thread),
|
additionCh: make(chan *Unit, config.Thread),
|
||||||
closeCh: make(chan struct{}),
|
closeCh: make(chan struct{}),
|
||||||
processCh: make(chan *pkg.Baseline, config.Thread),
|
processCh: make(chan *baseline.Baseline, config.Thread),
|
||||||
wg: sync.WaitGroup{},
|
wg: &sync.WaitGroup{},
|
||||||
},
|
},
|
||||||
base: u.Scheme + "://" + u.Host,
|
base: u.Scheme + "://" + u.Host,
|
||||||
isDir: strings.HasSuffix(u.Path, "/"),
|
isDir: strings.HasSuffix(u.Path, "/"),
|
||||||
@ -106,46 +106,30 @@ type BrutePool struct {
|
|||||||
initwg sync.WaitGroup // 初始化用, 之后改成锁
|
initwg sync.WaitGroup // 初始化用, 之后改成锁
|
||||||
}
|
}
|
||||||
|
|
||||||
func (pool *BrutePool) checkRedirect(redirectURL string) bool {
|
|
||||||
if pool.random.RedirectURL == "" {
|
|
||||||
// 如果random的redirectURL为空, 此时该项
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
if redirectURL == pool.random.RedirectURL {
|
|
||||||
// 相同的RedirectURL将被认为是无效数据
|
|
||||||
return false
|
|
||||||
} else {
|
|
||||||
// path为3xx, 且与baseline中的RedirectURL不同时, 为有效数据
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pool *BrutePool) genReq(mod SprayMod, s string) (*ihttp.Request, error) {
|
|
||||||
if mod == HostSpray {
|
|
||||||
return ihttp.BuildHostRequest(pool.ClientType, pool.BaseURL, s)
|
|
||||||
} else if mod == PathSpray {
|
|
||||||
return ihttp.BuildPathRequest(pool.ClientType, pool.base, s, pool.Method)
|
|
||||||
}
|
|
||||||
return nil, fmt.Errorf("unknown mod")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pool *BrutePool) Init() error {
|
func (pool *BrutePool) Init() error {
|
||||||
pool.initwg.Add(2)
|
pool.initwg.Add(2)
|
||||||
if pool.Index != "/" {
|
if pool.Index != "/" {
|
||||||
logs.Log.Logf(pkg.LogVerbose, "custom index url: %s", pkg.BaseURL(pool.url)+pkg.FormatURL(pkg.BaseURL(pool.url), pool.Index))
|
logs.Log.Logf(pkg.LogVerbose, "custom index url: %s", pkg.BaseURL(pool.url)+pkg.FormatURL(pkg.BaseURL(pool.url), pool.Index))
|
||||||
pool.reqPool.Invoke(newUnit(pool.Index, parsers.InitIndexSource))
|
pool.reqPool.Invoke(&Unit{path: pool.Index, source: parsers.InitIndexSource})
|
||||||
//pool.urls[dir(pool.Index)] = struct{}{}
|
//pool.urls[dir(pool.Index)] = struct{}{}
|
||||||
} else {
|
} else {
|
||||||
pool.reqPool.Invoke(newUnit(pool.url.Path, parsers.InitIndexSource))
|
pool.reqPool.Invoke(&Unit{path: pool.url.Path, source: parsers.InitIndexSource})
|
||||||
//pool.urls[dir(pool.url.Path)] = struct{}{}
|
//pool.urls[dir(pool.url.Path)] = struct{}{}
|
||||||
}
|
}
|
||||||
|
|
||||||
if pool.Random != "" {
|
if pool.Random != "" {
|
||||||
logs.Log.Logf(pkg.LogVerbose, "custom random url: %s", pkg.BaseURL(pool.url)+pkg.FormatURL(pkg.BaseURL(pool.url), pool.Random))
|
logs.Log.Logf(pkg.LogVerbose, "custom random url: %s", pkg.BaseURL(pool.url)+pkg.FormatURL(pkg.BaseURL(pool.url), pool.Random))
|
||||||
pool.reqPool.Invoke(newUnit(pool.Random, parsers.InitRandomSource))
|
if pool.Mod == PathSpray {
|
||||||
|
pool.reqPool.Invoke(&Unit{path: pool.Random, source: parsers.InitRandomSource})
|
||||||
|
} else {
|
||||||
|
pool.reqPool.Invoke(&Unit{host: pool.Random, source: parsers.InitRandomSource})
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
pool.reqPool.Invoke(newUnit(pool.safePath(pkg.RandPath()), parsers.InitRandomSource))
|
if pool.Mod == PathSpray {
|
||||||
|
pool.reqPool.Invoke(&Unit{path: pool.safePath(pkg.RandPath()), source: parsers.InitRandomSource})
|
||||||
|
} else {
|
||||||
|
pool.reqPool.Invoke(&Unit{host: pkg.RandHost(), source: parsers.InitRandomSource})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pool.initwg.Wait()
|
pool.initwg.Wait()
|
||||||
@ -180,22 +164,6 @@ func (pool *BrutePool) Init() error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (pool *BrutePool) Upgrade(bl *pkg.Baseline) error {
|
|
||||||
rurl, err := url.Parse(bl.RedirectURL)
|
|
||||||
if err == nil && rurl.Hostname() == bl.Url.Hostname() && bl.Url.Scheme == "http" && rurl.Scheme == "https" {
|
|
||||||
logs.Log.Infof("baseurl %s upgrade http to https, reinit", pool.BaseURL)
|
|
||||||
pool.base = strings.Replace(pool.BaseURL, "http", "https", 1)
|
|
||||||
pool.url.Scheme = "https"
|
|
||||||
// 重新初始化
|
|
||||||
err = pool.Init()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pool *BrutePool) Run(offset, limit int) {
|
func (pool *BrutePool) Run(offset, limit int) {
|
||||||
pool.Worder.Run()
|
pool.Worder.Run()
|
||||||
if pool.Active {
|
if pool.Active {
|
||||||
@ -208,6 +176,11 @@ func (pool *BrutePool) Run(offset, limit int) {
|
|||||||
go pool.doBak()
|
go pool.doBak()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if pool.Fuzzuli {
|
||||||
|
pool.wg.Add(1)
|
||||||
|
go pool.doFuzzuli()
|
||||||
|
}
|
||||||
|
|
||||||
if pool.Common {
|
if pool.Common {
|
||||||
pool.wg.Add(1)
|
pool.wg.Add(1)
|
||||||
go pool.doCommonFile()
|
go pool.doCommonFile()
|
||||||
@ -229,16 +202,12 @@ func (pool *BrutePool) Run(offset, limit int) {
|
|||||||
Loop:
|
Loop:
|
||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
case w, ok := <-pool.Worder.C:
|
case w, ok := <-pool.Worder.Output:
|
||||||
if !ok {
|
if !ok {
|
||||||
done = true
|
done = true
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
pool.Statistor.End++
|
pool.Statistor.End++
|
||||||
if w == "" {
|
|
||||||
pool.Statistor.Skipped++
|
|
||||||
pool.Bar.Done()
|
|
||||||
}
|
|
||||||
|
|
||||||
pool.wordOffset++
|
pool.wordOffset++
|
||||||
if pool.wordOffset < offset {
|
if pool.wordOffset < offset {
|
||||||
@ -250,20 +219,26 @@ Loop:
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if w == "" {
|
||||||
|
pool.Statistor.Skipped++
|
||||||
|
pool.Bar.Done()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
pool.wg.Add(1)
|
pool.wg.Add(1)
|
||||||
if pool.Mod == HostSpray {
|
if pool.Mod == HostSpray {
|
||||||
pool.reqPool.Invoke(newUnitWithNumber(w, parsers.WordSource, pool.wordOffset))
|
pool.reqPool.Invoke(&Unit{host: w, source: parsers.WordSource, number: pool.wordOffset})
|
||||||
} else {
|
} else {
|
||||||
// 原样的目录拼接, 输入了几个"/"就是几个, 适配/有语义的中间件
|
// 原样的目录拼接, 输入了几个"/"就是几个, 适配/有语义的中间件
|
||||||
pool.reqPool.Invoke(newUnitWithNumber(pool.safePath(w), parsers.WordSource, pool.wordOffset))
|
pool.reqPool.Invoke(&Unit{path: pool.safePath(w), source: parsers.WordSource, number: pool.wordOffset})
|
||||||
}
|
}
|
||||||
|
|
||||||
case <-pool.checkCh:
|
case <-pool.checkCh:
|
||||||
pool.Statistor.CheckNumber++
|
pool.Statistor.CheckNumber++
|
||||||
if pool.Mod == HostSpray {
|
if pool.Mod == HostSpray {
|
||||||
pool.reqPool.Invoke(newUnitWithNumber(pkg.RandHost(), parsers.CheckSource, pool.wordOffset))
|
pool.reqPool.Invoke(&Unit{host: pkg.RandHost(), source: parsers.CheckSource, number: pool.wordOffset})
|
||||||
} else if pool.Mod == PathSpray {
|
} else if pool.Mod == PathSpray {
|
||||||
pool.reqPool.Invoke(newUnitWithNumber(pool.safePath(pkg.RandPath()), parsers.CheckSource, pool.wordOffset))
|
pool.reqPool.Invoke(&Unit{path: pool.safePath(pkg.RandPath()), source: parsers.CheckSource, number: pool.wordOffset})
|
||||||
}
|
}
|
||||||
case unit, ok := <-pool.additionCh:
|
case unit, ok := <-pool.additionCh:
|
||||||
if !ok || pool.closed {
|
if !ok || pool.closed {
|
||||||
@ -274,6 +249,7 @@ Loop:
|
|||||||
pool.wg.Done()
|
pool.wg.Done()
|
||||||
} else {
|
} else {
|
||||||
pool.urls.Store(unit.path, nil)
|
pool.urls.Store(unit.path, nil)
|
||||||
|
unit.path = pool.safePath(unit.path)
|
||||||
unit.number = pool.wordOffset
|
unit.number = pool.wordOffset
|
||||||
pool.reqPool.Invoke(unit)
|
pool.reqPool.Invoke(unit)
|
||||||
}
|
}
|
||||||
@ -281,8 +257,6 @@ Loop:
|
|||||||
break Loop
|
break Loop
|
||||||
case <-pool.ctx.Done():
|
case <-pool.ctx.Done():
|
||||||
break Loop
|
break Loop
|
||||||
case <-pool.ctx.Done():
|
|
||||||
break Loop
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pool.closed = true
|
pool.closed = true
|
||||||
@ -299,35 +273,28 @@ func (pool *BrutePool) Invoke(v interface{}) {
|
|||||||
|
|
||||||
var req *ihttp.Request
|
var req *ihttp.Request
|
||||||
var err error
|
var err error
|
||||||
if unit.source == parsers.WordSource {
|
|
||||||
req, err = pool.genReq(pool.Mod, unit.path)
|
|
||||||
} else {
|
|
||||||
req, err = pool.genReq(PathSpray, unit.path)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
req, err = ihttp.BuildRequest(pool.ctx, pool.ClientType, pool.base, unit.path, unit.host, pool.Method)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logs.Log.Error(err.Error())
|
logs.Log.Error(err.Error())
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
req.SetHeaders(pool.Headers)
|
req.SetHeaders(pool.Headers, pool.RandomUserAgent)
|
||||||
if pool.RandomUserAgent {
|
|
||||||
req.SetHeader("User-Agent", pkg.RandomUA())
|
|
||||||
}
|
|
||||||
|
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
resp, reqerr := pool.client.Do(pool.ctx, req)
|
resp, reqerr := pool.client.Do(req)
|
||||||
if pool.ClientType == ihttp.FAST {
|
if pool.ClientType == ihttp.FAST {
|
||||||
defer fasthttp.ReleaseResponse(resp.FastResponse)
|
defer fasthttp.ReleaseResponse(resp.FastResponse)
|
||||||
defer fasthttp.ReleaseRequest(req.FastRequest)
|
defer fasthttp.ReleaseRequest(req.FastRequest)
|
||||||
}
|
}
|
||||||
|
|
||||||
// compare与各种错误处理
|
// compare与各种错误处理
|
||||||
var bl *pkg.Baseline
|
var bl *baseline.Baseline
|
||||||
if reqerr != nil && !errors.Is(reqerr, fasthttp.ErrBodyTooLarge) {
|
if reqerr != nil && !errors.Is(reqerr, fasthttp.ErrBodyTooLarge) {
|
||||||
atomic.AddInt32(&pool.failedCount, 1)
|
atomic.AddInt32(&pool.failedCount, 1)
|
||||||
atomic.AddInt32(&pool.Statistor.FailedNumber, 1)
|
atomic.AddInt32(&pool.Statistor.FailedNumber, 1)
|
||||||
bl = &pkg.Baseline{
|
bl = &baseline.Baseline{
|
||||||
SprayResult: &parsers.SprayResult{
|
SprayResult: &parsers.SprayResult{
|
||||||
UrlString: pool.base + unit.path,
|
UrlString: pool.base + unit.path,
|
||||||
ErrString: reqerr.Error(),
|
ErrString: reqerr.Error(),
|
||||||
@ -340,51 +307,54 @@ func (pool *BrutePool) Invoke(v interface{}) {
|
|||||||
} else { // 特定场景优化
|
} else { // 特定场景优化
|
||||||
if unit.source <= 3 || unit.source == parsers.CrawlSource || unit.source == parsers.CommonFileSource {
|
if unit.source <= 3 || unit.source == parsers.CrawlSource || unit.source == parsers.CommonFileSource {
|
||||||
// 一些高优先级的source, 将跳过PreCompare
|
// 一些高优先级的source, 将跳过PreCompare
|
||||||
bl = pkg.NewBaseline(req.URI(), req.Host(), resp)
|
bl = baseline.NewBaseline(req.URI(), req.Host(), resp)
|
||||||
} else if pool.MatchExpr != nil {
|
} else if pool.MatchExpr != nil {
|
||||||
// 如果自定义了match函数, 则所有数据送入tempch中
|
// 如果自定义了match函数, 则所有数据送入tempch中
|
||||||
bl = pkg.NewBaseline(req.URI(), req.Host(), resp)
|
bl = baseline.NewBaseline(req.URI(), req.Host(), resp)
|
||||||
} else if err = pool.PreCompare(resp); err == nil {
|
} else if err = pool.PreCompare(resp); err == nil {
|
||||||
// 通过预对比跳过一些无用数据, 减少性能消耗
|
// 通过预对比跳过一些无用数据, 减少性能消耗
|
||||||
bl = pkg.NewBaseline(req.URI(), req.Host(), resp)
|
bl = baseline.NewBaseline(req.URI(), req.Host(), resp)
|
||||||
} else {
|
} else {
|
||||||
bl = pkg.NewInvalidBaseline(req.URI(), req.Host(), resp, err.Error())
|
bl = baseline.NewInvalidBaseline(req.URI(), req.Host(), resp, err.Error())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 手动处理重定向
|
// 手动处理重定向
|
||||||
if bl.IsValid && unit.source != parsers.CheckSource && bl.RedirectURL != "" {
|
if bl.IsValid && unit.source != parsers.CheckSource && bl.RedirectURL != "" {
|
||||||
//pool.wg.Add(1)
|
bl.SameRedirectDomain = pool.checkHost(bl.RedirectURL)
|
||||||
pool.doRedirect(bl, unit.depth)
|
pool.doRedirect(bl, unit.depth)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !ihttp.CheckBodySize(int64(bl.BodyLength)) {
|
if !ihttp.CheckBodySize(int64(bl.BodyLength)) {
|
||||||
bl.ExceedLength = true
|
bl.ExceedLength = true
|
||||||
}
|
}
|
||||||
bl.Source = unit.source
|
unit.Update(bl)
|
||||||
bl.ReqDepth = unit.depth
|
|
||||||
bl.Number = unit.number
|
|
||||||
bl.Spended = time.Since(start).Milliseconds()
|
bl.Spended = time.Since(start).Milliseconds()
|
||||||
switch unit.source {
|
switch unit.source {
|
||||||
case parsers.InitRandomSource:
|
case parsers.InitRandomSource:
|
||||||
bl.Collect()
|
defer pool.initwg.Done()
|
||||||
pool.locker.Lock()
|
pool.locker.Lock()
|
||||||
pool.random = bl
|
pool.random = bl
|
||||||
pool.addFuzzyBaseline(bl)
|
|
||||||
pool.locker.Unlock()
|
pool.locker.Unlock()
|
||||||
pool.initwg.Done()
|
|
||||||
case parsers.InitIndexSource:
|
if !bl.IsValid {
|
||||||
|
return
|
||||||
|
}
|
||||||
bl.Collect()
|
bl.Collect()
|
||||||
|
pool.addFuzzyBaseline(bl)
|
||||||
|
|
||||||
|
case parsers.InitIndexSource:
|
||||||
|
defer pool.initwg.Done()
|
||||||
pool.locker.Lock()
|
pool.locker.Lock()
|
||||||
pool.index = bl
|
pool.index = bl
|
||||||
pool.locker.Unlock()
|
pool.locker.Unlock()
|
||||||
if bl.Status == 200 || (bl.Status/100) == 3 {
|
if !bl.IsValid {
|
||||||
// 保留index输出结果
|
return
|
||||||
pool.wg.Add(1)
|
|
||||||
pool.doCrawl(bl)
|
|
||||||
pool.putToOutput(bl)
|
|
||||||
}
|
}
|
||||||
pool.initwg.Done()
|
bl.Collect()
|
||||||
|
pool.doCrawl(bl)
|
||||||
|
pool.doAppend(bl)
|
||||||
|
pool.putToOutput(bl)
|
||||||
case parsers.CheckSource:
|
case parsers.CheckSource:
|
||||||
if bl.ErrString != "" {
|
if bl.ErrString != "" {
|
||||||
logs.Log.Warnf("[check.error] %s maybe ip had banned, break (%d/%d), error: %s", pool.BaseURL, pool.failedCount, pool.BreakThreshold, bl.ErrString)
|
logs.Log.Warnf("[check.error] %s maybe ip had banned, break (%d/%d), error: %s", pool.BaseURL, pool.failedCount, pool.BreakThreshold, bl.ErrString)
|
||||||
@ -426,14 +396,13 @@ func (pool *BrutePool) Invoke(v interface{}) {
|
|||||||
func (pool *BrutePool) NoScopeInvoke(v interface{}) {
|
func (pool *BrutePool) NoScopeInvoke(v interface{}) {
|
||||||
defer pool.wg.Done()
|
defer pool.wg.Done()
|
||||||
unit := v.(*Unit)
|
unit := v.(*Unit)
|
||||||
req, err := ihttp.BuildPathRequest(pool.ClientType, unit.path, "", pool.Method)
|
req, err := ihttp.BuildRequest(pool.ctx, pool.ClientType, unit.path, "", "", "GET")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logs.Log.Error(err.Error())
|
logs.Log.Error(err.Error())
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
req.SetHeaders(pool.Headers)
|
req.SetHeaders(pool.Headers, pool.RandomUserAgent)
|
||||||
req.SetHeader("User-Agent", pkg.RandomUA())
|
resp, reqerr := pool.client.Do(req)
|
||||||
resp, reqerr := pool.client.Do(pool.ctx, req)
|
|
||||||
if pool.ClientType == ihttp.FAST {
|
if pool.ClientType == ihttp.FAST {
|
||||||
defer fasthttp.ReleaseResponse(resp.FastResponse)
|
defer fasthttp.ReleaseResponse(resp.FastResponse)
|
||||||
defer fasthttp.ReleaseRequest(req.FastRequest)
|
defer fasthttp.ReleaseRequest(req.FastRequest)
|
||||||
@ -443,7 +412,7 @@ func (pool *BrutePool) NoScopeInvoke(v interface{}) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
if resp.StatusCode() == 200 {
|
if resp.StatusCode() == 200 {
|
||||||
bl := pkg.NewBaseline(req.URI(), req.Host(), resp)
|
bl := baseline.NewBaseline(req.URI(), req.Host(), resp)
|
||||||
bl.Source = unit.source
|
bl.Source = unit.source
|
||||||
bl.ReqDepth = unit.depth
|
bl.ReqDepth = unit.depth
|
||||||
bl.Collect()
|
bl.Collect()
|
||||||
@ -497,8 +466,6 @@ func (pool *BrutePool) Handler() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if ok {
|
if ok {
|
||||||
pool.Statistor.FoundNumber++
|
|
||||||
|
|
||||||
// unique判断
|
// unique判断
|
||||||
if EnableAllUnique || iutils.IntsContains(pkg.UniqueStatus, bl.Status) {
|
if EnableAllUnique || iutils.IntsContains(pkg.UniqueStatus, bl.Status) {
|
||||||
if _, ok := pool.uniques[bl.Unique]; ok {
|
if _, ok := pool.uniques[bl.Unique]; ok {
|
||||||
@ -520,19 +487,15 @@ func (pool *BrutePool) Handler() {
|
|||||||
bl.IsValid = false
|
bl.IsValid = false
|
||||||
}
|
}
|
||||||
|
|
||||||
if bl.IsValid || bl.IsFuzzy {
|
if bl.IsValid || (bl.IsFuzzy && pool.Fuzzy) {
|
||||||
pool.wg.Add(2)
|
|
||||||
pool.doCrawl(bl)
|
pool.doCrawl(bl)
|
||||||
pool.doRule(bl)
|
pool.doAppend(bl)
|
||||||
if iutils.IntsContains(pkg.WhiteStatus, bl.Status) || iutils.IntsContains(pkg.UniqueStatus, bl.Status) {
|
|
||||||
pool.wg.Add(1)
|
|
||||||
pool.doAppendWords(bl)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// 如果要进行递归判断, 要满足 bl有效, mod为path-spray, 当前深度小于最大递归深度
|
// 如果要进行递归判断, 要满足 bl有效, mod为path-spray, 当前深度小于最大递归深度
|
||||||
if bl.IsValid {
|
if bl.IsValid {
|
||||||
if bl.RecuDepth < MaxRecursion {
|
pool.Statistor.FoundNumber++
|
||||||
|
if bl.RecuDepth < pool.MaxRecursionDepth {
|
||||||
if pkg.CompareWithExpr(pool.RecuExpr, params) {
|
if pkg.CompareWithExpr(pool.RecuExpr, params) {
|
||||||
bl.Recu = true
|
bl.Recu = true
|
||||||
}
|
}
|
||||||
@ -549,9 +512,40 @@ func (pool *BrutePool) Handler() {
|
|||||||
pool.analyzeDone = true
|
pool.analyzeDone = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (pool *BrutePool) checkRedirect(redirectURL string) bool {
|
||||||
|
if pool.random.RedirectURL == "" {
|
||||||
|
// 如果random的redirectURL为空, 忽略
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if redirectURL == pool.random.RedirectURL {
|
||||||
|
// 相同的RedirectURL将被认为是无效数据
|
||||||
|
return false
|
||||||
|
} else {
|
||||||
|
// path为3xx, 且与baseline中的RedirectURL不同时, 为有效数据
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pool *BrutePool) Upgrade(bl *baseline.Baseline) error {
|
||||||
|
rurl, err := url.Parse(bl.RedirectURL)
|
||||||
|
if err == nil && rurl.Hostname() == bl.Url.Hostname() && bl.Url.Scheme == "http" && rurl.Scheme == "https" {
|
||||||
|
logs.Log.Infof("baseurl %s upgrade http to https, reinit", pool.BaseURL)
|
||||||
|
pool.base = strings.Replace(pool.BaseURL, "http", "https", 1)
|
||||||
|
pool.url.Scheme = "https"
|
||||||
|
// 重新初始化
|
||||||
|
err = pool.Init()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (pool *BrutePool) PreCompare(resp *ihttp.Response) error {
|
func (pool *BrutePool) PreCompare(resp *ihttp.Response) error {
|
||||||
status := resp.StatusCode()
|
status := resp.StatusCode()
|
||||||
if iutils.IntsContains(pkg.WhiteStatus, status) {
|
if pkg.StatusContain(pkg.WhiteStatus, status) {
|
||||||
// 如果为白名单状态码则直接返回
|
// 如果为白名单状态码则直接返回
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -559,11 +553,11 @@ func (pool *BrutePool) PreCompare(resp *ihttp.Response) error {
|
|||||||
// return pkg.ErrSameStatus
|
// return pkg.ErrSameStatus
|
||||||
//}
|
//}
|
||||||
|
|
||||||
if iutils.IntsContains(pkg.BlackStatus, status) {
|
if pkg.StatusContain(pkg.BlackStatus, status) {
|
||||||
return pkg.ErrBadStatus
|
return pkg.ErrBadStatus
|
||||||
}
|
}
|
||||||
|
|
||||||
if iutils.IntsContains(pkg.WAFStatus, status) {
|
if pkg.StatusContain(pkg.WAFStatus, status) {
|
||||||
return pkg.ErrWaf
|
return pkg.ErrWaf
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -574,19 +568,41 @@ func (pool *BrutePool) PreCompare(resp *ihttp.Response) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (pool *BrutePool) BaseCompare(bl *pkg.Baseline) bool {
|
// same host return true
|
||||||
|
// diff host return false
|
||||||
|
func (pool *BrutePool) checkHost(u string) bool {
|
||||||
|
if v, err := url.Parse(u); err == nil {
|
||||||
|
if v.Host == "" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if v.Host == pool.url.Host {
|
||||||
|
return true
|
||||||
|
} else {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pool *BrutePool) BaseCompare(bl *baseline.Baseline) bool {
|
||||||
if !bl.IsValid {
|
if !bl.IsValid {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
var status = -1
|
var status = -1
|
||||||
|
|
||||||
// 30x状态码的特殊处理
|
// 30x状态码的特殊处理
|
||||||
if bl.RedirectURL != "" && strings.HasSuffix(bl.RedirectURL, bl.Url.Path+"/") {
|
if bl.RedirectURL != "" {
|
||||||
bl.Reason = pkg.ErrFuzzyRedirect.Error()
|
if bl.SameRedirectDomain && strings.HasSuffix(bl.RedirectURL, bl.Url.Path+"/") {
|
||||||
pool.putToFuzzy(bl)
|
bl.Reason = pkg.ErrFuzzyRedirect.Error()
|
||||||
return false
|
return false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 使用与baseline相同状态码, 需要在fuzzystatus中提前配置
|
// 使用与baseline相同状态码, 需要在fuzzystatus中提前配置
|
||||||
base, ok := pool.baselines[bl.Status] // 挑选对应状态码的baseline进行compare
|
base, ok := pool.baselines[bl.Status] // 挑选对应状态码的baseline进行compare
|
||||||
|
if bl.IsBaseline {
|
||||||
|
ok = false
|
||||||
|
}
|
||||||
if !ok {
|
if !ok {
|
||||||
if pool.random.Status == bl.Status {
|
if pool.random.Status == bl.Status {
|
||||||
// 当other的状态码与base相同时, 会使用base
|
// 当other的状态码与base相同时, 会使用base
|
||||||
@ -629,117 +645,22 @@ func (pool *BrutePool) BaseCompare(bl *pkg.Baseline) bool {
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
func (pool *BrutePool) doCheck() {
|
func (pool *BrutePool) addFuzzyBaseline(bl *baseline.Baseline) {
|
||||||
if pool.failedCount > pool.BreakThreshold {
|
|
||||||
// 当报错次数超过上限是, 结束任务
|
|
||||||
pool.recover()
|
|
||||||
pool.Cancel()
|
|
||||||
pool.IsFailed = true
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if pool.Mod == HostSpray {
|
|
||||||
pool.checkCh <- struct{}{}
|
|
||||||
} else if pool.Mod == PathSpray {
|
|
||||||
pool.checkCh <- struct{}{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pool *BrutePool) doCrawl(bl *pkg.Baseline) {
|
|
||||||
if !pool.Crawl || bl.ReqDepth >= MaxCrawl {
|
|
||||||
pool.wg.Done()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
bl.CollectURL()
|
|
||||||
if bl.URLs == nil {
|
|
||||||
pool.wg.Done()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
pool.wg.Add(1)
|
|
||||||
pool.doScopeCrawl(bl)
|
|
||||||
|
|
||||||
go func() {
|
|
||||||
defer pool.wg.Done()
|
|
||||||
for _, u := range bl.URLs {
|
|
||||||
if u = pkg.FormatURL(bl.Url.Path, u); u == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
pool.addAddition(&Unit{
|
|
||||||
path: u,
|
|
||||||
source: parsers.CrawlSource,
|
|
||||||
depth: bl.ReqDepth + 1,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pool *BrutePool) doScopeCrawl(bl *pkg.Baseline) {
|
|
||||||
if bl.ReqDepth >= MaxCrawl {
|
|
||||||
pool.wg.Done()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
go func() {
|
|
||||||
defer pool.wg.Done()
|
|
||||||
for _, u := range bl.URLs {
|
|
||||||
if strings.HasPrefix(u, "http") {
|
|
||||||
if v, _ := url.Parse(u); v == nil || !pkg.MatchWithGlobs(v.Host, pool.Scope) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
pool.scopeLocker.Lock()
|
|
||||||
if _, ok := pool.scopeurls[u]; !ok {
|
|
||||||
pool.urls.Store(u, nil)
|
|
||||||
pool.wg.Add(1)
|
|
||||||
pool.scopePool.Invoke(&Unit{path: u, source: parsers.CrawlSource, depth: bl.ReqDepth + 1})
|
|
||||||
}
|
|
||||||
pool.scopeLocker.Unlock()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pool *BrutePool) addFuzzyBaseline(bl *pkg.Baseline) {
|
|
||||||
if _, ok := pool.baselines[bl.Status]; !ok && (EnableAllFuzzy || iutils.IntsContains(pkg.FuzzyStatus, bl.Status)) {
|
if _, ok := pool.baselines[bl.Status]; !ok && (EnableAllFuzzy || iutils.IntsContains(pkg.FuzzyStatus, bl.Status)) {
|
||||||
|
bl.IsBaseline = true
|
||||||
bl.Collect()
|
bl.Collect()
|
||||||
pool.wg.Add(1)
|
|
||||||
pool.doCrawl(bl) // 非有效页面也可能存在一些特殊的url可以用来爬取
|
pool.doCrawl(bl) // 非有效页面也可能存在一些特殊的url可以用来爬取
|
||||||
pool.baselines[bl.Status] = bl
|
pool.baselines[bl.Status] = bl
|
||||||
logs.Log.Logf(pkg.LogVerbose, "[baseline.%dinit] %s", bl.Status, bl.Format([]string{"status", "length", "spend", "title", "frame", "redirect"}))
|
logs.Log.Logf(pkg.LogVerbose, "[baseline.%dinit] %s", bl.Status, bl.Format([]string{"status", "length", "spend", "title", "frame", "redirect"}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (pool *BrutePool) doBak() {
|
func (pool *BrutePool) fallback() {
|
||||||
defer pool.wg.Done()
|
|
||||||
worder, err := words.NewWorderWithDsl("{?0}.{?@bak_ext}", [][]string{pkg.BakGenerator(pool.url.Host)}, nil)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
worder.Run()
|
|
||||||
for w := range worder.C {
|
|
||||||
pool.addAddition(&Unit{
|
|
||||||
path: pool.dir + w,
|
|
||||||
source: parsers.BakSource,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
worder, err = words.NewWorderWithDsl("{?@bak_name}.{?@bak_ext}", nil, nil)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
worder.Run()
|
|
||||||
for w := range worder.C {
|
|
||||||
pool.addAddition(&Unit{
|
|
||||||
path: pool.dir + w,
|
|
||||||
source: parsers.BakSource,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pool *BrutePool) recover() {
|
|
||||||
logs.Log.Errorf("%s ,failed request exceeds the threshold , task will exit. Breakpoint %d", pool.BaseURL, pool.wordOffset)
|
logs.Log.Errorf("%s ,failed request exceeds the threshold , task will exit. Breakpoint %d", pool.BaseURL, pool.wordOffset)
|
||||||
for i, bl := range pool.FailedBaselines {
|
for i, bl := range pool.FailedBaselines {
|
||||||
|
if i > 5 {
|
||||||
|
break
|
||||||
|
}
|
||||||
logs.Log.Errorf("[failed.%d] %s", i, bl.String())
|
logs.Log.Errorf("[failed.%d] %s", i, bl.String())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -750,9 +671,10 @@ func (pool *BrutePool) Close() {
|
|||||||
time.Sleep(time.Duration(100) * time.Millisecond)
|
time.Sleep(time.Duration(100) * time.Millisecond)
|
||||||
}
|
}
|
||||||
close(pool.additionCh) // 关闭addition管道
|
close(pool.additionCh) // 关闭addition管道
|
||||||
close(pool.checkCh) // 关闭check管道
|
//close(pool.checkCh) // 关闭check管道
|
||||||
pool.Statistor.EndTime = time.Now().Unix()
|
pool.Statistor.EndTime = time.Now().Unix()
|
||||||
pool.Bar.Close()
|
pool.reqPool.Release()
|
||||||
|
pool.scopePool.Release()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (pool *BrutePool) safePath(u string) string {
|
func (pool *BrutePool) safePath(u string) string {
|
||||||
@ -768,3 +690,213 @@ func (pool *BrutePool) resetFailed() {
|
|||||||
pool.failedCount = 1
|
pool.failedCount = 1
|
||||||
pool.FailedBaselines = nil
|
pool.FailedBaselines = nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (pool *BrutePool) doCheck() {
|
||||||
|
if pool.failedCount > pool.BreakThreshold {
|
||||||
|
// 当报错次数超过上限是, 结束任务
|
||||||
|
if pool.isFallback.Load() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
pool.isFallback.Store(true)
|
||||||
|
pool.fallback()
|
||||||
|
pool.IsFailed = true
|
||||||
|
pool.Cancel()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if pool.Mod == HostSpray {
|
||||||
|
pool.checkCh <- struct{}{}
|
||||||
|
} else if pool.Mod == PathSpray {
|
||||||
|
pool.checkCh <- struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pool *BrutePool) doRedirect(bl *baseline.Baseline, depth int) {
|
||||||
|
if depth >= pool.MaxRedirect {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
//if !bl.SameRedirectDomain {
|
||||||
|
// return // 不同域名的重定向不处理
|
||||||
|
//}
|
||||||
|
reURL := pkg.FormatURL(bl.Url.Path, bl.RedirectURL)
|
||||||
|
pool.wg.Add(1)
|
||||||
|
go func() {
|
||||||
|
defer pool.wg.Done()
|
||||||
|
pool.addAddition(&Unit{
|
||||||
|
path: reURL,
|
||||||
|
parent: bl.Number,
|
||||||
|
host: bl.Host,
|
||||||
|
source: parsers.RedirectSource,
|
||||||
|
from: bl.Source,
|
||||||
|
frontUrl: bl.UrlString,
|
||||||
|
depth: depth + 1,
|
||||||
|
})
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pool *BrutePool) doCrawl(bl *baseline.Baseline) {
|
||||||
|
if !pool.Crawl || bl.ReqDepth >= pool.MaxCrawlDepth {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
bl.CollectURL()
|
||||||
|
if bl.URLs == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
pool.wg.Add(2)
|
||||||
|
pool.doScopeCrawl(bl)
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
defer pool.wg.Done()
|
||||||
|
for _, u := range bl.URLs {
|
||||||
|
if u = pkg.FormatURL(bl.Url.Path, u); u == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
pool.addAddition(&Unit{
|
||||||
|
path: u,
|
||||||
|
parent: bl.Number,
|
||||||
|
host: bl.Host,
|
||||||
|
source: parsers.CrawlSource,
|
||||||
|
from: bl.Source,
|
||||||
|
depth: bl.ReqDepth + 1,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pool *BrutePool) doScopeCrawl(bl *baseline.Baseline) {
|
||||||
|
if bl.ReqDepth >= pool.MaxCrawlDepth {
|
||||||
|
pool.wg.Done()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
defer pool.wg.Done()
|
||||||
|
for _, u := range bl.URLs {
|
||||||
|
if strings.HasPrefix(u, "http") {
|
||||||
|
if v, _ := url.Parse(u); v == nil || !pkg.MatchWithGlobs(v.Host, pool.Scope) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
pool.scopeLocker.Lock()
|
||||||
|
if _, ok := pool.scopeurls[u]; !ok {
|
||||||
|
pool.urls.Store(u, nil)
|
||||||
|
pool.wg.Add(1)
|
||||||
|
pool.scopePool.Invoke(&Unit{
|
||||||
|
path: u,
|
||||||
|
parent: bl.Number,
|
||||||
|
source: parsers.CrawlSource,
|
||||||
|
from: bl.Source,
|
||||||
|
depth: bl.ReqDepth + 1,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
pool.scopeLocker.Unlock()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pool *BrutePool) doFuzzuli() {
|
||||||
|
defer pool.wg.Done()
|
||||||
|
if pool.Mod == HostSpray {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for w := range NewBruteDSL(pool.Config, "{?0}.{?@bak_ext}", [][]string{pkg.BakGenerator(pool.url.Host)}).Output {
|
||||||
|
pool.addAddition(&Unit{
|
||||||
|
path: pool.dir + w,
|
||||||
|
source: parsers.BakSource,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pool *BrutePool) doBak() {
|
||||||
|
defer pool.wg.Done()
|
||||||
|
if pool.Mod == HostSpray {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for w := range NewBruteDSL(pool.Config, "{?@bak_name}.{?@bak_ext}", nil).Output {
|
||||||
|
pool.addAddition(&Unit{
|
||||||
|
path: pool.dir + w,
|
||||||
|
source: parsers.BakSource,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pool *BrutePool) doAppend(bl *baseline.Baseline) {
|
||||||
|
pool.wg.Add(2)
|
||||||
|
pool.doAppendWords(bl)
|
||||||
|
pool.doAppendRule(bl)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pool *BrutePool) doAppendRule(bl *baseline.Baseline) {
|
||||||
|
if pool.AppendRule == nil || bl.Source == parsers.AppendRuleSource || bl.ReqDepth >= pool.MaxAppendDepth {
|
||||||
|
pool.wg.Done()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
defer pool.wg.Done()
|
||||||
|
for u := range rule.RunAsStream(pool.AppendRule.Expressions, path.Base(bl.Path)) {
|
||||||
|
pool.addAddition(&Unit{
|
||||||
|
path: pkg.Dir(bl.Url.Path) + u,
|
||||||
|
parent: bl.Number,
|
||||||
|
host: bl.Host,
|
||||||
|
source: parsers.AppendRuleSource,
|
||||||
|
from: bl.Source,
|
||||||
|
depth: bl.ReqDepth + 1,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pool *BrutePool) doAppendWords(bl *baseline.Baseline) {
|
||||||
|
if pool.AppendWords == nil || bl.Source == parsers.AppendSource || bl.Source == parsers.RuleSource || bl.ReqDepth >= pool.MaxAppendDepth {
|
||||||
|
// 防止自身递归
|
||||||
|
pool.wg.Done()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
defer pool.wg.Done()
|
||||||
|
|
||||||
|
for u := range NewBruteWords(pool.Config, pool.AppendWords).Output {
|
||||||
|
pool.addAddition(&Unit{
|
||||||
|
path: pkg.SafePath(bl.Path, u),
|
||||||
|
parent: bl.Number,
|
||||||
|
host: bl.Host,
|
||||||
|
source: parsers.AppendSource,
|
||||||
|
from: bl.Source,
|
||||||
|
depth: bl.RecuDepth + 1,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pool *BrutePool) doActive() {
|
||||||
|
defer pool.wg.Done()
|
||||||
|
if pool.Mod == HostSpray {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for _, u := range pkg.ActivePath {
|
||||||
|
pool.addAddition(&Unit{
|
||||||
|
path: pool.dir + u[1:],
|
||||||
|
source: parsers.FingerSource,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pool *BrutePool) doCommonFile() {
|
||||||
|
defer pool.wg.Done()
|
||||||
|
if pool.Mod == HostSpray {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for u := range NewBruteWords(pool.Config, append(pkg.Dicts["common"], pkg.Dicts["log"]...)).Output {
|
||||||
|
pool.addAddition(&Unit{
|
||||||
|
path: pool.dir + u,
|
||||||
|
source: parsers.CommonFileSource,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
@ -4,7 +4,8 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"github.com/chainreactors/logs"
|
"github.com/chainreactors/logs"
|
||||||
"github.com/chainreactors/parsers"
|
"github.com/chainreactors/parsers"
|
||||||
"github.com/chainreactors/spray/internal/ihttp"
|
"github.com/chainreactors/spray/core/baseline"
|
||||||
|
"github.com/chainreactors/spray/core/ihttp"
|
||||||
"github.com/chainreactors/spray/pkg"
|
"github.com/chainreactors/spray/pkg"
|
||||||
"github.com/panjf2000/ants/v2"
|
"github.com/panjf2000/ants/v2"
|
||||||
"net/url"
|
"net/url"
|
||||||
@ -18,33 +19,34 @@ func NewCheckPool(ctx context.Context, config *Config) (*CheckPool, error) {
|
|||||||
pctx, cancel := context.WithCancel(ctx)
|
pctx, cancel := context.WithCancel(ctx)
|
||||||
config.ClientType = ihttp.STANDARD
|
config.ClientType = ihttp.STANDARD
|
||||||
pool := &CheckPool{
|
pool := &CheckPool{
|
||||||
&BasePool{
|
BasePool: &BasePool{
|
||||||
Config: config,
|
Config: config,
|
||||||
Statistor: pkg.NewStatistor(""),
|
Statistor: pkg.NewStatistor(""),
|
||||||
ctx: pctx,
|
ctx: pctx,
|
||||||
Cancel: cancel,
|
Cancel: cancel,
|
||||||
client: ihttp.NewClient(&ihttp.ClientConfig{
|
client: ihttp.NewClient(&ihttp.ClientConfig{
|
||||||
Thread: config.Thread,
|
Thread: config.Thread,
|
||||||
Type: config.ClientType,
|
Type: config.ClientType,
|
||||||
Timeout: time.Duration(config.Timeout) * time.Second,
|
Timeout: config.Timeout,
|
||||||
ProxyAddr: config.ProxyAddr,
|
ProxyClient: config.ProxyClient,
|
||||||
}),
|
}),
|
||||||
wg: sync.WaitGroup{},
|
wg: &sync.WaitGroup{},
|
||||||
additionCh: make(chan *Unit, 1024),
|
additionCh: make(chan *Unit, 1024),
|
||||||
closeCh: make(chan struct{}),
|
closeCh: make(chan struct{}),
|
||||||
processCh: make(chan *pkg.Baseline, config.Thread),
|
processCh: make(chan *baseline.Baseline, config.Thread),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
pool.Headers = map[string]string{"Connection": "close"}
|
pool.Headers.Set("Connection", "close")
|
||||||
p, _ := ants.NewPoolWithFunc(config.Thread, pool.Invoke)
|
p, _ := ants.NewPoolWithFunc(config.Thread, pool.Invoke)
|
||||||
|
|
||||||
pool.BasePool.Pool = p
|
pool.Pool = p
|
||||||
go pool.Handler()
|
go pool.Handler()
|
||||||
return pool, nil
|
return pool, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type CheckPool struct {
|
type CheckPool struct {
|
||||||
*BasePool
|
*BasePool
|
||||||
|
Pool *ants.PoolWithFunc
|
||||||
}
|
}
|
||||||
|
|
||||||
func (pool *CheckPool) Run(ctx context.Context, offset, limit int) {
|
func (pool *CheckPool) Run(ctx context.Context, offset, limit int) {
|
||||||
@ -66,7 +68,7 @@ func (pool *CheckPool) Run(ctx context.Context, offset, limit int) {
|
|||||||
Loop:
|
Loop:
|
||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
case u, ok := <-pool.Worder.C:
|
case u, ok := <-pool.Worder.Output:
|
||||||
if !ok {
|
if !ok {
|
||||||
done = true
|
done = true
|
||||||
continue
|
continue
|
||||||
@ -82,12 +84,12 @@ Loop:
|
|||||||
}
|
}
|
||||||
|
|
||||||
pool.wg.Add(1)
|
pool.wg.Add(1)
|
||||||
_ = pool.BasePool.Pool.Invoke(newUnit(u, parsers.CheckSource))
|
_ = pool.Pool.Invoke(newUnit(u, parsers.CheckSource))
|
||||||
case u, ok := <-pool.additionCh:
|
case u, ok := <-pool.additionCh:
|
||||||
if !ok {
|
if !ok {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
_ = pool.BasePool.Pool.Invoke(u)
|
_ = pool.Pool.Invoke(u)
|
||||||
case <-pool.closeCh:
|
case <-pool.closeCh:
|
||||||
break Loop
|
break Loop
|
||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
@ -99,6 +101,10 @@ Loop:
|
|||||||
|
|
||||||
pool.Close()
|
pool.Close()
|
||||||
}
|
}
|
||||||
|
func (pool *CheckPool) Close() {
|
||||||
|
pool.Bar.Close()
|
||||||
|
pool.Pool.Release()
|
||||||
|
}
|
||||||
|
|
||||||
func (pool *CheckPool) Invoke(v interface{}) {
|
func (pool *CheckPool) Invoke(v interface{}) {
|
||||||
defer func() {
|
defer func() {
|
||||||
@ -107,10 +113,10 @@ func (pool *CheckPool) Invoke(v interface{}) {
|
|||||||
}()
|
}()
|
||||||
|
|
||||||
unit := v.(*Unit)
|
unit := v.(*Unit)
|
||||||
req, err := pool.genReq(unit.path)
|
req, err := ihttp.BuildRequest(pool.ctx, pool.ClientType, unit.path, "", "", "GET")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logs.Log.Debug(err.Error())
|
logs.Log.Debug(err.Error())
|
||||||
bl := &pkg.Baseline{
|
bl := &baseline.Baseline{
|
||||||
SprayResult: &parsers.SprayResult{
|
SprayResult: &parsers.SprayResult{
|
||||||
UrlString: unit.path,
|
UrlString: unit.path,
|
||||||
IsValid: false,
|
IsValid: false,
|
||||||
@ -122,13 +128,13 @@ func (pool *CheckPool) Invoke(v interface{}) {
|
|||||||
pool.processCh <- bl
|
pool.processCh <- bl
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
req.SetHeaders(pool.Headers)
|
req.SetHeaders(pool.Headers, pool.RandomUserAgent)
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
var bl *pkg.Baseline
|
var bl *baseline.Baseline
|
||||||
resp, reqerr := pool.client.Do(pool.ctx, req)
|
resp, reqerr := pool.client.Do(req)
|
||||||
if reqerr != nil {
|
if reqerr != nil {
|
||||||
pool.failedCount++
|
pool.failedCount++
|
||||||
bl = &pkg.Baseline{
|
bl = &baseline.Baseline{
|
||||||
SprayResult: &parsers.SprayResult{
|
SprayResult: &parsers.SprayResult{
|
||||||
UrlString: unit.path,
|
UrlString: unit.path,
|
||||||
IsValid: false,
|
IsValid: false,
|
||||||
@ -140,8 +146,12 @@ func (pool *CheckPool) Invoke(v interface{}) {
|
|||||||
logs.Log.Debugf("%s, %s", unit.path, reqerr.Error())
|
logs.Log.Debugf("%s, %s", unit.path, reqerr.Error())
|
||||||
pool.doUpgrade(bl)
|
pool.doUpgrade(bl)
|
||||||
} else {
|
} else {
|
||||||
bl = pkg.NewBaseline(req.URI(), req.Host(), resp)
|
bl = baseline.NewBaseline(req.URI(), req.Host(), resp)
|
||||||
|
bl.ReqDepth = unit.depth
|
||||||
bl.Collect()
|
bl.Collect()
|
||||||
|
if bl.Status == 400 {
|
||||||
|
pool.doUpgrade(bl)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
bl.ReqDepth = unit.depth
|
bl.ReqDepth = unit.depth
|
||||||
bl.Source = unit.source
|
bl.Source = unit.source
|
||||||
@ -155,9 +165,6 @@ func (pool *CheckPool) Handler() {
|
|||||||
if bl.RedirectURL != "" {
|
if bl.RedirectURL != "" {
|
||||||
pool.doRedirect(bl, bl.ReqDepth)
|
pool.doRedirect(bl, bl.ReqDepth)
|
||||||
pool.putToOutput(bl)
|
pool.putToOutput(bl)
|
||||||
} else if bl.Status == 400 {
|
|
||||||
pool.doUpgrade(bl)
|
|
||||||
pool.putToOutput(bl)
|
|
||||||
} else {
|
} else {
|
||||||
params := map[string]interface{}{
|
params := map[string]interface{}{
|
||||||
"current": bl,
|
"current": bl,
|
||||||
@ -174,8 +181,8 @@ func (pool *CheckPool) Handler() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (pool *CheckPool) doRedirect(bl *pkg.Baseline, depth int) {
|
func (pool *CheckPool) doRedirect(bl *baseline.Baseline, depth int) {
|
||||||
if depth >= MaxRedirect {
|
if depth >= pool.MaxRedirect {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
var reURL string
|
var reURL string
|
||||||
@ -193,15 +200,17 @@ func (pool *CheckPool) doRedirect(bl *pkg.Baseline, depth int) {
|
|||||||
go func() {
|
go func() {
|
||||||
pool.additionCh <- &Unit{
|
pool.additionCh <- &Unit{
|
||||||
path: reURL,
|
path: reURL,
|
||||||
|
parent: bl.Number,
|
||||||
source: parsers.RedirectSource,
|
source: parsers.RedirectSource,
|
||||||
frontUrl: bl.UrlString,
|
frontUrl: bl.UrlString,
|
||||||
depth: depth + 1,
|
depth: depth + 1,
|
||||||
|
from: bl.Source,
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
// tcp与400进行协议转换
|
// tcp与400进行协议转换
|
||||||
func (pool *CheckPool) doUpgrade(bl *pkg.Baseline) {
|
func (pool *CheckPool) doUpgrade(bl *baseline.Baseline) {
|
||||||
if bl.ReqDepth >= 1 {
|
if bl.ReqDepth >= 1 {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -215,8 +224,10 @@ func (pool *CheckPool) doUpgrade(bl *pkg.Baseline) {
|
|||||||
go func() {
|
go func() {
|
||||||
pool.additionCh <- &Unit{
|
pool.additionCh <- &Unit{
|
||||||
path: reurl,
|
path: reurl,
|
||||||
|
parent: bl.Number,
|
||||||
source: parsers.UpgradeSource,
|
source: parsers.UpgradeSource,
|
||||||
depth: bl.ReqDepth + 1,
|
depth: bl.ReqDepth + 1,
|
||||||
|
from: bl.Source,
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
}
|
}
|
72
core/pool/config.go
Normal file
72
core/pool/config.go
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
package pool
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/chainreactors/logs"
|
||||||
|
"github.com/chainreactors/proxyclient"
|
||||||
|
"github.com/chainreactors/spray/core/baseline"
|
||||||
|
"github.com/chainreactors/words"
|
||||||
|
"github.com/chainreactors/words/rule"
|
||||||
|
"github.com/expr-lang/expr/vm"
|
||||||
|
"net/http"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Config struct {
|
||||||
|
BaseURL string
|
||||||
|
ProxyClient proxyclient.Dial
|
||||||
|
Thread int
|
||||||
|
Wordlist []string
|
||||||
|
Timeout time.Duration
|
||||||
|
ProcessCh chan *baseline.Baseline
|
||||||
|
OutputCh chan *baseline.Baseline
|
||||||
|
FuzzyCh chan *baseline.Baseline
|
||||||
|
Outwg *sync.WaitGroup
|
||||||
|
RateLimit int
|
||||||
|
CheckPeriod int
|
||||||
|
ErrPeriod int32
|
||||||
|
BreakThreshold int32
|
||||||
|
Method string
|
||||||
|
Mod SprayMod
|
||||||
|
Headers http.Header
|
||||||
|
ClientType int
|
||||||
|
MatchExpr *vm.Program
|
||||||
|
FilterExpr *vm.Program
|
||||||
|
RecuExpr *vm.Program
|
||||||
|
AppendRule *rule.Program
|
||||||
|
Fns []words.WordFunc
|
||||||
|
AppendWords []string
|
||||||
|
Fuzzy bool
|
||||||
|
IgnoreWaf bool
|
||||||
|
Crawl bool
|
||||||
|
Scope []string
|
||||||
|
Active bool
|
||||||
|
Bak bool
|
||||||
|
Fuzzuli bool
|
||||||
|
Common bool
|
||||||
|
RetryLimit int
|
||||||
|
RandomUserAgent bool
|
||||||
|
Random string
|
||||||
|
Index string
|
||||||
|
MaxRedirect int
|
||||||
|
MaxCrawlDepth int
|
||||||
|
MaxRecursionDepth int
|
||||||
|
MaxAppendDepth int
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewBruteWords(config *Config, list []string) *words.Worder {
|
||||||
|
word := words.NewWorderWithList(list)
|
||||||
|
word.Fns = config.Fns
|
||||||
|
word.Run()
|
||||||
|
return word
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewBruteDSL(config *Config, dsl string, params [][]string) *words.Worder {
|
||||||
|
word, err := words.NewWorderWithDsl(dsl, params, nil)
|
||||||
|
if err != nil {
|
||||||
|
logs.Log.Error(err.Error())
|
||||||
|
}
|
||||||
|
word.Fns = config.Fns
|
||||||
|
word.Run()
|
||||||
|
return word
|
||||||
|
}
|
72
core/pool/pool.go
Normal file
72
core/pool/pool.go
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
package pool
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/chainreactors/parsers"
|
||||||
|
"github.com/chainreactors/spray/core/baseline"
|
||||||
|
"github.com/chainreactors/spray/core/ihttp"
|
||||||
|
"github.com/chainreactors/spray/pkg"
|
||||||
|
"github.com/chainreactors/words"
|
||||||
|
"sync"
|
||||||
|
"sync/atomic"
|
||||||
|
)
|
||||||
|
|
||||||
|
type BasePool struct {
|
||||||
|
*Config
|
||||||
|
Statistor *pkg.Statistor
|
||||||
|
Bar *pkg.Bar
|
||||||
|
Worder *words.Worder
|
||||||
|
Cancel context.CancelFunc
|
||||||
|
client *ihttp.Client
|
||||||
|
ctx context.Context
|
||||||
|
processCh chan *baseline.Baseline // 待处理的baseline
|
||||||
|
dir string
|
||||||
|
reqCount int
|
||||||
|
failedCount int
|
||||||
|
additionCh chan *Unit
|
||||||
|
closeCh chan struct{}
|
||||||
|
wg *sync.WaitGroup
|
||||||
|
isFallback atomic.Bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pool *BasePool) doRetry(bl *baseline.Baseline) {
|
||||||
|
if bl.Retry >= pool.RetryLimit {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
pool.wg.Add(1)
|
||||||
|
go func() {
|
||||||
|
defer pool.wg.Done()
|
||||||
|
pool.addAddition(&Unit{
|
||||||
|
path: bl.Path,
|
||||||
|
parent: bl.Number,
|
||||||
|
host: bl.Host,
|
||||||
|
source: parsers.RetrySource,
|
||||||
|
from: bl.Source,
|
||||||
|
retry: bl.Retry + 1,
|
||||||
|
})
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pool *BasePool) addAddition(u *Unit) {
|
||||||
|
// 强行屏蔽报错, 防止goroutine泄露
|
||||||
|
pool.wg.Add(1)
|
||||||
|
defer func() {
|
||||||
|
if err := recover(); err != nil {
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
pool.additionCh <- u
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pool *BasePool) putToOutput(bl *baseline.Baseline) {
|
||||||
|
if bl.IsValid || bl.IsFuzzy {
|
||||||
|
bl.Collect()
|
||||||
|
}
|
||||||
|
pool.Outwg.Add(1)
|
||||||
|
pool.OutputCh <- bl
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pool *BasePool) putToFuzzy(bl *baseline.Baseline) {
|
||||||
|
pool.Outwg.Add(1)
|
||||||
|
bl.IsFuzzy = true
|
||||||
|
pool.FuzzyCh <- bl
|
||||||
|
}
|
57
core/pool/type.go
Normal file
57
core/pool/type.go
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
package pool
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/chainreactors/parsers"
|
||||||
|
"github.com/chainreactors/spray/core/baseline"
|
||||||
|
)
|
||||||
|
|
||||||
|
func newUnit(path string, source parsers.SpraySource) *Unit {
|
||||||
|
return &Unit{path: path, source: source}
|
||||||
|
}
|
||||||
|
|
||||||
|
type Unit struct {
|
||||||
|
number int
|
||||||
|
parent int
|
||||||
|
host string
|
||||||
|
path string
|
||||||
|
from parsers.SpraySource
|
||||||
|
source parsers.SpraySource
|
||||||
|
retry int
|
||||||
|
frontUrl string
|
||||||
|
depth int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *Unit) Update(bl *baseline.Baseline) {
|
||||||
|
bl.Number = u.number
|
||||||
|
bl.Parent = u.parent
|
||||||
|
bl.Host = u.host
|
||||||
|
bl.Path = u.path
|
||||||
|
bl.Source = u.source
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewBaselines() *Baselines {
|
||||||
|
return &Baselines{
|
||||||
|
baselines: map[int]*baseline.Baseline{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type Baselines struct {
|
||||||
|
FailedBaselines []*baseline.Baseline
|
||||||
|
random *baseline.Baseline
|
||||||
|
index *baseline.Baseline
|
||||||
|
baselines map[int]*baseline.Baseline
|
||||||
|
}
|
||||||
|
|
||||||
|
type SprayMod int
|
||||||
|
|
||||||
|
const (
|
||||||
|
PathSpray SprayMod = iota + 1
|
||||||
|
HostSpray
|
||||||
|
ParamSpray
|
||||||
|
CustomSpray
|
||||||
|
)
|
||||||
|
|
||||||
|
var ModMap = map[string]SprayMod{
|
||||||
|
"path": PathSpray,
|
||||||
|
"host": HostSpray,
|
||||||
|
}
|
@ -1,11 +1,13 @@
|
|||||||
package internal
|
package core
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"github.com/chainreactors/files"
|
"github.com/chainreactors/files"
|
||||||
"github.com/chainreactors/logs"
|
"github.com/chainreactors/logs"
|
||||||
"github.com/chainreactors/spray/internal/ihttp"
|
"github.com/chainreactors/proxyclient"
|
||||||
"github.com/chainreactors/spray/internal/pool"
|
"github.com/chainreactors/spray/core/baseline"
|
||||||
|
"github.com/chainreactors/spray/core/ihttp"
|
||||||
|
"github.com/chainreactors/spray/core/pool"
|
||||||
"github.com/chainreactors/spray/pkg"
|
"github.com/chainreactors/spray/pkg"
|
||||||
"github.com/chainreactors/words"
|
"github.com/chainreactors/words"
|
||||||
"github.com/chainreactors/words/rule"
|
"github.com/chainreactors/words/rule"
|
||||||
@ -13,28 +15,28 @@ import (
|
|||||||
"github.com/panjf2000/ants/v2"
|
"github.com/panjf2000/ants/v2"
|
||||||
"github.com/vbauerster/mpb/v8"
|
"github.com/vbauerster/mpb/v8"
|
||||||
"github.com/vbauerster/mpb/v8/decor"
|
"github.com/vbauerster/mpb/v8/decor"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
MAX = 2147483647
|
MAX = 2147483647
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
|
||||||
dictCache = make(map[string][]string)
|
|
||||||
wordlistCache = make(map[string][]string)
|
|
||||||
ruleCache = make(map[string][]rule.Expression)
|
|
||||||
)
|
|
||||||
|
|
||||||
type Runner struct {
|
type Runner struct {
|
||||||
*Option
|
*Option
|
||||||
|
|
||||||
taskCh chan *Task
|
taskCh chan *Task
|
||||||
poolwg sync.WaitGroup
|
poolwg *sync.WaitGroup
|
||||||
outwg *sync.WaitGroup
|
outwg *sync.WaitGroup
|
||||||
outputCh chan *pkg.Baseline
|
outputCh chan *baseline.Baseline
|
||||||
fuzzyCh chan *pkg.Baseline
|
fuzzyCh chan *baseline.Baseline
|
||||||
bar *mpb.Bar
|
bar *mpb.Bar
|
||||||
|
bruteMod bool
|
||||||
|
|
||||||
|
ProxyClient proxyclient.Dial
|
||||||
IsCheck bool
|
IsCheck bool
|
||||||
Pools *ants.PoolWithFunc
|
Pools *ants.PoolWithFunc
|
||||||
PoolName map[string]bool
|
PoolName map[string]bool
|
||||||
@ -46,28 +48,27 @@ type Runner struct {
|
|||||||
MatchExpr *vm.Program
|
MatchExpr *vm.Program
|
||||||
RecursiveExpr *vm.Program
|
RecursiveExpr *vm.Program
|
||||||
OutputFile *files.File
|
OutputFile *files.File
|
||||||
FuzzyFile *files.File
|
//FuzzyFile *files.File
|
||||||
DumpFile *files.File
|
DumpFile *files.File
|
||||||
StatFile *files.File
|
StatFile *files.File
|
||||||
Progress *mpb.Progress
|
Progress *mpb.Progress
|
||||||
Fns []func(string) []string
|
Fns []words.WordFunc
|
||||||
Count int // tasks total number
|
Count int // tasks total number
|
||||||
Wordlist []string
|
Wordlist []string
|
||||||
AppendWords []string
|
AppendWords []string
|
||||||
RecuDepth int
|
ClientType int
|
||||||
ClientType int
|
Probes []string
|
||||||
Probes []string
|
Total int // wordlist total number
|
||||||
Total int // wordlist total number
|
Color bool
|
||||||
Color bool
|
Jsonify bool
|
||||||
Jsonify bool
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Runner) PrepareConfig() *pool.Config {
|
func (r *Runner) PrepareConfig() *pool.Config {
|
||||||
config := &pool.Config{
|
config := &pool.Config{
|
||||||
Thread: r.Threads,
|
Thread: r.Threads,
|
||||||
Timeout: r.Timeout,
|
Timeout: time.Duration(r.Timeout) * time.Second,
|
||||||
RateLimit: r.RateLimit,
|
RateLimit: r.RateLimit,
|
||||||
Headers: r.Headers,
|
Headers: make(http.Header),
|
||||||
Method: r.Method,
|
Method: r.Method,
|
||||||
Mod: pool.ModMap[r.Mod],
|
Mod: pool.ModMap[r.Mod],
|
||||||
OutputCh: r.outputCh,
|
OutputCh: r.outputCh,
|
||||||
@ -80,20 +81,26 @@ func (r *Runner) PrepareConfig() *pool.Config {
|
|||||||
MatchExpr: r.MatchExpr,
|
MatchExpr: r.MatchExpr,
|
||||||
FilterExpr: r.FilterExpr,
|
FilterExpr: r.FilterExpr,
|
||||||
RecuExpr: r.RecursiveExpr,
|
RecuExpr: r.RecursiveExpr,
|
||||||
AppendRule: r.AppendRules,
|
AppendRule: r.AppendRules, // 对有效目录追加规则, 根据rule生成
|
||||||
AppendWords: r.AppendWords,
|
AppendWords: r.AppendWords, // 对有效目录追加字典
|
||||||
|
Fns: r.Fns,
|
||||||
//IgnoreWaf: r.IgnoreWaf,
|
//IgnoreWaf: r.IgnoreWaf,
|
||||||
Crawl: r.Crawl,
|
Crawl: r.CrawlPlugin,
|
||||||
Scope: r.Scope,
|
Scope: r.Scope,
|
||||||
Active: r.Finger,
|
Active: r.Finger,
|
||||||
Bak: r.Bak,
|
Bak: r.BakPlugin,
|
||||||
Common: r.Common,
|
Fuzzuli: r.FuzzuliPlugin,
|
||||||
Retry: r.RetryCount,
|
Common: r.CommonPlugin,
|
||||||
ClientType: r.ClientType,
|
RetryLimit: r.RetryCount,
|
||||||
RandomUserAgent: r.RandomUserAgent,
|
ClientType: r.ClientType,
|
||||||
Random: r.Random,
|
RandomUserAgent: r.RandomUserAgent,
|
||||||
Index: r.Index,
|
Random: r.Random,
|
||||||
ProxyAddr: r.Proxy,
|
Index: r.Index,
|
||||||
|
MaxRecursionDepth: r.Depth,
|
||||||
|
MaxRedirect: 3,
|
||||||
|
MaxAppendDepth: r.AppendDepth,
|
||||||
|
MaxCrawlDepth: r.CrawlDepth,
|
||||||
|
ProxyClient: r.ProxyClient,
|
||||||
}
|
}
|
||||||
|
|
||||||
if config.ClientType == ihttp.Auto {
|
if config.ClientType == ihttp.Auto {
|
||||||
@ -103,6 +110,19 @@ func (r *Runner) PrepareConfig() *pool.Config {
|
|||||||
config.ClientType = ihttp.STANDARD
|
config.ClientType = ihttp.STANDARD
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for k, v := range r.Headers {
|
||||||
|
config.Headers.Set(k, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.Headers.Get("User-Agent") == "" {
|
||||||
|
config.Headers.Set("User-Agent", pkg.DefaultUserAgent)
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.Headers.Get("Accept") == "" {
|
||||||
|
config.Headers.Set("Accept", "*/*")
|
||||||
|
}
|
||||||
|
|
||||||
return config
|
return config
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -111,6 +131,10 @@ func (r *Runner) AppendFunction(fn func(string) []string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (r *Runner) Prepare(ctx context.Context) error {
|
func (r *Runner) Prepare(ctx context.Context) error {
|
||||||
|
if r.bruteMod {
|
||||||
|
r.IsCheck = false
|
||||||
|
}
|
||||||
|
r.OutputHandler()
|
||||||
var err error
|
var err error
|
||||||
if r.IsCheck {
|
if r.IsCheck {
|
||||||
// 仅check, 类似httpx
|
// 仅check, 类似httpx
|
||||||
@ -138,6 +162,7 @@ func (r *Runner) Prepare(ctx context.Context) error {
|
|||||||
checkPool.Run(ctx, r.Offset, r.Count)
|
checkPool.Run(ctx, r.Offset, r.Count)
|
||||||
r.poolwg.Done()
|
r.poolwg.Done()
|
||||||
})
|
})
|
||||||
|
r.RunWithCheck(ctx)
|
||||||
} else {
|
} else {
|
||||||
// 完整探测模式
|
// 完整探测模式
|
||||||
go func() {
|
go func() {
|
||||||
@ -180,7 +205,7 @@ func (r *Runner) Prepare(ctx context.Context) error {
|
|||||||
brutePool.Statistor.Total = t.origin.sum
|
brutePool.Statistor.Total = t.origin.sum
|
||||||
} else {
|
} else {
|
||||||
brutePool.Statistor = pkg.NewStatistor(t.baseUrl)
|
brutePool.Statistor = pkg.NewStatistor(t.baseUrl)
|
||||||
brutePool.Worder = words.NewWorder(r.Wordlist)
|
brutePool.Worder = words.NewWorderWithList(r.Wordlist)
|
||||||
brutePool.Worder.Fns = r.Fns
|
brutePool.Worder.Fns = r.Fns
|
||||||
brutePool.Worder.Rules = r.Rules.Expressions
|
brutePool.Worder.Rules = r.Rules.Expressions
|
||||||
}
|
}
|
||||||
@ -192,12 +217,14 @@ func (r *Runner) Prepare(ctx context.Context) error {
|
|||||||
limit = brutePool.Statistor.Total
|
limit = brutePool.Statistor.Total
|
||||||
}
|
}
|
||||||
brutePool.Bar = pkg.NewBar(config.BaseURL, limit-brutePool.Statistor.Offset, brutePool.Statistor, r.Progress)
|
brutePool.Bar = pkg.NewBar(config.BaseURL, limit-brutePool.Statistor.Offset, brutePool.Statistor, r.Progress)
|
||||||
logs.Log.Importantf("[pool] task: %s, total %d words, %d threads, proxy: %s", brutePool.BaseURL, limit-brutePool.Statistor.Offset, brutePool.Thread, brutePool.ProxyAddr)
|
logs.Log.Importantf("[pool] task: %s, total %d words, %d threads, proxy: %v",
|
||||||
|
brutePool.BaseURL, limit-brutePool.Statistor.Offset, brutePool.Thread, r.Proxies)
|
||||||
err = brutePool.Init()
|
err = brutePool.Init()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
brutePool.Statistor.Error = err.Error()
|
brutePool.Statistor.Error = err.Error()
|
||||||
if !r.Force {
|
if !r.Force {
|
||||||
// 如果没开启force, init失败将会关闭pool
|
// 如果没开启force, init失败将会关闭pool
|
||||||
|
brutePool.Bar.Close()
|
||||||
brutePool.Close()
|
brutePool.Close()
|
||||||
r.PrintStat(brutePool)
|
r.PrintStat(brutePool)
|
||||||
r.Done()
|
r.Done()
|
||||||
@ -214,12 +241,12 @@ func (r *Runner) Prepare(ctx context.Context) error {
|
|||||||
r.PrintStat(brutePool)
|
r.PrintStat(brutePool)
|
||||||
r.Done()
|
r.Done()
|
||||||
})
|
})
|
||||||
|
r.Run(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
r.OutputHandler()
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -228,6 +255,7 @@ Loop:
|
|||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
|
// 如果超过了deadline, 尚未开始的任务都将被记录到stat中
|
||||||
if len(r.taskCh) > 0 {
|
if len(r.taskCh) > 0 {
|
||||||
for t := range r.taskCh {
|
for t := range r.taskCh {
|
||||||
stat := pkg.NewStatistor(t.baseUrl)
|
stat := pkg.NewStatistor(t.baseUrl)
|
||||||
@ -246,6 +274,9 @@ Loop:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if r.bar != nil {
|
||||||
|
r.bar.Wait()
|
||||||
|
}
|
||||||
r.poolwg.Wait()
|
r.poolwg.Wait()
|
||||||
r.outwg.Wait()
|
r.outwg.Wait()
|
||||||
}
|
}
|
||||||
@ -276,7 +307,7 @@ Loop:
|
|||||||
r.outwg.Wait()
|
r.outwg.Wait()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Runner) AddRecursive(bl *pkg.Baseline) {
|
func (r *Runner) AddRecursive(bl *baseline.Baseline) {
|
||||||
// 递归新任务
|
// 递归新任务
|
||||||
task := &Task{
|
task := &Task{
|
||||||
baseUrl: bl.UrlString,
|
baseUrl: bl.UrlString,
|
||||||
@ -352,62 +383,40 @@ func (r *Runner) saveStat(content string) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Runner) OutputHandler() {
|
func (r *Runner) Output(bl *baseline.Baseline) {
|
||||||
debugPrint := func(bl *pkg.Baseline) {
|
var out string
|
||||||
if r.Color {
|
if r.Option.Json {
|
||||||
logs.Log.Debug(bl.ColorString())
|
out = bl.ToJson()
|
||||||
} else {
|
} else if len(r.Probes) > 0 {
|
||||||
logs.Log.Debug(bl.String())
|
out = bl.ProbeOutput(r.Probes)
|
||||||
}
|
} else if r.Color {
|
||||||
|
out = bl.ColorString()
|
||||||
|
} else {
|
||||||
|
out = bl.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
if bl.IsValid {
|
||||||
|
logs.Log.Console(out + "\n")
|
||||||
|
} else if r.Fuzzy && bl.IsFuzzy {
|
||||||
|
logs.Log.Console("[fuzzy] " + out + "\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
var saveFunc func(string)
|
|
||||||
if r.OutputFile != nil {
|
if r.OutputFile != nil {
|
||||||
saveFunc = func(line string) {
|
if r.FileOutput == "json" {
|
||||||
r.OutputFile.SafeWrite(line + "\n")
|
r.OutputFile.SafeWrite(bl.ToJson() + "\n")
|
||||||
r.OutputFile.SafeSync()
|
} else if r.FileOutput == "csv" {
|
||||||
}
|
r.OutputFile.SafeWrite(bl.ToCSV())
|
||||||
} else {
|
} else if r.FileOutput == "full" {
|
||||||
saveFunc = func(line string) {
|
r.OutputFile.SafeWrite(bl.String() + "\n")
|
||||||
logs.Log.Console(line + "\n")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var fuzzySaveFunc func(string)
|
|
||||||
if r.FuzzyFile != nil {
|
|
||||||
fuzzySaveFunc = func(line string) {
|
|
||||||
r.FuzzyFile.SafeWrite(line + "\n")
|
|
||||||
r.FuzzyFile.SafeSync()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
fuzzySaveFunc = func(line string) {
|
|
||||||
logs.Log.Console("[fuzzy] " + line + "\n")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
outputPrint := func(bl *pkg.Baseline) {
|
|
||||||
var outFunc func(string)
|
|
||||||
if bl.IsFuzzy {
|
|
||||||
outFunc = fuzzySaveFunc
|
|
||||||
} else {
|
} else {
|
||||||
outFunc = saveFunc
|
r.OutputFile.SafeWrite(bl.ProbeOutput(strings.Split(r.FileOutput, ",")) + "\n")
|
||||||
}
|
}
|
||||||
if r.Option.Json {
|
|
||||||
outFunc(bl.Jsonify())
|
|
||||||
} else if r.Color {
|
|
||||||
if len(r.Probes) > 0 {
|
|
||||||
outFunc(logs.GreenBold(bl.Format(r.Probes)))
|
|
||||||
} else {
|
|
||||||
outFunc(logs.GreenBold(bl.ColorString()))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if len(r.Probes) > 0 {
|
|
||||||
outFunc(bl.Format(r.Probes))
|
|
||||||
} else {
|
|
||||||
outFunc(bl.String())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
r.OutputFile.SafeSync()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Runner) OutputHandler() {
|
||||||
go func() {
|
go func() {
|
||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
@ -416,16 +425,20 @@ func (r *Runner) OutputHandler() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
if r.DumpFile != nil {
|
if r.DumpFile != nil {
|
||||||
r.DumpFile.SafeWrite(bl.Jsonify() + "\n")
|
r.DumpFile.SafeWrite(bl.ToJson() + "\n")
|
||||||
r.DumpFile.SafeSync()
|
r.DumpFile.SafeSync()
|
||||||
}
|
}
|
||||||
if bl.IsValid {
|
if bl.IsValid {
|
||||||
outputPrint(bl)
|
r.Output(bl)
|
||||||
if bl.Recu {
|
if bl.Recu {
|
||||||
r.AddRecursive(bl)
|
r.AddRecursive(bl)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
debugPrint(bl)
|
if r.Color {
|
||||||
|
logs.Log.Debug(bl.ColorString())
|
||||||
|
} else {
|
||||||
|
logs.Log.Debug(bl.String())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
r.outwg.Done()
|
r.outwg.Done()
|
||||||
}
|
}
|
||||||
@ -439,9 +452,7 @@ func (r *Runner) OutputHandler() {
|
|||||||
if !ok {
|
if !ok {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if r.Fuzzy {
|
r.Output(bl)
|
||||||
outputPrint(bl)
|
|
||||||
}
|
|
||||||
r.outwg.Done()
|
r.outwg.Done()
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -1,4 +1,4 @@
|
|||||||
package internal
|
package core
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
@ -1,4 +1,4 @@
|
|||||||
package internal
|
package core
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/chainreactors/spray/pkg"
|
"github.com/chainreactors/spray/pkg"
|
||||||
@ -14,15 +14,15 @@ type Origin struct {
|
|||||||
sum int
|
sum int
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o *Origin) InitWorder(fns []func(string) []string) (*words.Worder, error) {
|
func (o *Origin) InitWorder(fns []words.WordFunc) (*words.Worder, error) {
|
||||||
var worder *words.Worder
|
var worder *words.Worder
|
||||||
wl, err := loadWordlist(o.Word, o.Dictionaries)
|
wl, err := pkg.LoadWordlist(o.Word, o.Dictionaries)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
worder = words.NewWorder(wl)
|
worder = words.NewWorderWithList(wl)
|
||||||
worder.Fns = fns
|
worder.Fns = fns
|
||||||
rules, err := loadRuleWithFiles(o.RuleFiles, o.RuleFilter)
|
rules, err := pkg.LoadRuleWithFiles(o.RuleFiles, o.RuleFilter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
35
go.mod
35
go.mod
@ -1,23 +1,22 @@
|
|||||||
module github.com/chainreactors/spray
|
module github.com/chainreactors/spray
|
||||||
|
|
||||||
go 1.22
|
go 1.20
|
||||||
|
|
||||||
toolchain go1.22.2
|
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/chainreactors/files v0.0.0-20240716182835-7884ee1e77f0
|
github.com/chainreactors/files v0.0.0-20240716182835-7884ee1e77f0
|
||||||
github.com/chainreactors/fingers v0.0.0-20240716172449-2fc3147b9c2a
|
github.com/chainreactors/fingers v0.0.0-20240716172449-2fc3147b9c2a
|
||||||
github.com/chainreactors/logs v0.0.0-20240207121836-c946f072f81f
|
github.com/chainreactors/logs v0.0.0-20241115105204-6132e39f5261
|
||||||
github.com/chainreactors/parsers v0.0.0-20240708072709-07deeece7ce2
|
github.com/chainreactors/parsers v0.0.0-20250605044448-6bc270f12c0e
|
||||||
github.com/chainreactors/utils v0.0.0-20240716182459-e85f2b01ee16
|
github.com/chainreactors/proxyclient v1.0.3-0.20250219180226-a25a0c9e6ac8
|
||||||
github.com/chainreactors/words v0.4.1-0.20240510105042-5ba5c2edc508
|
github.com/chainreactors/utils v0.0.0-20240805193040-ff3b97aa3c3f
|
||||||
|
github.com/chainreactors/words v0.0.0-20240910083848-19a289e8984b
|
||||||
|
github.com/charmbracelet/lipgloss v0.13.0
|
||||||
github.com/expr-lang/expr v1.16.9
|
github.com/expr-lang/expr v1.16.9
|
||||||
github.com/gookit/config/v2 v2.2.5
|
github.com/gookit/config/v2 v2.2.5
|
||||||
github.com/jessevdk/go-flags v1.5.0
|
github.com/jessevdk/go-flags v1.5.0
|
||||||
github.com/panjf2000/ants/v2 v2.9.1
|
github.com/panjf2000/ants/v2 v2.9.1
|
||||||
github.com/valyala/fasthttp v1.53.0
|
github.com/valyala/fasthttp v1.53.0
|
||||||
github.com/vbauerster/mpb/v8 v8.7.3
|
github.com/vbauerster/mpb/v8 v8.7.3
|
||||||
golang.org/x/net v0.25.0
|
|
||||||
golang.org/x/time v0.5.0
|
golang.org/x/time v0.5.0
|
||||||
sigs.k8s.io/yaml v1.4.0
|
sigs.k8s.io/yaml v1.4.0
|
||||||
)
|
)
|
||||||
@ -27,6 +26,8 @@ require (
|
|||||||
github.com/VividCortex/ewma v1.2.0 // indirect
|
github.com/VividCortex/ewma v1.2.0 // indirect
|
||||||
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d // indirect
|
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d // indirect
|
||||||
github.com/andybalholm/brotli v1.1.0 // indirect
|
github.com/andybalholm/brotli v1.1.0 // indirect
|
||||||
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
||||||
|
github.com/charmbracelet/x/ansi v0.1.4 // indirect
|
||||||
github.com/facebookincubator/nvdtools v0.1.5 // indirect
|
github.com/facebookincubator/nvdtools v0.1.5 // indirect
|
||||||
github.com/fatih/color v1.17.0 // indirect
|
github.com/fatih/color v1.17.0 // indirect
|
||||||
github.com/go-dedup/megophone v0.0.0-20170830025436-f01be21026f5 // indirect
|
github.com/go-dedup/megophone v0.0.0-20170830025436-f01be21026f5 // indirect
|
||||||
@ -34,27 +35,33 @@ require (
|
|||||||
github.com/go-dedup/text v0.0.0-20170907015346-8bb1b95e3cb7 // indirect
|
github.com/go-dedup/text v0.0.0-20170907015346-8bb1b95e3cb7 // indirect
|
||||||
github.com/go-playground/validator/v10 v10.20.0 // indirect
|
github.com/go-playground/validator/v10 v10.20.0 // indirect
|
||||||
github.com/goccy/go-yaml v1.11.3 // indirect
|
github.com/goccy/go-yaml v1.11.3 // indirect
|
||||||
github.com/google/go-cmp v0.6.0 // indirect
|
|
||||||
github.com/gookit/color v1.5.4 // indirect
|
github.com/gookit/color v1.5.4 // indirect
|
||||||
github.com/gookit/goutil v0.6.15 // indirect
|
github.com/gookit/goutil v0.6.15 // indirect
|
||||||
github.com/klauspost/compress v1.17.8 // indirect
|
github.com/klauspost/compress v1.17.8 // indirect
|
||||||
github.com/kr/pretty v0.3.1 // indirect
|
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
github.com/mattn/go-runewidth v0.0.15 // indirect
|
github.com/mattn/go-runewidth v0.0.15 // indirect
|
||||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||||
|
github.com/muesli/termenv v0.15.2 // indirect
|
||||||
github.com/pkg/errors v0.9.1 // indirect
|
github.com/pkg/errors v0.9.1 // indirect
|
||||||
|
github.com/riobard/go-bloom v0.0.0-20200614022211-cdc8013cb5b3 // indirect
|
||||||
github.com/rivo/uniseg v0.4.7 // indirect
|
github.com/rivo/uniseg v0.4.7 // indirect
|
||||||
github.com/rogpeppe/go-internal v1.12.0 // indirect
|
github.com/rogpeppe/go-internal v1.12.0 // indirect
|
||||||
|
github.com/shadowsocks/go-shadowsocks2 v0.1.5 // indirect
|
||||||
github.com/twmb/murmur3 v1.1.8 // indirect
|
github.com/twmb/murmur3 v1.1.8 // indirect
|
||||||
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
||||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
|
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
|
||||||
|
golang.org/x/crypto v0.33.0 // indirect
|
||||||
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 // indirect
|
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 // indirect
|
||||||
golang.org/x/sync v0.7.0 // indirect
|
golang.org/x/net v0.25.0 // indirect
|
||||||
golang.org/x/sys v0.20.0 // indirect
|
golang.org/x/sync v0.11.0 // indirect
|
||||||
golang.org/x/term v0.20.0 // indirect
|
golang.org/x/sys v0.30.0 // indirect
|
||||||
golang.org/x/text v0.15.0 // indirect
|
golang.org/x/term v0.29.0 // indirect
|
||||||
|
golang.org/x/text v0.22.0 // indirect
|
||||||
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect
|
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect
|
||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
|
||||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
)
|
)
|
||||||
|
|
||||||
|
replace github.com/chainreactors/proxyclient => github.com/chainreactors/proxyclient v1.0.3
|
||||||
|
107
go.sum
107
go.sum
@ -50,6 +50,7 @@ dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
|
|||||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
github.com/BurntSushi/toml v0.4.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
|
github.com/BurntSushi/toml v0.4.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
|
||||||
|
github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
|
||||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||||
github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
|
github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
|
||||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||||
@ -62,6 +63,7 @@ github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuy
|
|||||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||||
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||||
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
|
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
|
||||||
|
github.com/andybalholm/brotli v1.0.6/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
||||||
github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M=
|
github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M=
|
||||||
github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY=
|
github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY=
|
||||||
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
||||||
@ -70,16 +72,18 @@ github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmV
|
|||||||
github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc=
|
github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc=
|
||||||
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
||||||
github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
||||||
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
|
||||||
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
|
||||||
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
|
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
|
||||||
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
|
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
|
||||||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||||
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
|
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
|
||||||
|
github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0=
|
||||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||||
github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||||
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||||
github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||||
github.com/chainreactors/files v0.0.0-20230731174853-acee21c8c45a/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2IDcovf42/6A=
|
|
||||||
github.com/chainreactors/files v0.0.0-20231102192550-a652458cee26/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2IDcovf42/6A=
|
github.com/chainreactors/files v0.0.0-20231102192550-a652458cee26/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2IDcovf42/6A=
|
||||||
github.com/chainreactors/files v0.0.0-20231123083421-cea5b4ad18a8/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2IDcovf42/6A=
|
github.com/chainreactors/files v0.0.0-20231123083421-cea5b4ad18a8/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2IDcovf42/6A=
|
||||||
github.com/chainreactors/files v0.0.0-20240716182835-7884ee1e77f0 h1:cU3sGEODXZsUZGBXfnz0nyxF6+37vA+ZGDx6L/FKN4o=
|
github.com/chainreactors/files v0.0.0-20240716182835-7884ee1e77f0 h1:cU3sGEODXZsUZGBXfnz0nyxF6+37vA+ZGDx6L/FKN4o=
|
||||||
@ -87,25 +91,38 @@ github.com/chainreactors/files v0.0.0-20240716182835-7884ee1e77f0/go.mod h1:NSxG
|
|||||||
github.com/chainreactors/fingers v0.0.0-20240702104653-a66e34aa41df/go.mod h1:l8AO6ZbIL8WQ8PkihCK/MD6Iww/O+LY/osAhRJjThs4=
|
github.com/chainreactors/fingers v0.0.0-20240702104653-a66e34aa41df/go.mod h1:l8AO6ZbIL8WQ8PkihCK/MD6Iww/O+LY/osAhRJjThs4=
|
||||||
github.com/chainreactors/fingers v0.0.0-20240716172449-2fc3147b9c2a h1:5l4i8TdHRlz088J5xZM30yvTUMLVcWJ6iXiO/VyD3ro=
|
github.com/chainreactors/fingers v0.0.0-20240716172449-2fc3147b9c2a h1:5l4i8TdHRlz088J5xZM30yvTUMLVcWJ6iXiO/VyD3ro=
|
||||||
github.com/chainreactors/fingers v0.0.0-20240716172449-2fc3147b9c2a/go.mod h1:R03soobTE/AnZWtFgfQVYNM5QLH52NZ946wZTJVBXh4=
|
github.com/chainreactors/fingers v0.0.0-20240716172449-2fc3147b9c2a/go.mod h1:R03soobTE/AnZWtFgfQVYNM5QLH52NZ946wZTJVBXh4=
|
||||||
github.com/chainreactors/logs v0.0.0-20231027080134-7a11bb413460/go.mod h1:VZFqkFDGmp7/JOMeraW+YI7kTGcgz9fgc/HArVFnrGQ=
|
|
||||||
github.com/chainreactors/logs v0.0.0-20240207121836-c946f072f81f h1:tcfp+CEdgiMvjyUzWab5edJtxUwRMSMEIkLybupIx0k=
|
|
||||||
github.com/chainreactors/logs v0.0.0-20240207121836-c946f072f81f/go.mod h1:6Mv6W70JrtL6VClulZhmMRZnoYpcTahcDTKLMNEjK0o=
|
github.com/chainreactors/logs v0.0.0-20240207121836-c946f072f81f/go.mod h1:6Mv6W70JrtL6VClulZhmMRZnoYpcTahcDTKLMNEjK0o=
|
||||||
|
github.com/chainreactors/logs v0.0.0-20241115105204-6132e39f5261 h1:gcRLCAF4ANvltkdh7cnLFCNrogwl0Qh8oNaYrKHMyz4=
|
||||||
|
github.com/chainreactors/logs v0.0.0-20241115105204-6132e39f5261/go.mod h1:6Mv6W70JrtL6VClulZhmMRZnoYpcTahcDTKLMNEjK0o=
|
||||||
github.com/chainreactors/parsers v0.0.0-20240702104902-1ce563b7ef76/go.mod h1:G/XLE5RAaUdqADkbhQ59mPrUAbsJLiQ2DN6CwtwNpBQ=
|
github.com/chainreactors/parsers v0.0.0-20240702104902-1ce563b7ef76/go.mod h1:G/XLE5RAaUdqADkbhQ59mPrUAbsJLiQ2DN6CwtwNpBQ=
|
||||||
github.com/chainreactors/parsers v0.0.0-20240708072709-07deeece7ce2 h1:sE3SChgHLtPsEaqHo5tDSy8niDys1SO174C4eHlShSw=
|
github.com/chainreactors/parsers v0.0.0-20250222062812-66fe23cfde02 h1:zpBTjOampIeifWQKiyfpSwHvIO0aJ60N7FlO1Z5ePKc=
|
||||||
github.com/chainreactors/parsers v0.0.0-20240708072709-07deeece7ce2/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA=
|
github.com/chainreactors/parsers v0.0.0-20250222062812-66fe23cfde02/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA=
|
||||||
|
github.com/chainreactors/parsers v0.0.0-20250418131403-e9e233e9d804 h1:6YFXQEaJH/A7sFAVHJbH3iOf1L0gbD9IaBGTj0ETIHc=
|
||||||
|
github.com/chainreactors/parsers v0.0.0-20250418131403-e9e233e9d804/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA=
|
||||||
|
github.com/chainreactors/parsers v0.0.0-20250605044448-6bc270f12c0e h1:8AgGNkG1JoO6CIGlMNOecUCaQCnB/Ko/WI3Y6VgVPrI=
|
||||||
|
github.com/chainreactors/parsers v0.0.0-20250605044448-6bc270f12c0e/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA=
|
||||||
|
github.com/chainreactors/proxyclient v1.0.3 h1:afnymFICAEqzG4rVMMJhd4Tqrx6LfTssUbo+T8P52vs=
|
||||||
|
github.com/chainreactors/proxyclient v1.0.3/go.mod h1:kuB9olIK/GOW3lrpbYcJ2Uxb1aKsQPQmxewfCyIZ/0g=
|
||||||
github.com/chainreactors/utils v0.0.0-20240528085651-ba1b255482c1/go.mod h1:JA4eiQZm+7AsfjXBcIzIdVKBEhDCb16eNtWFCGTxlvs=
|
github.com/chainreactors/utils v0.0.0-20240528085651-ba1b255482c1/go.mod h1:JA4eiQZm+7AsfjXBcIzIdVKBEhDCb16eNtWFCGTxlvs=
|
||||||
github.com/chainreactors/utils v0.0.0-20240704062557-662d623b74f4/go.mod h1:JA4eiQZm+7AsfjXBcIzIdVKBEhDCb16eNtWFCGTxlvs=
|
github.com/chainreactors/utils v0.0.0-20240704062557-662d623b74f4/go.mod h1:JA4eiQZm+7AsfjXBcIzIdVKBEhDCb16eNtWFCGTxlvs=
|
||||||
github.com/chainreactors/utils v0.0.0-20240715080349-d2d0484c95ed/go.mod h1:LajXuvESQwP+qCMAvlcoSXppQCjuLlBrnQpu9XQ1HtU=
|
github.com/chainreactors/utils v0.0.0-20240715080349-d2d0484c95ed/go.mod h1:LajXuvESQwP+qCMAvlcoSXppQCjuLlBrnQpu9XQ1HtU=
|
||||||
github.com/chainreactors/utils v0.0.0-20240716182459-e85f2b01ee16 h1:TCOshCp7PrWqhP/HSAM5kT3VxoOe7EoJbRseyoSX3RM=
|
|
||||||
github.com/chainreactors/utils v0.0.0-20240716182459-e85f2b01ee16/go.mod h1:LajXuvESQwP+qCMAvlcoSXppQCjuLlBrnQpu9XQ1HtU=
|
github.com/chainreactors/utils v0.0.0-20240716182459-e85f2b01ee16/go.mod h1:LajXuvESQwP+qCMAvlcoSXppQCjuLlBrnQpu9XQ1HtU=
|
||||||
github.com/chainreactors/words v0.4.1-0.20240510105042-5ba5c2edc508 h1:iT4HWkoZzUAfQYcQMRH8XyrMau9tCVE0zSuFQnkhrqw=
|
github.com/chainreactors/utils v0.0.0-20240805193040-ff3b97aa3c3f h1:2NKmadFYP9vCwC0YrazgttFACleOhxScTPzg0i76YAY=
|
||||||
github.com/chainreactors/words v0.4.1-0.20240510105042-5ba5c2edc508/go.mod h1:DUDx7PdsMEm5PvVhzkFyppzpiUhQb8dOJaWjVc1SMVk=
|
github.com/chainreactors/utils v0.0.0-20240805193040-ff3b97aa3c3f/go.mod h1:LajXuvESQwP+qCMAvlcoSXppQCjuLlBrnQpu9XQ1HtU=
|
||||||
|
github.com/chainreactors/words v0.0.0-20240910083848-19a289e8984b h1:OsZ1fyarW4NwK/Oi+Yf3nm/dTW0uX0UfxFjyky5Mb60=
|
||||||
|
github.com/chainreactors/words v0.0.0-20240910083848-19a289e8984b/go.mod h1:zfz367PUmyaX6oAqV9SktVqyRXKlEh0sel9Wsq9dd2c=
|
||||||
|
github.com/charmbracelet/lipgloss v0.13.0 h1:4X3PPeoWEDCMvzDvGmTajSyYPcZM4+y8sCA/SsA3cjw=
|
||||||
|
github.com/charmbracelet/lipgloss v0.13.0/go.mod h1:nw4zy0SBX/F/eAO1cWdcvy6qnkDUxr8Lw7dvFrAIbbY=
|
||||||
|
github.com/charmbracelet/x/ansi v0.1.4 h1:IEU3D6+dWwPSgZ6HBH+v6oUuZ/nVawMiWj5831KfiLM=
|
||||||
|
github.com/charmbracelet/x/ansi v0.1.4/go.mod h1:dk73KoMTT5AX5BsX0KrqhsTqAnhZZoCBjs7dGWp4Ktw=
|
||||||
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
|
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
|
||||||
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
|
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
|
||||||
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
|
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
|
||||||
github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag=
|
github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag=
|
||||||
github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I=
|
github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I=
|
||||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||||
|
github.com/cloudflare/circl v1.3.7/go.mod h1:sRTcRWXGLrKw6yIGJ+l7amYJFfAXbZG0kBSc8r4zxgA=
|
||||||
|
github.com/cloudflare/circl v1.3.8/go.mod h1:PDRU+oXvdD7KCtgKxW95M5Z8BpSCJXQORiZFnBQS5QU=
|
||||||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||||
github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
||||||
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
||||||
@ -119,6 +136,7 @@ github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWH
|
|||||||
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||||
github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
|
github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||||
|
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
@ -147,7 +165,6 @@ github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4=
|
|||||||
github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI=
|
github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI=
|
||||||
github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU=
|
github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU=
|
||||||
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
|
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
|
||||||
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
|
|
||||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||||
github.com/go-dedup/megophone v0.0.0-20170830025436-f01be21026f5 h1:4U+x+EB1P66zwYgTjxWXSOT8vF+651Ksr1lojiCZnT8=
|
github.com/go-dedup/megophone v0.0.0-20170830025436-f01be21026f5 h1:4U+x+EB1P66zwYgTjxWXSOT8vF+651Ksr1lojiCZnT8=
|
||||||
github.com/go-dedup/megophone v0.0.0-20170830025436-f01be21026f5/go.mod h1:poR/Cp00iqtqu9ltFwl6C00sKC0HY13u/Gh05ZBmP54=
|
github.com/go-dedup/megophone v0.0.0-20170830025436-f01be21026f5/go.mod h1:poR/Cp00iqtqu9ltFwl6C00sKC0HY13u/Gh05ZBmP54=
|
||||||
@ -158,18 +175,18 @@ github.com/go-dedup/text v0.0.0-20170907015346-8bb1b95e3cb7/go.mod h1:wSsK4VOECO
|
|||||||
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||||
|
github.com/go-gost/gosocks5 v0.3.0/go.mod h1:1G6I7HP7VFVxveGkoK8mnprnJqSqJjdcASKsdUn4Pp4=
|
||||||
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||||
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||||
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
||||||
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
||||||
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
||||||
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
|
||||||
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
||||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
|
||||||
github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8=
|
github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8=
|
||||||
github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
|
github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
|
||||||
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||||
|
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
|
||||||
github.com/goccy/go-yaml v1.11.3 h1:B3W9IdWbvrUu2OYQGwvU1nZtvMQJPBKgBUuweJjLj6I=
|
github.com/goccy/go-yaml v1.11.3 h1:B3W9IdWbvrUu2OYQGwvU1nZtvMQJPBKgBUuweJjLj6I=
|
||||||
github.com/goccy/go-yaml v1.11.3/go.mod h1:wKnAMd44+9JAAnGQpWVEgBzGt3YuTaQ4uXoHvE4m7WU=
|
github.com/goccy/go-yaml v1.11.3/go.mod h1:wKnAMd44+9JAAnGQpWVEgBzGt3YuTaQ4uXoHvE4m7WU=
|
||||||
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||||
@ -256,7 +273,6 @@ github.com/gookit/config/v2 v2.2.5/go.mod h1:NeX+yiNYn6Ei10eJvCQFXuHEPIE/IPS8bqa
|
|||||||
github.com/gookit/goutil v0.6.15 h1:mMQ0ElojNZoyPD0eVROk5QXJPh2uKR4g06slgPDF5Jo=
|
github.com/gookit/goutil v0.6.15 h1:mMQ0ElojNZoyPD0eVROk5QXJPh2uKR4g06slgPDF5Jo=
|
||||||
github.com/gookit/goutil v0.6.15/go.mod h1:qdKdYEHQdEtyH+4fNdQNZfJHhI0jUZzHxQVAV3DaMDY=
|
github.com/gookit/goutil v0.6.15/go.mod h1:qdKdYEHQdEtyH+4fNdQNZfJHhI0jUZzHxQVAV3DaMDY=
|
||||||
github.com/gookit/ini/v2 v2.2.3 h1:nSbN+x9OfQPcMObTFP+XuHt8ev6ndv/fWWqxFhPMu2E=
|
github.com/gookit/ini/v2 v2.2.3 h1:nSbN+x9OfQPcMObTFP+XuHt8ev6ndv/fWWqxFhPMu2E=
|
||||||
github.com/gookit/ini/v2 v2.2.3/go.mod h1:Vu6p7P7xcfmb8KYu3L0ek8bqu/Im63N81q208SCCZY4=
|
|
||||||
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
|
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
|
||||||
github.com/hashicorp/consul/api v1.11.0/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M=
|
github.com/hashicorp/consul/api v1.11.0/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M=
|
||||||
github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms=
|
github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms=
|
||||||
@ -301,8 +317,11 @@ github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHm
|
|||||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||||
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
|
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
|
||||||
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
||||||
|
github.com/kataras/golog v0.1.8/go.mod h1:rGPAin4hYROfk1qT9wZP6VY2rsb4zzc37QpdPjdkqVw=
|
||||||
|
github.com/kataras/pio v0.0.11/go.mod h1:38hH6SWH6m4DKSYmRhlrCJ5WItwWgCVrTNU62XZyUvI=
|
||||||
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||||
|
github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
|
||||||
github.com/klauspost/compress v1.17.8 h1:YcnTYrq7MikUT7k0Yb5eceMmALQPYBW/Xltxn0NAMnU=
|
github.com/klauspost/compress v1.17.8 h1:YcnTYrq7MikUT7k0Yb5eceMmALQPYBW/Xltxn0NAMnU=
|
||||||
github.com/klauspost/compress v1.17.8/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
|
github.com/klauspost/compress v1.17.8/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
|
||||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||||
@ -318,7 +337,8 @@ github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
|||||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||||
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
|
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
|
||||||
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
|
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||||
github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc87/1qhoTACD8w=
|
github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc87/1qhoTACD8w=
|
||||||
github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=
|
github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=
|
||||||
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
|
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
|
||||||
@ -356,6 +376,8 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJ
|
|||||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||||
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||||
|
github.com/muesli/termenv v0.15.2 h1:GohcuySI0QmI3wN8Ok9PtKGkgkFIk7y6Vpb5PvrY+Wo=
|
||||||
|
github.com/muesli/termenv v0.15.2/go.mod h1:Epx+iuz8sNs7mNKhxzH4fWXGNpZwUaJKRS1noLXviQ8=
|
||||||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||||
github.com/panjf2000/ants/v2 v2.9.1 h1:Q5vh5xohbsZXGcD6hhszzGqB7jSSc2/CRr3QKIga8Kw=
|
github.com/panjf2000/ants/v2 v2.9.1 h1:Q5vh5xohbsZXGcD6hhszzGqB7jSSc2/CRr3QKIga8Kw=
|
||||||
github.com/panjf2000/ants/v2 v2.9.1/go.mod h1:7ZxyxsqE4vvW0M7LSD8aI3cKwgFhBHbxnlN8mDqHa1I=
|
github.com/panjf2000/ants/v2 v2.9.1/go.mod h1:7ZxyxsqE4vvW0M7LSD8aI3cKwgFhBHbxnlN8mDqHa1I=
|
||||||
@ -384,6 +406,9 @@ github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8b
|
|||||||
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||||
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
||||||
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
|
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
|
||||||
|
github.com/refraction-networking/utls v1.6.4/go.mod h1:2VL2xfiqgFAZtJKeUTlf+PSYFs3Eu7km0gCtXJ3m8zs=
|
||||||
|
github.com/riobard/go-bloom v0.0.0-20200614022211-cdc8013cb5b3 h1:f/FNXud6gA3MNr8meMVVGxhp+QBTqY91tM8HjEuMjGg=
|
||||||
|
github.com/riobard/go-bloom v0.0.0-20200614022211-cdc8013cb5b3/go.mod h1:HgjTstvQsPGkxUsCd2KWxErBblirPizecHcpD3ffK+s=
|
||||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||||
@ -397,6 +422,8 @@ github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb
|
|||||||
github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig=
|
github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig=
|
||||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
|
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
|
||||||
github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
||||||
|
github.com/shadowsocks/go-shadowsocks2 v0.1.5 h1:PDSQv9y2S85Fl7VBeOMF9StzeXZyK1HakRm86CUbr28=
|
||||||
|
github.com/shadowsocks/go-shadowsocks2 v0.1.5/go.mod h1:AGGpIoek4HRno4xzyFiAtLHkOpcoznZEkAccaI/rplM=
|
||||||
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||||
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||||
@ -422,12 +449,15 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
|
|||||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||||
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||||
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
|
||||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||||
|
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||||
|
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||||
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
||||||
|
github.com/things-go/go-socks5 v0.0.5/go.mod h1:mtzInf8v5xmsBpHZVbIw2YQYhc4K0jRwzfsH64Uh0IQ=
|
||||||
github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM=
|
github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM=
|
||||||
github.com/twmb/murmur3 v1.1.8 h1:8Yt9taO/WN3l08xErzjeschgZU2QSrwm1kclYq+0aRg=
|
github.com/twmb/murmur3 v1.1.8 h1:8Yt9taO/WN3l08xErzjeschgZU2QSrwm1kclYq+0aRg=
|
||||||
github.com/twmb/murmur3 v1.1.8/go.mod h1:Qq/R7NUyOfr65zD+6Q5IHKsJLwP7exErjN6lyyq3OSQ=
|
github.com/twmb/murmur3 v1.1.8/go.mod h1:Qq/R7NUyOfr65zD+6Q5IHKsJLwP7exErjN6lyyq3OSQ=
|
||||||
|
github.com/urfave/cli/v2 v2.27.4/go.mod h1:m4QzxcD2qpra4z7WhzEGn74WZLViBnMpb1ToCAKdGRQ=
|
||||||
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
||||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||||
github.com/valyala/fasthttp v1.53.0 h1:lW/+SUkOxCx2vlIu0iaImv4JLrVRnbbkpCoaawvA4zc=
|
github.com/valyala/fasthttp v1.53.0 h1:lW/+SUkOxCx2vlIu0iaImv4JLrVRnbbkpCoaawvA4zc=
|
||||||
@ -436,12 +466,15 @@ github.com/vbauerster/mpb/v8 v8.7.3 h1:n/mKPBav4FFWp5fH4U0lPpXfiOmCEgl5Yx/NM3tKJ
|
|||||||
github.com/vbauerster/mpb/v8 v8.7.3/go.mod h1:9nFlNpDGVoTmQ4QvNjSLtwLmAFjwmq0XaAF26toHGNM=
|
github.com/vbauerster/mpb/v8 v8.7.3/go.mod h1:9nFlNpDGVoTmQ4QvNjSLtwLmAFjwmq0XaAF26toHGNM=
|
||||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
|
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
|
||||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
|
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
|
||||||
|
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM=
|
||||||
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
|
github.com/zema1/rawhttp v0.2.0/go.mod h1:EYBmBgSu01yb/kLh6lgjJWa6kDV+DrSO8nbgmEzuG6E=
|
||||||
|
github.com/zema1/suo5 v1.3.2-0.20250219115440-31983ee59a83/go.mod h1:MAuFXiTGFS3PLzZ6cTVsjdQqze4SWPfHvciBTPE6dkw=
|
||||||
go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs=
|
go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs=
|
||||||
go.etcd.io/etcd/client/pkg/v3 v3.5.1/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g=
|
go.etcd.io/etcd/client/pkg/v3 v3.5.1/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g=
|
||||||
go.etcd.io/etcd/client/v2 v2.305.1/go.mod h1:pMEacxZW7o8pg4CrFE7pquyCJJzZvkvdD2RibOCCCGs=
|
go.etcd.io/etcd/client/v2 v2.305.1/go.mod h1:pMEacxZW7o8pg4CrFE7pquyCJJzZvkvdD2RibOCCCGs=
|
||||||
@ -465,11 +498,18 @@ golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8U
|
|||||||
golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY=
|
golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY=
|
||||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
|
golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
|
||||||
golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
|
golang.org/x/crypto v0.11.1-0.20230711161743-2e82bdd1719d/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio=
|
||||||
|
golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw=
|
||||||
|
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
||||||
|
golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
|
||||||
|
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
|
||||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||||
golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI=
|
|
||||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||||
|
golang.org/x/crypto v0.33.0 h1:IOBPskki6Lysi0lo9qQvbxiQ+FvsCC/YWOecCHAixus=
|
||||||
|
golang.org/x/crypto v0.33.0/go.mod h1:bVdXmD7IV/4GdElGPozy6U7lWdRXA4qyRVGJV57uQ5M=
|
||||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||||
@ -510,6 +550,9 @@ golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
|||||||
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
|
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
|
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||||
|
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
@ -554,6 +597,9 @@ golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qx
|
|||||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||||
|
golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI=
|
||||||
|
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||||
|
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
|
||||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||||
golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
|
golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
|
||||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||||
@ -588,8 +634,10 @@ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJ
|
|||||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||||
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
|
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
|
golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w=
|
||||||
|
golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
@ -658,19 +706,34 @@ golang.org/x/sys v0.0.0-20211205182925-97ca703d548d/go.mod h1:oPkhp1MJrh7nUepCBc
|
|||||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220919091848-fb04ddd9f9c8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y=
|
|
||||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||||
|
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||||
|
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||||
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||||
|
golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o=
|
||||||
|
golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU=
|
||||||
|
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
|
||||||
|
golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0=
|
||||||
|
golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY=
|
||||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||||
golang.org/x/term v0.20.0 h1:VnkxpohqXaOBYJtBmEppKUG6mXpi+4O6purfc2+sMhw=
|
|
||||||
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||||
|
golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU=
|
||||||
|
golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s=
|
||||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
@ -682,9 +745,13 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
|||||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||||
|
golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||||
|
golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||||
|
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk=
|
|
||||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
|
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
|
||||||
|
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
|
||||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
@ -746,6 +813,8 @@ golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
|||||||
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||||
|
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||||
|
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
@ -1,41 +0,0 @@
|
|||||||
package internal
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
"github.com/chainreactors/logs"
|
|
||||||
"github.com/chainreactors/spray/pkg"
|
|
||||||
"io"
|
|
||||||
"os"
|
|
||||||
)
|
|
||||||
|
|
||||||
func Format(filename string, color bool) {
|
|
||||||
var content []byte
|
|
||||||
var err error
|
|
||||||
if filename == "stdin" {
|
|
||||||
content, err = io.ReadAll(os.Stdin)
|
|
||||||
} else {
|
|
||||||
content, err = os.ReadFile(filename)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
var results []*pkg.Baseline
|
|
||||||
for _, line := range bytes.Split(bytes.TrimSpace(content), []byte("\n")) {
|
|
||||||
var result pkg.Baseline
|
|
||||||
err := json.Unmarshal(line, &result)
|
|
||||||
if err != nil {
|
|
||||||
logs.Log.Error(err.Error())
|
|
||||||
return
|
|
||||||
}
|
|
||||||
results = append(results, &result)
|
|
||||||
}
|
|
||||||
for _, result := range results {
|
|
||||||
if color {
|
|
||||||
logs.Log.Info(result.ColorString())
|
|
||||||
} else {
|
|
||||||
logs.Log.Info(result.String())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,58 +0,0 @@
|
|||||||
package pool
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/chainreactors/spray/pkg"
|
|
||||||
"github.com/chainreactors/words/rule"
|
|
||||||
"github.com/expr-lang/expr/vm"
|
|
||||||
"sync"
|
|
||||||
)
|
|
||||||
|
|
||||||
type SprayMod int
|
|
||||||
|
|
||||||
const (
|
|
||||||
PathSpray SprayMod = iota + 1
|
|
||||||
HostSpray
|
|
||||||
ParamSpray
|
|
||||||
CustomSpray
|
|
||||||
)
|
|
||||||
|
|
||||||
var ModMap = map[string]SprayMod{
|
|
||||||
"path": PathSpray,
|
|
||||||
"host": HostSpray,
|
|
||||||
}
|
|
||||||
|
|
||||||
type Config struct {
|
|
||||||
BaseURL string
|
|
||||||
ProxyAddr string
|
|
||||||
Thread int
|
|
||||||
Wordlist []string
|
|
||||||
Timeout int
|
|
||||||
ProcessCh chan *pkg.Baseline
|
|
||||||
OutputCh chan *pkg.Baseline
|
|
||||||
FuzzyCh chan *pkg.Baseline
|
|
||||||
Outwg *sync.WaitGroup
|
|
||||||
RateLimit int
|
|
||||||
CheckPeriod int
|
|
||||||
ErrPeriod int32
|
|
||||||
BreakThreshold int32
|
|
||||||
Method string
|
|
||||||
Mod SprayMod
|
|
||||||
Headers map[string]string
|
|
||||||
ClientType int
|
|
||||||
MatchExpr *vm.Program
|
|
||||||
FilterExpr *vm.Program
|
|
||||||
RecuExpr *vm.Program
|
|
||||||
AppendRule *rule.Program
|
|
||||||
AppendWords []string
|
|
||||||
Fuzzy bool
|
|
||||||
IgnoreWaf bool
|
|
||||||
Crawl bool
|
|
||||||
Scope []string
|
|
||||||
Active bool
|
|
||||||
Bak bool
|
|
||||||
Common bool
|
|
||||||
Retry int
|
|
||||||
RandomUserAgent bool
|
|
||||||
Random string
|
|
||||||
Index string
|
|
||||||
}
|
|
@ -1,164 +0,0 @@
|
|||||||
package pool
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"github.com/chainreactors/parsers"
|
|
||||||
"github.com/chainreactors/spray/internal/ihttp"
|
|
||||||
"github.com/chainreactors/spray/pkg"
|
|
||||||
"github.com/chainreactors/words"
|
|
||||||
"github.com/chainreactors/words/mask"
|
|
||||||
"github.com/chainreactors/words/rule"
|
|
||||||
"github.com/panjf2000/ants/v2"
|
|
||||||
"path"
|
|
||||||
"sync"
|
|
||||||
)
|
|
||||||
|
|
||||||
type BasePool struct {
|
|
||||||
*Config
|
|
||||||
Statistor *pkg.Statistor
|
|
||||||
Pool *ants.PoolWithFunc
|
|
||||||
Bar *pkg.Bar
|
|
||||||
Worder *words.Worder
|
|
||||||
Cancel context.CancelFunc
|
|
||||||
client *ihttp.Client
|
|
||||||
ctx context.Context
|
|
||||||
processCh chan *pkg.Baseline // 待处理的baseline
|
|
||||||
dir string
|
|
||||||
reqCount int
|
|
||||||
failedCount int
|
|
||||||
additionCh chan *Unit
|
|
||||||
closeCh chan struct{}
|
|
||||||
wg sync.WaitGroup
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pool *BasePool) doRedirect(bl *pkg.Baseline, depth int) {
|
|
||||||
if depth >= MaxRedirect {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
reURL := pkg.FormatURL(bl.Url.Path, bl.RedirectURL)
|
|
||||||
pool.wg.Add(1)
|
|
||||||
go func() {
|
|
||||||
defer pool.wg.Done()
|
|
||||||
pool.addAddition(&Unit{
|
|
||||||
path: reURL,
|
|
||||||
source: parsers.RedirectSource,
|
|
||||||
frontUrl: bl.UrlString,
|
|
||||||
depth: depth + 1,
|
|
||||||
})
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pool *BasePool) doRule(bl *pkg.Baseline) {
|
|
||||||
if pool.AppendRule == nil {
|
|
||||||
pool.wg.Done()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if bl.Source == parsers.RuleSource {
|
|
||||||
pool.wg.Done()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
go func() {
|
|
||||||
defer pool.wg.Done()
|
|
||||||
for u := range rule.RunAsStream(pool.AppendRule.Expressions, path.Base(bl.Path)) {
|
|
||||||
pool.addAddition(&Unit{
|
|
||||||
path: pkg.Dir(bl.Url.Path) + u,
|
|
||||||
source: parsers.RuleSource,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pool *BasePool) doAppendWords(bl *pkg.Baseline) {
|
|
||||||
if pool.AppendWords == nil {
|
|
||||||
pool.wg.Done()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if bl.Source == parsers.AppendSource {
|
|
||||||
pool.wg.Done()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
go func() {
|
|
||||||
defer pool.wg.Done()
|
|
||||||
for _, u := range pool.AppendWords {
|
|
||||||
pool.addAddition(&Unit{
|
|
||||||
path: pkg.SafePath(bl.Path, u),
|
|
||||||
source: parsers.AppendSource,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pool *BasePool) doRetry(bl *pkg.Baseline) {
|
|
||||||
if bl.Retry >= pool.Retry {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
pool.wg.Add(1)
|
|
||||||
go func() {
|
|
||||||
defer pool.wg.Done()
|
|
||||||
pool.addAddition(&Unit{
|
|
||||||
path: bl.Path,
|
|
||||||
source: parsers.RetrySource,
|
|
||||||
retry: bl.Retry + 1,
|
|
||||||
})
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pool *BasePool) doActive() {
|
|
||||||
defer pool.wg.Done()
|
|
||||||
for _, u := range pkg.ActivePath {
|
|
||||||
pool.addAddition(&Unit{
|
|
||||||
path: pool.dir + u[1:],
|
|
||||||
source: parsers.FingerSource,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pool *BasePool) doCommonFile() {
|
|
||||||
defer pool.wg.Done()
|
|
||||||
for _, u := range mask.SpecialWords["common_file"] {
|
|
||||||
pool.addAddition(&Unit{
|
|
||||||
path: pool.dir + u,
|
|
||||||
source: parsers.CommonFileSource,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pool *BasePool) addAddition(u *Unit) {
|
|
||||||
// 强行屏蔽报错, 防止goroutine泄露
|
|
||||||
pool.wg.Add(1)
|
|
||||||
defer func() {
|
|
||||||
if err := recover(); err != nil {
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
pool.additionCh <- u
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pool *BasePool) Close() {
|
|
||||||
pool.Bar.Close()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pool *BasePool) genReq(s string) (*ihttp.Request, error) {
|
|
||||||
if pool.Mod == HostSpray {
|
|
||||||
return ihttp.BuildHostRequest(pool.ClientType, pool.BaseURL, s)
|
|
||||||
} else if pool.Mod == PathSpray {
|
|
||||||
return ihttp.BuildPathRequest(pool.ClientType, pool.BaseURL, s, pool.Method)
|
|
||||||
}
|
|
||||||
return nil, fmt.Errorf("unknown mod")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pool *BasePool) putToOutput(bl *pkg.Baseline) {
|
|
||||||
if bl.IsValid || bl.IsFuzzy {
|
|
||||||
bl.Collect()
|
|
||||||
}
|
|
||||||
pool.Outwg.Add(1)
|
|
||||||
pool.OutputCh <- bl
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pool *BasePool) putToFuzzy(bl *pkg.Baseline) {
|
|
||||||
pool.Outwg.Add(1)
|
|
||||||
bl.IsFuzzy = true
|
|
||||||
pool.FuzzyCh <- bl
|
|
||||||
}
|
|
@ -1,36 +0,0 @@
|
|||||||
package pool
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/chainreactors/parsers"
|
|
||||||
"github.com/chainreactors/spray/pkg"
|
|
||||||
)
|
|
||||||
|
|
||||||
func newUnit(path string, source parsers.SpraySource) *Unit {
|
|
||||||
return &Unit{path: path, source: source}
|
|
||||||
}
|
|
||||||
|
|
||||||
func newUnitWithNumber(path string, source parsers.SpraySource, number int) *Unit {
|
|
||||||
return &Unit{path: path, source: source, number: number}
|
|
||||||
}
|
|
||||||
|
|
||||||
type Unit struct {
|
|
||||||
number int
|
|
||||||
path string
|
|
||||||
source parsers.SpraySource
|
|
||||||
retry int
|
|
||||||
frontUrl string
|
|
||||||
depth int // redirect depth
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewBaselines() *Baselines {
|
|
||||||
return &Baselines{
|
|
||||||
baselines: map[int]*pkg.Baseline{},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type Baselines struct {
|
|
||||||
FailedBaselines []*pkg.Baseline
|
|
||||||
random *pkg.Baseline
|
|
||||||
index *pkg.Baseline
|
|
||||||
baselines map[int]*pkg.Baseline
|
|
||||||
}
|
|
@ -1,174 +0,0 @@
|
|||||||
package internal
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"github.com/chainreactors/spray/pkg"
|
|
||||||
"github.com/chainreactors/words/mask"
|
|
||||||
"github.com/chainreactors/words/rule"
|
|
||||||
"io/ioutil"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
func parseExtension(s string) string {
|
|
||||||
if i := strings.Index(s, "."); i != -1 {
|
|
||||||
return s[i+1:]
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseStatus(preset []int, changed string) []int {
|
|
||||||
if changed == "" {
|
|
||||||
return preset
|
|
||||||
}
|
|
||||||
if strings.HasPrefix(changed, "+") {
|
|
||||||
for _, s := range strings.Split(changed[1:], ",") {
|
|
||||||
if t, err := strconv.Atoi(s); err != nil {
|
|
||||||
continue
|
|
||||||
} else {
|
|
||||||
preset = append(preset, t)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if strings.HasPrefix(changed, "!") {
|
|
||||||
for _, s := range strings.Split(changed[1:], ",") {
|
|
||||||
for i, status := range preset {
|
|
||||||
if t, err := strconv.Atoi(s); err != nil {
|
|
||||||
break
|
|
||||||
} else if t == status {
|
|
||||||
preset = append(preset[:i], preset[i+1:]...)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
preset = []int{}
|
|
||||||
for _, s := range strings.Split(changed, ",") {
|
|
||||||
if t, err := strconv.Atoi(s); err != nil {
|
|
||||||
continue
|
|
||||||
} else {
|
|
||||||
preset = append(preset, t)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return preset
|
|
||||||
}
|
|
||||||
|
|
||||||
func loadFileToSlice(filename string) ([]string, error) {
|
|
||||||
var ss []string
|
|
||||||
content, err := ioutil.ReadFile(filename)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
ss = strings.Split(strings.TrimSpace(string(content)), "\n")
|
|
||||||
|
|
||||||
// 统一windows与linux的回车换行差异
|
|
||||||
for i, word := range ss {
|
|
||||||
ss[i] = strings.TrimSpace(word)
|
|
||||||
}
|
|
||||||
|
|
||||||
return ss, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func loadRuleAndCombine(filename []string) (string, error) {
|
|
||||||
var bs bytes.Buffer
|
|
||||||
for _, f := range filename {
|
|
||||||
if data, ok := pkg.Rules[f]; ok {
|
|
||||||
bs.WriteString(strings.TrimSpace(data))
|
|
||||||
bs.WriteString("\n")
|
|
||||||
} else {
|
|
||||||
content, err := ioutil.ReadFile(f)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
bs.Write(bytes.TrimSpace(content))
|
|
||||||
bs.WriteString("\n")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return bs.String(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func loadFileWithCache(filename string) ([]string, error) {
|
|
||||||
if dict, ok := dictCache[filename]; ok {
|
|
||||||
return dict, nil
|
|
||||||
}
|
|
||||||
dict, err := loadFileToSlice(filename)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
dictCache[filename] = dict
|
|
||||||
return dict, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func loadDictionaries(filenames []string) ([][]string, error) {
|
|
||||||
dicts := make([][]string, len(filenames))
|
|
||||||
for i, name := range filenames {
|
|
||||||
dict, err := loadFileWithCache(name)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
dicts[i] = dict
|
|
||||||
}
|
|
||||||
return dicts, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func loadWordlist(word string, dictNames []string) ([]string, error) {
|
|
||||||
if wl, ok := wordlistCache[word+strings.Join(dictNames, ",")]; ok {
|
|
||||||
return wl, nil
|
|
||||||
}
|
|
||||||
dicts, err := loadDictionaries(dictNames)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
wl, err := mask.Run(word, dicts, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
wordlistCache[word] = wl
|
|
||||||
return wl, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func loadRuleWithFiles(ruleFiles []string, filter string) ([]rule.Expression, error) {
|
|
||||||
if rules, ok := ruleCache[strings.Join(ruleFiles, ",")]; ok {
|
|
||||||
return rules, nil
|
|
||||||
}
|
|
||||||
var rules bytes.Buffer
|
|
||||||
for _, filename := range ruleFiles {
|
|
||||||
content, err := ioutil.ReadFile(filename)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
rules.Write(content)
|
|
||||||
rules.WriteString("\n")
|
|
||||||
}
|
|
||||||
return rule.Compile(rules.String(), filter).Expressions, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
//type bytesPatcher struct{}
|
|
||||||
//
|
|
||||||
//func (p *bytesPatcher) Visit(node *ast.Node) {
|
|
||||||
// switch (*node).(type) {
|
|
||||||
// case *ast.MemberNode:
|
|
||||||
// ast.Patch(node, &ast.CallNode{
|
|
||||||
// Callee: &ast.MemberNode{
|
|
||||||
// Node: *node,
|
|
||||||
// Name: "String",
|
|
||||||
// Property: &ast.StringNode{Value: "String"},
|
|
||||||
// },
|
|
||||||
// })
|
|
||||||
// }
|
|
||||||
//}
|
|
||||||
|
|
||||||
func wrapWordsFunc(f func(string) string) func(string) []string {
|
|
||||||
return func(s string) []string {
|
|
||||||
return []string{f(s)}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func safeFilename(filename string) string {
|
|
||||||
filename = strings.ReplaceAll(filename, "http://", "")
|
|
||||||
filename = strings.ReplaceAll(filename, "https://", "")
|
|
||||||
filename = strings.ReplaceAll(filename, ":", "_")
|
|
||||||
filename = strings.ReplaceAll(filename, "/", "_")
|
|
||||||
return filename
|
|
||||||
}
|
|
43
pkg/load.go
43
pkg/load.go
@ -1,7 +1,7 @@
|
|||||||
package pkg
|
package pkg
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"fmt"
|
||||||
"github.com/chainreactors/fingers"
|
"github.com/chainreactors/fingers"
|
||||||
"github.com/chainreactors/parsers"
|
"github.com/chainreactors/parsers"
|
||||||
"github.com/chainreactors/utils"
|
"github.com/chainreactors/utils"
|
||||||
@ -12,18 +12,10 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
|
||||||
ExtractRegexps = make(parsers.Extractors)
|
|
||||||
Extractors = make(parsers.Extractors)
|
|
||||||
|
|
||||||
FingerEngine *fingers.Engine
|
|
||||||
ActivePath []string
|
|
||||||
)
|
|
||||||
|
|
||||||
func LoadPorts() error {
|
func LoadPorts() error {
|
||||||
var err error
|
var err error
|
||||||
var ports []*utils.PortConfig
|
var ports []*utils.PortConfig
|
||||||
err = json.Unmarshal(LoadConfig("port"), &ports)
|
err = yaml.Unmarshal(LoadConfig("port"), &ports)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -55,18 +47,29 @@ func LoadFingers() error {
|
|||||||
func LoadTemplates() error {
|
func LoadTemplates() error {
|
||||||
var err error
|
var err error
|
||||||
// load rule
|
// load rule
|
||||||
var data map[string]interface{}
|
|
||||||
err = json.Unmarshal(LoadConfig("spray_rule"), &data)
|
err = yaml.Unmarshal(LoadConfig("spray_rule"), &Rules)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
for k, v := range data {
|
|
||||||
Rules[k] = v.(string)
|
// load default words
|
||||||
|
var dicts map[string]string
|
||||||
|
err = yaml.Unmarshal(LoadConfig("spray_dict"), &dicts)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for name, wordlist := range dicts {
|
||||||
|
dict := strings.Split(strings.TrimSpace(wordlist), "\n")
|
||||||
|
for i, d := range dict {
|
||||||
|
dict[i] = strings.TrimSpace(d)
|
||||||
|
}
|
||||||
|
Dicts[strings.TrimSuffix(name, ".txt")] = dict
|
||||||
}
|
}
|
||||||
|
|
||||||
// load mask
|
// load mask
|
||||||
var keywords map[string]interface{}
|
var keywords map[string]interface{}
|
||||||
err = json.Unmarshal(LoadConfig("spray_common"), &keywords)
|
err = yaml.Unmarshal(LoadConfig("spray_common"), &keywords)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -80,7 +83,7 @@ func LoadTemplates() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var extracts []*parsers.Extractor
|
var extracts []*parsers.Extractor
|
||||||
err = json.Unmarshal(LoadConfig("extract"), &extracts)
|
err = yaml.Unmarshal(LoadConfig("extract"), &extracts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -122,16 +125,12 @@ func LoadExtractorConfig(filename string) ([]*parsers.Extractor, error) {
|
|||||||
func Load() error {
|
func Load() error {
|
||||||
err := LoadPorts()
|
err := LoadPorts()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return fmt.Errorf("load ports, %w", err)
|
||||||
}
|
}
|
||||||
err = LoadTemplates()
|
err = LoadTemplates()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return fmt.Errorf("load templates, %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func LoadDefaultDict() []string {
|
|
||||||
return strings.Split(strings.TrimSpace(string(LoadConfig("spray_default"))), "\n")
|
|
||||||
}
|
|
||||||
|
26
pkg/parse.go
Normal file
26
pkg/parse.go
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
package pkg
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
var (
|
||||||
|
SkipChar = "%SKIP%"
|
||||||
|
EXTChar = "%EXT%"
|
||||||
|
)
|
||||||
|
|
||||||
|
func ParseEXTPlaceholderFunc(exts []string) func(string) []string {
|
||||||
|
return func(s string) []string {
|
||||||
|
ss := make([]string, len(exts))
|
||||||
|
var n int
|
||||||
|
for i, e := range exts {
|
||||||
|
if strings.Contains(s, EXTChar) {
|
||||||
|
n++
|
||||||
|
ss[i] = strings.Replace(s, EXTChar, e, -1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if n == 0 {
|
||||||
|
return []string{s}
|
||||||
|
} else {
|
||||||
|
return ss
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
263
pkg/utils.go
263
pkg/utils.go
@ -3,10 +3,16 @@ package pkg
|
|||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"github.com/chainreactors/files"
|
||||||
|
"github.com/chainreactors/fingers"
|
||||||
"github.com/chainreactors/logs"
|
"github.com/chainreactors/logs"
|
||||||
|
"github.com/chainreactors/parsers"
|
||||||
"github.com/chainreactors/utils/iutils"
|
"github.com/chainreactors/utils/iutils"
|
||||||
|
"github.com/chainreactors/words/mask"
|
||||||
|
"github.com/chainreactors/words/rule"
|
||||||
"github.com/expr-lang/expr"
|
"github.com/expr-lang/expr"
|
||||||
"github.com/expr-lang/expr/vm"
|
"github.com/expr-lang/expr/vm"
|
||||||
|
"io/ioutil"
|
||||||
"math/rand"
|
"math/rand"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
@ -19,23 +25,33 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
LogVerbose = logs.Warn - 2
|
LogVerbose = logs.Warn - 2
|
||||||
LogFuzz = logs.Warn - 1
|
LogFuzz = logs.Warn - 1
|
||||||
WhiteStatus = []int{} // cmd input, 200
|
DefaultWhiteStatus = []int{200} // cmd input
|
||||||
BlackStatus = []int{} // cmd input, 400,410
|
DefaultBlackStatus = []int{400, 410} // cmd input
|
||||||
FuzzyStatus = []int{} // cmd input, 500,501,502,503
|
DefaultFuzzyStatus = []int{500, 501, 502, 503, 301, 302, 404} // cmd input
|
||||||
WAFStatus = []int{493, 418, 1020, 406}
|
DefaultUniqueStatus = []int{403, 200, 404} // 相同unique的403表示命中了同一条acl, 相同unique的200表示default页面
|
||||||
UniqueStatus = []int{} // 相同unique的403表示命中了同一条acl, 相同unique的200表示default页面
|
WhiteStatus = []int{} // cmd input, 200
|
||||||
|
BlackStatus = []int{} // cmd input, 400,410
|
||||||
|
FuzzyStatus = []int{} // cmd input, 500,501,502,503
|
||||||
|
WAFStatus = []int{493, 418, 1020, 406, 429, 406, 412}
|
||||||
|
UniqueStatus = []int{} // 相同unique的403表示命中了同一条acl, 相同unique的200表示default页面
|
||||||
|
|
||||||
// plugins
|
// plugins
|
||||||
EnableAllFingerEngine = false
|
EnableAllFingerEngine = false
|
||||||
)
|
)
|
||||||
var (
|
var (
|
||||||
Rules map[string]string = make(map[string]string)
|
Rules map[string]string = make(map[string]string)
|
||||||
|
Dicts map[string][]string = make(map[string][]string)
|
||||||
BadExt = []string{".js", ".css", ".scss", ".,", ".jpeg", ".jpg", ".png", ".gif", ".svg", ".vue", ".ts", ".swf", ".pdf", ".mp4", ".zip", ".rar"}
|
wordlistCache = make(map[string][]string)
|
||||||
BadURL = []string{";", "}", "\\n", "webpack://", "{", "www.w3.org", ".src", ".url", ".att", ".href", "location.href", "javascript:", "location:", ".createObject", ":location", ".path"}
|
ruleCache = make(map[string][]rule.Expression)
|
||||||
|
BadExt = []string{".js", ".css", ".scss", ".,", ".jpeg", ".jpg", ".png", ".gif", ".svg", ".vue", ".ts", ".swf", ".pdf", ".mp4", ".zip", ".rar"}
|
||||||
|
BadURL = []string{";", "}", "\\n", "webpack://", "{", "www.w3.org", ".src", ".url", ".att", ".href", "location.href", "javascript:", "location:", ".createObject", ":location", ".path"}
|
||||||
|
ExtractRegexps = make(parsers.Extractors)
|
||||||
|
Extractors = make(parsers.Extractors)
|
||||||
|
|
||||||
|
FingerEngine *fingers.Engine
|
||||||
|
ActivePath []string
|
||||||
ContentTypeMap = map[string]string{
|
ContentTypeMap = map[string]string{
|
||||||
"application/javascript": "js",
|
"application/javascript": "js",
|
||||||
"application/json": "json",
|
"application/json": "json",
|
||||||
@ -75,7 +91,8 @@ var (
|
|||||||
"Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)",
|
"Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)",
|
||||||
"Mozilla/5.0 (compatible; Yahoo! Slurp; http://help.yahoo.com/help/us/ysearch/slurp)",
|
"Mozilla/5.0 (compatible; Yahoo! Slurp; http://help.yahoo.com/help/us/ysearch/slurp)",
|
||||||
}
|
}
|
||||||
uacount = len(randomUserAgent)
|
uacount = len(randomUserAgent)
|
||||||
|
DefaultUserAgent = randomUserAgent[rand.Intn(uacount)]
|
||||||
)
|
)
|
||||||
|
|
||||||
type BS []byte
|
type BS []byte
|
||||||
@ -258,9 +275,9 @@ func CRC16Hash(data []byte) uint16 {
|
|||||||
func SafePath(dir, u string) string {
|
func SafePath(dir, u string) string {
|
||||||
hasSlash := strings.HasPrefix(u, "/")
|
hasSlash := strings.HasPrefix(u, "/")
|
||||||
if hasSlash {
|
if hasSlash {
|
||||||
return path.Join(dir, u[1:])
|
return dir + u[1:]
|
||||||
} else {
|
} else {
|
||||||
return path.Join(dir, u)
|
return dir + u
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -315,12 +332,6 @@ func Dir(u string) string {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func UniqueHash(bl *Baseline) uint16 {
|
|
||||||
// 由host+状态码+重定向url+content-type+title+length舍去个位组成的hash
|
|
||||||
// body length可能会导致一些误报, 目前没有更好的解决办法
|
|
||||||
return CRC16Hash([]byte(bl.Host + strconv.Itoa(bl.Status) + bl.RedirectURL + bl.ContentType + bl.Title + strconv.Itoa(bl.BodyLength/10*10)))
|
|
||||||
}
|
|
||||||
|
|
||||||
func FormatURL(base, u string) string {
|
func FormatURL(base, u string) string {
|
||||||
if strings.HasPrefix(u, "http") {
|
if strings.HasPrefix(u, "http") {
|
||||||
parsed, err := url.Parse(u)
|
parsed, err := url.Parse(u)
|
||||||
@ -391,3 +402,215 @@ func ParseRawResponse(raw []byte) (*http.Response, error) {
|
|||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
return resp, nil
|
return resp, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func GetPresetWordList(key []string) []string {
|
||||||
|
var wordlist []string
|
||||||
|
|
||||||
|
for _, k := range key {
|
||||||
|
if v, ok := mask.SpecialWords[k]; ok {
|
||||||
|
wordlist = append(wordlist, v...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return wordlist
|
||||||
|
}
|
||||||
|
|
||||||
|
func ParseExtension(s string) string {
|
||||||
|
if i := strings.Index(s, "."); i != -1 {
|
||||||
|
return s[i+1:]
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseStatus parses the input string and updates the preset status filters.
|
||||||
|
func ParseStatus(preset []int, changed string) []int {
|
||||||
|
if changed == "" {
|
||||||
|
return preset
|
||||||
|
}
|
||||||
|
|
||||||
|
parseToken := func(s string) (int, bool) {
|
||||||
|
s = strings.TrimSpace(s)
|
||||||
|
if strings.HasSuffix(s, "*") {
|
||||||
|
prefix := s[:len(s)-1]
|
||||||
|
if t, err := strconv.Atoi(prefix); err == nil {
|
||||||
|
return t, true // isPrefix = true
|
||||||
|
}
|
||||||
|
} else if t, err := strconv.Atoi(s); err == nil {
|
||||||
|
return t, false // isPrefix = false
|
||||||
|
}
|
||||||
|
return 0, false
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.HasPrefix(changed, "+") {
|
||||||
|
for _, s := range strings.Split(changed[1:], ",") {
|
||||||
|
if t, _ := parseToken(s); t != 0 {
|
||||||
|
preset = append(preset, t)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if strings.HasPrefix(changed, "!") {
|
||||||
|
for _, s := range strings.Split(changed[1:], ",") {
|
||||||
|
if t, _ := parseToken(s); t != 0 {
|
||||||
|
newPreset := preset[:0]
|
||||||
|
for _, val := range preset {
|
||||||
|
if val != t {
|
||||||
|
newPreset = append(newPreset, val)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
preset = newPreset
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
preset = []int{}
|
||||||
|
for _, s := range strings.Split(changed, ",") {
|
||||||
|
if t, _ := parseToken(s); t != 0 {
|
||||||
|
preset = append(preset, t)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return UniqueInts(preset)
|
||||||
|
}
|
||||||
|
|
||||||
|
func UniqueInts(input []int) []int {
|
||||||
|
seen := make(map[int]bool)
|
||||||
|
result := make([]int, 0, len(input))
|
||||||
|
|
||||||
|
for _, val := range input {
|
||||||
|
if !seen[val] {
|
||||||
|
seen[val] = true
|
||||||
|
result = append(result, val)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// StatusContain checks if a status matches any of the preset filters.
|
||||||
|
// Preset values < 100 are treated as prefix filters (e.g. 5 = 5xx, 51 = 51x).
|
||||||
|
func StatusContain(preset []int, status int) bool {
|
||||||
|
if len(preset) == 0 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
for _, s := range preset {
|
||||||
|
if s < 10 {
|
||||||
|
if status/100 == s {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
} else if s < 100 {
|
||||||
|
if status/10 == s {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
} else if s == status {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadFileToSlice(filename string) ([]string, error) {
|
||||||
|
var ss []string
|
||||||
|
if dicts, ok := Dicts[filename]; ok {
|
||||||
|
if files.IsExist(filename) {
|
||||||
|
logs.Log.Warnf("load and overwrite %s from preset", filename)
|
||||||
|
}
|
||||||
|
return dicts, nil
|
||||||
|
}
|
||||||
|
content, err := ioutil.ReadFile(filename)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
ss = strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||||
|
|
||||||
|
// 统一windows与linux的回车换行差异
|
||||||
|
for i, word := range ss {
|
||||||
|
ss[i] = strings.TrimSpace(word)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ss, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadRuleAndCombine(filename []string) (string, error) {
|
||||||
|
var bs bytes.Buffer
|
||||||
|
for _, f := range filename {
|
||||||
|
if data, ok := Rules[f]; ok {
|
||||||
|
bs.WriteString(strings.TrimSpace(data))
|
||||||
|
bs.WriteString("\n")
|
||||||
|
} else {
|
||||||
|
content, err := ioutil.ReadFile(f)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
bs.Write(bytes.TrimSpace(content))
|
||||||
|
bs.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return bs.String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadFileWithCache(filename string) ([]string, error) {
|
||||||
|
if dict, ok := Dicts[filename]; ok {
|
||||||
|
return dict, nil
|
||||||
|
}
|
||||||
|
dict, err := LoadFileToSlice(filename)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
Dicts[filename] = dict
|
||||||
|
return dict, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadDictionaries(filenames []string) ([][]string, error) {
|
||||||
|
dicts := make([][]string, len(filenames))
|
||||||
|
for i, name := range filenames {
|
||||||
|
dict, err := loadFileWithCache(name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
dicts[i] = dict
|
||||||
|
}
|
||||||
|
return dicts, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadWordlist(word string, dictNames []string) ([]string, error) {
|
||||||
|
if wl, ok := wordlistCache[word+strings.Join(dictNames, ",")]; ok {
|
||||||
|
return wl, nil
|
||||||
|
}
|
||||||
|
dicts, err := loadDictionaries(dictNames)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
wl, err := mask.Run(word, dicts, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
wordlistCache[word] = wl
|
||||||
|
return wl, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadRuleWithFiles(ruleFiles []string, filter string) ([]rule.Expression, error) {
|
||||||
|
if rules, ok := ruleCache[strings.Join(ruleFiles, ",")]; ok {
|
||||||
|
return rules, nil
|
||||||
|
}
|
||||||
|
var rules bytes.Buffer
|
||||||
|
for _, filename := range ruleFiles {
|
||||||
|
content, err := ioutil.ReadFile(filename)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
rules.Write(content)
|
||||||
|
rules.WriteString("\n")
|
||||||
|
}
|
||||||
|
return rule.Compile(rules.String(), filter).Expressions, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func WrapWordsFunc(f func(string) string) func(string) []string {
|
||||||
|
return func(s string) []string {
|
||||||
|
return []string{f(s)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func SafeFilename(filename string) string {
|
||||||
|
filename = strings.ReplaceAll(filename, "http://", "")
|
||||||
|
filename = strings.ReplaceAll(filename, "https://", "")
|
||||||
|
filename = strings.ReplaceAll(filename, ":", "_")
|
||||||
|
filename = strings.ReplaceAll(filename, "/", "_")
|
||||||
|
return filename
|
||||||
|
}
|
||||||
|
@ -1 +1 @@
|
|||||||
Subproject commit 3e85234341b95f7e6e45b31468311f01093ac970
|
Subproject commit fe95f1f22d18b6cf2046b004191f5bd745f1c578
|
Loading…
x
Reference in New Issue
Block a user