Merge pull request #24 from chainreactors/dev

merge v0.9.3
This commit is contained in:
M09Ic 2024-02-12 17:06:42 +08:00 committed by GitHub
commit be19895446
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
26 changed files with 1519 additions and 1005 deletions

View File

@ -17,11 +17,16 @@ jobs:
fetch-depth: 0 fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }} token: ${{ secrets.GITHUB_TOKEN }}
submodules: recursive submodules: recursive
- name: Install upx
run: sudo apt install upx -y
continue-on-error: true
- -
name: Set up Go name: Set up Go
uses: actions/setup-go@v3 uses: actions/setup-go@v3
with: with:
go-version: 1.17 go-version: 1.21
- -
name: Run GoReleaser name: Run GoReleaser
uses: goreleaser/goreleaser-action@v4 uses: goreleaser/goreleaser-action@v4

View File

@ -33,12 +33,15 @@ builds:
- all=-trimpath={{.Env.GOPATH}} - all=-trimpath={{.Env.GOPATH}}
no_unique_dist_dir: true no_unique_dist_dir: true
upx:
-
enabled: true
goos: [linux, windows]
archives: archives:
- -
name_template: "{{ .ProjectName }}_{{ .Os }}_{{ .Arch }}" name_template: "{{ .ProjectName }}_{{ .Os }}_{{ .Arch }}"
format: binary format: binary
replacements:
amd64_v1: amd64
checksum: checksum:
name_template: "{{ .ProjectName }}_checksums.txt" name_template: "{{ .ProjectName }}_checksums.txt"

View File

@ -5,10 +5,11 @@ import (
"fmt" "fmt"
"github.com/chainreactors/logs" "github.com/chainreactors/logs"
"github.com/chainreactors/parsers" "github.com/chainreactors/parsers"
"github.com/chainreactors/parsers/iutils"
"github.com/chainreactors/spray/internal" "github.com/chainreactors/spray/internal"
"github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/internal/pool"
"github.com/chainreactors/spray/pkg" "github.com/chainreactors/spray/pkg"
"github.com/chainreactors/spray/pkg/ihttp" "github.com/chainreactors/utils/iutils"
"github.com/jessevdk/go-flags" "github.com/jessevdk/go-flags"
"os" "os"
"os/signal" "os/signal"
@ -17,7 +18,7 @@ import (
"time" "time"
) )
var ver = "" var ver = "v0.9.3"
func Spray() { func Spray() {
var option internal.Option var option internal.Option
@ -51,6 +52,28 @@ func Spray() {
return return
} }
// logs
logs.AddLevel(pkg.LogVerbose, "verbose", "[=] %s {{suffix}}")
if option.Debug {
logs.Log.SetLevel(logs.Debug)
} else if len(option.Verbose) > 0 {
logs.Log.SetLevel(pkg.LogVerbose)
}
logs.Log.SetColorMap(map[logs.Level]func(string) string{
logs.Info: logs.PurpleBold,
logs.Important: logs.GreenBold,
pkg.LogVerbose: logs.Green,
})
if option.Config != "" {
err := internal.LoadConfig(option.Config, &option)
if err != nil {
logs.Log.Error(err.Error())
return
}
}
if option.Version { if option.Version {
fmt.Println(ver) fmt.Println(ver)
return return
@ -58,7 +81,7 @@ func Spray() {
if option.Format != "" { if option.Format != "" {
internal.Format(option.Format, !option.NoColor) internal.Format(option.Format, !option.NoColor)
os.Exit(0) return
} }
err = pkg.LoadTemplates() err = pkg.LoadTemplates()
@ -80,30 +103,25 @@ func Spray() {
} }
} }
} }
// 一些全局变量初始化
if option.Debug {
logs.Log.Level = logs.Debug
}
logs.DefaultColorMap[logs.Info] = logs.PurpleBold // 初始化全局变量
logs.DefaultColorMap[logs.Important] = logs.GreenBold
pkg.Distance = uint8(option.SimhashDistance) pkg.Distance = uint8(option.SimhashDistance)
ihttp.DefaultMaxBodySize = option.MaxBodyLength * 1024 ihttp.DefaultMaxBodySize = option.MaxBodyLength * 1024
internal.MaxCrawl = option.CrawlDepth pool.MaxCrawl = option.CrawlDepth
if option.ReadAll {
ihttp.DefaultMaxBodySize = 0
}
var runner *internal.Runner var runner *internal.Runner
if option.ResumeFrom != "" { if option.ResumeFrom != "" {
runner, err = option.PrepareRunner() runner, err = option.PrepareRunner()
} else { } else {
runner, err = option.PrepareRunner() runner, err = option.PrepareRunner()
} }
if err != nil { if err != nil {
logs.Log.Errorf(err.Error()) logs.Log.Errorf(err.Error())
return return
} }
if option.ReadAll || runner.Crawl {
ihttp.DefaultMaxBodySize = 0
}
ctx, canceler := context.WithTimeout(context.Background(), time.Duration(runner.Deadline)*time.Second) ctx, canceler := context.WithTimeout(context.Background(), time.Duration(runner.Deadline)*time.Second)

94
config.yaml Normal file
View File

@ -0,0 +1,94 @@
input:
append-files: [] # Files, when found valid path, use append file new word with current path
append-rules: [] # Files, when found valid path, use append rule generator new word with current path
dictionaries: [] # Files, Multi,dict files, e.g.: -d 1.txt -d 2.txt
filter-rule: "" # String, filter rule, e.g.: --rule-filter '>8 <4'
rules: [] # Files, rule files, e.g.: -r rule1.txt -r rule2.txt
word: "" # String, word generate dsl, e.g.: -w test{?ld#4}
functions:
extension: "" # String, add extensions (separated by commas), e.g.: -e jsp,jspx
exclude-extension: "" # String, exclude extensions (separated by commas), e.g.: --exclude-extension jsp,jspx
force-extension: false # Bool, force add extensions
remove-extension: "" # String, remove extensions (separated by commas), e.g.: --remove-extension jsp,jspx
prefix: [] # Strings, add prefix, e.g.: --prefix aaa --prefix bbb
suffix: [] # Strings, add suffix, e.g.: --suffix aaa --suffix bbb
upper: false # Bool, upper wordlist, e.g.: --uppercase
lower: false # Bool, lower wordlist, e.g.: --lowercase
replace: null # Strings, replace string, e.g.: --replace aaa:bbb --replace ccc:ddd
skip: [ ] # String, skip word when generate. rule, e.g.: --skip aaa
misc:
mod: path # String, path/host spray
client: auto # String, Client type
thread: 20 # Int, number of threads per pool
pool: 5 # Int, Pool size
timeout: 5 # Int, timeout with request (seconds)
deadline: 999999 # Int, deadline (seconds)
proxy: "" # String, proxy address, e.g.: --proxy socks5://127.0.0.1:1080
quiet: false # Bool, Quiet
debug: false # Bool, output debug info
verbose: [] # Bool, log verbose level, default 0, level1: -v, level2 -vv
no-bar: false # Bool, No progress bar
no-color: false # Bool, no color
mode:
# status
black-status: "400,410" # Strings (comma split), custom black status
fuzzy-status: "500,501,502,503" # Strings (comma split), custom fuzzy status
unique-status: "403,200,404" # Strings (comma split), custom unique status
white-status: "200" # Strings (comma split), custom white status
# check
check-only: false # Bool, check only
check-period: 200 # Int, check period when request
error-period: 10 # Int, check period when error
error-threshold: 20 # Int, break when the error exceeds the threshold
# recursive
recursive: current.IsDir() # String, custom recursive rule, e.g.: --recursive current.IsDir()
depth: 0 # Int, recursive depth
# crawl
scope: [] # String, custom scope, e.g.: --scope *.example.com
no-scope: false # Bool, no scope
# other
index: / # String, custom index path
random: "" # String, custom random path
unique: false # Bool, unique response
distance: 5 # Int, simhash distance for unique response
force: false # Bool, skip error break
rate-limit: 0 # Int, request rate limit (rate/s), e.g.: --rate-limit 100
retry: 0 # Int, retry count
output:
output-file: "" # String, output filename
auto-file: false # Bool, auto generator output and fuzzy filename
dump: false # Bool, dump all request
dump-file: "" # String, dump all request, and write to filename
fuzzy: false # Bool, open fuzzy output
fuzzy-file: "" # String, fuzzy output filename
filter: "" # String, custom filter function, e.g.: --filter 'current.Body contains "hello"'
match: "" # String, custom match function, e.g.: --match 'current.Status != 200''
format: "" # String, output format, e.g.: --format 1.json
output_probe: "" # String, output probes
plugins:
all: false # Bool, enable all plugin
bak: false # Bool, enable bak found
common: false # Bool, enable common file found
crawl: false # Bool, enable crawl
crawl-depth: 3 # Int, crawl depth
extract: [] # Strings, extract response, e.g.: --extract js --extract ip --extract version:(.*?)
file-bak: false # Bool, enable valid result bak found, equal --append-rule rule/filebak.txt
finger: false # Bool, enable active finger detect
recon: false # Bool, enable recon
request:
cookies: [] # Strings, custom cookie
headers: [] # Strings, custom headers, e.g.: --headers 'Auth: example_auth'
max-body-length: 100 # Int, max response body length (kb), default 100k, e.g. -max-length 1000
useragent: "" # String, custom user-agent, e.g.: --user-agent Custom
random-useragent: false # Bool, use random with default user-agent
read-all: false # Bool, read all response body

41
go.mod
View File

@ -1,38 +1,47 @@
module github.com/chainreactors/spray module github.com/chainreactors/spray
go 1.17 go 1.19
require ( require github.com/chainreactors/go-metrics v0.0.0-20220926021830-24787b7a10f8
github.com/chainreactors/files v0.2.5-0.20230310102018-3d10f74c7d6b
github.com/chainreactors/go-metrics v0.0.0-20220926021830-24787b7a10f8
github.com/chainreactors/gogo/v2 v2.11.1-0.20230327070928-b5ff67ac46c7
github.com/chainreactors/logs v0.7.1-0.20230316032643-ed7d85ca234f
github.com/chainreactors/parsers v0.3.1-0.20230403160559-9ed502452575
github.com/chainreactors/words v0.4.1-0.20230327065326-448a905ac8c2
)
require ( require (
github.com/antonmedv/expr v1.12.5 github.com/antonmedv/expr v1.12.5
github.com/chainreactors/ipcs v0.0.13 github.com/chainreactors/files v0.0.0-20231123083421-cea5b4ad18a8
github.com/chainreactors/utils v0.0.14-0.20230314084720-a4d745cabc56 github.com/chainreactors/gogo/v2 v2.11.12-0.20231228061950-116583962e30
github.com/chainreactors/logs v0.0.0-20240207121836-c946f072f81f
github.com/chainreactors/parsers v0.0.0-20240208143911-65866d5bbc6d
github.com/chainreactors/utils v0.0.0-20231031063336-9477f1b23886
github.com/chainreactors/words v0.4.1-0.20240208114042-a1c5053345b0
github.com/gookit/config/v2 v2.2.5
github.com/gosuri/uiprogress v0.0.1 github.com/gosuri/uiprogress v0.0.1
github.com/jessevdk/go-flags v1.5.0 github.com/jessevdk/go-flags v1.5.0
github.com/panjf2000/ants/v2 v2.7.0 github.com/panjf2000/ants/v2 v2.7.0
github.com/valyala/fasthttp v1.43.0 github.com/valyala/fasthttp v1.43.0
golang.org/x/net v0.6.0
golang.org/x/time v0.3.0 golang.org/x/time v0.3.0
sigs.k8s.io/yaml v1.3.0
) )
require ( require (
dario.cat/mergo v1.0.0 // indirect
github.com/andybalholm/brotli v1.0.4 // indirect github.com/andybalholm/brotli v1.0.4 // indirect
github.com/fatih/color v1.14.1 // indirect
github.com/go-dedup/megophone v0.0.0-20170830025436-f01be21026f5 // indirect github.com/go-dedup/megophone v0.0.0-20170830025436-f01be21026f5 // indirect
github.com/go-dedup/simhash v0.0.0-20170904020510-9ecaca7b509c // indirect github.com/go-dedup/simhash v0.0.0-20170904020510-9ecaca7b509c // indirect
github.com/go-dedup/text v0.0.0-20170907015346-8bb1b95e3cb7 // indirect github.com/go-dedup/text v0.0.0-20170907015346-8bb1b95e3cb7 // indirect
github.com/goccy/go-yaml v1.11.2 // indirect
github.com/gookit/color v1.5.4 // indirect
github.com/gookit/goutil v0.6.15 // indirect
github.com/gosuri/uilive v0.0.4 // indirect github.com/gosuri/uilive v0.0.4 // indirect
github.com/klauspost/compress v1.15.10 // indirect github.com/klauspost/compress v1.15.10 // indirect
github.com/mattn/go-isatty v0.0.16 // indirect github.com/mattn/go-colorable v0.1.13 // indirect
github.com/twmb/murmur3 v1.1.6 // indirect github.com/mattn/go-isatty v0.0.17 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/twmb/murmur3 v1.1.8 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect
golang.org/x/sys v0.2.0 // indirect github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect golang.org/x/sync v0.5.0 // indirect
golang.org/x/sys v0.15.0 // indirect
golang.org/x/term v0.15.0 // indirect
golang.org/x/text v0.14.0 // indirect
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect
) )

114
go.sum
View File

@ -1,38 +1,39 @@
dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk=
dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
github.com/M09ic/go-ntlmssp v0.0.0-20230312133735-dcccd454dfe0/go.mod h1:yMNEF6ulbFipt3CakMhcmcNVACshPRG4Ap4l00V+mMs= github.com/M09ic/go-ntlmssp v0.0.0-20230312133735-dcccd454dfe0/go.mod h1:yMNEF6ulbFipt3CakMhcmcNVACshPRG4Ap4l00V+mMs=
github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY= github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY=
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/antonmedv/expr v1.12.5 h1:Fq4okale9swwL3OeLLs9WD9H6GbgBLJyN/NUHRv+n0E= github.com/antonmedv/expr v1.12.5 h1:Fq4okale9swwL3OeLLs9WD9H6GbgBLJyN/NUHRv+n0E=
github.com/antonmedv/expr v1.12.5/go.mod h1:FPC8iWArxls7axbVLsW+kpg1mz29A1b2M6jt+hZfDkU= github.com/antonmedv/expr v1.12.5/go.mod h1:FPC8iWArxls7axbVLsW+kpg1mz29A1b2M6jt+hZfDkU=
github.com/chainreactors/files v0.2.0/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2IDcovf42/6A= github.com/chainreactors/files v0.0.0-20230731174853-acee21c8c45a/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2IDcovf42/6A=
github.com/chainreactors/files v0.2.3/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2IDcovf42/6A= github.com/chainreactors/files v0.0.0-20231102192550-a652458cee26/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2IDcovf42/6A=
github.com/chainreactors/files v0.2.5-0.20230310102018-3d10f74c7d6b h1:FRKGDHJrXrYfHnoehgE98vBoKvMpa/8/+d4wG0Zgpg4= github.com/chainreactors/files v0.0.0-20231123083421-cea5b4ad18a8 h1:8Plpi6haQbU8NzH+JtU6bkGDWF/OeC+GFj8DIDuY5yk=
github.com/chainreactors/files v0.2.5-0.20230310102018-3d10f74c7d6b/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2IDcovf42/6A= github.com/chainreactors/files v0.0.0-20231123083421-cea5b4ad18a8/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2IDcovf42/6A=
github.com/chainreactors/go-metrics v0.0.0-20220926021830-24787b7a10f8 h1:kMFr1Hj+rkp1wBPIw2pcQvelO5GnA7r7wY3h6vJ1joA= github.com/chainreactors/go-metrics v0.0.0-20220926021830-24787b7a10f8 h1:kMFr1Hj+rkp1wBPIw2pcQvelO5GnA7r7wY3h6vJ1joA=
github.com/chainreactors/go-metrics v0.0.0-20220926021830-24787b7a10f8/go.mod h1:7NDvFERNiXsujaBPD6s4WXj52uKdfnF2zVHQtKXIEV4= github.com/chainreactors/go-metrics v0.0.0-20220926021830-24787b7a10f8/go.mod h1:7NDvFERNiXsujaBPD6s4WXj52uKdfnF2zVHQtKXIEV4=
github.com/chainreactors/gogo/v2 v2.11.1-0.20230327070928-b5ff67ac46c7 h1:3G8ExdfyXiP83WOzYPIEComWu2ZqKmmqAQxdq92F+Gs= github.com/chainreactors/gogo/v2 v2.11.12-0.20231228061950-116583962e30 h1:Zh96ERETgkygSLUZ2NZ7Zi7lDcNf8jqImz+0aXCDsHY=
github.com/chainreactors/gogo/v2 v2.11.1-0.20230327070928-b5ff67ac46c7/go.mod h1:hhPu1b7UjMobE+4gAjevJ9ixQbvVK2Z3lKqoy9MPK/g= github.com/chainreactors/gogo/v2 v2.11.12-0.20231228061950-116583962e30/go.mod h1:XAGU3kpCiA3ZZzp/JS2kCigk9jIM3SC6NcOBdQ2DYa4=
github.com/chainreactors/ipcs v0.0.13 h1:TZww7XRr4qZPWqy9DjBzcJgxtSUwT4TAbcho4156bRI= github.com/chainreactors/logs v0.0.0-20231027080134-7a11bb413460/go.mod h1:VZFqkFDGmp7/JOMeraW+YI7kTGcgz9fgc/HArVFnrGQ=
github.com/chainreactors/ipcs v0.0.13/go.mod h1:E9M3Ohyq0TYQLlV4i2dbM9ThBZB1Nnd7Oexoie2xLII= github.com/chainreactors/logs v0.0.0-20231220102821-19f082ce37c1/go.mod h1:6Mv6W70JrtL6VClulZhmMRZnoYpcTahcDTKLMNEjK0o=
github.com/chainreactors/logs v0.6.1/go.mod h1:Y0EtAnoF0kiASIJUnXN0pcOt420iRpHOAnOhEphzRHA= github.com/chainreactors/logs v0.0.0-20240207121836-c946f072f81f h1:tcfp+CEdgiMvjyUzWab5edJtxUwRMSMEIkLybupIx0k=
github.com/chainreactors/logs v0.7.0/go.mod h1:Y0EtAnoF0kiASIJUnXN0pcOt420iRpHOAnOhEphzRHA= github.com/chainreactors/logs v0.0.0-20240207121836-c946f072f81f/go.mod h1:6Mv6W70JrtL6VClulZhmMRZnoYpcTahcDTKLMNEjK0o=
github.com/chainreactors/logs v0.7.1-0.20221214153111-85f123ff6580/go.mod h1:Y0EtAnoF0kiASIJUnXN0pcOt420iRpHOAnOhEphzRHA= github.com/chainreactors/neutron v0.0.0-20231221064706-fd6aaac9c50b/go.mod h1:Q6xCl+KaPtCDIziAHegFxdHOvg6DgpA6hcUWRnQKDPk=
github.com/chainreactors/logs v0.7.1-0.20230316032643-ed7d85ca234f h1:exuFhz7uiKPB/JTS9AcMuUwgs8nfJNz5eG9P6ObVwlM= github.com/chainreactors/parsers v0.0.0-20231218072716-fb441aff745f/go.mod h1:ZHEkgxKf9DXoley2LUjdJkiSw08MC3vcJTxfqwYt2LU=
github.com/chainreactors/logs v0.7.1-0.20230316032643-ed7d85ca234f/go.mod h1:Y0EtAnoF0kiASIJUnXN0pcOt420iRpHOAnOhEphzRHA= github.com/chainreactors/parsers v0.0.0-20231220104848-3a0b5a5bd8dc/go.mod h1:V2w16sBSSiBlmsDR4A0Q9PIk9+TP/6coTXv6olvTI6M=
github.com/chainreactors/neutron v0.0.0-20230227122754-80dc76323a1c/go.mod h1:GjZPKmcyVoQvngG+GBHxXbpXBcjIcvHGO9xj/VXRf3w= github.com/chainreactors/parsers v0.0.0-20240208143911-65866d5bbc6d h1:NFZLic9KNL1KdyvZFatRufXV9FJ3AXmKgTFQQ6Sz+Vk=
github.com/chainreactors/parsers v0.3.0/go.mod h1:Z9weht+lnFCk7UcwqFu6lXpS7u5vttiy0AJYOAyCCLA= github.com/chainreactors/parsers v0.0.0-20240208143911-65866d5bbc6d/go.mod h1:IS0hrYnccfJKU0NA12zdZk4mM7k/Qt4qnzMnFGBFLZI=
github.com/chainreactors/parsers v0.3.1-0.20230313041950-25d5f9059c79/go.mod h1:tA33N6UbYFnIT3k5tufOMfETxmEP20RZFyTSEnVXNUA= github.com/chainreactors/utils v0.0.0-20231031063336-9477f1b23886 h1:lS2T/uE9tg1MNDPrb44wawbNlD24zBlWoG0H+ZdwDAk=
github.com/chainreactors/parsers v0.3.1-0.20230403160559-9ed502452575 h1:uHE9O8x70FXwge5p68U/lGC9Xs8Leg8hWJR9PHKGzsk= github.com/chainreactors/utils v0.0.0-20231031063336-9477f1b23886/go.mod h1:JA4eiQZm+7AsfjXBcIzIdVKBEhDCb16eNtWFCGTxlvs=
github.com/chainreactors/parsers v0.3.1-0.20230403160559-9ed502452575/go.mod h1:tA33N6UbYFnIT3k5tufOMfETxmEP20RZFyTSEnVXNUA= github.com/chainreactors/words v0.4.1-0.20240208114042-a1c5053345b0 h1:7aAfDhZDLs6uiWNzYa68L4uzBX7ZIj7IT8v+AlmmpHw=
github.com/chainreactors/utils v0.0.14-0.20230314084720-a4d745cabc56 h1:1uhvEh7Of4fQJXRMsfGEZGy5NcETsM2yataQ0oYSw0k= github.com/chainreactors/words v0.4.1-0.20240208114042-a1c5053345b0/go.mod h1:DUDx7PdsMEm5PvVhzkFyppzpiUhQb8dOJaWjVc1SMVk=
github.com/chainreactors/utils v0.0.14-0.20230314084720-a4d745cabc56/go.mod h1:NKSu1V6EC4wa8QHtPfiJHlH9VjGfUQOx5HADK0xry3Y=
github.com/chainreactors/words v0.4.1-0.20230327065326-448a905ac8c2 h1:/v8gTORQIRJl2lgNt82OOeP/04QZyNTGKcmjfstVN5E=
github.com/chainreactors/words v0.4.1-0.20230327065326-448a905ac8c2/go.mod h1:QIWX1vMT5j/Mp9zx3/wgZh3FqskhjCbo/3Ffy/Hxj9w=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dsnet/compress v0.0.1/go.mod h1:Aw8dCMJ7RioblQeTqt88akK31OvO8Dhf5JflhBbQEHo= github.com/dsnet/compress v0.0.1/go.mod h1:Aw8dCMJ7RioblQeTqt88akK31OvO8Dhf5JflhBbQEHo=
github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY= github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY=
github.com/fatih/color v1.14.1 h1:qfhVLaG5s+nCROl1zJsZRxFeYrHLqWroPOQ8BWiNb4w=
github.com/fatih/color v1.14.1/go.mod h1:2oHN61fhTpgcxD3TSWCgKDiH1+x4OiDVVGH8WlgGZGg=
github.com/frankban/quicktest v1.14.4/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/frankban/quicktest v1.14.4/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/go-dedup/megophone v0.0.0-20170830025436-f01be21026f5 h1:4U+x+EB1P66zwYgTjxWXSOT8vF+651Ksr1lojiCZnT8= github.com/go-dedup/megophone v0.0.0-20170830025436-f01be21026f5 h1:4U+x+EB1P66zwYgTjxWXSOT8vF+651Ksr1lojiCZnT8=
github.com/go-dedup/megophone v0.0.0-20170830025436-f01be21026f5/go.mod h1:poR/Cp00iqtqu9ltFwl6C00sKC0HY13u/Gh05ZBmP54= github.com/go-dedup/megophone v0.0.0-20170830025436-f01be21026f5/go.mod h1:poR/Cp00iqtqu9ltFwl6C00sKC0HY13u/Gh05ZBmP54=
@ -40,8 +41,21 @@ github.com/go-dedup/simhash v0.0.0-20170904020510-9ecaca7b509c h1:mucYYQn+sMGNSx
github.com/go-dedup/simhash v0.0.0-20170904020510-9ecaca7b509c/go.mod h1:gO3u2bjRAgUaLdQd2XK+3oooxrheOAx1BzS7WmPzw1s= github.com/go-dedup/simhash v0.0.0-20170904020510-9ecaca7b509c/go.mod h1:gO3u2bjRAgUaLdQd2XK+3oooxrheOAx1BzS7WmPzw1s=
github.com/go-dedup/text v0.0.0-20170907015346-8bb1b95e3cb7 h1:11wFcswN+37U+ByjxdKzsRY5KzNqqq5Uk5ztxnLOc7w= github.com/go-dedup/text v0.0.0-20170907015346-8bb1b95e3cb7 h1:11wFcswN+37U+ByjxdKzsRY5KzNqqq5Uk5ztxnLOc7w=
github.com/go-dedup/text v0.0.0-20170907015346-8bb1b95e3cb7/go.mod h1:wSsK4VOECOSfSYTzkBFw+iGY7wj59e7X96ABtNj9aCQ= github.com/go-dedup/text v0.0.0-20170907015346-8bb1b95e3cb7/go.mod h1:wSsK4VOECOSfSYTzkBFw+iGY7wj59e7X96ABtNj9aCQ=
github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q=
github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD876Lmtgy7VtROAbHHXk8no=
github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE=
github.com/goccy/go-yaml v1.11.2 h1:joq77SxuyIs9zzxEjgyLBugMQ9NEgTWxXfz2wVqwAaQ=
github.com/goccy/go-yaml v1.11.2/go.mod h1:wKnAMd44+9JAAnGQpWVEgBzGt3YuTaQ4uXoHvE4m7WU=
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/gookit/color v1.5.4 h1:FZmqs7XOyGgCAxmWyPslpiok1k05wmY3SJTytgvYFs0=
github.com/gookit/color v1.5.4/go.mod h1:pZJOeOS8DM43rXbp4AZo1n9zCU2qjpcRko0b6/QJi9w=
github.com/gookit/config/v2 v2.2.5 h1:RECbYYbtherywmzn3LNeu9NA5ZqhD7MSKEMsJ7l+MpU=
github.com/gookit/config/v2 v2.2.5/go.mod h1:NeX+yiNYn6Ei10eJvCQFXuHEPIE/IPS8bqaFIsszzaM=
github.com/gookit/goutil v0.6.15 h1:mMQ0ElojNZoyPD0eVROk5QXJPh2uKR4g06slgPDF5Jo=
github.com/gookit/goutil v0.6.15/go.mod h1:qdKdYEHQdEtyH+4fNdQNZfJHhI0jUZzHxQVAV3DaMDY=
github.com/gookit/ini/v2 v2.2.3 h1:nSbN+x9OfQPcMObTFP+XuHt8ev6ndv/fWWqxFhPMu2E=
github.com/gosuri/uilive v0.0.4 h1:hUEBpQDj8D8jXgtCdBu7sWsy5sbW/5GhuO8KBwJ2jyY= github.com/gosuri/uilive v0.0.4 h1:hUEBpQDj8D8jXgtCdBu7sWsy5sbW/5GhuO8KBwJ2jyY=
github.com/gosuri/uilive v0.0.4/go.mod h1:V/epo5LjjlDE5RJUcqx8dbw+zc93y5Ya3yg8tfZ74VI= github.com/gosuri/uilive v0.0.4/go.mod h1:V/epo5LjjlDE5RJUcqx8dbw+zc93y5Ya3yg8tfZ74VI=
github.com/gosuri/uiprogress v0.0.1 h1:0kpv/XY/qTmFWl/SkaJykZXrBBzwwadmW8fRb7RJSxw= github.com/gosuri/uiprogress v0.0.1 h1:0kpv/XY/qTmFWl/SkaJykZXrBBzwwadmW8fRb7RJSxw=
@ -56,9 +70,15 @@ github.com/klauspost/compress v1.15.10/go.mod h1:QPwzmACJjUTFsnSHH934V6woptycfrD
github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/mattn/go-isatty v0.0.16 h1:bq3VjFmv/sOjHtdEhmkEV4x1AJtvUvOJ2PFAZ5+peKQ= github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng=
github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mholt/archiver v3.1.1+incompatible/go.mod h1:Dh2dOXnSdiLxRiPoVfIr/fI1TwETms9B8CTWfeh7ROU= github.com/mholt/archiver v3.1.1+incompatible/go.mod h1:Dh2dOXnSdiLxRiPoVfIr/fI1TwETms9B8CTWfeh7ROU=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0= github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
github.com/panjf2000/ants/v2 v2.5.0/go.mod h1:cU93usDlihJZ5CfRGNDYsiBYvoilLvBF5Qp/BT2GNRE= github.com/panjf2000/ants/v2 v2.5.0/go.mod h1:cU93usDlihJZ5CfRGNDYsiBYvoilLvBF5Qp/BT2GNRE=
github.com/panjf2000/ants/v2 v2.7.0 h1:Y3Bgpfo9HDkBoHNVFbMfY5mAvi5TAA17y3HbzQ74p5Y= github.com/panjf2000/ants/v2 v2.7.0 h1:Y3Bgpfo9HDkBoHNVFbMfY5mAvi5TAA17y3HbzQ74p5Y=
@ -73,10 +93,10 @@ github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSS
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/twmb/murmur3 v1.1.6 h1:mqrRot1BRxm+Yct+vavLMou2/iJt0tNVTTC0QoIjaZg= github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/twmb/murmur3 v1.1.6/go.mod h1:Qq/R7NUyOfr65zD+6Q5IHKsJLwP7exErjN6lyyq3OSQ= github.com/twmb/murmur3 v1.1.8 h1:8Yt9taO/WN3l08xErzjeschgZU2QSrwm1kclYq+0aRg=
github.com/twmb/murmur3 v1.1.8/go.mod h1:Qq/R7NUyOfr65zD+6Q5IHKsJLwP7exErjN6lyyq3OSQ=
github.com/ulikunitz/xz v0.5.6/go.mod h1:2bypXElzHzzJZwzH67Y6wb67pO62Rzfn7BSiF4ABRW8= github.com/ulikunitz/xz v0.5.6/go.mod h1:2bypXElzHzzJZwzH67Y6wb67pO62Rzfn7BSiF4ABRW8=
github.com/ulikunitz/xz v0.5.11/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/ulikunitz/xz v0.5.11/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
@ -85,15 +105,31 @@ github.com/valyala/fasthttp v1.43.0 h1:Gy4sb32C98fbzVWZlTM1oTMdLWGyvxR03VhM6cBIU
github.com/valyala/fasthttp v1.43.0/go.mod h1:f6VbjjoI3z1NDOZOv17o6RvtRSWxC77seBFc2uWtgiY= github.com/valyala/fasthttp v1.43.0/go.mod h1:f6VbjjoI3z1NDOZOv17o6RvtRSWxC77seBFc2uWtgiY=
github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc= github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc=
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos= github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20200604202706-70a84ac30bf9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200604202706-70a84ac30bf9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.7.0 h1:AvwMYaRytfdeVt3u6mLaxYtErKYjxA2OXjJ1HHq6t3A=
golang.org/x/exp v0.0.0-20220909182711-5c715a9e8561 h1:MDc5xs78ZrZr3HMQugiXOAkSZtfTpbJLDr/lwfgO53E=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.0.0-20220906165146-f3363e06e74c/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= golang.org/x/net v0.0.0-20220906165146-f3363e06e74c/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk=
golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= golang.org/x/net v0.6.0 h1:L4ZwwTvKW9gr0ZMS1yrHD9GZhIuVjOBBnaKH+SPQK0Q=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE=
golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -101,25 +137,41 @@ golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.2.0 h1:ljd4t30dBnAvMZaQCevtY0xLLD0A+bRZXbgLMLU1F/A= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc=
golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.15.0 h1:y/Oo/a/q3IXu26lQgl04j/gjuBDOBlx7X6Om1j2CPW4=
golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4=
golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk=
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo=
sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8=
sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E=
sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY=

146
internal/config.go Normal file
View File

@ -0,0 +1,146 @@
package internal
import (
"fmt"
"github.com/goccy/go-yaml"
"github.com/gookit/config/v2"
"reflect"
"strconv"
)
//var (
// defaultConfigPath = ".config/spray/"
// defaultConfigFile = "config.yaml"
//)
//
//func LoadDefault(v interface{}) {
// dir, err := os.UserHomeDir()
// if err != nil {
// logs.Log.Error(err.Error())
// return
// }
// if !files.IsExist(filepath.Join(dir, defaultConfigPath, defaultConfigFile)) {
// err := os.MkdirAll(filepath.Join(dir, defaultConfigPath), 0o700)
// if err != nil {
// logs.Log.Error(err.Error())
// return
// }
// f, err := os.Create(filepath.Join(dir, defaultConfigPath, defaultConfigFile))
// if err != nil {
// logs.Log.Error(err.Error())
// return
// }
// err = LoadConfig(filepath.Join(dir, defaultConfigPath, defaultConfigFile), v)
// if err != nil {
// logs.Log.Error(err.Error())
// return
// }
// var buf bytes.Buffer
// _, err = config.DumpTo(&buf, config.Yaml)
// if err != nil {
// logs.Log.Error(err.Error())
// return
// }
// fmt.Println(buf.String())
// f.Sync()
// }
//}
func LoadConfig(filename string, v interface{}) error {
err := config.LoadFiles(filename)
if err != nil {
return err
}
err = config.Decode(v)
if err != nil {
return err
}
return nil
}
func convertToFieldType(fieldType reflect.StructField, defaultVal string) interface{} {
switch fieldType.Type.Kind() {
case reflect.Bool:
val, err := strconv.ParseBool(defaultVal)
if err == nil {
return val
}
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
val, err := strconv.ParseInt(defaultVal, 10, 64)
if err == nil {
return val
}
case reflect.Float32, reflect.Float64:
val, err := strconv.ParseFloat(defaultVal, 64)
if err == nil {
return val
}
case reflect.String:
return defaultVal
// 可以根据需要扩展其他类型
}
return nil // 如果转换失败或类型不受支持返回nil
}
func setFieldValue(field reflect.Value) interface{} {
switch field.Kind() {
case reflect.Bool:
return false
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return 0
case reflect.Float32, reflect.Float64:
return 0.0
case reflect.Slice, reflect.Array:
return []interface{}{} // 返回一个空切片
case reflect.String:
return ""
case reflect.Struct:
return make(map[string]interface{})
default:
return nil
}
}
// extractConfigAndDefaults 提取带有 `config` 和 `default` 标签的字段
func extractConfigAndDefaults(v reflect.Value, result map[string]interface{}) {
t := v.Type()
for i := 0; i < v.NumField(); i++ {
field := v.Field(i)
fieldType := t.Field(i)
configTag := fieldType.Tag.Get("config")
defaultTag := fieldType.Tag.Get("default")
if configTag != "" {
var value interface{}
if defaultTag != "" {
value = convertToFieldType(fieldType, defaultTag)
} else {
value = setFieldValue(field)
}
if field.Kind() == reflect.Struct {
nestedResult := make(map[string]interface{})
extractConfigAndDefaults(field, nestedResult)
result[configTag] = nestedResult
} else {
result[configTag] = value
}
}
}
}
func initDefaultConfig(cfg interface{}) (string, error) {
v := reflect.ValueOf(cfg)
if v.Kind() != reflect.Struct {
return "", fmt.Errorf("expected a struct, got %s", v.Kind())
}
result := make(map[string]interface{})
extractConfigAndDefaults(v, result)
yamlData, err := yaml.Marshal(result)
if err != nil {
return "", err
}
return string(yamlData), nil
}

View File

@ -4,8 +4,14 @@ import (
"context" "context"
"crypto/tls" "crypto/tls"
"fmt" "fmt"
"github.com/chainreactors/logs"
"github.com/valyala/fasthttp" "github.com/valyala/fasthttp"
"github.com/valyala/fasthttp/fasthttpproxy"
"golang.org/x/net/proxy"
"net"
"net/http" "net/http"
"net/url"
"strings"
"time" "time"
) )
@ -19,27 +25,27 @@ const (
STANDARD STANDARD
) )
func NewClient(thread int, timeout int, clientType int) *Client { func NewClient(config *ClientConfig) *Client {
if clientType == FAST { if config.Type == FAST {
return &Client{ return &Client{
fastClient: &fasthttp.Client{ fastClient: &fasthttp.Client{
TLSConfig: &tls.Config{ TLSConfig: &tls.Config{
Renegotiation: tls.RenegotiateOnceAsClient, Renegotiation: tls.RenegotiateOnceAsClient,
InsecureSkipVerify: true, InsecureSkipVerify: true,
}, },
MaxConnsPerHost: thread * 3 / 2, Dial: customDialFunc(config.ProxyAddr, config.Timeout),
MaxIdleConnDuration: time.Duration(timeout) * time.Second, MaxConnsPerHost: config.Thread * 3 / 2,
MaxConnWaitTimeout: time.Duration(timeout) * time.Second, MaxIdleConnDuration: config.Timeout,
ReadTimeout: time.Duration(timeout) * time.Second, //MaxConnWaitTimeout: time.Duration(timeout) * time.Second,
WriteTimeout: time.Duration(timeout) * time.Second, //ReadTimeout: time.Duration(timeout) * time.Second,
//WriteTimeout: time.Duration(timeout) * time.Second,
ReadBufferSize: 16384, // 16k ReadBufferSize: 16384, // 16k
MaxResponseBodySize: DefaultMaxBodySize, MaxResponseBodySize: DefaultMaxBodySize,
NoDefaultUserAgentHeader: true, NoDefaultUserAgentHeader: true,
DisablePathNormalizing: true, DisablePathNormalizing: true,
DisableHeaderNamesNormalizing: true, DisableHeaderNamesNormalizing: true,
}, },
timeout: time.Duration(timeout) * time.Second, Config: config,
clientType: clientType,
} }
} else { } else {
return &Client{ return &Client{
@ -51,27 +57,34 @@ func NewClient(thread int, timeout int, clientType int) *Client {
Renegotiation: tls.RenegotiateOnceAsClient, Renegotiation: tls.RenegotiateOnceAsClient,
InsecureSkipVerify: true, InsecureSkipVerify: true,
}, },
TLSHandshakeTimeout: time.Duration(timeout) * time.Second, MaxConnsPerHost: config.Thread * 3 / 2,
MaxConnsPerHost: thread * 3 / 2, IdleConnTimeout: config.Timeout,
IdleConnTimeout: time.Duration(timeout) * time.Second, ReadBufferSize: 16384, // 16k
ReadBufferSize: 16384, // 16k Proxy: func(_ *http.Request) (*url.URL, error) {
return url.Parse(config.ProxyAddr)
},
}, },
Timeout: time.Second * time.Duration(timeout), Timeout: config.Timeout,
CheckRedirect: func(req *http.Request, via []*http.Request) error { CheckRedirect: func(req *http.Request, via []*http.Request) error {
return http.ErrUseLastResponse return http.ErrUseLastResponse
}, },
}, },
timeout: time.Duration(timeout) * time.Second, Config: config,
clientType: clientType,
} }
} }
} }
type ClientConfig struct {
Type int
Timeout time.Duration
Thread int
ProxyAddr string
}
type Client struct { type Client struct {
fastClient *fasthttp.Client fastClient *fasthttp.Client
standardClient *http.Client standardClient *http.Client
clientType int Config *ClientConfig
timeout time.Duration
} }
func (c *Client) TransToCheck() { func (c *Client) TransToCheck() {
@ -103,3 +116,41 @@ func (c *Client) Do(ctx context.Context, req *Request) (*Response, error) {
return nil, fmt.Errorf("not found client") return nil, fmt.Errorf("not found client")
} }
} }
func customDialFunc(proxyAddr string, timeout time.Duration) fasthttp.DialFunc {
if proxyAddr == "" {
return func(addr string) (net.Conn, error) {
return fasthttp.DialTimeout(addr, timeout)
}
}
u, err := url.Parse(proxyAddr)
if err != nil {
logs.Log.Error(err.Error())
return nil
}
if strings.ToLower(u.Scheme) == "socks5" {
return func(addr string) (net.Conn, error) {
dialer, err := proxy.SOCKS5("tcp", u.Host, nil, proxy.Direct)
if err != nil {
return nil, err
}
// Set up a connection with a timeout
conn, err := dialer.Dial("tcp", addr)
if err != nil {
return nil, err
}
// Set deadlines for the connection
deadline := time.Now().Add(timeout)
if err := conn.SetDeadline(deadline); err != nil {
conn.Close()
return nil, err
}
return conn, nil
}
} else {
return fasthttpproxy.FasthttpHTTPDialerTimeout(proxyAddr, timeout)
}
}

View File

@ -1,14 +1,17 @@
package internal package internal
import ( import (
"bytes"
"errors"
"fmt" "fmt"
"github.com/antonmedv/expr" "github.com/antonmedv/expr"
"github.com/chainreactors/files" "github.com/chainreactors/files"
"github.com/chainreactors/logs" "github.com/chainreactors/logs"
"github.com/chainreactors/parsers/iutils" "github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/internal/pool"
"github.com/chainreactors/spray/pkg" "github.com/chainreactors/spray/pkg"
"github.com/chainreactors/spray/pkg/ihttp"
"github.com/chainreactors/utils" "github.com/chainreactors/utils"
"github.com/chainreactors/utils/iutils"
"github.com/chainreactors/words/mask" "github.com/chainreactors/words/mask"
"github.com/chainreactors/words/rule" "github.com/chainreactors/words/rule"
"github.com/gosuri/uiprogress" "github.com/gosuri/uiprogress"
@ -17,128 +20,132 @@ import (
"os" "os"
"strconv" "strconv"
"strings" "strings"
"sync"
) )
var ( var (
DefaultThreads = 20 DefaultThreads = 20
//DefaultTimeout = 5
//DefaultPoolSize = 5
//DefaultRateLimit = 0
) )
type Option struct { type Option struct {
InputOptions `group:"Input Options"` InputOptions `group:"Input Options" config:"input" default:""`
FunctionOptions `group:"Function Options"` FunctionOptions `group:"Function Options" config:"functions" default:""`
OutputOptions `group:"Output Options"` OutputOptions `group:"Output Options" config:"output"`
PluginOptions `group:"Plugin Options"` PluginOptions `group:"Plugin Options" config:"plugins"`
RequestOptions `group:"Request Options"` RequestOptions `group:"Request Options" config:"request"`
ModeOptions `group:"Modify Options"` ModeOptions `group:"Modify Options" config:"mode"`
MiscOptions `group:"Miscellaneous Options"` MiscOptions `group:"Miscellaneous Options" config:"misc"`
} }
type InputOptions struct { type InputOptions struct {
ResumeFrom string `long:"resume"` ResumeFrom string `long:"resume" description:"File, resume filename" `
URL []string `short:"u" long:"url" description:"Strings, input baseurl, e.g.: http://google.com"` Config string `short:"c" long:"config" description:"File, config filename"`
URLFile string `short:"l" long:"list" description:"File, input filename"` URL []string `short:"u" long:"url" description:"Strings, input baseurl, e.g.: http://google.com"`
PortRange string `short:"p" long:"port" description:"String, input port range, e.g.: 80,8080-8090,db"` URLFile string `short:"l" long:"list" description:"File, input filename"`
CIDRs string `short:"c" long:"cidr" description:"String, input cidr, e.g.: 1.1.1.1/24 "` PortRange string `short:"p" long:"port" description:"String, input port range, e.g.: 80,8080-8090,db"`
Raw string `long:"raw" description:"File, input raw request filename"` CIDRs string `long:"cidr" description:"String, input cidr, e.g.: 1.1.1.1/24 "`
Dictionaries []string `short:"d" long:"dict" description:"Files, Multi,dict files, e.g.: -d 1.txt -d 2.txt"` //Raw string `long:"raw" description:"File, input raw request filename"`
Dictionaries []string `short:"d" long:"dict" description:"Files, Multi,dict files, e.g.: -d 1.txt -d 2.txt" config:"dictionaries"`
Offset int `long:"offset" description:"Int, wordlist offset"` Offset int `long:"offset" description:"Int, wordlist offset"`
Limit int `long:"limit" description:"Int, wordlist limit, start with offset. e.g.: --offset 1000 --limit 100"` Limit int `long:"limit" description:"Int, wordlist limit, start with offset. e.g.: --offset 1000 --limit 100"`
Word string `short:"w" long:"word" description:"String, word generate dsl, e.g.: -w test{?ld#4}"` Word string `short:"w" long:"word" description:"String, word generate dsl, e.g.: -w test{?ld#4}" config:"word"`
Rules []string `short:"r" long:"rules" description:"Files, rule files, e.g.: -r rule1.txt -r rule2.txt"` Rules []string `short:"r" long:"rules" description:"Files, rule files, e.g.: -r rule1.txt -r rule2.txt" config:"rules"`
AppendRule []string `long:"append-rule" description:"Files, when found valid path , use append rule generator new word with current path"` AppendRule []string `long:"append-rule" description:"Files, when found valid path , use append rule generator new word with current path" config:"append-rules"`
FilterRule string `long:"filter-rule" description:"String, filter rule, e.g.: --rule-filter '>8 <4'"` FilterRule string `long:"filter-rule" description:"String, filter rule, e.g.: --rule-filter '>8 <4'" config:"filter-rule"`
AppendFile []string `long:"append-file" description:"Files, when found valid path , use append file new word with current path" config:"append-files"`
} }
type FunctionOptions struct { type FunctionOptions struct {
Extensions string `short:"e" long:"extension" description:"String, add extensions (separated by commas), e.g.: -e jsp,jspx"` Extensions string `short:"e" long:"extension" description:"String, add extensions (separated by commas), e.g.: -e jsp,jspx" config:"extension"`
ExcludeExtensions string `long:"exclude-extension" description:"String, exclude extensions (separated by commas), e.g.: --exclude-extension jsp,jspx"` ForceExtension bool `long:"force-extension" description:"Bool, force add extensions" config:"force-extension"`
RemoveExtensions string `long:"remove-extension" description:"String, remove extensions (separated by commas), e.g.: --remove-extension jsp,jspx"` ExcludeExtensions string `long:"exclude-extension" description:"String, exclude extensions (separated by commas), e.g.: --exclude-extension jsp,jspx" config:"exclude-extension"`
Uppercase bool `short:"U" long:"uppercase" description:"Bool, upper wordlist, e.g.: --uppercase"` RemoveExtensions string `long:"remove-extension" description:"String, remove extensions (separated by commas), e.g.: --remove-extension jsp,jspx" config:"remove-extension"`
Lowercase bool `short:"L" long:"lowercase" description:"Bool, lower wordlist, e.g.: --lowercase"` Uppercase bool `short:"U" long:"uppercase" description:"Bool, upper wordlist, e.g.: --uppercase" config:"upper"`
Prefixes []string `long:"prefix" description:"Strings, add prefix, e.g.: --prefix aaa --prefix bbb"` Lowercase bool `short:"L" long:"lowercase" description:"Bool, lower wordlist, e.g.: --lowercase" config:"lower"`
Suffixes []string `long:"suffix" description:"Strings, add suffix, e.g.: --suffix aaa --suffix bbb"` Prefixes []string `long:"prefix" description:"Strings, add prefix, e.g.: --prefix aaa --prefix bbb" config:"prefix"`
Replaces map[string]string `long:"replace" description:"Strings, replace string, e.g.: --replace aaa:bbb --replace ccc:ddd"` Suffixes []string `long:"suffix" description:"Strings, add suffix, e.g.: --suffix aaa --suffix bbb" config:"suffix"`
Replaces map[string]string `long:"replace" description:"Strings, replace string, e.g.: --replace aaa:bbb --replace ccc:ddd" config:"replace"`
Skips []string `long:"skip" description:"String, skip word when generate. rule, e.g.: --skip aaa" config:"skip"`
//SkipEval string `long:"skip-eval" description:"String, skip word when generate. rule, e.g.: --skip-eval 'current.Length < 4'"`
} }
type OutputOptions struct { type OutputOptions struct {
Match string `long:"match" description:"String, custom match function, e.g.: --match 'current.Status != 200''" json:"match,omitempty"` Match string `long:"match" description:"String, custom match function, e.g.: --match 'current.Status != 200''" config:"match" `
Filter string `long:"filter" description:"String, custom filter function, e.g.: --filter 'current.Body contains \"hello\"'" json:"filter,omitempty"` Filter string `long:"filter" description:"String, custom filter function, e.g.: --filter 'current.Body contains \"hello\"'" config:"filter"`
OutputFile string `short:"f" long:"file" description:"String, output filename" json:"output_file,omitempty"` Fuzzy bool `long:"fuzzy" description:"String, open fuzzy output" config:"fuzzy"`
Format string `short:"F" long:"format" description:"String, output format, e.g.: --format 1.json"` OutputFile string `short:"f" long:"file" description:"String, output filename" json:"output_file,omitempty" config:"output-file"`
FuzzyFile string `long:"fuzzy-file" description:"String, fuzzy output filename" json:"fuzzy_file,omitempty"` FuzzyFile string `long:"fuzzy-file" description:"String, fuzzy output filename" json:"fuzzy_file,omitempty" config:"fuzzy-file"`
DumpFile string `long:"dump-file" description:"String, dump all request, and write to filename"` DumpFile string `long:"dump-file" description:"String, dump all request, and write to filename" config:"dump-file"`
Dump bool `long:"dump" description:"Bool, dump all request"` Dump bool `long:"dump" description:"Bool, dump all request" config:"dump"`
AutoFile bool `long:"auto-file" description:"Bool, auto generator output and fuzzy filename" ` AutoFile bool `long:"auto-file" description:"Bool, auto generator output and fuzzy filename" config:"auto-file"`
Fuzzy bool `long:"fuzzy" description:"String, open fuzzy output" json:"fuzzy,omitempty"` Format string `short:"F" long:"format" description:"String, output format, e.g.: --format 1.json" config:"format"`
OutputProbe string `short:"o" long:"probe" description:"String, output format" json:"output_probe,omitempty"` OutputProbe string `short:"o" long:"probe" description:"String, output format" config:"output_probe"`
} }
type RequestOptions struct { type RequestOptions struct {
Headers []string `long:"header" description:"Strings, custom headers, e.g.: --headers 'Auth: example_auth'"` Headers []string `long:"header" description:"Strings, custom headers, e.g.: --headers 'Auth: example_auth'" config:"headers"`
UserAgent string `long:"user-agent" description:"String, custom user-agent, e.g.: --user-agent Custom"` UserAgent string `long:"user-agent" description:"String, custom user-agent, e.g.: --user-agent Custom" config:"useragent"`
RandomUserAgent bool `long:"random-agent" description:"Bool, use random with default user-agent"` RandomUserAgent bool `long:"random-agent" description:"Bool, use random with default user-agent" config:"random-useragent"`
Cookie []string `long:"cookie" description:"Strings, custom cookie"` Cookie []string `long:"cookie" description:"Strings, custom cookie" config:"cookies"`
ReadAll bool `long:"read-all" description:"Bool, read all response body"` ReadAll bool `long:"read-all" description:"Bool, read all response body" config:"read-all"`
MaxBodyLength int `long:"max-length" default:"100" description:"Int, max response body length (kb), default 100k, e.g. -max-length 1000"` MaxBodyLength int `long:"max-length" default:"100" description:"Int, max response body length (kb), default 100k, e.g. -max-length 1000" config:"max-body-length"`
} }
type PluginOptions struct { type PluginOptions struct {
Advance bool `short:"a" long:"advance" description:"Bool, enable crawl and active"` Advance bool `short:"a" long:"advance" description:"Bool, enable all plugin" config:"all" `
Extracts []string `long:"extract" description:"Strings, extract response, e.g.: --extract js --extract ip --extract version:(.*?)"` Extracts []string `long:"extract" description:"Strings, extract response, e.g.: --extract js --extract ip --extract version:(.*?)" config:"extract"`
Recon bool `long:"recon" description:"Bool, enable recon"` Recon bool `long:"recon" description:"Bool, enable recon" config:"recon"`
Active bool `long:"active" description:"Bool, enable active finger detect"` Finger bool `long:"finger" description:"Bool, enable active finger detect" config:"finger"`
Bak bool `long:"bak" description:"Bool, enable bak found"` Bak bool `long:"bak" description:"Bool, enable bak found" config:"bak"`
FileBak bool `long:"file-bak" description:"Bool, enable valid result bak found, equal --append-rule rule/filebak.txt"` FileBak bool `long:"file-bak" description:"Bool, enable valid result bak found, equal --append-rule rule/filebak.txt" config:"file-bak"`
Common bool `long:"common" description:"Bool, enable common file found"` Common bool `long:"common" description:"Bool, enable common file found" config:"common"`
Crawl bool `long:"crawl" description:"Bool, enable crawl"` Crawl bool `long:"crawl" description:"Bool, enable crawl" config:"crawl"`
CrawlDepth int `long:"crawl-depth" default:"3" description:"Int, crawl depth"` CrawlDepth int `long:"crawl-depth" default:"3" description:"Int, crawl depth" config:"crawl-depth"`
} }
type ModeOptions struct { type ModeOptions struct {
RateLimit int `long:"rate-limit" default:"0" description:"Int, request rate limit (rate/s), e.g.: --rate-limit 100"` RateLimit int `long:"rate-limit" default:"0" description:"Int, request rate limit (rate/s), e.g.: --rate-limit 100" config:"rate-limit"`
Force bool `long:"force" description:"Bool, skip error break"` Force bool `long:"force" description:"Bool, skip error break" config:"force"`
CheckOnly bool `long:"check-only" description:"Bool, check only"` CheckOnly bool `long:"check-only" description:"Bool, check only" config:"check-only"`
NoScope bool `long:"no-scope" description:"Bool, no scope"` NoScope bool `long:"no-scope" description:"Bool, no scope" config:"no-scope"`
Scope []string `long:"scope" description:"String, custom scope, e.g.: --scope *.example.com"` Scope []string `long:"scope" description:"String, custom scope, e.g.: --scope *.example.com" config:"scope"`
Recursive string `long:"recursive" default:"current.IsDir()" description:"String,custom recursive rule, e.g.: --recursive current.IsDir()"` Recursive string `long:"recursive" default:"current.IsDir()" description:"String,custom recursive rule, e.g.: --recursive current.IsDir()" config:"recursive"`
Depth int `long:"depth" default:"0" description:"Int, recursive depth"` Depth int `long:"depth" default:"0" description:"Int, recursive depth" config:"depth"`
Index string `long:"index" default:"" description:"String, custom index path"` Index string `long:"index" default:"/" description:"String, custom index path" config:"index"`
Random string `long:"random" default:"" description:"String, custom random path"` Random string `long:"random" default:"" description:"String, custom random path" config:"random"`
CheckPeriod int `long:"check-period" default:"200" description:"Int, check period when request"` CheckPeriod int `long:"check-period" default:"200" description:"Int, check period when request" config:"check-period"`
ErrPeriod int `long:"error-period" default:"10" description:"Int, check period when error"` ErrPeriod int `long:"error-period" default:"10" description:"Int, check period when error" config:"error-period"`
BreakThreshold int `long:"error-threshold" default:"20" description:"Int, break when the error exceeds the threshold "` BreakThreshold int `long:"error-threshold" default:"20" description:"Int, break when the error exceeds the threshold" config:"error-threshold"`
BlackStatus string `long:"black-status" default:"400,410" description:"Strings (comma split),custom black status, "` BlackStatus string `long:"black-status" default:"400,410" description:"Strings (comma split),custom black status" config:"black-status"`
WhiteStatus string `long:"white-status" default:"200" description:"Strings (comma split), custom white status"` WhiteStatus string `long:"white-status" default:"200" description:"Strings (comma split), custom white status" config:"white-status"`
FuzzyStatus string `long:"fuzzy-status" default:"404,403,500,501,502,503" description:"Strings (comma split), custom fuzzy status"` FuzzyStatus string `long:"fuzzy-status" default:"500,501,502,503" description:"Strings (comma split), custom fuzzy status" config:"fuzzy-status"`
UniqueStatus string `long:"unique-status" default:"403" description:"Strings (comma split), custom unique status"` UniqueStatus string `long:"unique-status" default:"403,200,404" description:"Strings (comma split), custom unique status" config:"unique-status"`
Unique bool `long:"unique" description:"Bool, unique response"` Unique bool `long:"unique" description:"Bool, unique response" config:"unique"`
RetryCount int `long:"retry" default:"1" description:"Int, retry count"` RetryCount int `long:"retry" default:"0" description:"Int, retry count" config:"retry"`
SimhashDistance int `long:"distance" default:"5"` SimhashDistance int `long:"distance" default:"5" config:"distance"`
} }
type MiscOptions struct { type MiscOptions struct {
Deadline int `long:"deadline" default:"999999" description:"Int, deadline (seconds)"` // todo 总的超时时间,适配云函数的deadline Mod string `short:"m" long:"mod" default:"path" choice:"path" choice:"host" description:"String, path/host spray" config:"mod"`
Timeout int `long:"timeout" default:"5" description:"Int, timeout with request (seconds)"` Client string `short:"C" long:"client" default:"auto" choice:"fast" choice:"standard" choice:"auto" description:"String, Client type" config:"client"`
PoolSize int `short:"P" long:"pool" default:"5" description:"Int, Pool size"` Deadline int `long:"deadline" default:"999999" description:"Int, deadline (seconds)" config:"deadline"` // todo 总的超时时间,适配云函数的deadline
Threads int `short:"t" long:"thread" default:"20" description:"Int, number of threads per pool"` Timeout int `long:"timeout" default:"5" description:"Int, timeout with request (seconds)" config:"timeout"`
Debug bool `long:"debug" description:"Bool, output debug info"` PoolSize int `short:"P" long:"pool" default:"5" description:"Int, Pool size" config:"pool"`
Version bool `short:"v" long:"version" description:"Bool, show version"` Threads int `short:"t" long:"thread" default:"20" description:"Int, number of threads per pool" config:"thread"`
Quiet bool `short:"q" long:"quiet" description:"Bool, Quiet"` Debug bool `long:"debug" description:"Bool, output debug info" config:"debug"`
NoColor bool `long:"no-color" description:"Bool, no color"` Version bool `long:"version" description:"Bool, show version"`
NoBar bool `long:"no-bar" description:"Bool, No progress bar"` Verbose []bool `short:"v" description:"Bool, log verbose level ,default 0, level1: -v level2 -vv " config:"verbose"`
Mod string `short:"m" long:"mod" default:"path" choice:"path" choice:"host" description:"String, path/host spray"` Quiet bool `short:"q" long:"quiet" description:"Bool, Quiet" config:"quiet"`
Client string `short:"C" long:"client" default:"auto" choice:"fast" choice:"standard" choice:"auto" description:"String, Client type"` NoColor bool `long:"no-color" description:"Bool, no color" config:"no-color"`
NoBar bool `long:"no-bar" description:"Bool, No progress bar" config:"no-bar"`
Proxy string `long:"proxy" default:"" description:"String, proxy address, e.g.: --proxy socks5://127.0.0.1:1080" config:"proxy"`
} }
func (opt *Option) PrepareRunner() (*Runner, error) { func (opt *Option) PrepareRunner() (*Runner, error) {
ok := opt.Validate() err := opt.Validate()
if !ok { if err != nil {
return nil, fmt.Errorf("validate failed") return nil, err
} }
var err error
r := &Runner{ r := &Runner{
Progress: uiprogress.New(), Progress: uiprogress.New(),
Threads: opt.Threads, Threads: opt.Threads,
@ -151,8 +158,9 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
Offset: opt.Offset, Offset: opt.Offset,
Total: opt.Limit, Total: opt.Limit,
taskCh: make(chan *Task), taskCh: make(chan *Task),
OutputCh: make(chan *pkg.Baseline, 100), outputCh: make(chan *pkg.Baseline, 100),
FuzzyCh: make(chan *pkg.Baseline, 100), outwg: &sync.WaitGroup{},
fuzzyCh: make(chan *pkg.Baseline, 100),
Fuzzy: opt.Fuzzy, Fuzzy: opt.Fuzzy,
Force: opt.Force, Force: opt.Force,
CheckOnly: opt.CheckOnly, CheckOnly: opt.CheckOnly,
@ -161,28 +169,29 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
BreakThreshold: opt.BreakThreshold, BreakThreshold: opt.BreakThreshold,
Crawl: opt.Crawl, Crawl: opt.Crawl,
Scope: opt.Scope, Scope: opt.Scope,
Active: opt.Active, Finger: opt.Finger,
Bak: opt.Bak, Bak: opt.Bak,
Common: opt.Common, Common: opt.Common,
RetryCount: opt.RetryCount, RetryCount: opt.RetryCount,
RandomUserAgent: opt.RandomUserAgent, RandomUserAgent: opt.RandomUserAgent,
Random: opt.Random, Random: opt.Random,
Index: opt.Index, Index: opt.Index,
Proxy: opt.Proxy,
} }
// log and bar // log and bar
if !opt.NoColor { if !opt.NoColor {
logs.Log.Color = true logs.Log.SetColor(true)
r.Color = true r.Color = true
} }
if opt.Quiet { if opt.Quiet {
logs.Log.Quiet = true logs.Log.SetQuiet(true)
logs.Log.Color = false logs.Log.SetColor(false)
r.Color = false r.Color = false
} }
if !(opt.Quiet || opt.NoBar) { if !(opt.Quiet || opt.NoBar) {
r.Progress.Start() r.Progress.Start()
logs.Log.Writer = r.Progress.Bypass() logs.Log.SetOutput(r.Progress.Bypass())
} }
// configuration // configuration
@ -211,7 +220,7 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
if opt.Advance { if opt.Advance {
r.Crawl = true r.Crawl = true
r.Active = true r.Finger = true
r.Bak = true r.Bak = true
r.Common = true r.Common = true
pkg.Extractors["recon"] = pkg.ExtractRegexps["pentest"] pkg.Extractors["recon"] = pkg.ExtractRegexps["pentest"]
@ -224,13 +233,15 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
if r.Crawl { if r.Crawl {
s.WriteString("crawl enable; ") s.WriteString("crawl enable; ")
} }
if r.Active { if r.Finger {
r.AppendWords = append(r.AppendWords, pkg.ActivePath...)
s.WriteString("active fingerprint enable; ") s.WriteString("active fingerprint enable; ")
} }
if r.Bak { if r.Bak {
s.WriteString("bak file enable; ") s.WriteString("bak file enable; ")
} }
if r.Common { if r.Common {
r.AppendWords = append(r.AppendWords, mask.SpecialWords["common_file"]...)
s.WriteString("common file enable; ") s.WriteString("common file enable; ")
} }
if opt.Recon { if opt.Recon {
@ -239,51 +250,57 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
if len(opt.AppendRule) > 0 { if len(opt.AppendRule) > 0 {
s.WriteString("file bak enable; ") s.WriteString("file bak enable; ")
} }
if s.Len() > 0 {
logs.Log.Important("Advance Mod: " + s.String()) if r.RetryCount > 0 {
s.WriteString("Retry Count: " + strconv.Itoa(r.RetryCount))
} }
logs.Log.Important("Retry Count: " + strconv.Itoa(r.RetryCount)) if s.Len() > 0 {
logs.Log.Important(s.String())
}
if opt.NoScope { if opt.NoScope {
r.Scope = []string{"*"} r.Scope = []string{"*"}
} }
BlackStatus = parseStatus(BlackStatus, opt.BlackStatus) pkg.BlackStatus = parseStatus(pkg.BlackStatus, opt.BlackStatus)
WhiteStatus = parseStatus(WhiteStatus, opt.WhiteStatus) pkg.WhiteStatus = parseStatus(pkg.WhiteStatus, opt.WhiteStatus)
if opt.FuzzyStatus == "all" { if opt.FuzzyStatus == "all" {
enableAllFuzzy = true pool.EnableAllFuzzy = true
} else { } else {
FuzzyStatus = parseStatus(FuzzyStatus, opt.FuzzyStatus) pkg.FuzzyStatus = parseStatus(pkg.FuzzyStatus, opt.FuzzyStatus)
} }
if opt.Unique { if opt.Unique {
enableAllUnique = true pool.EnableAllUnique = true
} else { } else {
UniqueStatus = parseStatus(UniqueStatus, opt.UniqueStatus) pkg.UniqueStatus = parseStatus(pkg.UniqueStatus, opt.UniqueStatus)
} }
// prepare word // prepare word
dicts := make([][]string, len(opt.Dictionaries)) dicts := make([][]string, len(opt.Dictionaries))
for i, f := range opt.Dictionaries { if len(opt.Dictionaries) == 0 {
dicts[i], err = loadFileToSlice(f) dicts = append(dicts, pkg.LoadDefaultDict())
if opt.ResumeFrom != "" { logs.Log.Warn("not set any dictionary, use default dictionary: https://github.com/maurosoria/dirsearch/blob/master/db/dicc.txt")
dictCache[f] = dicts[i] } else {
for i, f := range opt.Dictionaries {
dicts[i], err = loadFileToSlice(f)
if opt.ResumeFrom != "" {
dictCache[f] = dicts[i]
}
if err != nil {
return nil, err
}
logs.Log.Logf(pkg.LogVerbose, "Loaded %d word from %s", len(dicts[i]), f)
} }
if err != nil {
return nil, err
}
logs.Log.Importantf("Loaded %d word from %s", len(dicts[i]), f)
} }
if opt.Word == "" { if opt.Word == "" {
if len(opt.Dictionaries) == 0 { opt.Word = "{?"
opt.Word = "/" for i, _ := range dicts {
} else { opt.Word += strconv.Itoa(i)
opt.Word = "{?"
for i, _ := range dicts {
opt.Word += strconv.Itoa(i)
}
opt.Word += "}"
} }
opt.Word += "}"
} }
if opt.Suffixes != nil { if opt.Suffixes != nil {
@ -295,7 +312,7 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
opt.Word = "{@prefix}" + opt.Word opt.Word = "{@prefix}" + opt.Word
} }
if opt.Extensions != "" { if opt.ForceExtension && opt.Extensions != "" {
exts := strings.Split(opt.Extensions, ",") exts := strings.Split(opt.Extensions, ",")
for i, e := range exts { for i, e := range exts {
if !strings.HasPrefix(e, ".") { if !strings.HasPrefix(e, ".") {
@ -311,11 +328,11 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
return nil, fmt.Errorf("%s %w", opt.Word, err) return nil, fmt.Errorf("%s %w", opt.Word, err)
} }
if len(r.Wordlist) > 0 { if len(r.Wordlist) > 0 {
logs.Log.Importantf("Parsed %d words by %s", len(r.Wordlist), opt.Word) logs.Log.Logf(pkg.LogVerbose, "Parsed %d words by %s", len(r.Wordlist), opt.Word)
} }
if opt.Rules != nil { if opt.Rules != nil {
rules, err := loadFileAndCombine(opt.Rules) rules, err := loadRuleAndCombine(opt.Rules)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -344,13 +361,30 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
} }
if opt.AppendRule != nil { if opt.AppendRule != nil {
content, err := loadFileAndCombine(opt.AppendRule) content, err := loadRuleAndCombine(opt.AppendRule)
if err != nil { if err != nil {
return nil, err return nil, err
} }
r.AppendRules = rule.Compile(string(content), "") r.AppendRules = rule.Compile(string(content), "")
} }
if opt.AppendFile != nil {
var bs bytes.Buffer
for _, f := range opt.AppendFile {
content, err := ioutil.ReadFile(f)
if err != nil {
return nil, err
}
bs.Write(bytes.TrimSpace(content))
bs.WriteString("\n")
}
lines := strings.Split(bs.String(), "\n")
for i, line := range lines {
lines[i] = strings.TrimSpace(line)
}
r.AppendWords = append(r.AppendWords, lines...)
}
ports := utils.ParsePort(opt.PortRange) ports := utils.ParsePort(opt.PortRange)
// prepare task // prepare task
@ -424,7 +458,7 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
logs.Log.Error(err.Error()) logs.Log.Error(err.Error())
} }
taskfrom = opt.URLFile taskfrom = opt.URLFile
} else if pkg.HasStdin() { } else if files.HasStdin() {
file = os.Stdin file = os.Stdin
taskfrom = "stdin" taskfrom = "stdin"
} }
@ -469,44 +503,84 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
} }
r.Tasks = tasks r.Tasks = tasks
logs.Log.Importantf("Loaded %d urls from %s", len(tasks), taskfrom) logs.Log.Logf(pkg.LogVerbose, "Loaded %d urls from %s", len(tasks), taskfrom)
// 类似dirsearch中的
if opt.Extensions != "" {
r.AppendFunction(func(s string) []string {
exts := strings.Split(opt.Extensions, ",")
ss := make([]string, len(exts))
for i, e := range exts {
if strings.Contains(s, "%EXT%") {
ss[i] = strings.Replace(s, "%EXT%", e, -1)
}
}
return ss
})
} else {
r.AppendFunction(func(s string) []string {
if strings.Contains(s, "%EXT%") {
return nil
}
return []string{s}
})
}
if opt.Uppercase { if opt.Uppercase {
r.Fns = append(r.Fns, strings.ToUpper) r.AppendFunction(wrapWordsFunc(strings.ToUpper))
} }
if opt.Lowercase { if opt.Lowercase {
r.Fns = append(r.Fns, strings.ToLower) r.AppendFunction(wrapWordsFunc(strings.ToLower))
} }
if opt.RemoveExtensions != "" { if opt.RemoveExtensions != "" {
rexts := strings.Split(opt.ExcludeExtensions, ",") rexts := strings.Split(opt.ExcludeExtensions, ",")
r.Fns = append(r.Fns, func(s string) string { r.AppendFunction(func(s string) []string {
if ext := parseExtension(s); iutils.StringsContains(rexts, ext) { if ext := parseExtension(s); iutils.StringsContains(rexts, ext) {
return strings.TrimSuffix(s, "."+ext) return []string{strings.TrimSuffix(s, "."+ext)}
} }
return s return []string{s}
}) })
} }
if opt.ExcludeExtensions != "" { if opt.ExcludeExtensions != "" {
exexts := strings.Split(opt.ExcludeExtensions, ",") exexts := strings.Split(opt.ExcludeExtensions, ",")
r.Fns = append(r.Fns, func(s string) string { r.AppendFunction(func(s string) []string {
if ext := parseExtension(s); iutils.StringsContains(exexts, ext) { if ext := parseExtension(s); iutils.StringsContains(exexts, ext) {
return "" return nil
} }
return s return []string{s}
}) })
} }
if len(opt.Replaces) > 0 { if len(opt.Replaces) > 0 {
r.Fns = append(r.Fns, func(s string) string { r.AppendFunction(func(s string) []string {
for k, v := range opt.Replaces { for k, v := range opt.Replaces {
s = strings.Replace(s, k, v, -1) s = strings.Replace(s, k, v, -1)
} }
return s return []string{s}
}) })
} }
logs.Log.Importantf("Loaded %d dictionaries and %d decorators", len(opt.Dictionaries), len(r.Fns))
// default skip function, skip %EXT%
r.AppendFunction(func(s string) []string {
if strings.Contains(s, "%EXT%") {
return nil
}
return []string{s}
})
if len(opt.Skips) > 0 {
r.AppendFunction(func(s string) []string {
for _, skip := range opt.Skips {
if strings.Contains(s, skip) {
return nil
}
}
return []string{s}
})
}
logs.Log.Logf(pkg.LogVerbose, "Loaded %d dictionaries and %d decorators", len(opt.Dictionaries), len(r.Fns))
if opt.Match != "" { if opt.Match != "" {
exp, err := expr.Compile(opt.Match, expr.Patch(&bytesPatcher{})) exp, err := expr.Compile(opt.Match, expr.Patch(&bytesPatcher{}))
@ -528,13 +602,13 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
var express string var express string
if opt.Recursive != "current.IsDir()" && opt.Depth != 0 { if opt.Recursive != "current.IsDir()" && opt.Depth != 0 {
// 默认不打开递归, 除非指定了非默认的递归表达式 // 默认不打开递归, 除非指定了非默认的递归表达式
MaxRecursion = 1 pool.MaxRecursion = 1
express = opt.Recursive express = opt.Recursive
} }
if opt.Depth != 0 { if opt.Depth != 0 {
// 手动设置的depth优先级高于默认 // 手动设置的depth优先级高于默认
MaxRecursion = opt.Depth pool.MaxRecursion = opt.Depth
express = opt.Recursive express = opt.Recursive
} }
@ -619,24 +693,26 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
return r, nil return r, nil
} }
func (opt *Option) Validate() bool { func (opt *Option) Validate() error {
if opt.Uppercase && opt.Lowercase { if opt.Uppercase && opt.Lowercase {
logs.Log.Error("Cannot set -U and -L at the same time") return errors.New("cannot set -U and -L at the same time")
return false
} }
if (opt.Offset != 0 || opt.Limit != 0) && opt.Depth > 0 { if (opt.Offset != 0 || opt.Limit != 0) && opt.Depth > 0 {
// 偏移和上限与递归同时使用时也会造成混淆. // 偏移和上限与递归同时使用时也会造成混淆.
logs.Log.Error("--offset and --limit cannot be used with --depth at the same time") return errors.New("--offset and --limit cannot be used with --depth at the same time")
return false
} }
if opt.Depth > 0 && opt.ResumeFrom != "" { if opt.Depth > 0 && opt.ResumeFrom != "" {
// 递归与断点续传会造成混淆, 断点续传的word与rule不是通过命令行获取的 // 递归与断点续传会造成混淆, 断点续传的word与rule不是通过命令行获取的
logs.Log.Error("--resume and --depth cannot be used at the same time")
return false return errors.New("--resume and --depth cannot be used at the same time")
} }
return true
if opt.ResumeFrom == "" && opt.URL == nil && opt.URLFile == "" && opt.CIDRs == "" {
return fmt.Errorf("without any target, please use -u/-l/-c/--resume to set targets")
}
return nil
} }
// Generate Tasks // Generate Tasks

View File

@ -1,22 +1,19 @@
package internal package pool
import ( import (
"context" "context"
"fmt" "fmt"
"github.com/chainreactors/logs" "github.com/chainreactors/logs"
"github.com/chainreactors/parsers" "github.com/chainreactors/parsers"
"github.com/chainreactors/parsers/iutils" "github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/pkg" "github.com/chainreactors/spray/pkg"
"github.com/chainreactors/spray/pkg/ihttp" "github.com/chainreactors/utils/iutils"
"github.com/chainreactors/words" "github.com/chainreactors/words"
"github.com/chainreactors/words/mask"
"github.com/chainreactors/words/rule"
"github.com/panjf2000/ants/v2" "github.com/panjf2000/ants/v2"
"github.com/valyala/fasthttp" "github.com/valyala/fasthttp"
"golang.org/x/time/rate" "golang.org/x/time/rate"
"math/rand" "math/rand"
"net/url" "net/url"
"path"
"strings" "strings"
"sync" "sync"
"sync/atomic" "sync/atomic"
@ -24,39 +21,44 @@ import (
) )
var ( var (
max = 2147483647
MaxRedirect = 3 MaxRedirect = 3
MaxCrawl = 3 MaxCrawl = 3
MaxRecursion = 0 MaxRecursion = 0
enableAllFuzzy = false EnableAllFuzzy = false
enableAllUnique = false EnableAllUnique = false
nilBaseline = &pkg.Baseline{}
) )
func NewPool(ctx context.Context, config *pkg.Config) (*Pool, error) { func NewBrutePool(ctx context.Context, config *Config) (*BrutePool, error) {
var u *url.URL var u *url.URL
var err error var err error
if u, err = url.Parse(config.BaseURL); err != nil { if u, err = url.Parse(config.BaseURL); err != nil {
return nil, err return nil, err
} }
pctx, cancel := context.WithCancel(ctx) pctx, cancel := context.WithCancel(ctx)
pool := &Pool{ pool := &BrutePool{
Config: config, Baselines: NewBaselines(),
base: u.Scheme + "://" + u.Host, This: &This{
isDir: strings.HasSuffix(u.Path, "/"), Config: config,
url: u, ctx: pctx,
ctx: pctx, Cancel: cancel,
cancel: cancel, client: ihttp.NewClient(&ihttp.ClientConfig{
client: ihttp.NewClient(config.Thread, 2, config.ClientType), Thread: config.Thread,
baselines: make(map[int]*pkg.Baseline), Type: config.ClientType,
urls: make(map[string]struct{}), Timeout: time.Duration(config.Timeout) * time.Second,
ProxyAddr: config.ProxyAddr,
}),
additionCh: make(chan *Unit, config.Thread),
closeCh: make(chan struct{}),
wg: sync.WaitGroup{},
},
base: u.Scheme + "://" + u.Host,
isDir: strings.HasSuffix(u.Path, "/"),
url: u,
scopeurls: make(map[string]struct{}), scopeurls: make(map[string]struct{}),
uniques: make(map[uint16]struct{}), uniques: make(map[uint16]struct{}),
tempCh: make(chan *pkg.Baseline, 100), handlerCh: make(chan *pkg.Baseline, config.Thread),
checkCh: make(chan int, 100), checkCh: make(chan struct{}, config.Thread),
additionCh: make(chan *Unit, 100),
closeCh: make(chan struct{}),
waiter: sync.WaitGroup{},
initwg: sync.WaitGroup{}, initwg: sync.WaitGroup{},
limiter: rate.NewLimiter(rate.Limit(config.RateLimit), 1), limiter: rate.NewLimiter(rate.Limit(config.RateLimit), 1),
failedCount: 1, failedCount: 1,
@ -68,7 +70,7 @@ func NewPool(ctx context.Context, config *pkg.Config) (*Pool, error) {
} else if pool.url.Path == "" { } else if pool.url.Path == "" {
pool.dir = "/" pool.dir = "/"
} else { } else {
pool.dir = Dir(pool.url.Path) pool.dir = pkg.Dir(pool.url.Path)
} }
pool.reqPool, _ = ants.NewPoolWithFunc(config.Thread, pool.Invoke) pool.reqPool, _ = ants.NewPoolWithFunc(config.Thread, pool.Invoke)
@ -79,44 +81,32 @@ func NewPool(ctx context.Context, config *pkg.Config) (*Pool, error) {
return pool, nil return pool, nil
} }
type Pool struct { type BrutePool struct {
*pkg.Config // read only *Baselines
base string // url的根目录, 在爬虫或者redirect时, 会需要用到根目录进行拼接 *This
dir string base string // url的根目录, 在爬虫或者redirect时, 会需要用到根目录进行拼接
isDir bool isDir bool
url *url.URL url *url.URL
Statistor *pkg.Statistor
client *ihttp.Client reqPool *ants.PoolWithFunc
reqPool *ants.PoolWithFunc scopePool *ants.PoolWithFunc
scopePool *ants.PoolWithFunc handlerCh chan *pkg.Baseline // 待处理的baseline
bar *pkg.Bar checkCh chan struct{} // 独立的check管道 防止与redirect/crawl冲突
ctx context.Context closed bool
cancel context.CancelFunc wordOffset int
tempCh chan *pkg.Baseline // 待处理的baseline failedCount int32
checkCh chan int // 独立的check管道 防止与redirect/crawl冲突 IsFailed bool
additionCh chan *Unit // 插件添加的任务, 待处理管道 urls sync.Map
closeCh chan struct{} scopeurls map[string]struct{}
closed bool uniques map[uint16]struct{}
wordOffset int analyzeDone bool
failedCount int32 limiter *rate.Limiter
isFailed bool locker sync.Mutex
failedBaselines []*pkg.Baseline scopeLocker sync.Mutex
random *pkg.Baseline initwg sync.WaitGroup // 初始化用, 之后改成锁
index *pkg.Baseline
baselines map[int]*pkg.Baseline
urls map[string]struct{}
scopeurls map[string]struct{}
uniques map[uint16]struct{}
analyzeDone bool
worder *words.Worder
limiter *rate.Limiter
locker sync.Mutex
scopeLocker sync.Mutex
waiter sync.WaitGroup
initwg sync.WaitGroup // 初始化用, 之后改成锁
} }
func (pool *Pool) checkRedirect(redirectURL string) bool { func (pool *BrutePool) checkRedirect(redirectURL string) bool {
if pool.random.RedirectURL == "" { if pool.random.RedirectURL == "" {
// 如果random的redirectURL为空, 此时该项 // 如果random的redirectURL为空, 此时该项
return true return true
@ -131,30 +121,31 @@ func (pool *Pool) checkRedirect(redirectURL string) bool {
} }
} }
func (pool *Pool) genReq(mod pkg.SprayMod, s string) (*ihttp.Request, error) { func (pool *BrutePool) genReq(mod SprayMod, s string) (*ihttp.Request, error) {
if mod == pkg.HostSpray { if mod == HostSpray {
return ihttp.BuildHostRequest(pool.ClientType, pool.BaseURL, s) return ihttp.BuildHostRequest(pool.ClientType, pool.BaseURL, s)
} else if mod == pkg.PathSpray { } else if mod == PathSpray {
return ihttp.BuildPathRequest(pool.ClientType, pool.base, s) return ihttp.BuildPathRequest(pool.ClientType, pool.base, s)
} }
return nil, fmt.Errorf("unknown mod") return nil, fmt.Errorf("unknown mod")
} }
func (pool *Pool) Init() error { func (pool *BrutePool) Init() error {
// 分成两步是为了避免闭包的线程安全问题
pool.initwg.Add(2) pool.initwg.Add(2)
if pool.Index != "" { if pool.Index != "/" {
logs.Log.Importantf("custom index url: %s", BaseURL(pool.url)+FormatURL(BaseURL(pool.url), pool.Index)) logs.Log.Logf(pkg.LogVerbose, "custom index url: %s", pkg.BaseURL(pool.url)+pkg.FormatURL(pkg.BaseURL(pool.url), pool.Index))
pool.reqPool.Invoke(newUnit(pool.Index, InitIndexSource)) pool.reqPool.Invoke(newUnit(pool.Index, parsers.InitIndexSource))
//pool.urls[dir(pool.Index)] = struct{}{}
} else { } else {
pool.reqPool.Invoke(newUnit(pool.url.Path, InitIndexSource)) pool.reqPool.Invoke(newUnit(pool.url.Path, parsers.InitIndexSource))
//pool.urls[dir(pool.url.Path)] = struct{}{}
} }
if pool.Random != "" { if pool.Random != "" {
logs.Log.Importantf("custom random url: %s", BaseURL(pool.url)+FormatURL(BaseURL(pool.url), pool.Random)) logs.Log.Logf(pkg.LogVerbose, "custom random url: %s", pkg.BaseURL(pool.url)+pkg.FormatURL(pkg.BaseURL(pool.url), pool.Random))
pool.reqPool.Invoke(newUnit(pool.Random, InitRandomSource)) pool.reqPool.Invoke(newUnit(pool.Random, parsers.InitRandomSource))
} else { } else {
pool.reqPool.Invoke(newUnit(pool.safePath(pkg.RandPath()), InitRandomSource)) pool.reqPool.Invoke(newUnit(pool.safePath(pkg.RandPath()), parsers.InitRandomSource))
} }
pool.initwg.Wait() pool.initwg.Wait()
@ -165,13 +156,13 @@ func (pool *Pool) Init() error {
if pool.index.Chunked && pool.ClientType == ihttp.FAST { if pool.index.Chunked && pool.ClientType == ihttp.FAST {
logs.Log.Warn("chunk encoding! buf current client FASTHTTP not support chunk decode") logs.Log.Warn("chunk encoding! buf current client FASTHTTP not support chunk decode")
} }
logs.Log.Info("[baseline.index] " + pool.index.Format([]string{"status", "length", "spend", "title", "frame", "redirect"})) logs.Log.Logf(pkg.LogVerbose, "[baseline.index] "+pool.index.Format([]string{"status", "length", "spend", "title", "frame", "redirect"}))
// 检测基本访问能力 // 检测基本访问能力
if pool.random.ErrString != "" { if pool.random.ErrString != "" {
logs.Log.Error(pool.index.String()) logs.Log.Error(pool.index.String())
return fmt.Errorf(pool.index.ErrString) return fmt.Errorf(pool.index.ErrString)
} }
logs.Log.Info("[baseline.random] " + pool.random.Format([]string{"status", "length", "spend", "title", "frame", "redirect"})) logs.Log.Logf(pkg.LogVerbose, "[baseline.random] "+pool.random.Format([]string{"status", "length", "spend", "title", "frame", "redirect"}))
// 某些网站http会重定向到https, 如果发现随机目录出现这种情况, 则自定将baseurl升级为https // 某些网站http会重定向到https, 如果发现随机目录出现这种情况, 则自定将baseurl升级为https
if pool.url.Scheme == "http" { if pool.url.Scheme == "http" {
@ -189,20 +180,36 @@ func (pool *Pool) Init() error {
return nil return nil
} }
func (pool *Pool) Run(offset, limit int) { func (pool *BrutePool) Upgrade(bl *pkg.Baseline) error {
pool.worder.RunWithRules() rurl, err := url.Parse(bl.RedirectURL)
if err == nil && rurl.Hostname() == bl.Url.Hostname() && bl.Url.Scheme == "http" && rurl.Scheme == "https" {
logs.Log.Infof("baseurl %s upgrade http to https, reinit", pool.BaseURL)
pool.base = strings.Replace(pool.BaseURL, "http", "https", 1)
pool.url.Scheme = "https"
// 重新初始化
err = pool.Init()
if err != nil {
return err
}
}
return nil
}
func (pool *BrutePool) Run(offset, limit int) {
pool.Worder.Run()
if pool.Active { if pool.Active {
pool.waiter.Add(1) pool.wg.Add(1)
go pool.doActive() go pool.doActive()
} }
if pool.Bak { if pool.Bak {
pool.waiter.Add(1) pool.wg.Add(1)
go pool.doBak() go pool.doBak()
} }
if pool.Common { if pool.Common {
pool.waiter.Add(1) pool.wg.Add(1)
go pool.doCommonFile() go pool.doCommonFile()
} }
@ -211,7 +218,7 @@ func (pool *Pool) Run(offset, limit int) {
go func() { go func() {
for { for {
if done { if done {
pool.waiter.Wait() pool.wg.Wait()
close(pool.closeCh) close(pool.closeCh)
return return
} }
@ -222,7 +229,7 @@ func (pool *Pool) Run(offset, limit int) {
Loop: Loop:
for { for {
select { select {
case w, ok := <-pool.worder.C: case w, ok := <-pool.Worder.C:
if !ok { if !ok {
done = true done = true
continue continue
@ -238,30 +245,30 @@ Loop:
continue continue
} }
pool.waiter.Add(1) pool.wg.Add(1)
if pool.Mod == pkg.HostSpray { if pool.Mod == HostSpray {
pool.reqPool.Invoke(newUnitWithNumber(w, WordSource, pool.wordOffset)) pool.reqPool.Invoke(newUnitWithNumber(w, parsers.WordSource, pool.wordOffset))
} else { } else {
// 原样的目录拼接, 输入了几个"/"就是几个, 适配/有语义的中间件 // 原样的目录拼接, 输入了几个"/"就是几个, 适配/有语义的中间件
pool.reqPool.Invoke(newUnitWithNumber(pool.safePath(w), WordSource, pool.wordOffset)) pool.reqPool.Invoke(newUnitWithNumber(pool.safePath(w), parsers.WordSource, pool.wordOffset))
} }
case source := <-pool.checkCh: case <-pool.checkCh:
pool.Statistor.CheckNumber++ pool.Statistor.CheckNumber++
if pool.Mod == pkg.HostSpray { if pool.Mod == HostSpray {
pool.reqPool.Invoke(newUnitWithNumber(pkg.RandHost(), source, pool.wordOffset)) pool.reqPool.Invoke(newUnitWithNumber(pkg.RandHost(), parsers.CheckSource, pool.wordOffset))
} else if pool.Mod == pkg.PathSpray { } else if pool.Mod == PathSpray {
pool.reqPool.Invoke(newUnitWithNumber(pool.safePath(pkg.RandPath()), source, pool.wordOffset)) pool.reqPool.Invoke(newUnitWithNumber(pool.safePath(pkg.RandPath()), parsers.CheckSource, pool.wordOffset))
} }
case unit, ok := <-pool.additionCh: case unit, ok := <-pool.additionCh:
if !ok || pool.closed { if !ok || pool.closed {
continue continue
} }
if _, ok := pool.urls[unit.path]; ok { if _, ok := pool.urls.Load(unit.path); ok {
logs.Log.Debugf("[%s] duplicate path: %s, skipped", parsers.GetSpraySourceName(unit.source), pool.base+unit.path) logs.Log.Debugf("[%s] duplicate path: %s, skipped", unit.source.Name(), pool.base+unit.path)
pool.waiter.Done() pool.wg.Done()
} else { } else {
pool.urls[unit.path] = struct{}{} pool.urls.Store(unit.path, nil)
unit.number = pool.wordOffset unit.number = pool.wordOffset
pool.reqPool.Invoke(unit) pool.reqPool.Invoke(unit)
} }
@ -277,7 +284,7 @@ Loop:
pool.Close() pool.Close()
} }
func (pool *Pool) Invoke(v interface{}) { func (pool *BrutePool) Invoke(v interface{}) {
if pool.RateLimit != 0 { if pool.RateLimit != 0 {
pool.limiter.Wait(pool.ctx) pool.limiter.Wait(pool.ctx)
} }
@ -287,10 +294,10 @@ func (pool *Pool) Invoke(v interface{}) {
var req *ihttp.Request var req *ihttp.Request
var err error var err error
if unit.source == WordSource { if unit.source == parsers.WordSource {
req, err = pool.genReq(pool.Mod, unit.path) req, err = pool.genReq(pool.Mod, unit.path)
} else { } else {
req, err = pool.genReq(pkg.PathSpray, unit.path) req, err = pool.genReq(PathSpray, unit.path)
} }
if err != nil { if err != nil {
@ -299,7 +306,7 @@ func (pool *Pool) Invoke(v interface{}) {
} }
req.SetHeaders(pool.Headers) req.SetHeaders(pool.Headers)
req.SetHeader("User-Agent", RandomUA()) req.SetHeader("User-Agent", pkg.RandomUA())
start := time.Now() start := time.Now()
resp, reqerr := pool.client.Do(pool.ctx, req) resp, reqerr := pool.client.Do(pool.ctx, req)
@ -316,17 +323,15 @@ func (pool *Pool) Invoke(v interface{}) {
bl = &pkg.Baseline{ bl = &pkg.Baseline{
SprayResult: &parsers.SprayResult{ SprayResult: &parsers.SprayResult{
UrlString: pool.base + unit.path, UrlString: pool.base + unit.path,
IsValid: false,
ErrString: reqerr.Error(), ErrString: reqerr.Error(),
Reason: pkg.ErrRequestFailed.Error(), Reason: pkg.ErrRequestFailed.Error(),
}, },
} }
pool.failedBaselines = append(pool.failedBaselines, bl) pool.FailedBaselines = append(pool.FailedBaselines, bl)
// 自动重放失败请求, 默认为一次 // 自动重放失败请求
pool.doRetry(bl) pool.doRetry(bl)
} else { // 特定场景优化
} else { if unit.source <= 3 || unit.source == parsers.CrawlSource || unit.source == parsers.CommonFileSource {
if unit.source <= 3 || unit.source == CrawlSource || unit.source == CommonFileSource {
// 一些高优先级的source, 将跳过PreCompare // 一些高优先级的source, 将跳过PreCompare
bl = pkg.NewBaseline(req.URI(), req.Host(), resp) bl = pkg.NewBaseline(req.URI(), req.Host(), resp)
} else if pool.MatchExpr != nil { } else if pool.MatchExpr != nil {
@ -341,8 +346,8 @@ func (pool *Pool) Invoke(v interface{}) {
} }
// 手动处理重定向 // 手动处理重定向
if bl.IsValid && unit.source != CheckSource && bl.RedirectURL != "" { if bl.IsValid && unit.source != parsers.CheckSource && bl.RedirectURL != "" {
//pool.waiter.Add(1) //pool.wg.Add(1)
pool.doRedirect(bl, unit.depth) pool.doRedirect(bl, unit.depth)
} }
@ -354,63 +359,63 @@ func (pool *Pool) Invoke(v interface{}) {
bl.Number = unit.number bl.Number = unit.number
bl.Spended = time.Since(start).Milliseconds() bl.Spended = time.Since(start).Milliseconds()
switch unit.source { switch unit.source {
case InitRandomSource: case parsers.InitRandomSource:
bl.Collect() bl.Collect()
pool.locker.Lock() pool.locker.Lock()
pool.random = bl pool.random = bl
pool.addFuzzyBaseline(bl) pool.addFuzzyBaseline(bl)
pool.locker.Unlock() pool.locker.Unlock()
pool.initwg.Done() pool.initwg.Done()
case InitIndexSource: case parsers.InitIndexSource:
bl.Collect() bl.Collect()
pool.locker.Lock() pool.locker.Lock()
pool.index = bl pool.index = bl
pool.locker.Unlock() pool.locker.Unlock()
if bl.Status == 200 || (bl.Status/100) == 3 { if bl.Status == 200 || (bl.Status/100) == 3 {
// 保留index输出结果 // 保留index输出结果
pool.waiter.Add(1) pool.wg.Add(1)
pool.doCrawl(bl) pool.doCrawl(bl)
pool.OutputCh <- bl pool.putToOutput(bl)
} }
pool.initwg.Done() pool.initwg.Done()
case CheckSource: case parsers.CheckSource:
if bl.ErrString != "" { if bl.ErrString != "" {
logs.Log.Warnf("[check.error] %s maybe ip had banned, break (%d/%d), error: %s", pool.BaseURL, pool.failedCount, pool.BreakThreshold, bl.ErrString) logs.Log.Warnf("[check.error] %s maybe ip had banned, break (%d/%d), error: %s", pool.BaseURL, pool.failedCount, pool.BreakThreshold, bl.ErrString)
} else if i := pool.random.Compare(bl); i < 1 { } else if i := pool.random.Compare(bl); i < 1 {
if i == 0 { if i == 0 {
if pool.Fuzzy { if pool.Fuzzy {
logs.Log.Warn("[check.fuzzy] maybe trigger risk control, " + bl.String()) logs.Log.Debug("[check.fuzzy] maybe trigger risk control, " + bl.String())
} }
} else { } else {
atomic.AddInt32(&pool.failedCount, 1) // atomic.AddInt32(&pool.failedCount, 1) //
logs.Log.Warn("[check.failed] maybe trigger risk control, " + bl.String()) logs.Log.Debug("[check.failed] maybe trigger risk control, " + bl.String())
pool.failedBaselines = append(pool.failedBaselines, bl) pool.FailedBaselines = append(pool.FailedBaselines, bl)
} }
} else { } else {
pool.resetFailed() // 如果后续访问正常, 重置错误次数 pool.resetFailed() // 如果后续访问正常, 重置错误次数
logs.Log.Debug("[check.pass] " + bl.String()) logs.Log.Debug("[check.pass] " + bl.String())
} }
case WordSource: case parsers.WordSource:
// 异步进行性能消耗较大的深度对比 // 异步进行性能消耗较大的深度对比
pool.tempCh <- bl pool.handlerCh <- bl
if int(pool.Statistor.ReqTotal)%pool.CheckPeriod == 0 { if int(pool.Statistor.ReqTotal)%pool.CheckPeriod == 0 {
pool.doCheck() pool.doCheck()
} else if pool.failedCount%pool.ErrPeriod == 0 { } else if pool.failedCount%pool.ErrPeriod == 0 {
atomic.AddInt32(&pool.failedCount, 1) atomic.AddInt32(&pool.failedCount, 1)
pool.doCheck() pool.doCheck()
} }
pool.bar.Done() pool.Bar.Done()
case RedirectSource: case parsers.RedirectSource:
bl.FrontURL = unit.frontUrl bl.FrontURL = unit.frontUrl
pool.tempCh <- bl pool.handlerCh <- bl
default: default:
pool.tempCh <- bl pool.handlerCh <- bl
} }
} }
func (pool *Pool) NoScopeInvoke(v interface{}) { func (pool *BrutePool) NoScopeInvoke(v interface{}) {
defer pool.waiter.Done() defer pool.wg.Done()
unit := v.(*Unit) unit := v.(*Unit)
req, err := ihttp.BuildPathRequest(pool.ClientType, unit.path, "") req, err := ihttp.BuildPathRequest(pool.ClientType, unit.path, "")
if err != nil { if err != nil {
@ -418,7 +423,7 @@ func (pool *Pool) NoScopeInvoke(v interface{}) {
return return
} }
req.SetHeaders(pool.Headers) req.SetHeaders(pool.Headers)
req.SetHeader("User-Agent", RandomUA()) req.SetHeader("User-Agent", pkg.RandomUA())
resp, reqerr := pool.client.Do(pool.ctx, req) resp, reqerr := pool.client.Do(pool.ctx, req)
if pool.ClientType == ihttp.FAST { if pool.ClientType == ihttp.FAST {
defer fasthttp.ReleaseResponse(resp.FastResponse) defer fasthttp.ReleaseResponse(resp.FastResponse)
@ -434,14 +439,14 @@ func (pool *Pool) NoScopeInvoke(v interface{}) {
bl.ReqDepth = unit.depth bl.ReqDepth = unit.depth
bl.Collect() bl.Collect()
bl.CollectURL() bl.CollectURL()
pool.waiter.Add(1) pool.wg.Add(1)
pool.doScopeCrawl(bl) pool.doScopeCrawl(bl)
pool.OutputCh <- bl pool.putToOutput(bl)
} }
} }
func (pool *Pool) Handler() { func (pool *BrutePool) Handler() {
for bl := range pool.tempCh { for bl := range pool.handlerCh {
if bl.IsValid { if bl.IsValid {
pool.addFuzzyBaseline(bl) pool.addFuzzyBaseline(bl)
} }
@ -464,29 +469,29 @@ func (pool *Pool) Handler() {
"random": pool.random, "random": pool.random,
"current": bl, "current": bl,
} }
//for _, status := range FuzzyStatus { //for _, ok := range FuzzyStatus {
// if bl, ok := pool.baselines[status]; ok { // if bl, ok := pool.baselines[ok]; ok {
// params["bl"+strconv.Itoa(status)] = bl // params["bl"+strconv.Itoa(ok)] = bl
// } else { // } else {
// params["bl"+strconv.Itoa(status)] = nilBaseline // params["bl"+strconv.Itoa(ok)] = nilBaseline
// } // }
//} //}
} }
var status bool var ok bool
if pool.MatchExpr != nil { if pool.MatchExpr != nil {
if CompareWithExpr(pool.MatchExpr, params) { if pkg.CompareWithExpr(pool.MatchExpr, params) {
status = true ok = true
} }
} else { } else {
status = pool.BaseCompare(bl) ok = pool.BaseCompare(bl)
} }
if status { if ok {
pool.Statistor.FoundNumber++ pool.Statistor.FoundNumber++
// unique判断 // unique判断
if enableAllUnique || iutils.IntsContains(UniqueStatus, bl.Status) { if EnableAllUnique || iutils.IntsContains(pkg.UniqueStatus, bl.Status) {
if _, ok := pool.uniques[bl.Unique]; ok { if _, ok := pool.uniques[bl.Unique]; ok {
bl.IsValid = false bl.IsValid = false
bl.IsFuzzy = true bl.IsFuzzy = true
@ -497,7 +502,7 @@ func (pool *Pool) Handler() {
} }
// 对通过所有对比的有效数据进行再次filter // 对通过所有对比的有效数据进行再次filter
if bl.IsValid && pool.FilterExpr != nil && CompareWithExpr(pool.FilterExpr, params) { if bl.IsValid && pool.FilterExpr != nil && pkg.CompareWithExpr(pool.FilterExpr, params) {
pool.Statistor.FilteredNumber++ pool.Statistor.FilteredNumber++
bl.Reason = pkg.ErrCustomFilter.Error() bl.Reason = pkg.ErrCustomFilter.Error()
bl.IsValid = false bl.IsValid = false
@ -507,14 +512,19 @@ func (pool *Pool) Handler() {
} }
if bl.IsValid || bl.IsFuzzy { if bl.IsValid || bl.IsFuzzy {
pool.waiter.Add(2) pool.wg.Add(2)
pool.doCrawl(bl) pool.doCrawl(bl)
pool.doRule(bl) pool.doRule(bl)
if iutils.IntsContains(pkg.WhiteStatus, bl.Status) || iutils.IntsContains(pkg.UniqueStatus, bl.Status) {
pool.wg.Add(1)
pool.doAppendWords(bl)
}
} }
// 如果要进行递归判断, 要满足 bl有效, mod为path-spray, 当前深度小于最大递归深度 // 如果要进行递归判断, 要满足 bl有效, mod为path-spray, 当前深度小于最大递归深度
if bl.IsValid { if bl.IsValid {
if bl.RecuDepth < MaxRecursion { if bl.RecuDepth < MaxRecursion {
if CompareWithExpr(pool.RecuExpr, params) { if pkg.CompareWithExpr(pool.RecuExpr, params) {
bl.Recu = true bl.Recu = true
} }
} }
@ -522,17 +532,17 @@ func (pool *Pool) Handler() {
if !pool.closed { if !pool.closed {
// 如果任务被取消, 所有还没处理的请求结果都会被丢弃 // 如果任务被取消, 所有还没处理的请求结果都会被丢弃
pool.OutputCh <- bl pool.putToOutput(bl)
} }
pool.waiter.Done() pool.wg.Done()
} }
pool.analyzeDone = true pool.analyzeDone = true
} }
func (pool *Pool) PreCompare(resp *ihttp.Response) error { func (pool *BrutePool) PreCompare(resp *ihttp.Response) error {
status := resp.StatusCode() status := resp.StatusCode()
if iutils.IntsContains(WhiteStatus, status) { if iutils.IntsContains(pkg.WhiteStatus, status) {
// 如果为白名单状态码则直接返回 // 如果为白名单状态码则直接返回
return nil return nil
} }
@ -540,11 +550,11 @@ func (pool *Pool) PreCompare(resp *ihttp.Response) error {
return pkg.ErrSameStatus return pkg.ErrSameStatus
} }
if iutils.IntsContains(BlackStatus, status) { if iutils.IntsContains(pkg.BlackStatus, status) {
return pkg.ErrBadStatus return pkg.ErrBadStatus
} }
if iutils.IntsContains(WAFStatus, status) { if iutils.IntsContains(pkg.WAFStatus, status) {
return pkg.ErrWaf return pkg.ErrWaf
} }
@ -555,7 +565,7 @@ func (pool *Pool) PreCompare(resp *ihttp.Response) error {
return nil return nil
} }
func (pool *Pool) BaseCompare(bl *pkg.Baseline) bool { func (pool *BrutePool) BaseCompare(bl *pkg.Baseline) bool {
if !bl.IsValid { if !bl.IsValid {
return false return false
} }
@ -566,7 +576,6 @@ func (pool *Pool) BaseCompare(bl *pkg.Baseline) bool {
pool.putToFuzzy(bl) pool.putToFuzzy(bl)
return false return false
} }
// 使用与baseline相同状态码, 需要在fuzzystatus中提前配置 // 使用与baseline相同状态码, 需要在fuzzystatus中提前配置
base, ok := pool.baselines[bl.Status] // 挑选对应状态码的baseline进行compare base, ok := pool.baselines[bl.Status] // 挑选对应状态码的baseline进行compare
if !ok { if !ok {
@ -611,62 +620,45 @@ func (pool *Pool) BaseCompare(bl *pkg.Baseline) bool {
return true return true
} }
func (pool *Pool) Upgrade(bl *pkg.Baseline) error { func (pool *BrutePool) doCheck() {
rurl, err := url.Parse(bl.RedirectURL) if pool.failedCount > pool.BreakThreshold {
if err == nil && rurl.Hostname() == bl.Url.Hostname() && bl.Url.Scheme == "http" && rurl.Scheme == "https" { // 当报错次数超过上限是, 结束任务
logs.Log.Infof("baseurl %s upgrade http to https, reinit", pool.BaseURL) pool.recover()
pool.base = strings.Replace(pool.BaseURL, "http", "https", 1) pool.Cancel()
pool.url.Scheme = "https" pool.IsFailed = true
// 重新初始化
err = pool.Init()
if err != nil {
return err
}
}
return nil
}
func (pool *Pool) doRedirect(bl *pkg.Baseline, depth int) {
if depth >= MaxRedirect {
return return
} }
reURL := FormatURL(bl.Url.Path, bl.RedirectURL)
pool.waiter.Add(1) if pool.Mod == HostSpray {
go func() { pool.checkCh <- struct{}{}
defer pool.waiter.Done() } else if pool.Mod == PathSpray {
pool.addAddition(&Unit{ pool.checkCh <- struct{}{}
path: reURL, }
source: RedirectSource,
frontUrl: bl.UrlString,
depth: depth + 1,
})
}()
} }
func (pool *Pool) doCrawl(bl *pkg.Baseline) { func (pool *BrutePool) doCrawl(bl *pkg.Baseline) {
if !pool.Crawl || bl.ReqDepth >= MaxCrawl { if !pool.Crawl || bl.ReqDepth >= MaxCrawl {
pool.waiter.Done() pool.wg.Done()
return return
} }
bl.CollectURL() bl.CollectURL()
if bl.URLs == nil { if bl.URLs == nil {
pool.waiter.Done() pool.wg.Done()
return return
} }
pool.waiter.Add(1) pool.wg.Add(1)
pool.doScopeCrawl(bl) pool.doScopeCrawl(bl)
go func() { go func() {
defer pool.waiter.Done() defer pool.wg.Done()
for _, u := range bl.URLs { for _, u := range bl.URLs {
if u = FormatURL(bl.Url.Path, u); u == "" { if u = pkg.FormatURL(bl.Url.Path, u); u == "" {
continue continue
} }
pool.addAddition(&Unit{ pool.addAddition(&Unit{
path: u, path: u,
source: CrawlSource, source: parsers.CrawlSource,
depth: bl.ReqDepth + 1, depth: bl.ReqDepth + 1,
}) })
} }
@ -674,24 +666,24 @@ func (pool *Pool) doCrawl(bl *pkg.Baseline) {
} }
func (pool *Pool) doScopeCrawl(bl *pkg.Baseline) { func (pool *BrutePool) doScopeCrawl(bl *pkg.Baseline) {
if bl.ReqDepth >= MaxCrawl { if bl.ReqDepth >= MaxCrawl {
pool.waiter.Done() pool.wg.Done()
return return
} }
go func() { go func() {
defer pool.waiter.Done() defer pool.wg.Done()
for _, u := range bl.URLs { for _, u := range bl.URLs {
if strings.HasPrefix(u, "http") { if strings.HasPrefix(u, "http") {
if v, _ := url.Parse(u); v == nil || !MatchWithGlobs(v.Host, pool.Scope) { if v, _ := url.Parse(u); v == nil || !pkg.MatchWithGlobs(v.Host, pool.Scope) {
continue continue
} }
pool.scopeLocker.Lock() pool.scopeLocker.Lock()
if _, ok := pool.scopeurls[u]; !ok { if _, ok := pool.scopeurls[u]; !ok {
pool.urls[u] = struct{}{} pool.urls.Store(u, nil)
pool.waiter.Add(1) pool.wg.Add(1)
pool.scopePool.Invoke(&Unit{path: u, source: CrawlSource, depth: bl.ReqDepth + 1}) pool.scopePool.Invoke(&Unit{path: u, source: parsers.CrawlSource, depth: bl.ReqDepth + 1})
} }
pool.scopeLocker.Unlock() pool.scopeLocker.Unlock()
} }
@ -699,54 +691,18 @@ func (pool *Pool) doScopeCrawl(bl *pkg.Baseline) {
}() }()
} }
func (pool *Pool) doRule(bl *pkg.Baseline) { func (pool *BrutePool) addFuzzyBaseline(bl *pkg.Baseline) {
if pool.AppendRule == nil { if _, ok := pool.baselines[bl.Status]; !ok && (EnableAllFuzzy || iutils.IntsContains(pkg.FuzzyStatus, bl.Status)) {
pool.waiter.Done() bl.Collect()
return pool.wg.Add(1)
} pool.doCrawl(bl) // 非有效页面也可能存在一些特殊的url可以用来爬取
if bl.Source == RuleSource { pool.baselines[bl.Status] = bl
pool.waiter.Done() logs.Log.Logf(pkg.LogVerbose, "[baseline.%dinit] %s", bl.Status, bl.Format([]string{"status", "length", "spend", "title", "frame", "redirect"}))
return
}
go func() {
defer pool.waiter.Done()
for u := range rule.RunAsStream(pool.AppendRule.Expressions, path.Base(bl.Path)) {
pool.addAddition(&Unit{
path: Dir(bl.Url.Path) + u,
source: RuleSource,
})
}
}()
}
func (pool *Pool) doRetry(bl *pkg.Baseline) {
if bl.Retry >= pool.Retry {
return
}
pool.waiter.Add(1)
go func() {
defer pool.waiter.Done()
pool.addAddition(&Unit{
path: bl.Path,
source: RetrySource,
retry: bl.Retry + 1,
})
}()
}
func (pool *Pool) doActive() {
defer pool.waiter.Done()
for _, u := range pkg.ActivePath {
pool.addAddition(&Unit{
path: pool.dir + u[1:],
source: ActiveSource,
})
} }
} }
func (pool *Pool) doBak() { func (pool *BrutePool) doBak() {
defer pool.waiter.Done() defer pool.wg.Done()
worder, err := words.NewWorderWithDsl("{?0}.{@bak_ext}", [][]string{pkg.BakGenerator(pool.url.Host)}, nil) worder, err := words.NewWorderWithDsl("{?0}.{@bak_ext}", [][]string{pkg.BakGenerator(pool.url.Host)}, nil)
if err != nil { if err != nil {
return return
@ -755,7 +711,7 @@ func (pool *Pool) doBak() {
for w := range worder.C { for w := range worder.C {
pool.addAddition(&Unit{ pool.addAddition(&Unit{
path: pool.dir + w, path: pool.dir + w,
source: BakSource, source: parsers.BakSource,
}) })
} }
@ -767,80 +723,19 @@ func (pool *Pool) doBak() {
for w := range worder.C { for w := range worder.C {
pool.addAddition(&Unit{ pool.addAddition(&Unit{
path: pool.dir + w, path: pool.dir + w,
source: BakSource, source: parsers.BakSource,
}) })
} }
} }
func (pool *Pool) doCommonFile() { func (pool *BrutePool) recover() {
defer pool.waiter.Done()
for _, u := range mask.SpecialWords["common_file"] {
pool.addAddition(&Unit{
path: pool.dir + u,
source: CommonFileSource,
})
}
}
func (pool *Pool) doCheck() {
if pool.failedCount > pool.BreakThreshold {
// 当报错次数超过上限是, 结束任务
pool.recover()
pool.cancel()
pool.isFailed = true
return
}
if pool.Mod == pkg.HostSpray {
pool.checkCh <- CheckSource
} else if pool.Mod == pkg.PathSpray {
pool.checkCh <- CheckSource
}
}
func (pool *Pool) addAddition(u *Unit) {
// 强行屏蔽报错, 防止goroutine泄露
pool.waiter.Add(1)
defer func() {
if err := recover(); err != nil {
}
}()
pool.additionCh <- u
}
func (pool *Pool) addFuzzyBaseline(bl *pkg.Baseline) {
if _, ok := pool.baselines[bl.Status]; !ok && (enableAllFuzzy || iutils.IntsContains(FuzzyStatus, bl.Status)) {
bl.Collect()
pool.waiter.Add(1)
pool.doCrawl(bl) // 非有效页面也可能存在一些特殊的url可以用来爬取
pool.baselines[bl.Status] = bl
logs.Log.Infof("[baseline.%dinit] %s", bl.Status, bl.Format([]string{"status", "length", "spend", "title", "frame", "redirect"}))
}
}
func (pool *Pool) putToInvalid(bl *pkg.Baseline, reason string) {
bl.IsValid = false
pool.OutputCh <- bl
}
func (pool *Pool) putToFuzzy(bl *pkg.Baseline) {
bl.IsFuzzy = true
pool.FuzzyCh <- bl
}
func (pool *Pool) resetFailed() {
pool.failedCount = 1
pool.failedBaselines = nil
}
func (pool *Pool) recover() {
logs.Log.Errorf("%s ,failed request exceeds the threshold , task will exit. Breakpoint %d", pool.BaseURL, pool.wordOffset) logs.Log.Errorf("%s ,failed request exceeds the threshold , task will exit. Breakpoint %d", pool.BaseURL, pool.wordOffset)
for i, bl := range pool.failedBaselines { for i, bl := range pool.FailedBaselines {
logs.Log.Errorf("[failed.%d] %s", i, bl.String()) logs.Log.Errorf("[failed.%d] %s", i, bl.String())
} }
} }
func (pool *Pool) Close() { func (pool *BrutePool) Close() {
for pool.analyzeDone { for pool.analyzeDone {
// 等待缓存的待处理任务完成 // 等待缓存的待处理任务完成
time.Sleep(time.Duration(100) * time.Millisecond) time.Sleep(time.Duration(100) * time.Millisecond)
@ -848,23 +743,32 @@ func (pool *Pool) Close() {
close(pool.additionCh) // 关闭addition管道 close(pool.additionCh) // 关闭addition管道
close(pool.checkCh) // 关闭check管道 close(pool.checkCh) // 关闭check管道
pool.Statistor.EndTime = time.Now().Unix() pool.Statistor.EndTime = time.Now().Unix()
pool.bar.Close() pool.Bar.Close()
} }
func (pool *Pool) safePath(u string) string { func (pool *BrutePool) safePath(u string) string {
// 自动生成的目录将采用safepath的方式拼接到相对目录中, 避免出现//的情况. 例如init, check, common // 自动生成的目录将采用safepath的方式拼接到相对目录中, 避免出现//的情况. 例如init, check, common
hasSlash := strings.HasPrefix(u, "/") if pool.isDir {
if hasSlash { return pkg.SafePath(pool.dir, u)
if pool.isDir {
return pool.dir + u[1:]
} else {
return pool.url.Path + u
}
} else { } else {
if pool.isDir { return pkg.SafePath(pool.url.Path+"/", u)
return pool.url.Path + u
} else {
return pool.url.Path + "/" + u
}
} }
} }
func (pool *BrutePool) resetFailed() {
pool.failedCount = 1
pool.FailedBaselines = nil
}
func NewBaselines() *Baselines {
return &Baselines{
baselines: map[int]*pkg.Baseline{},
}
}
type Baselines struct {
FailedBaselines []*pkg.Baseline
random *pkg.Baseline
index *pkg.Baseline
baselines map[int]*pkg.Baseline
}

View File

@ -1,13 +1,11 @@
package internal package pool
import ( import (
"context" "context"
"fmt"
"github.com/chainreactors/logs" "github.com/chainreactors/logs"
"github.com/chainreactors/parsers" "github.com/chainreactors/parsers"
"github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/pkg" "github.com/chainreactors/spray/pkg"
"github.com/chainreactors/spray/pkg/ihttp"
"github.com/chainreactors/words"
"github.com/panjf2000/ants/v2" "github.com/panjf2000/ants/v2"
"github.com/valyala/fasthttp" "github.com/valyala/fasthttp"
"net/url" "net/url"
@ -17,56 +15,37 @@ import (
) )
// 类似httpx的无状态, 无scope, 无并发池的检测模式 // 类似httpx的无状态, 无scope, 无并发池的检测模式
func NewCheckPool(ctx context.Context, config *pkg.Config) (*CheckPool, error) { func NewCheckPool(ctx context.Context, config *Config) (*CheckPool, error) {
pctx, cancel := context.WithCancel(ctx) pctx, cancel := context.WithCancel(ctx)
pool := &CheckPool{ pool := &CheckPool{
Config: config, &This{
ctx: pctx, Config: config,
cancel: cancel, ctx: pctx,
client: ihttp.NewClient(config.Thread, 2, config.ClientType), Cancel: cancel,
wg: sync.WaitGroup{}, client: ihttp.NewClient(&ihttp.ClientConfig{
additionCh: make(chan *Unit, 100), Thread: config.Thread,
closeCh: make(chan struct{}), Type: config.ClientType,
reqCount: 1, Timeout: time.Duration(config.Timeout) * time.Second,
failedCount: 1, ProxyAddr: config.ProxyAddr,
}),
wg: sync.WaitGroup{},
additionCh: make(chan *Unit, 100),
closeCh: make(chan struct{}),
},
} }
pool.Headers = map[string]string{"Connection": "close"} pool.Headers = map[string]string{"Connection": "close"}
p, _ := ants.NewPoolWithFunc(config.Thread, pool.Invoke) p, _ := ants.NewPoolWithFunc(config.Thread, pool.Invoke)
pool.pool = p pool.This.Pool = p
return pool, nil return pool, nil
} }
type CheckPool struct { type CheckPool struct {
*pkg.Config *This
client *ihttp.Client
pool *ants.PoolWithFunc
bar *pkg.Bar
ctx context.Context
cancel context.CancelFunc
reqCount int
failedCount int
additionCh chan *Unit
closeCh chan struct{}
worder *words.Worder
wg sync.WaitGroup
}
func (pool *CheckPool) Close() {
pool.bar.Close()
}
func (pool *CheckPool) genReq(s string) (*ihttp.Request, error) {
if pool.Mod == pkg.HostSpray {
return ihttp.BuildHostRequest(pool.ClientType, pool.BaseURL, s)
} else if pool.Mod == pkg.PathSpray {
return ihttp.BuildPathRequest(pool.ClientType, pool.BaseURL, s)
}
return nil, fmt.Errorf("unknown mod")
} }
func (pool *CheckPool) Run(ctx context.Context, offset, limit int) { func (pool *CheckPool) Run(ctx context.Context, offset, limit int) {
pool.worder.Run() pool.Worder.Run()
var done bool var done bool
// 挂起一个监控goroutine, 每100ms判断一次done, 如果已经done, 则关闭closeCh, 然后通过Loop中的select case closeCh去break, 实现退出 // 挂起一个监控goroutine, 每100ms判断一次done, 如果已经done, 则关闭closeCh, 然后通过Loop中的select case closeCh去break, 实现退出
@ -84,7 +63,7 @@ func (pool *CheckPool) Run(ctx context.Context, offset, limit int) {
Loop: Loop:
for { for {
select { select {
case u, ok := <-pool.worder.C: case u, ok := <-pool.Worder.C:
if !ok { if !ok {
done = true done = true
continue continue
@ -100,12 +79,12 @@ Loop:
} }
pool.wg.Add(1) pool.wg.Add(1)
_ = pool.pool.Invoke(newUnit(u, CheckSource)) _ = pool.This.Pool.Invoke(newUnit(u, parsers.CheckSource))
case u, ok := <-pool.additionCh: case u, ok := <-pool.additionCh:
if !ok { if !ok {
continue continue
} }
_ = pool.pool.Invoke(u) _ = pool.This.Pool.Invoke(u)
case <-pool.closeCh: case <-pool.closeCh:
break Loop break Loop
case <-ctx.Done(): case <-ctx.Done():
@ -163,23 +142,23 @@ func (pool *CheckPool) Invoke(v interface{}) {
if bl.IsValid { if bl.IsValid {
if bl.RedirectURL != "" { if bl.RedirectURL != "" {
pool.doRedirect(bl, unit.depth) pool.doRedirect(bl, unit.depth)
pool.FuzzyCh <- bl pool.putToFuzzy(bl)
} else if bl.Status == 400 { } else if bl.Status == 400 {
pool.doUpgrade(bl) pool.doUpgrade(bl)
pool.FuzzyCh <- bl pool.putToFuzzy(bl)
} else { } else {
params := map[string]interface{}{ params := map[string]interface{}{
"current": bl, "current": bl,
} }
if pool.MatchExpr == nil || CompareWithExpr(pool.MatchExpr, params) { if pool.MatchExpr == nil || pkg.CompareWithExpr(pool.MatchExpr, params) {
pool.OutputCh <- bl pool.putToOutput(bl)
} }
} }
} }
pool.reqCount++ pool.reqCount++
pool.wg.Done() pool.wg.Done()
pool.bar.Done() pool.Bar.Done()
} }
func (pool *CheckPool) doRedirect(bl *pkg.Baseline, depth int) { func (pool *CheckPool) doRedirect(bl *pkg.Baseline, depth int) {
@ -194,14 +173,14 @@ func (pool *CheckPool) doRedirect(bl *pkg.Baseline, depth int) {
} }
reURL = bl.RedirectURL reURL = bl.RedirectURL
} else { } else {
reURL = BaseURL(bl.Url) + FormatURL(BaseURL(bl.Url), bl.RedirectURL) reURL = pkg.BaseURL(bl.Url) + pkg.FormatURL(pkg.BaseURL(bl.Url), bl.RedirectURL)
} }
pool.wg.Add(1) pool.wg.Add(1)
go func() { go func() {
pool.additionCh <- &Unit{ pool.additionCh <- &Unit{
path: reURL, path: reURL,
source: RedirectSource, source: parsers.RedirectSource,
frontUrl: bl.UrlString, frontUrl: bl.UrlString,
depth: depth + 1, depth: depth + 1,
} }
@ -223,7 +202,7 @@ func (pool *CheckPool) doUpgrade(bl *pkg.Baseline) {
go func() { go func() {
pool.additionCh <- &Unit{ pool.additionCh <- &Unit{
path: reurl, path: reurl,
source: UpgradeSource, source: parsers.UpgradeSource,
depth: bl.ReqDepth + 1, depth: bl.ReqDepth + 1,
} }
}() }()

View File

@ -1,8 +1,10 @@
package pkg package pool
import ( import (
"github.com/antonmedv/expr/vm" "github.com/antonmedv/expr/vm"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words/rule" "github.com/chainreactors/words/rule"
"sync"
) )
type SprayMod int type SprayMod int
@ -21,9 +23,13 @@ var ModMap = map[string]SprayMod{
type Config struct { type Config struct {
BaseURL string BaseURL string
ProxyAddr string
Thread int Thread int
Wordlist []string Wordlist []string
Timeout int Timeout int
OutputCh chan *pkg.Baseline
FuzzyCh chan *pkg.Baseline
OutLocker *sync.WaitGroup
RateLimit int RateLimit int
CheckPeriod int CheckPeriod int
ErrPeriod int32 ErrPeriod int32
@ -36,8 +42,7 @@ type Config struct {
FilterExpr *vm.Program FilterExpr *vm.Program
RecuExpr *vm.Program RecuExpr *vm.Program
AppendRule *rule.Program AppendRule *rule.Program
OutputCh chan *Baseline AppendWords []string
FuzzyCh chan *Baseline
Fuzzy bool Fuzzy bool
IgnoreWaf bool IgnoreWaf bool
Crawl bool Crawl bool

160
internal/pool/pool.go Normal file
View File

@ -0,0 +1,160 @@
package pool
import (
"context"
"fmt"
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words"
"github.com/chainreactors/words/mask"
"github.com/chainreactors/words/rule"
"github.com/panjf2000/ants/v2"
"path"
"sync"
)
type This struct {
*Config
Statistor *pkg.Statistor
Pool *ants.PoolWithFunc
Bar *pkg.Bar
Worder *words.Worder
client *ihttp.Client
ctx context.Context
Cancel context.CancelFunc
dir string
reqCount int
failedCount int
additionCh chan *Unit
closeCh chan struct{}
wg sync.WaitGroup
}
func (pool *This) doRedirect(bl *pkg.Baseline, depth int) {
if depth >= MaxRedirect {
return
}
reURL := pkg.FormatURL(bl.Url.Path, bl.RedirectURL)
pool.wg.Add(1)
go func() {
defer pool.wg.Done()
pool.addAddition(&Unit{
path: reURL,
source: parsers.RedirectSource,
frontUrl: bl.UrlString,
depth: depth + 1,
})
}()
}
func (pool *This) doRule(bl *pkg.Baseline) {
if pool.AppendRule == nil {
pool.wg.Done()
return
}
if bl.Source == parsers.RuleSource {
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for u := range rule.RunAsStream(pool.AppendRule.Expressions, path.Base(bl.Path)) {
pool.addAddition(&Unit{
path: pkg.Dir(bl.Url.Path) + u,
source: parsers.RuleSource,
})
}
}()
}
func (pool *This) doAppendWords(bl *pkg.Baseline) {
if pool.AppendWords == nil {
pool.wg.Done()
return
}
if bl.Source == parsers.AppendSource {
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for _, u := range pool.AppendWords {
pool.addAddition(&Unit{
path: pkg.SafePath(bl.Path, u),
source: parsers.AppendSource,
})
}
}()
}
func (pool *This) doRetry(bl *pkg.Baseline) {
if bl.Retry >= pool.Retry {
return
}
pool.wg.Add(1)
go func() {
defer pool.wg.Done()
pool.addAddition(&Unit{
path: bl.Path,
source: parsers.RetrySource,
retry: bl.Retry + 1,
})
}()
}
func (pool *This) doActive() {
defer pool.wg.Done()
for _, u := range pkg.ActivePath {
pool.addAddition(&Unit{
path: pool.dir + u[1:],
source: parsers.FingerSource,
})
}
}
func (pool *This) doCommonFile() {
defer pool.wg.Done()
for _, u := range mask.SpecialWords["common_file"] {
pool.addAddition(&Unit{
path: pool.dir + u,
source: parsers.CommonFileSource,
})
}
}
func (pool *This) addAddition(u *Unit) {
// 强行屏蔽报错, 防止goroutine泄露
pool.wg.Add(1)
defer func() {
if err := recover(); err != nil {
}
}()
pool.additionCh <- u
}
func (pool *This) Close() {
pool.Bar.Close()
}
func (pool *This) genReq(s string) (*ihttp.Request, error) {
if pool.Mod == HostSpray {
return ihttp.BuildHostRequest(pool.ClientType, pool.BaseURL, s)
} else if pool.Mod == PathSpray {
return ihttp.BuildPathRequest(pool.ClientType, pool.BaseURL, s)
}
return nil, fmt.Errorf("unknown mod")
}
func (pool *This) putToOutput(bl *pkg.Baseline) {
pool.OutLocker.Add(1)
pool.OutputCh <- bl
}
func (pool *This) putToFuzzy(bl *pkg.Baseline) {
pool.OutLocker.Add(1)
bl.IsFuzzy = true
pool.FuzzyCh <- bl
}

20
internal/pool/unit.go Normal file
View File

@ -0,0 +1,20 @@
package pool
import "github.com/chainreactors/parsers"
func newUnit(path string, source parsers.SpraySource) *Unit {
return &Unit{path: path, source: source}
}
func newUnitWithNumber(path string, source parsers.SpraySource, number int) *Unit {
return &Unit{path: path, source: source, number: number}
}
type Unit struct {
number int
path string
source parsers.SpraySource
retry int
frontUrl string
depth int // redirect depth
}

View File

@ -6,8 +6,9 @@ import (
"github.com/antonmedv/expr/vm" "github.com/antonmedv/expr/vm"
"github.com/chainreactors/files" "github.com/chainreactors/files"
"github.com/chainreactors/logs" "github.com/chainreactors/logs"
"github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/internal/pool"
"github.com/chainreactors/spray/pkg" "github.com/chainreactors/spray/pkg"
"github.com/chainreactors/spray/pkg/ihttp"
"github.com/chainreactors/words" "github.com/chainreactors/words"
"github.com/chainreactors/words/rule" "github.com/chainreactors/words/rule"
"github.com/gosuri/uiprogress" "github.com/gosuri/uiprogress"
@ -17,11 +18,7 @@ import (
) )
var ( var (
WhiteStatus = []int{200} max = 2147483647
BlackStatus = []int{400, 410}
FuzzyStatus = []int{403, 404, 500, 501, 502, 503}
WAFStatus = []int{493, 418, 1020, 406}
UniqueStatus = []int{403}
) )
var ( var (
@ -33,6 +30,9 @@ var (
type Runner struct { type Runner struct {
taskCh chan *Task taskCh chan *Task
poolwg sync.WaitGroup poolwg sync.WaitGroup
outwg *sync.WaitGroup
outputCh chan *pkg.Baseline
fuzzyCh chan *pkg.Baseline
bar *uiprogress.Bar bar *uiprogress.Bar
finished int finished int
@ -41,8 +41,9 @@ type Runner struct {
Wordlist []string Wordlist []string
Rules *rule.Program Rules *rule.Program
AppendRules *rule.Program AppendRules *rule.Program
AppendWords []string
Headers map[string]string Headers map[string]string
Fns []func(string) string Fns []func(string) []string
FilterExpr *vm.Program FilterExpr *vm.Program
MatchExpr *vm.Program MatchExpr *vm.Program
RecursiveExpr *vm.Program RecursiveExpr *vm.Program
@ -55,8 +56,6 @@ type Runner struct {
Timeout int Timeout int
Mod string Mod string
Probes []string Probes []string
OutputCh chan *pkg.Baseline
FuzzyCh chan *pkg.Baseline
Fuzzy bool Fuzzy bool
OutputFile *files.File OutputFile *files.File
FuzzyFile *files.File FuzzyFile *files.File
@ -77,24 +76,26 @@ type Runner struct {
IgnoreWaf bool IgnoreWaf bool
Crawl bool Crawl bool
Scope []string Scope []string
Active bool Finger bool
Bak bool Bak bool
Common bool Common bool
RetryCount int RetryCount int
RandomUserAgent bool RandomUserAgent bool
Random string Random string
Index string Index string
Proxy string
} }
func (r *Runner) PrepareConfig() *pkg.Config { func (r *Runner) PrepareConfig() *pool.Config {
config := &pkg.Config{ config := &pool.Config{
Thread: r.Threads, Thread: r.Threads,
Timeout: r.Timeout, Timeout: r.Timeout,
RateLimit: r.RateLimit, RateLimit: r.RateLimit,
Headers: r.Headers, Headers: r.Headers,
Mod: pkg.ModMap[r.Mod], Mod: pool.ModMap[r.Mod],
OutputCh: r.OutputCh, OutputCh: r.outputCh,
FuzzyCh: r.FuzzyCh, FuzzyCh: r.fuzzyCh,
OutLocker: r.outwg,
Fuzzy: r.Fuzzy, Fuzzy: r.Fuzzy,
CheckPeriod: r.CheckPeriod, CheckPeriod: r.CheckPeriod,
ErrPeriod: int32(r.ErrPeriod), ErrPeriod: int32(r.ErrPeriod),
@ -103,10 +104,11 @@ func (r *Runner) PrepareConfig() *pkg.Config {
FilterExpr: r.FilterExpr, FilterExpr: r.FilterExpr,
RecuExpr: r.RecursiveExpr, RecuExpr: r.RecursiveExpr,
AppendRule: r.AppendRules, AppendRule: r.AppendRules,
AppendWords: r.AppendWords,
IgnoreWaf: r.IgnoreWaf, IgnoreWaf: r.IgnoreWaf,
Crawl: r.Crawl, Crawl: r.Crawl,
Scope: r.Scope, Scope: r.Scope,
Active: r.Active, Active: r.Finger,
Bak: r.Bak, Bak: r.Bak,
Common: r.Common, Common: r.Common,
Retry: r.RetryCount, Retry: r.RetryCount,
@ -114,18 +116,23 @@ func (r *Runner) PrepareConfig() *pkg.Config {
RandomUserAgent: r.RandomUserAgent, RandomUserAgent: r.RandomUserAgent,
Random: r.Random, Random: r.Random,
Index: r.Index, Index: r.Index,
ProxyAddr: r.Proxy,
} }
if config.ClientType == ihttp.Auto { if config.ClientType == ihttp.Auto {
if config.Mod == pkg.PathSpray { if config.Mod == pool.PathSpray {
config.ClientType = ihttp.FAST config.ClientType = ihttp.FAST
} else if config.Mod == pkg.HostSpray { } else if config.Mod == pool.HostSpray {
config.ClientType = ihttp.STANDARD config.ClientType = ihttp.STANDARD
} }
} }
return config return config
} }
func (r *Runner) AppendFunction(fn func(string) []string) {
r.Fns = append(r.Fns, fn)
}
func (r *Runner) Prepare(ctx context.Context) error { func (r *Runner) Prepare(ctx context.Context) error {
var err error var err error
if r.CheckOnly { if r.CheckOnly {
@ -133,10 +140,10 @@ func (r *Runner) Prepare(ctx context.Context) error {
r.Pools, err = ants.NewPoolWithFunc(1, func(i interface{}) { r.Pools, err = ants.NewPoolWithFunc(1, func(i interface{}) {
config := r.PrepareConfig() config := r.PrepareConfig()
pool, err := NewCheckPool(ctx, config) pool, err := pool.NewCheckPool(ctx, config)
if err != nil { if err != nil {
logs.Log.Error(err.Error()) logs.Log.Error(err.Error())
pool.cancel() pool.Cancel()
r.poolwg.Done() r.poolwg.Done()
return return
} }
@ -148,9 +155,9 @@ func (r *Runner) Prepare(ctx context.Context) error {
} }
close(ch) close(ch)
}() }()
pool.worder = words.NewWorderWithChan(ch) pool.Worder = words.NewWorderWithChan(ch)
pool.worder.Fns = r.Fns pool.Worder.Fns = r.Fns
pool.bar = pkg.NewBar("check", r.Count-r.Offset, r.Progress) pool.Bar = pkg.NewBar("check", r.Count-r.Offset, r.Progress)
pool.Run(ctx, r.Offset, r.Count) pool.Run(ctx, r.Offset, r.Count)
r.poolwg.Done() r.poolwg.Done()
}) })
@ -182,17 +189,17 @@ func (r *Runner) Prepare(ctx context.Context) error {
config := r.PrepareConfig() config := r.PrepareConfig()
config.BaseURL = t.baseUrl config.BaseURL = t.baseUrl
pool, err := NewPool(ctx, config) pool, err := pool.NewBrutePool(ctx, config)
if err != nil { if err != nil {
logs.Log.Error(err.Error()) logs.Log.Error(err.Error())
pool.cancel() pool.Cancel()
r.Done() r.Done()
return return
} }
if t.origin != nil && len(r.Wordlist) == 0 { if t.origin != nil && len(r.Wordlist) == 0 {
// 如果是从断点续传中恢复的任务, 则自动设置word,dict与rule, 不过优先级低于命令行参数 // 如果是从断点续传中恢复的任务, 则自动设置word,dict与rule, 不过优先级低于命令行参数
pool.Statistor = pkg.NewStatistorFromStat(t.origin.Statistor) pool.Statistor = pkg.NewStatistorFromStat(t.origin.Statistor)
pool.worder, err = t.origin.InitWorder(r.Fns) pool.Worder, err = t.origin.InitWorder(r.Fns)
if err != nil { if err != nil {
logs.Log.Error(err.Error()) logs.Log.Error(err.Error())
r.Done() r.Done()
@ -201,9 +208,9 @@ func (r *Runner) Prepare(ctx context.Context) error {
pool.Statistor.Total = t.origin.sum pool.Statistor.Total = t.origin.sum
} else { } else {
pool.Statistor = pkg.NewStatistor(t.baseUrl) pool.Statistor = pkg.NewStatistor(t.baseUrl)
pool.worder = words.NewWorder(r.Wordlist) pool.Worder = words.NewWorder(r.Wordlist)
pool.worder.Fns = r.Fns pool.Worder.Fns = r.Fns
pool.worder.Rules = r.Rules.Expressions pool.Worder.Rules = r.Rules.Expressions
} }
var limit int var limit int
@ -212,7 +219,8 @@ func (r *Runner) Prepare(ctx context.Context) error {
} else { } else {
limit = pool.Statistor.Total limit = pool.Statistor.Total
} }
pool.bar = pkg.NewBar(config.BaseURL, limit-pool.Statistor.Offset, r.Progress) pool.Bar = pkg.NewBar(config.BaseURL, limit-pool.Statistor.Offset, r.Progress)
logs.Log.Importantf("[pool] task: %s, total %d words, %d threads, proxy: %s", pool.BaseURL, limit-pool.Statistor.Offset, pool.Thread, pool.ProxyAddr)
err = pool.Init() err = pool.Init()
if err != nil { if err != nil {
pool.Statistor.Error = err.Error() pool.Statistor.Error = err.Error()
@ -227,9 +235,9 @@ func (r *Runner) Prepare(ctx context.Context) error {
pool.Run(pool.Statistor.Offset, limit) pool.Run(pool.Statistor.Offset, limit)
if pool.isFailed && len(pool.failedBaselines) > 0 { if pool.IsFailed && len(pool.FailedBaselines) > 0 {
// 如果因为错误积累退出, end将指向第一个错误发生时, 防止resume时跳过大量目标 // 如果因为错误积累退出, end将指向第一个错误发生时, 防止resume时跳过大量目标
pool.Statistor.End = pool.failedBaselines[0].Number pool.Statistor.End = pool.FailedBaselines[0].Number
} }
r.PrintStat(pool) r.PrintStat(pool)
r.Done() r.Done()
@ -239,7 +247,7 @@ func (r *Runner) Prepare(ctx context.Context) error {
if err != nil { if err != nil {
return err return err
} }
r.Output() r.OutputHandler()
return nil return nil
} }
@ -287,19 +295,7 @@ Loop:
} }
r.poolwg.Wait() r.poolwg.Wait()
time.Sleep(100 * time.Millisecond) // 延迟100ms, 等所有数据处理完毕 r.outwg.Wait()
for {
if len(r.OutputCh) == 0 {
break
}
}
for {
if len(r.FuzzyCh) == 0 {
break
}
}
time.Sleep(100 * time.Millisecond) // 延迟100ms, 等所有数据处理完毕
} }
func (r *Runner) RunWithCheck(ctx context.Context) { func (r *Runner) RunWithCheck(ctx context.Context) {
@ -326,7 +322,7 @@ Loop:
} }
for { for {
if len(r.OutputCh) == 0 { if len(r.outputCh) == 0 {
break break
} }
} }
@ -340,18 +336,18 @@ func (r *Runner) Done() {
r.poolwg.Done() r.poolwg.Done()
} }
func (r *Runner) PrintStat(pool *Pool) { func (r *Runner) PrintStat(pool *pool.BrutePool) {
if r.Color { if r.Color {
logs.Log.Important(pool.Statistor.ColorString()) logs.Log.Important(pool.Statistor.ColorString())
if pool.Statistor.Error == "" { if pool.Statistor.Error == "" {
logs.Log.Important(pool.Statistor.ColorCountString()) pool.Statistor.PrintColorCount()
logs.Log.Important(pool.Statistor.ColorSourceString()) pool.Statistor.PrintColorSource()
} }
} else { } else {
logs.Log.Important(pool.Statistor.String()) logs.Log.Important(pool.Statistor.String())
if pool.Statistor.Error == "" { if pool.Statistor.Error == "" {
logs.Log.Important(pool.Statistor.CountString()) pool.Statistor.PrintCount()
logs.Log.Important(pool.Statistor.SourceString()) pool.Statistor.PrintSource()
} }
} }
@ -361,7 +357,7 @@ func (r *Runner) PrintStat(pool *Pool) {
} }
} }
func (r *Runner) Output() { func (r *Runner) OutputHandler() {
debugPrint := func(bl *pkg.Baseline) { debugPrint := func(bl *pkg.Baseline) {
if r.Color { if r.Color {
logs.Log.Debug(bl.ColorString()) logs.Log.Debug(bl.ColorString())
@ -403,7 +399,7 @@ func (r *Runner) Output() {
for { for {
select { select {
case bl, ok := <-r.OutputCh: case bl, ok := <-r.outputCh:
if !ok { if !ok {
return return
} }
@ -419,6 +415,7 @@ func (r *Runner) Output() {
} else { } else {
debugPrint(bl) debugPrint(bl)
} }
r.outwg.Done()
} }
} }
}() }()
@ -443,15 +440,16 @@ func (r *Runner) Output() {
for { for {
select { select {
case bl, ok := <-r.FuzzyCh: case bl, ok := <-r.fuzzyCh:
if !ok { if !ok {
return return
} }
if r.Fuzzy { if r.Fuzzy {
fuzzySaveFunc(bl) fuzzySaveFunc(bl)
} else { //} else {
debugPrint(bl) // debugPrint(bl)
} }
r.outwg.Done()
} }
} }
}() }()

View File

@ -6,39 +6,6 @@ import (
"github.com/chainreactors/words/rule" "github.com/chainreactors/words/rule"
) )
const (
CheckSource = iota + 1
InitRandomSource
InitIndexSource
RedirectSource
CrawlSource
ActiveSource
WordSource
WafSource
RuleSource
BakSource
CommonFileSource
UpgradeSource
RetrySource
)
func newUnit(path string, source int) *Unit {
return &Unit{path: path, source: source}
}
func newUnitWithNumber(path string, source int, number int) *Unit {
return &Unit{path: path, source: source, number: number}
}
type Unit struct {
number int
path string
source int
retry int
frontUrl string
depth int // redirect depth
}
type Task struct { type Task struct {
baseUrl string baseUrl string
depth int depth int
@ -55,7 +22,7 @@ type Origin struct {
sum int sum int
} }
func (o *Origin) InitWorder(fns []func(string) string) (*words.Worder, error) { func (o *Origin) InitWorder(fns []func(string) []string) (*words.Worder, error) {
var worder *words.Worder var worder *words.Worder
wl, err := loadWordlist(o.Word, o.Dictionaries) wl, err := loadWordlist(o.Word, o.Dictionaries)
if err != nil { if err != nil {

View File

@ -2,41 +2,15 @@ package internal
import ( import (
"bytes" "bytes"
"github.com/antonmedv/expr"
"github.com/antonmedv/expr/ast" "github.com/antonmedv/expr/ast"
"github.com/antonmedv/expr/vm"
"github.com/chainreactors/logs"
"github.com/chainreactors/spray/pkg" "github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words/mask" "github.com/chainreactors/words/mask"
"github.com/chainreactors/words/rule" "github.com/chainreactors/words/rule"
"io/ioutil" "io/ioutil"
"math/rand"
"net/url"
"path"
"path/filepath"
"strconv" "strconv"
"strings" "strings"
) )
var (
// from feroxbuster
randomUserAgent = []string{
"Mozilla/5.0 (Linux; Android 8.0.0; SM-G960F Build/R16NW) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.84 Mobile Safari/537.36",
"Mozilla/5.0 (iPhone; CPU iPhone OS 12_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/12.0 Mobile/15E148 Safari/604.1",
"Mozilla/5.0 (Windows Phone 10.0; Android 6.0.1; Microsoft; RM-1152) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Mobile Safari/537.36 Edge/15.15254",
"Mozilla/5.0 (Linux; Android 7.0; Pixel C Build/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/52.0.2743.98 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.246",
"Mozilla/5.0 (X11; CrOS x86_64 8172.45.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.64 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_2) AppleWebKit/601.3.9 (KHTML, like Gecko) Version/9.0.2 Safari/601.3.9",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36",
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:15.0) Gecko/20100101 Firefox/15.0.1",
"Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)",
"Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)",
"Mozilla/5.0 (compatible; Yahoo! Slurp; http://help.yahoo.com/help/us/ysearch/slurp)",
}
uacount = len(randomUserAgent)
)
func parseExtension(s string) string { func parseExtension(s string) string {
if i := strings.Index(s, "."); i != -1 { if i := strings.Index(s, "."); i != -1 {
return s[i+1:] return s[i+1:]
@ -97,7 +71,7 @@ func loadFileToSlice(filename string) ([]string, error) {
return ss, nil return ss, nil
} }
func loadFileAndCombine(filename []string) (string, error) { func loadRuleAndCombine(filename []string) (string, error) {
var bs bytes.Buffer var bs bytes.Buffer
for _, f := range filename { for _, f := range filename {
if data, ok := pkg.Rules[f]; ok { if data, ok := pkg.Rules[f]; ok {
@ -171,116 +145,6 @@ func loadRuleWithFiles(ruleFiles []string, filter string) ([]rule.Expression, er
return rule.Compile(rules.String(), filter).Expressions, nil return rule.Compile(rules.String(), filter).Expressions, nil
} }
func relaPath(base, u string) string {
// 拼接相对目录, 不使用path.join的原因是, 如果存在"////"这样的情况, 可能真的是有意义的路由, 不能随意去掉.
// "" /a /a
// "" a /a
// / "" /
// /a/ b /a/b
// /a/ /b /a/b
// /a b /b
// /a /b /b
if u == "" {
return base
}
pathSlash := strings.HasPrefix(u, "/")
if base == "" {
if pathSlash {
return u[1:]
} else {
return "/" + u
}
} else if strings.HasSuffix(base, "/") {
if pathSlash {
return base + u[1:]
} else {
return base + u
}
} else {
if pathSlash {
return Dir(base) + u[1:]
} else {
return Dir(base) + u
}
}
}
func Dir(u string) string {
// 安全的获取目录, 不会额外处理多个"//", 并非用来获取上级目录
// /a /
// /a/ /a/
// a/ a/
// aaa /
if strings.HasSuffix(u, "/") {
return u
} else if i := strings.LastIndex(u, "/"); i == -1 {
return "/"
} else {
return u[:i+1]
}
}
func FormatURL(base, u string) string {
if strings.HasPrefix(u, "http") {
parsed, err := url.Parse(u)
if err != nil {
return ""
}
return parsed.Path
} else if strings.HasPrefix(u, "//") {
parsed, err := url.Parse(u)
if err != nil {
return ""
}
return parsed.Path
} else if strings.HasPrefix(u, "/") {
// 绝对目录拼接
// 不需要进行处理, 用来跳过下面的判断
return u
} else if strings.HasPrefix(u, "./") {
// "./"相对目录拼接
return relaPath(base, u[2:])
} else if strings.HasPrefix(u, "../") {
return path.Join(Dir(base), u)
} else {
// 相对目录拼接
return relaPath(base, u)
}
}
func BaseURL(u *url.URL) string {
return u.Scheme + "://" + u.Host
}
func RandomUA() string {
return randomUserAgent[rand.Intn(uacount)]
}
func CompareWithExpr(exp *vm.Program, params map[string]interface{}) bool {
res, err := expr.Run(exp, params)
if err != nil {
logs.Log.Warn(err.Error())
}
if res == true {
return true
} else {
return false
}
}
func MatchWithGlobs(u string, globs []string) bool {
for _, glob := range globs {
ok, err := filepath.Match(glob, u)
if err == nil && ok {
return true
}
}
return false
}
type bytesPatcher struct{} type bytesPatcher struct{}
func (p *bytesPatcher) Visit(node *ast.Node) { func (p *bytesPatcher) Visit(node *ast.Node) {
@ -295,3 +159,9 @@ func (p *bytesPatcher) Visit(node *ast.Node) {
}) })
} }
} }
func wrapWordsFunc(f func(string) string) func(string) []string {
return func(s string) []string {
return []string{f(s)}
}
}

View File

@ -3,8 +3,9 @@ package pkg
import ( import (
"bytes" "bytes"
"github.com/chainreactors/parsers" "github.com/chainreactors/parsers"
"github.com/chainreactors/parsers/iutils" "github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/pkg/ihttp" "github.com/chainreactors/utils/encode"
"github.com/chainreactors/utils/iutils"
"net/url" "net/url"
"strings" "strings"
) )
@ -31,7 +32,7 @@ func NewBaseline(u, host string, resp *ihttp.Response) *Baseline {
copy(bl.Header, header) copy(bl.Header, header)
bl.HeaderLength = len(bl.Header) bl.HeaderLength = len(bl.Header)
if i := resp.ContentLength(); i != 0 && bl.ContentType != "bin" { if i := resp.ContentLength(); i != 0 && i <= ihttp.DefaultMaxBodySize {
body := resp.Body() body := resp.Body()
bl.Body = make([]byte, len(body)) bl.Body = make([]byte, len(body))
copy(bl.Body, body) copy(bl.Body, body)
@ -101,7 +102,6 @@ func NewInvalidBaseline(u, host string, resp *ihttp.Response, reason string) *Ba
type Baseline struct { type Baseline struct {
*parsers.SprayResult *parsers.SprayResult
Unique uint16 `json:"-"`
Url *url.URL `json:"-"` Url *url.URL `json:"-"`
Dir bool `json:"-"` Dir bool `json:"-"`
Chunked bool `json:"-"` Chunked bool `json:"-"`
@ -133,9 +133,9 @@ func (bl *Baseline) Collect() {
if bl.ContentType == "html" { if bl.ContentType == "html" {
bl.Title = iutils.AsciiEncode(parsers.MatchTitle(bl.Body)) bl.Title = iutils.AsciiEncode(parsers.MatchTitle(bl.Body))
} else if bl.ContentType == "ico" { } else if bl.ContentType == "ico" {
if name, ok := Md5Fingers[parsers.Md5Hash(bl.Body)]; ok { if name, ok := Md5Fingers[encode.Md5Hash(bl.Body)]; ok {
bl.Frameworks[name] = &parsers.Framework{Name: name} bl.Frameworks[name] = &parsers.Framework{Name: name}
} else if name, ok := Mmh3Fingers[parsers.Mmh3Hash32(bl.Body)]; ok { } else if name, ok := Mmh3Fingers[encode.Mmh3Hash32(bl.Body)]; ok {
bl.Frameworks[name] = &parsers.Framework{Name: name} bl.Frameworks[name] = &parsers.Framework{Name: name}
} }
} }
@ -160,8 +160,8 @@ func (bl *Baseline) CollectURL() {
for _, reg := range ExtractRegexps["js"][0].CompiledRegexps { for _, reg := range ExtractRegexps["js"][0].CompiledRegexps {
urls := reg.FindAllStringSubmatch(string(bl.Body), -1) urls := reg.FindAllStringSubmatch(string(bl.Body), -1)
for _, u := range urls { for _, u := range urls {
u[1] = formatURL(u[1]) u[1] = CleanURL(u[1])
if u[1] != "" && !filterJs(u[1]) { if u[1] != "" && !FilterJs(u[1]) {
bl.URLs = append(bl.URLs, u[1]) bl.URLs = append(bl.URLs, u[1])
} }
} }
@ -170,14 +170,14 @@ func (bl *Baseline) CollectURL() {
for _, reg := range ExtractRegexps["url"][0].CompiledRegexps { for _, reg := range ExtractRegexps["url"][0].CompiledRegexps {
urls := reg.FindAllStringSubmatch(string(bl.Body), -1) urls := reg.FindAllStringSubmatch(string(bl.Body), -1)
for _, u := range urls { for _, u := range urls {
u[1] = formatURL(u[1]) u[1] = CleanURL(u[1])
if u[1] != "" && !filterUrl(u[1]) { if u[1] != "" && !FilterUrl(u[1]) {
bl.URLs = append(bl.URLs, u[1]) bl.URLs = append(bl.URLs, u[1])
} }
} }
} }
bl.URLs = RemoveDuplication(bl.URLs) bl.URLs = iutils.StringsUnique(bl.URLs)
if bl.URLs != nil { if bl.URLs != nil {
bl.Extracteds = append(bl.Extracteds, &parsers.Extracted{ bl.Extracteds = append(bl.Extracteds, &parsers.Extracted{
Name: "crawl", Name: "crawl",
@ -225,7 +225,7 @@ var Distance uint8 = 5 // 数字越小越相似, 数字为0则为完全一致.
func (bl *Baseline) FuzzyCompare(other *Baseline) bool { func (bl *Baseline) FuzzyCompare(other *Baseline) bool {
// 这里使用rawsimhash, 是为了保证一定数量的字符串, 否则超短的body会导致simhash偏差指较大 // 这里使用rawsimhash, 是为了保证一定数量的字符串, 否则超短的body会导致simhash偏差指较大
if other.Distance = parsers.SimhashCompare(other.RawSimhash, bl.RawSimhash); other.Distance < Distance { if other.Distance = encode.SimhashCompare(other.RawSimhash, bl.RawSimhash); other.Distance < Distance {
return true return true
} }
return false return false

View File

@ -37,9 +37,3 @@ var ErrMap = map[ErrorType]string{
func (e ErrorType) Error() string { func (e ErrorType) Error() string {
return ErrMap[e] return ErrMap[e]
} }
type BS []byte
func (b BS) String() string {
return string(b)
}

92
pkg/load.go Normal file
View File

@ -0,0 +1,92 @@
package pkg
import (
"encoding/json"
"github.com/chainreactors/gogo/v2/pkg/fingers"
"github.com/chainreactors/parsers"
"github.com/chainreactors/utils"
"github.com/chainreactors/utils/iutils"
"github.com/chainreactors/words/mask"
"strings"
)
func LoadTemplates() error {
var err error
// load fingers
Fingers, err = fingers.LoadFingers(LoadConfig("http"))
if err != nil {
return err
}
for _, finger := range Fingers {
err := finger.Compile(utils.ParsePorts)
if err != nil {
return err
}
}
for _, f := range Fingers {
for _, rule := range f.Rules {
if rule.SendDataStr != "" {
ActivePath = append(ActivePath, rule.SendDataStr)
}
if rule.Favicon != nil {
for _, mmh3 := range rule.Favicon.Mmh3 {
Mmh3Fingers[mmh3] = f.Name
}
for _, md5 := range rule.Favicon.Md5 {
Md5Fingers[md5] = f.Name
}
}
}
}
// load rule
var data map[string]interface{}
err = json.Unmarshal(LoadConfig("spray_rule"), &data)
if err != nil {
return err
}
for k, v := range data {
Rules[k] = v.(string)
}
// load mask
var keywords map[string]interface{}
err = json.Unmarshal(LoadConfig("spray_common"), &keywords)
if err != nil {
return err
}
for k, v := range keywords {
t := make([]string, len(v.([]interface{})))
for i, vv := range v.([]interface{}) {
t[i] = iutils.ToString(vv)
}
mask.SpecialWords[k] = t
}
var extracts []*parsers.Extractor
err = json.Unmarshal(LoadConfig("extract"), &extracts)
if err != nil {
return err
}
for _, extract := range extracts {
extract.Compile()
ExtractRegexps[extract.Name] = []*parsers.Extractor{extract}
for _, tag := range extract.Tags {
if _, ok := ExtractRegexps[tag]; !ok {
ExtractRegexps[tag] = []*parsers.Extractor{extract}
} else {
ExtractRegexps[tag] = append(ExtractRegexps[tag], extract)
}
}
}
return nil
}
func LoadDefaultDict() []string {
return strings.Split(strings.TrimSpace(string(LoadConfig("spray_default"))), "\n")
}

View File

@ -18,7 +18,7 @@ func NewStatistor(url string) *Statistor {
stat := DefaultStatistor stat := DefaultStatistor
stat.StartTime = time.Now().Unix() stat.StartTime = time.Now().Unix()
stat.Counts = make(map[int]int) stat.Counts = make(map[int]int)
stat.Sources = make(map[int]int) stat.Sources = make(map[parsers.SpraySource]int)
stat.BaseUrl = url stat.BaseUrl = url
return &stat return &stat
} }
@ -32,33 +32,33 @@ func NewStatistorFromStat(origin *Statistor) *Statistor {
RuleFiles: origin.RuleFiles, RuleFiles: origin.RuleFiles,
RuleFilter: origin.RuleFilter, RuleFilter: origin.RuleFilter,
Counts: make(map[int]int), Counts: make(map[int]int),
Sources: map[int]int{}, Sources: map[parsers.SpraySource]int{},
StartTime: time.Now().Unix(), StartTime: time.Now().Unix(),
} }
} }
type Statistor struct { type Statistor struct {
BaseUrl string `json:"url"` BaseUrl string `json:"url"`
Error string `json:"error"` Error string `json:"error"`
Counts map[int]int `json:"counts"` Counts map[int]int `json:"counts"`
Sources map[int]int `json:"sources"` Sources map[parsers.SpraySource]int `json:"sources"`
FailedNumber int32 `json:"failed"` FailedNumber int32 `json:"failed"`
ReqTotal int32 `json:"req_total"` ReqTotal int32 `json:"req_total"`
CheckNumber int `json:"check"` CheckNumber int `json:"check"`
FoundNumber int `json:"found"` FoundNumber int `json:"found"`
FilteredNumber int `json:"filtered"` FilteredNumber int `json:"filtered"`
FuzzyNumber int `json:"fuzzy"` FuzzyNumber int `json:"fuzzy"`
WafedNumber int `json:"wafed"` WafedNumber int `json:"wafed"`
End int `json:"end"` End int `json:"end"`
Offset int `json:"offset"` Offset int `json:"offset"`
Total int `json:"total"` Total int `json:"total"`
StartTime int64 `json:"start_time"` StartTime int64 `json:"start_time"`
EndTime int64 `json:"end_time"` EndTime int64 `json:"end_time"`
WordCount int `json:"word_count"` WordCount int `json:"word_count"`
Word string `json:"word"` Word string `json:"word"`
Dictionaries []string `json:"dictionaries"` Dictionaries []string `json:"dictionaries"`
RuleFiles []string `json:"rule_files"` RuleFiles []string `json:"rule_files"`
RuleFilter string `json:"rule_filter"` RuleFilter string `json:"rule_filter"`
} }
func (stat *Statistor) ColorString() string { func (stat *Statistor) ColorString() string {
@ -92,7 +92,10 @@ func (stat *Statistor) String() string {
return s.String() return s.String()
} }
func (stat *Statistor) CountString() string { func (stat *Statistor) PrintCount() {
if len(stat.Counts) == 0 {
return
}
var s strings.Builder var s strings.Builder
s.WriteString("[stat] ") s.WriteString("[stat] ")
s.WriteString(stat.BaseUrl) s.WriteString(stat.BaseUrl)
@ -102,20 +105,26 @@ func (stat *Statistor) CountString() string {
} }
s.WriteString(fmt.Sprintf(" %d: %d,", k, v)) s.WriteString(fmt.Sprintf(" %d: %d,", k, v))
} }
return s.String() logs.Log.Important(s.String())
} }
func (stat *Statistor) SourceString() string { func (stat *Statistor) PrintSource() {
if len(stat.Sources) == 0 {
return
}
var s strings.Builder var s strings.Builder
s.WriteString("[stat] ") s.WriteString("[stat] ")
s.WriteString(stat.BaseUrl) s.WriteString(stat.BaseUrl)
for k, v := range stat.Sources { for k, v := range stat.Sources {
s.WriteString(fmt.Sprintf(" %s: %d,", parsers.GetSpraySourceName(k), v)) s.WriteString(fmt.Sprintf(" %s: %d,", k.Name(), v))
} }
return s.String() logs.Log.Important(s.String())
} }
func (stat *Statistor) ColorCountString() string { func (stat *Statistor) PrintColorCount() {
if len(stat.Counts) == 0 {
return
}
var s strings.Builder var s strings.Builder
s.WriteString("[stat] ") s.WriteString("[stat] ")
s.WriteString(stat.BaseUrl) s.WriteString(stat.BaseUrl)
@ -125,17 +134,20 @@ func (stat *Statistor) ColorCountString() string {
} }
s.WriteString(fmt.Sprintf(" %s: %s,", logs.Cyan(strconv.Itoa(k)), logs.YellowBold(strconv.Itoa(v)))) s.WriteString(fmt.Sprintf(" %s: %s,", logs.Cyan(strconv.Itoa(k)), logs.YellowBold(strconv.Itoa(v))))
} }
return s.String() logs.Log.Important(s.String())
} }
func (stat *Statistor) ColorSourceString() string { func (stat *Statistor) PrintColorSource() {
if len(stat.Sources) == 0 {
return
}
var s strings.Builder var s strings.Builder
s.WriteString("[stat] ") s.WriteString("[stat] ")
s.WriteString(stat.BaseUrl) s.WriteString(stat.BaseUrl)
for k, v := range stat.Sources { for k, v := range stat.Sources {
s.WriteString(fmt.Sprintf(" %s: %s,", logs.Cyan(parsers.GetSpraySourceName(k)), logs.YellowBold(strconv.Itoa(v)))) s.WriteString(fmt.Sprintf(" %s: %s,", logs.Cyan(k.Name()), logs.YellowBold(strconv.Itoa(v))))
} }
return s.String() logs.Log.Important(s.String())
} }
func (stat *Statistor) Json() string { func (stat *Statistor) Json() string {

View File

@ -1,22 +1,31 @@
package pkg package pkg
import ( import (
"encoding/json" "github.com/antonmedv/expr"
"github.com/antonmedv/expr/vm"
"github.com/chainreactors/gogo/v2/pkg/fingers" "github.com/chainreactors/gogo/v2/pkg/fingers"
"github.com/chainreactors/ipcs" "github.com/chainreactors/logs"
"github.com/chainreactors/parsers" "github.com/chainreactors/parsers"
"github.com/chainreactors/parsers/iutils" "github.com/chainreactors/utils/iutils"
"github.com/chainreactors/words/mask"
"math/rand" "math/rand"
"net/url" "net/url"
"os"
"path" "path"
"path/filepath"
"strconv" "strconv"
"strings" "strings"
"time" "time"
"unsafe" "unsafe"
) )
var (
LogVerbose = logs.Warn - 2
LogFuzz = logs.Warn - 1
WhiteStatus = []int{} // cmd input, 200
BlackStatus = []int{} // cmd input, 400,410
FuzzyStatus = []int{} // cmd input, 500,501,502,503
WAFStatus = []int{493, 418, 1020, 406}
UniqueStatus = []int{} // 相同unique的403表示命中了同一条acl, 相同unique的200表示default页面
)
var ( var (
Md5Fingers map[string]string = make(map[string]string) Md5Fingers map[string]string = make(map[string]string)
Mmh3Fingers map[string]string = make(map[string]string) Mmh3Fingers map[string]string = make(map[string]string)
@ -52,35 +61,29 @@ var (
"video/avi": "avi", "video/avi": "avi",
"image/x-icon": "ico", "image/x-icon": "ico",
} }
// from feroxbuster
randomUserAgent = []string{
"Mozilla/5.0 (Linux; Android 8.0.0; SM-G960F Build/R16NW) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.84 Mobile Safari/537.36",
"Mozilla/5.0 (iPhone; CPU iPhone OS 12_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/12.0 Mobile/15E148 Safari/604.1",
"Mozilla/5.0 (Windows Phone 10.0; Android 6.0.1; Microsoft; RM-1152) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Mobile Safari/537.36 Edge/15.15254",
"Mozilla/5.0 (Linux; Android 7.0; Pixel C Build/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/52.0.2743.98 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.246",
"Mozilla/5.0 (X11; CrOS x86_64 8172.45.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.64 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_2) AppleWebKit/601.3.9 (KHTML, like Gecko) Version/9.0.2 Safari/601.3.9",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36",
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:15.0) Gecko/20100101 Firefox/15.0.1",
"Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)",
"Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)",
"Mozilla/5.0 (compatible; Yahoo! Slurp; http://help.yahoo.com/help/us/ysearch/slurp)",
}
uacount = len(randomUserAgent)
) )
func RemoveDuplication(arr []string) []string { type BS []byte
set := make(map[string]struct{}, len(arr))
j := 0
for _, v := range arr {
_, ok := set[v]
if ok {
continue
}
set[v] = struct{}{}
arr[j] = v
j++
}
return arr[:j] func (b BS) String() string {
} return string(b)
// 判断是否存在标准输入数据
func HasStdin() bool {
stat, err := os.Stdin.Stat()
if err != nil {
return false
}
isPipedFromChrDev := (stat.Mode() & os.ModeCharDevice) == 0
isPipedFromFIFO := (stat.Mode() & os.ModeNamedPipe) != 0
return isPipedFromChrDev || isPipedFromFIFO
} }
const letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" const letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
@ -133,83 +136,6 @@ func RandHost() string {
return *(*string)(unsafe.Pointer(&b)) return *(*string)(unsafe.Pointer(&b))
} }
func LoadTemplates() error {
var err error
// load fingers
Fingers, err = fingers.LoadFingers(LoadConfig("http"))
if err != nil {
return err
}
for _, finger := range Fingers {
err := finger.Compile(ipcs.ParsePorts)
if err != nil {
return err
}
}
for _, f := range Fingers {
for _, rule := range f.Rules {
if rule.SendDataStr != "" {
ActivePath = append(ActivePath, rule.SendDataStr)
}
if rule.Favicon != nil {
for _, mmh3 := range rule.Favicon.Mmh3 {
Mmh3Fingers[mmh3] = f.Name
}
for _, md5 := range rule.Favicon.Md5 {
Md5Fingers[md5] = f.Name
}
}
}
}
// load rule
var data map[string]interface{}
err = json.Unmarshal(LoadConfig("rule"), &data)
if err != nil {
return err
}
for k, v := range data {
Rules[k] = v.(string)
}
// load mask
var keywords map[string]interface{}
err = json.Unmarshal(LoadConfig("mask"), &keywords)
if err != nil {
return err
}
for k, v := range keywords {
t := make([]string, len(v.([]interface{})))
for i, vv := range v.([]interface{}) {
t[i] = iutils.ToString(vv)
}
mask.SpecialWords[k] = t
}
var extracts []*parsers.Extractor
err = json.Unmarshal(LoadConfig("extract"), &extracts)
if err != nil {
return err
}
for _, extract := range extracts {
extract.Compile()
ExtractRegexps[extract.Name] = []*parsers.Extractor{extract}
for _, tag := range extract.Tags {
if _, ok := ExtractRegexps[tag]; !ok {
ExtractRegexps[tag] = []*parsers.Extractor{extract}
} else {
ExtractRegexps[tag] = append(ExtractRegexps[tag], extract)
}
}
}
return nil
}
func FingerDetect(content []byte) parsers.Frameworks { func FingerDetect(content []byte) parsers.Frameworks {
frames := make(parsers.Frameworks) frames := make(parsers.Frameworks)
for _, finger := range Fingers { for _, finger := range Fingers {
@ -222,7 +148,7 @@ func FingerDetect(content []byte) parsers.Frameworks {
return frames return frames
} }
func filterJs(u string) bool { func FilterJs(u string) bool {
if commonFilter(u) { if commonFilter(u) {
return true return true
} }
@ -230,7 +156,7 @@ func filterJs(u string) bool {
return false return false
} }
func filterUrl(u string) bool { func FilterUrl(u string) bool {
if commonFilter(u) { if commonFilter(u) {
return true return true
} }
@ -249,8 +175,10 @@ func filterUrl(u string) bool {
return false return false
} }
func formatURL(u string) string { func CleanURL(u string) string {
// 去掉frag与params, 节约url.parse性能, 防止带参数造成意外的影响 // 去掉frag与params, 节约url.parse性能, 防止带参数造成意外的影响
u = strings.Trim(u, "\"")
u = strings.Trim(u, "'")
if strings.Contains(u, "2f") || strings.Contains(u, "2F") { if strings.Contains(u, "2f") || strings.Contains(u, "2F") {
u = strings.ReplaceAll(u, "\\u002F", "/") u = strings.ReplaceAll(u, "\\u002F", "/")
u = strings.ReplaceAll(u, "\\u002f", "/") u = strings.ReplaceAll(u, "\\u002f", "/")
@ -341,8 +269,127 @@ func CRC16Hash(data []byte) uint16 {
return crc16 return crc16
} }
func UniqueHash(bl *Baseline) uint16 { func SafePath(dir, u string) string {
// 由host+状态码+重定向url+content-type+title+length舍去个位与十位组成的hash hasSlash := strings.HasPrefix(u, "/")
// body length可能会导致一些误报, 目前没有更好的解决办法 if hasSlash {
return CRC16Hash([]byte(bl.Host + strconv.Itoa(bl.Status) + bl.RedirectURL + bl.ContentType + bl.Title + strconv.Itoa(bl.BodyLength/100*100))) return path.Join(dir, u[1:])
} else {
return path.Join(dir, u)
}
}
func RelaPath(base, u string) string {
// 拼接相对目录, 不使用path.join的原因是, 如果存在"////"这样的情况, 可能真的是有意义的路由, 不能随意去掉.
// "" /a /a
// "" a /a
// / "" /
// /a/ b /a/b
// /a/ /b /a/b
// /a b /b
// /a /b /b
if u == "" {
return base
}
pathSlash := strings.HasPrefix(u, "/")
if base == "" {
if pathSlash {
return u[1:]
} else {
return "/" + u
}
} else if strings.HasSuffix(base, "/") {
if pathSlash {
return base + u[1:]
} else {
return base + u
}
} else {
if pathSlash {
return Dir(base) + u[1:]
} else {
return Dir(base) + u
}
}
}
func Dir(u string) string {
// 安全的获取目录, 不会额外处理多个"//", 并非用来获取上级目录
// /a /
// /a/ /a/
// a/ a/
// aaa /
if strings.HasSuffix(u, "/") {
return u
} else if i := strings.LastIndex(u, "/"); i == -1 {
return "/"
} else {
return u[:i+1]
}
}
func UniqueHash(bl *Baseline) uint16 {
// 由host+状态码+重定向url+content-type+title+length舍去个位组成的hash
// body length可能会导致一些误报, 目前没有更好的解决办法
return CRC16Hash([]byte(bl.Host + strconv.Itoa(bl.Status) + bl.RedirectURL + bl.ContentType + bl.Title + strconv.Itoa(bl.BodyLength/10*10)))
}
func FormatURL(base, u string) string {
if strings.HasPrefix(u, "http") {
parsed, err := url.Parse(u)
if err != nil {
return ""
}
return parsed.Path
} else if strings.HasPrefix(u, "//") {
parsed, err := url.Parse(u)
if err != nil {
return ""
}
return parsed.Path
} else if strings.HasPrefix(u, "/") {
// 绝对目录拼接
// 不需要进行处理, 用来跳过下面的判断
return u
} else if strings.HasPrefix(u, "./") {
// "./"相对目录拼接
return RelaPath(base, u[2:])
} else if strings.HasPrefix(u, "../") {
return path.Join(Dir(base), u)
} else {
// 相对目录拼接
return RelaPath(base, u)
}
}
func BaseURL(u *url.URL) string {
return u.Scheme + "://" + u.Host
}
func RandomUA() string {
return randomUserAgent[rand.Intn(uacount)]
}
func CompareWithExpr(exp *vm.Program, params map[string]interface{}) bool {
res, err := expr.Run(exp, params)
if err != nil {
logs.Log.Warn(err.Error())
}
if res == true {
return true
} else {
return false
}
}
func MatchWithGlobs(u string, globs []string) bool {
for _, glob := range globs {
ok, err := filepath.Match(glob, u)
if err == nil && ok {
return true
}
}
return false
} }

View File

@ -1,7 +1,19 @@
//go:generate go run templates/templates_gen.go -t templates -o pkg/templates.go -need http,rule,mask,extract //go:generate go run templates/templates_gen.go -t templates -o pkg/templates.go -need spray
package main package main
import "github.com/chainreactors/spray/cmd" import (
"github.com/chainreactors/spray/cmd"
"github.com/gookit/config/v2"
"github.com/gookit/config/v2/yaml"
)
func init() {
config.WithOptions(func(opt *config.Options) {
opt.DecoderConfig.TagName = "config"
opt.ParseDefault = true
})
config.AddDriver(yaml.Driver)
}
func main() { func main() {
//f, _ := os.Create("cpu.txt") //f, _ := os.Create("cpu.txt")

@ -1 +1 @@
Subproject commit 241a707ce2a8d32b8bc96ebb5a06bcfdecb54b24 Subproject commit 998cdc05018e9c221e91166d10c7b2e1b62396cf