enhance basepool and brutepool structure

This commit is contained in:
M09Ic 2024-08-25 23:06:10 +08:00
parent 678a6a44e4
commit de168e0be9
5 changed files with 78 additions and 84 deletions

View File

@ -265,6 +265,7 @@ func (opt *Option) NewRunner() (*Runner, error) {
Option: opt, Option: opt,
taskCh: make(chan *Task), taskCh: make(chan *Task),
outputCh: make(chan *pkg.Baseline, 256), outputCh: make(chan *pkg.Baseline, 256),
poolwg: &sync.WaitGroup{},
outwg: &sync.WaitGroup{}, outwg: &sync.WaitGroup{},
fuzzyCh: make(chan *pkg.Baseline, 256), fuzzyCh: make(chan *pkg.Baseline, 256),
Headers: make(map[string]string), Headers: make(map[string]string),
@ -541,7 +542,7 @@ func (opt *Option) BuildWords(r *Runner) error {
logs.Log.Logf(pkg.LogVerbose, "Loaded %d word from %s", len(dicts[i]), f) logs.Log.Logf(pkg.LogVerbose, "Loaded %d word from %s", len(dicts[i]), f)
} }
if len(dicts) == 0 && opt.Word == "" { if len(dicts) == 0 && opt.Word == "" && len(opt.Rules) == 0 && len(opt.AppendRule) == 0 {
r.IsCheck = true r.IsCheck = true
} }

View File

@ -10,11 +10,14 @@ import (
"github.com/chainreactors/spray/pkg" "github.com/chainreactors/spray/pkg"
"github.com/chainreactors/utils/iutils" "github.com/chainreactors/utils/iutils"
"github.com/chainreactors/words" "github.com/chainreactors/words"
"github.com/chainreactors/words/mask"
"github.com/chainreactors/words/rule"
"github.com/panjf2000/ants/v2" "github.com/panjf2000/ants/v2"
"github.com/valyala/fasthttp" "github.com/valyala/fasthttp"
"golang.org/x/time/rate" "golang.org/x/time/rate"
"math/rand" "math/rand"
"net/url" "net/url"
"path"
"strings" "strings"
"sync" "sync"
"sync/atomic" "sync/atomic"
@ -378,12 +381,9 @@ func (pool *BrutePool) Invoke(v interface{}) {
pool.locker.Lock() pool.locker.Lock()
pool.index = bl pool.index = bl
pool.locker.Unlock() pool.locker.Unlock()
if bl.Status == 200 || (bl.Status/100) == 3 { pool.wg.Add(1)
// 保留index输出结果 pool.doCrawl(bl)
pool.wg.Add(1) pool.putToOutput(bl)
pool.doCrawl(bl)
pool.putToOutput(bl)
}
pool.initwg.Done() pool.initwg.Done()
case parsers.CheckSource: case parsers.CheckSource:
if bl.ErrString != "" { if bl.ErrString != "" {
@ -521,13 +521,10 @@ func (pool *BrutePool) Handler() {
} }
if bl.IsValid || bl.IsFuzzy { if bl.IsValid || bl.IsFuzzy {
pool.wg.Add(2) pool.wg.Add(3)
pool.doCrawl(bl) pool.doCrawl(bl)
pool.doRule(bl) pool.doAppendRule(bl)
if iutils.IntsContains(pkg.WhiteStatus, bl.Status) || iutils.IntsContains(pkg.UniqueStatus, bl.Status) { pool.doAppendWords(bl)
pool.wg.Add(1)
pool.doAppendWords(bl)
}
} }
// 如果要进行递归判断, 要满足 bl有效, mod为path-spray, 当前深度小于最大递归深度 // 如果要进行递归判断, 要满足 bl有效, mod为path-spray, 当前深度小于最大递归深度
@ -549,6 +546,67 @@ func (pool *BrutePool) Handler() {
pool.analyzeDone = true pool.analyzeDone = true
} }
func (pool *BrutePool) doAppendRule(bl *pkg.Baseline) {
if pool.AppendRule == nil || bl.Source == parsers.RuleSource {
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for u := range rule.RunAsStream(pool.AppendRule.Expressions, path.Base(bl.Path)) {
pool.addAddition(&Unit{
path: pkg.Dir(bl.Url.Path) + u,
source: parsers.RuleSource,
})
}
}()
}
func (pool *BrutePool) doAppendWords(bl *pkg.Baseline) {
if pool.AppendWords == nil || bl.Source == parsers.AppendSource || bl.Source == parsers.RuleSource {
// 防止自身递归
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for _, u := range pool.AppendWords {
pool.addAddition(&Unit{
path: pkg.SafePath(bl.Path, u),
source: parsers.AppendSource,
})
}
}()
}
func (pool *BrutePool) doAppend(bl *pkg.Baseline) {
pool.wg.Add(2)
pool.doAppendWords(bl)
pool.doAppendRule(bl)
}
func (pool *BrutePool) doActive() {
defer pool.wg.Done()
for _, u := range pkg.ActivePath {
pool.addAddition(&Unit{
path: pool.dir + u[1:],
source: parsers.FingerSource,
})
}
}
func (pool *BrutePool) doCommonFile() {
defer pool.wg.Done()
for _, u := range mask.SpecialWords["common_file"] {
pool.addAddition(&Unit{
path: pool.dir + u,
source: parsers.CommonFileSource,
})
}
}
func (pool *BrutePool) PreCompare(resp *ihttp.Response) error { func (pool *BrutePool) PreCompare(resp *ihttp.Response) error {
status := resp.StatusCode() status := resp.StatusCode()
if iutils.IntsContains(pkg.WhiteStatus, status) { if iutils.IntsContains(pkg.WhiteStatus, status) {

View File

@ -51,7 +51,7 @@ type Config struct {
Active bool Active bool
Bak bool Bak bool
Common bool Common bool
Retry int RetryLimit int
RandomUserAgent bool RandomUserAgent bool
Random string Random string
Index string Index string

View File

@ -7,10 +7,7 @@ import (
"github.com/chainreactors/spray/internal/ihttp" "github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/pkg" "github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words" "github.com/chainreactors/words"
"github.com/chainreactors/words/mask"
"github.com/chainreactors/words/rule"
"github.com/panjf2000/ants/v2" "github.com/panjf2000/ants/v2"
"path"
"sync" "sync"
) )
@ -49,50 +46,8 @@ func (pool *BasePool) doRedirect(bl *pkg.Baseline, depth int) {
}() }()
} }
func (pool *BasePool) doRule(bl *pkg.Baseline) {
if pool.AppendRule == nil {
pool.wg.Done()
return
}
if bl.Source == parsers.RuleSource {
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for u := range rule.RunAsStream(pool.AppendRule.Expressions, path.Base(bl.Path)) {
pool.addAddition(&Unit{
path: pkg.Dir(bl.Url.Path) + u,
source: parsers.RuleSource,
})
}
}()
}
func (pool *BasePool) doAppendWords(bl *pkg.Baseline) {
if pool.AppendWords == nil {
pool.wg.Done()
return
}
if bl.Source == parsers.AppendSource {
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for _, u := range pool.AppendWords {
pool.addAddition(&Unit{
path: pkg.SafePath(bl.Path, u),
source: parsers.AppendSource,
})
}
}()
}
func (pool *BasePool) doRetry(bl *pkg.Baseline) { func (pool *BasePool) doRetry(bl *pkg.Baseline) {
if bl.Retry >= pool.Retry { if bl.Retry >= pool.RetryLimit {
return return
} }
pool.wg.Add(1) pool.wg.Add(1)
@ -106,26 +61,6 @@ func (pool *BasePool) doRetry(bl *pkg.Baseline) {
}() }()
} }
func (pool *BasePool) doActive() {
defer pool.wg.Done()
for _, u := range pkg.ActivePath {
pool.addAddition(&Unit{
path: pool.dir + u[1:],
source: parsers.FingerSource,
})
}
}
func (pool *BasePool) doCommonFile() {
defer pool.wg.Done()
for _, u := range mask.SpecialWords["common_file"] {
pool.addAddition(&Unit{
path: pool.dir + u,
source: parsers.CommonFileSource,
})
}
}
func (pool *BasePool) addAddition(u *Unit) { func (pool *BasePool) addAddition(u *Unit) {
// 强行屏蔽报错, 防止goroutine泄露 // 强行屏蔽报错, 防止goroutine泄露
pool.wg.Add(1) pool.wg.Add(1)

View File

@ -30,7 +30,7 @@ type Runner struct {
*Option *Option
taskCh chan *Task taskCh chan *Task
poolwg sync.WaitGroup poolwg *sync.WaitGroup
outwg *sync.WaitGroup outwg *sync.WaitGroup
outputCh chan *pkg.Baseline outputCh chan *pkg.Baseline
fuzzyCh chan *pkg.Baseline fuzzyCh chan *pkg.Baseline
@ -81,15 +81,15 @@ func (r *Runner) PrepareConfig() *pool.Config {
MatchExpr: r.MatchExpr, MatchExpr: r.MatchExpr,
FilterExpr: r.FilterExpr, FilterExpr: r.FilterExpr,
RecuExpr: r.RecursiveExpr, RecuExpr: r.RecursiveExpr,
AppendRule: r.AppendRules, AppendRule: r.AppendRules, // 对有效目录追加规则, 根据rule生成
AppendWords: r.AppendWords, AppendWords: r.AppendWords, // 对有效目录追加字典
//IgnoreWaf: r.IgnoreWaf, //IgnoreWaf: r.IgnoreWaf,
Crawl: r.Crawl, Crawl: r.Crawl,
Scope: r.Scope, Scope: r.Scope,
Active: r.Finger, Active: r.Finger,
Bak: r.Bak, Bak: r.Bak,
Common: r.Common, Common: r.Common,
Retry: r.RetryCount, RetryLimit: r.RetryCount,
ClientType: r.ClientType, ClientType: r.ClientType,
RandomUserAgent: r.RandomUserAgent, RandomUserAgent: r.RandomUserAgent,
Random: r.Random, Random: r.Random,