refactor plugin

This commit is contained in:
M09Ic 2024-08-26 01:20:03 +08:00
parent 491b8c16a5
commit 105c426396
5 changed files with 123 additions and 113 deletions

View File

@ -128,7 +128,7 @@ func Spray() {
logs.Log.Errorf(err.Error()) logs.Log.Errorf(err.Error())
return return
} }
if option.ReadAll || runner.Crawl { if option.ReadAll || runner.CrawlPlugin {
ihttp.DefaultMaxBodySize = -1 ihttp.DefaultMaxBodySize = -1
} }

View File

@ -111,12 +111,11 @@ type PluginOptions struct {
Advance bool `short:"a" long:"advance" description:"Bool, enable all plugin" config:"all" ` Advance bool `short:"a" long:"advance" description:"Bool, enable all plugin" config:"all" `
Extracts []string `long:"extract" description:"Strings, extract response, e.g.: --extract js --extract ip --extract version:(.*?)" config:"extract"` Extracts []string `long:"extract" description:"Strings, extract response, e.g.: --extract js --extract ip --extract version:(.*?)" config:"extract"`
ExtractConfig string `long:"extract-config" description:"String, extract config filename" config:"extract-config"` ExtractConfig string `long:"extract-config" description:"String, extract config filename" config:"extract-config"`
Active bool `long:"active" description:"Bool, enable active finger path"` ActivePlugin bool `long:"active" description:"Bool, enable active finger path"`
Recon bool `long:"recon" description:"Bool, enable recon" config:"recon"` ReconPlugin bool `long:"recon" description:"Bool, enable recon" config:"recon"`
Bak bool `long:"bak" description:"Bool, enable bak found" config:"bak"` BakPlugin bool `long:"bak" description:"Bool, enable bak found" config:"bak"`
FileBak bool `long:"file-bak" description:"Bool, enable valid result bak found, equal --append-rule rule/filebak.txt" config:"file-bak"` CommonPlugin bool `long:"common" description:"Bool, enable common file found" config:"common"`
Common bool `long:"common" description:"Bool, enable common file found" config:"common"` CrawlPlugin bool `long:"crawl" description:"Bool, enable crawl" config:"crawl"`
Crawl bool `long:"crawl" description:"Bool, enable crawl" config:"crawl"`
CrawlDepth int `long:"crawl-depth" default:"3" description:"Int, crawl depth" config:"crawl-depth"` CrawlDepth int `long:"crawl-depth" default:"3" description:"Int, crawl depth" config:"crawl-depth"`
} }
@ -310,52 +309,9 @@ func (opt *Option) NewRunner() (*Runner, error) {
r.Threads = 1000 r.Threads = 1000
} }
if opt.Recon { err = opt.BuildPlugin(r)
pkg.Extractors["recon"] = pkg.ExtractRegexps["pentest"] if err != nil {
} return nil, err
if opt.Finger {
pkg.EnableAllFingerEngine = true
}
// brute only
if opt.Advance {
r.Crawl = true
r.Finger = true
r.Bak = true
r.Common = true
r.Active = true
pkg.EnableAllFingerEngine = true
pkg.Extractors["recon"] = pkg.ExtractRegexps["pentest"]
r.bruteMod = true
opt.AppendRule = append(opt.AppendRule, "filebak")
}
if opt.FileBak {
r.bruteMod = true
opt.AppendRule = append(opt.AppendRule, "filebak")
}
if opt.Common {
r.bruteMod = true
r.AppendWords = append(r.AppendWords, mask.SpecialWords["common_file"]...)
}
if opt.Active {
r.bruteMod = true
r.AppendWords = append(r.AppendWords, pkg.ActivePath...)
}
if opt.Crawl {
r.bruteMod = true
}
opt.PrintPlugin()
if r.bruteMod {
logs.Log.Important("enabling brute mod, because of enabled brute plugin")
}
if opt.NoScope {
r.Scope = []string{"*"}
} }
err = opt.BuildWords(r) err = opt.BuildWords(r)
@ -494,25 +450,21 @@ func (opt *Option) NewRunner() (*Runner, error) {
func (opt *Option) PrintPlugin() { func (opt *Option) PrintPlugin() {
var s strings.Builder var s strings.Builder
if opt.Crawl { if opt.CrawlPlugin {
s.WriteString("crawl enable; ") s.WriteString("crawl enable; ")
} }
if opt.Finger { if opt.Finger {
s.WriteString("active fingerprint enable; ") s.WriteString("active fingerprint enable; ")
} }
if opt.Bak { if opt.BakPlugin {
s.WriteString("bak file enable; ") s.WriteString("bak file enable; ")
} }
if opt.Common { if opt.CommonPlugin {
s.WriteString("common file enable; ") s.WriteString("common file enable; ")
} }
if opt.Recon { if opt.ReconPlugin {
s.WriteString("recon enable; ") s.WriteString("recon enable; ")
} }
if opt.FileBak {
s.WriteString("file bak enable; ")
}
if opt.RetryCount > 0 { if opt.RetryCount > 0 {
s.WriteString("Retry Count: " + strconv.Itoa(opt.RetryCount)) s.WriteString("Retry Count: " + strconv.Itoa(opt.RetryCount))
} }
@ -522,6 +474,55 @@ func (opt *Option) PrintPlugin() {
} }
} }
func (opt *Option) BuildPlugin(r *Runner) error {
// brute only
if opt.Advance {
opt.CrawlPlugin = true
opt.Finger = true
opt.BakPlugin = true
opt.CommonPlugin = true
opt.ActivePlugin = true
}
if opt.ReconPlugin {
pkg.Extractors["recon"] = pkg.ExtractRegexps["pentest"]
}
if opt.Finger {
pkg.EnableAllFingerEngine = true
}
if opt.BakPlugin {
r.bruteMod = true
opt.AppendRule = append(opt.AppendRule, "filebak")
r.AppendWords = append(r.AppendWords, pkg.GetPresetWordList([]string{"bak_file"})...)
}
if opt.CommonPlugin {
r.bruteMod = true
r.AppendWords = append(r.AppendWords, pkg.GetPresetWordList([]string{"common_file", "log_file"})...)
}
if opt.ActivePlugin {
r.bruteMod = true
r.AppendWords = append(r.AppendWords, pkg.ActivePath...)
}
if opt.CrawlPlugin {
r.bruteMod = true
}
opt.PrintPlugin()
if r.bruteMod {
logs.Log.Important("enabling brute mod, because of enabled brute plugin")
}
if opt.NoScope {
r.Scope = []string{"*"}
}
return nil
}
func (opt *Option) BuildWords(r *Runner) error { func (opt *Option) BuildWords(r *Runner) error {
var dicts [][]string var dicts [][]string
var err error var err error

View File

@ -10,7 +10,6 @@ import (
"github.com/chainreactors/spray/pkg" "github.com/chainreactors/spray/pkg"
"github.com/chainreactors/utils/iutils" "github.com/chainreactors/utils/iutils"
"github.com/chainreactors/words" "github.com/chainreactors/words"
"github.com/chainreactors/words/mask"
"github.com/chainreactors/words/rule" "github.com/chainreactors/words/rule"
"github.com/panjf2000/ants/v2" "github.com/panjf2000/ants/v2"
"github.com/valyala/fasthttp" "github.com/valyala/fasthttp"
@ -597,7 +596,7 @@ func (pool *BrutePool) doActive() {
func (pool *BrutePool) doCommonFile() { func (pool *BrutePool) doCommonFile() {
defer pool.wg.Done() defer pool.wg.Done()
for _, u := range mask.SpecialWords["common_file"] { for _, u := range pkg.GetPresetWordList([]string{"common_file", "log_file"}) {
pool.addAddition(&Unit{ pool.addAddition(&Unit{
path: pool.dir + u, path: pool.dir + u,
source: parsers.CommonFileSource, source: parsers.CommonFileSource,
@ -685,6 +684,50 @@ func (pool *BrutePool) BaseCompare(bl *pkg.Baseline) bool {
return true return true
} }
func (pool *BrutePool) addFuzzyBaseline(bl *pkg.Baseline) {
if _, ok := pool.baselines[bl.Status]; !ok && (EnableAllFuzzy || iutils.IntsContains(pkg.FuzzyStatus, bl.Status)) {
bl.Collect()
pool.doCrawl(bl) // 非有效页面也可能存在一些特殊的url可以用来爬取
pool.baselines[bl.Status] = bl
logs.Log.Logf(pkg.LogVerbose, "[baseline.%dinit] %s", bl.Status, bl.Format([]string{"status", "length", "spend", "title", "frame", "redirect"}))
}
}
func (pool *BrutePool) recover() {
logs.Log.Errorf("%s ,failed request exceeds the threshold , task will exit. Breakpoint %d", pool.BaseURL, pool.wordOffset)
for i, bl := range pool.FailedBaselines {
if i > int(pool.BreakThreshold) {
break
}
logs.Log.Errorf("[failed.%d] %s", i, bl.String())
}
}
func (pool *BrutePool) Close() {
for pool.analyzeDone {
// 等待缓存的待处理任务完成
time.Sleep(time.Duration(100) * time.Millisecond)
}
close(pool.additionCh) // 关闭addition管道
close(pool.checkCh) // 关闭check管道
pool.Statistor.EndTime = time.Now().Unix()
pool.Bar.Close()
}
func (pool *BrutePool) safePath(u string) string {
// 自动生成的目录将采用safepath的方式拼接到相对目录中, 避免出现//的情况. 例如init, check, common
if pool.isDir {
return pkg.SafePath(pool.dir, u)
} else {
return pkg.SafePath(pool.url.Path+"/", u)
}
}
func (pool *BrutePool) resetFailed() {
pool.failedCount = 1
pool.FailedBaselines = nil
}
func (pool *BrutePool) doCheck() { func (pool *BrutePool) doCheck() {
if pool.failedCount > pool.BreakThreshold { if pool.failedCount > pool.BreakThreshold {
// 当报错次数超过上限是, 结束任务 // 当报错次数超过上限是, 结束任务
@ -755,15 +798,6 @@ func (pool *BrutePool) doScopeCrawl(bl *pkg.Baseline) {
}() }()
} }
func (pool *BrutePool) addFuzzyBaseline(bl *pkg.Baseline) {
if _, ok := pool.baselines[bl.Status]; !ok && (EnableAllFuzzy || iutils.IntsContains(pkg.FuzzyStatus, bl.Status)) {
bl.Collect()
pool.doCrawl(bl) // 非有效页面也可能存在一些特殊的url可以用来爬取
pool.baselines[bl.Status] = bl
logs.Log.Logf(pkg.LogVerbose, "[baseline.%dinit] %s", bl.Status, bl.Format([]string{"status", "length", "spend", "title", "frame", "redirect"}))
}
}
func (pool *BrutePool) doBak() { func (pool *BrutePool) doBak() {
defer pool.wg.Done() defer pool.wg.Done()
worder, err := words.NewWorderWithDsl("{?0}.{?@bak_ext}", [][]string{pkg.BakGenerator(pool.url.Host)}, nil) worder, err := words.NewWorderWithDsl("{?0}.{?@bak_ext}", [][]string{pkg.BakGenerator(pool.url.Host)}, nil)
@ -790,38 +824,3 @@ func (pool *BrutePool) doBak() {
}) })
} }
} }
func (pool *BrutePool) recover() {
logs.Log.Errorf("%s ,failed request exceeds the threshold , task will exit. Breakpoint %d", pool.BaseURL, pool.wordOffset)
for i, bl := range pool.FailedBaselines {
if i > int(pool.BreakThreshold) {
break
}
logs.Log.Errorf("[failed.%d] %s", i, bl.String())
}
}
func (pool *BrutePool) Close() {
for pool.analyzeDone {
// 等待缓存的待处理任务完成
time.Sleep(time.Duration(100) * time.Millisecond)
}
close(pool.additionCh) // 关闭addition管道
close(pool.checkCh) // 关闭check管道
pool.Statistor.EndTime = time.Now().Unix()
pool.Bar.Close()
}
func (pool *BrutePool) safePath(u string) string {
// 自动生成的目录将采用safepath的方式拼接到相对目录中, 避免出现//的情况. 例如init, check, common
if pool.isDir {
return pkg.SafePath(pool.dir, u)
} else {
return pkg.SafePath(pool.url.Path+"/", u)
}
}
func (pool *BrutePool) resetFailed() {
pool.failedCount = 1
pool.FailedBaselines = nil
}

View File

@ -84,11 +84,11 @@ func (r *Runner) PrepareConfig() *pool.Config {
AppendRule: r.AppendRules, // 对有效目录追加规则, 根据rule生成 AppendRule: r.AppendRules, // 对有效目录追加规则, 根据rule生成
AppendWords: r.AppendWords, // 对有效目录追加字典 AppendWords: r.AppendWords, // 对有效目录追加字典
//IgnoreWaf: r.IgnoreWaf, //IgnoreWaf: r.IgnoreWaf,
Crawl: r.Crawl, Crawl: r.CrawlPlugin,
Scope: r.Scope, Scope: r.Scope,
Active: r.Finger, Active: r.Finger,
Bak: r.Bak, Bak: r.BakPlugin,
Common: r.Common, Common: r.CommonPlugin,
RetryLimit: r.RetryCount, RetryLimit: r.RetryCount,
ClientType: r.ClientType, ClientType: r.ClientType,
RandomUserAgent: r.RandomUserAgent, RandomUserAgent: r.RandomUserAgent,
@ -418,9 +418,7 @@ func (r *Runner) OutputHandler() {
if !ok { if !ok {
return return
} }
if r.Fuzzy { r.Output(bl)
r.Output(bl)
}
r.outwg.Done() r.outwg.Done()
} }
} }

View File

@ -5,6 +5,7 @@ import (
"bytes" "bytes"
"github.com/chainreactors/logs" "github.com/chainreactors/logs"
"github.com/chainreactors/utils/iutils" "github.com/chainreactors/utils/iutils"
"github.com/chainreactors/words/mask"
"github.com/expr-lang/expr" "github.com/expr-lang/expr"
"github.com/expr-lang/expr/vm" "github.com/expr-lang/expr/vm"
"math/rand" "math/rand"
@ -391,3 +392,14 @@ func ParseRawResponse(raw []byte) (*http.Response, error) {
defer resp.Body.Close() defer resp.Body.Close()
return resp, nil return resp, nil
} }
func GetPresetWordList(key []string) []string {
var wordlist []string
for _, k := range key {
if v, ok := mask.SpecialWords[k]; ok {
wordlist = append(wordlist, v...)
}
}
return wordlist
}