enhance crawl and append

This commit is contained in:
M09Ic 2024-08-26 00:33:01 +08:00
parent 1c28898631
commit 491b8c16a5
3 changed files with 13 additions and 17 deletions

View File

@ -45,9 +45,9 @@ func Format(opts Option) {
continue continue
} }
if !opts.NoColor { if !opts.NoColor {
logs.Log.Info(result.ColorString()) logs.Log.Console(result.ColorString() + "\n")
} else { } else {
logs.Log.Info(result.String()) logs.Log.Console(result.String() + "\n")
} }
} }
} }

View File

@ -60,7 +60,7 @@ type InputOptions struct {
Rules []string `short:"r" long:"rules" description:"Files, rule files, e.g.: -r rule1.txt -r rule2.txt" config:"rules"` Rules []string `short:"r" long:"rules" description:"Files, rule files, e.g.: -r rule1.txt -r rule2.txt" config:"rules"`
AppendRule []string `long:"append-rule" description:"Files, when found valid path , use append rule generator new word with current path" config:"append-rules"` AppendRule []string `long:"append-rule" description:"Files, when found valid path , use append rule generator new word with current path" config:"append-rules"`
FilterRule string `long:"filter-rule" description:"String, filter rule, e.g.: --rule-filter '>8 <4'" config:"filter-rule"` FilterRule string `long:"filter-rule" description:"String, filter rule, e.g.: --rule-filter '>8 <4'" config:"filter-rule"`
AppendFile []string `long:"append-file" description:"Files, when found valid path , use append file new word with current path" config:"append-files"` AppendFile []string `long:"append" description:"Files, when found valid path , use append file new word with current path" config:"append-files"`
Offset int `long:"offset" description:"Int, wordlist offset"` Offset int `long:"offset" description:"Int, wordlist offset"`
Limit int `long:"limit" description:"Int, wordlist limit, start with offset. e.g.: --offset 1000 --limit 100"` Limit int `long:"limit" description:"Int, wordlist limit, start with offset. e.g.: --offset 1000 --limit 100"`
} }
@ -135,7 +135,7 @@ type ModeOptions struct {
BreakThreshold int `long:"error-threshold" default:"20" description:"Int, break when the error exceeds the threshold" config:"error-threshold"` BreakThreshold int `long:"error-threshold" default:"20" description:"Int, break when the error exceeds the threshold" config:"error-threshold"`
BlackStatus string `long:"black-status" default:"400,410" description:"Strings (comma split),custom black status" config:"black-status"` BlackStatus string `long:"black-status" default:"400,410" description:"Strings (comma split),custom black status" config:"black-status"`
WhiteStatus string `long:"white-status" default:"200" description:"Strings (comma split), custom white status" config:"white-status"` WhiteStatus string `long:"white-status" default:"200" description:"Strings (comma split), custom white status" config:"white-status"`
FuzzyStatus string `long:"fuzzy-status" default:"500,501,502,503" description:"Strings (comma split), custom fuzzy status" config:"fuzzy-status"` FuzzyStatus string `long:"fuzzy-status" default:"500,501,502,503,301,302" description:"Strings (comma split), custom fuzzy status" config:"fuzzy-status"`
UniqueStatus string `long:"unique-status" default:"403,200,404" description:"Strings (comma split), custom unique status" config:"unique-status"` UniqueStatus string `long:"unique-status" default:"403,200,404" description:"Strings (comma split), custom unique status" config:"unique-status"`
Unique bool `long:"unique" description:"Bool, unique response" config:"unique"` Unique bool `long:"unique" description:"Bool, unique response" config:"unique"`
RetryCount int `long:"retry" default:"0" description:"Int, retry count" config:"retry"` RetryCount int `long:"retry" default:"0" description:"Int, retry count" config:"retry"`

View File

@ -381,8 +381,8 @@ func (pool *BrutePool) Invoke(v interface{}) {
pool.locker.Lock() pool.locker.Lock()
pool.index = bl pool.index = bl
pool.locker.Unlock() pool.locker.Unlock()
pool.wg.Add(1)
pool.doCrawl(bl) pool.doCrawl(bl)
pool.doAppend(bl)
pool.putToOutput(bl) pool.putToOutput(bl)
pool.initwg.Done() pool.initwg.Done()
case parsers.CheckSource: case parsers.CheckSource:
@ -520,11 +520,9 @@ func (pool *BrutePool) Handler() {
bl.IsValid = false bl.IsValid = false
} }
if bl.IsValid || bl.IsFuzzy { if bl.IsValid || (bl.IsFuzzy && pool.Fuzzy) {
pool.wg.Add(3)
pool.doCrawl(bl) pool.doCrawl(bl)
pool.doAppendRule(bl) pool.doAppend(bl)
pool.doAppendWords(bl)
} }
// 如果要进行递归判断, 要满足 bl有效, mod为path-spray, 当前深度小于最大递归深度 // 如果要进行递归判断, 要满足 bl有效, mod为path-spray, 当前深度小于最大递归深度
@ -705,16 +703,15 @@ func (pool *BrutePool) doCheck() {
func (pool *BrutePool) doCrawl(bl *pkg.Baseline) { func (pool *BrutePool) doCrawl(bl *pkg.Baseline) {
if !pool.Crawl || bl.ReqDepth >= MaxCrawl { if !pool.Crawl || bl.ReqDepth >= MaxCrawl {
pool.wg.Done()
return
}
bl.CollectURL()
if bl.URLs == nil {
pool.wg.Done()
return return
} }
pool.wg.Add(1) bl.CollectURL()
if bl.URLs == nil {
return
}
pool.wg.Add(2)
pool.doScopeCrawl(bl) pool.doScopeCrawl(bl)
go func() { go func() {
@ -761,7 +758,6 @@ func (pool *BrutePool) doScopeCrawl(bl *pkg.Baseline) {
func (pool *BrutePool) addFuzzyBaseline(bl *pkg.Baseline) { func (pool *BrutePool) addFuzzyBaseline(bl *pkg.Baseline) {
if _, ok := pool.baselines[bl.Status]; !ok && (EnableAllFuzzy || iutils.IntsContains(pkg.FuzzyStatus, bl.Status)) { if _, ok := pool.baselines[bl.Status]; !ok && (EnableAllFuzzy || iutils.IntsContains(pkg.FuzzyStatus, bl.Status)) {
bl.Collect() bl.Collect()
pool.wg.Add(1)
pool.doCrawl(bl) // 非有效页面也可能存在一些特殊的url可以用来爬取 pool.doCrawl(bl) // 非有效页面也可能存在一些特殊的url可以用来爬取
pool.baselines[bl.Status] = bl pool.baselines[bl.Status] = bl
logs.Log.Logf(pkg.LogVerbose, "[baseline.%dinit] %s", bl.Status, bl.Format([]string{"status", "length", "spend", "title", "frame", "redirect"})) logs.Log.Logf(pkg.LogVerbose, "[baseline.%dinit] %s", bl.Status, bl.Format([]string{"status", "length", "spend", "title", "frame", "redirect"}))