Merge pull request #79 from chainreactors/dev

merge v1.1.6
This commit is contained in:
M09Ic 2024-11-01 13:54:13 +08:00 committed by GitHub
commit 7621514bd9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 349 additions and 257 deletions

View File

@ -149,6 +149,15 @@ func Spray() {
} }
ctx, canceler := context.WithTimeout(context.Background(), time.Duration(runner.Deadline)*time.Second) ctx, canceler := context.WithTimeout(context.Background(), time.Duration(runner.Deadline)*time.Second)
go func() {
select {
case <-ctx.Done():
time.Sleep(10 * time.Second)
logs.Log.Errorf("deadline and timeout not work, hard exit!!!")
os.Exit(0)
}
}()
go func() { go func() {
exitChan := make(chan os.Signal, 2) exitChan := make(chan os.Signal, 2)
signal.Notify(exitChan, os.Interrupt, syscall.SIGTERM) signal.Notify(exitChan, os.Interrupt, syscall.SIGTERM)

2
go.mod
View File

@ -6,7 +6,7 @@ require (
github.com/chainreactors/files v0.0.0-20240716182835-7884ee1e77f0 github.com/chainreactors/files v0.0.0-20240716182835-7884ee1e77f0
github.com/chainreactors/fingers v0.0.0-20240716172449-2fc3147b9c2a github.com/chainreactors/fingers v0.0.0-20240716172449-2fc3147b9c2a
github.com/chainreactors/logs v0.0.0-20240207121836-c946f072f81f github.com/chainreactors/logs v0.0.0-20240207121836-c946f072f81f
github.com/chainreactors/parsers v0.0.0-20240910081704-fd57f462fc65 github.com/chainreactors/parsers v0.0.0-20241016065831-bedaf68005f1
github.com/chainreactors/utils v0.0.0-20240805193040-ff3b97aa3c3f github.com/chainreactors/utils v0.0.0-20240805193040-ff3b97aa3c3f
github.com/expr-lang/expr v1.16.9 github.com/expr-lang/expr v1.16.9
github.com/gookit/config/v2 v2.2.5 github.com/gookit/config/v2 v2.2.5

4
go.sum
View File

@ -101,6 +101,10 @@ github.com/chainreactors/parsers v0.0.0-20240829055950-923f89a92b84 h1:F6umsdHLx
github.com/chainreactors/parsers v0.0.0-20240829055950-923f89a92b84/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA= github.com/chainreactors/parsers v0.0.0-20240829055950-923f89a92b84/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA=
github.com/chainreactors/parsers v0.0.0-20240910081704-fd57f462fc65 h1:subSvyczsErYMRnCD07s4Ub6zOSaw2xZ1/O9t3tHkuw= github.com/chainreactors/parsers v0.0.0-20240910081704-fd57f462fc65 h1:subSvyczsErYMRnCD07s4Ub6zOSaw2xZ1/O9t3tHkuw=
github.com/chainreactors/parsers v0.0.0-20240910081704-fd57f462fc65/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA= github.com/chainreactors/parsers v0.0.0-20240910081704-fd57f462fc65/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA=
github.com/chainreactors/parsers v0.0.0-20241013180542-88e2dc355c57 h1:KuijtekTNtSpQbKf2jqKp99gxnGQXffPeEF+EOHnXBE=
github.com/chainreactors/parsers v0.0.0-20241013180542-88e2dc355c57/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA=
github.com/chainreactors/parsers v0.0.0-20241016065831-bedaf68005f1 h1:Ka/KBrqAgwiL07TwYjtqF2DQ3x0fCxw1XHG+GFqMKEc=
github.com/chainreactors/parsers v0.0.0-20241016065831-bedaf68005f1/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA=
github.com/chainreactors/utils v0.0.0-20240528085651-ba1b255482c1/go.mod h1:JA4eiQZm+7AsfjXBcIzIdVKBEhDCb16eNtWFCGTxlvs= github.com/chainreactors/utils v0.0.0-20240528085651-ba1b255482c1/go.mod h1:JA4eiQZm+7AsfjXBcIzIdVKBEhDCb16eNtWFCGTxlvs=
github.com/chainreactors/utils v0.0.0-20240704062557-662d623b74f4/go.mod h1:JA4eiQZm+7AsfjXBcIzIdVKBEhDCb16eNtWFCGTxlvs= github.com/chainreactors/utils v0.0.0-20240704062557-662d623b74f4/go.mod h1:JA4eiQZm+7AsfjXBcIzIdVKBEhDCb16eNtWFCGTxlvs=
github.com/chainreactors/utils v0.0.0-20240715080349-d2d0484c95ed/go.mod h1:LajXuvESQwP+qCMAvlcoSXppQCjuLlBrnQpu9XQ1HtU= github.com/chainreactors/utils v0.0.0-20240715080349-d2d0484c95ed/go.mod h1:LajXuvESQwP+qCMAvlcoSXppQCjuLlBrnQpu9XQ1HtU=

View File

@ -39,18 +39,21 @@ func Format(opts Option) {
group[result.Url.Host] = append(group[result.Url.Host], &result) group[result.Url.Host] = append(group[result.Url.Host], &result)
} }
// 分组
for _, results := range group { for _, results := range group {
for _, result := range results { for _, result := range results {
if !opts.Fuzzy && result.IsFuzzy { if !opts.Fuzzy && result.IsFuzzy {
continue continue
} }
if opts.OutputProbe == "" {
if !opts.NoColor { if !opts.NoColor {
logs.Log.Console(result.ColorString() + "\n") logs.Log.Console(result.ColorString() + "\n")
} else { } else {
logs.Log.Console(result.String() + "\n") logs.Log.Console(result.String() + "\n")
} }
} else {
probes := strings.Split(opts.OutputProbe, ",")
logs.Log.Console(result.ProbeOutput(probes) + "\n")
}
} }
} }
} }

View File

@ -115,6 +115,7 @@ type PluginOptions struct {
CommonPlugin bool `long:"common" description:"Bool, enable common file found" config:"common"` CommonPlugin bool `long:"common" description:"Bool, enable common file found" config:"common"`
CrawlPlugin bool `long:"crawl" description:"Bool, enable crawl" config:"crawl"` CrawlPlugin bool `long:"crawl" description:"Bool, enable crawl" config:"crawl"`
CrawlDepth int `long:"crawl-depth" default:"3" description:"Int, crawl depth" config:"crawl-depth"` CrawlDepth int `long:"crawl-depth" default:"3" description:"Int, crawl depth" config:"crawl-depth"`
AppendDepth int `long:"append-depth" default:"2" description:"Int, append depth" config:"append-depth"`
} }
type ModeOptions struct { type ModeOptions struct {
@ -131,7 +132,7 @@ type ModeOptions struct {
BreakThreshold int `long:"error-threshold" default:"20" description:"Int, break when the error exceeds the threshold" config:"error-threshold"` BreakThreshold int `long:"error-threshold" default:"20" description:"Int, break when the error exceeds the threshold" config:"error-threshold"`
BlackStatus string `long:"black-status" default:"400,410" description:"Strings (comma split),custom black status" config:"black-status"` BlackStatus string `long:"black-status" default:"400,410" description:"Strings (comma split),custom black status" config:"black-status"`
WhiteStatus string `long:"white-status" default:"200" description:"Strings (comma split), custom white status" config:"white-status"` WhiteStatus string `long:"white-status" default:"200" description:"Strings (comma split), custom white status" config:"white-status"`
FuzzyStatus string `long:"fuzzy-status" default:"500,501,502,503,301,302" description:"Strings (comma split), custom fuzzy status" config:"fuzzy-status"` FuzzyStatus string `long:"fuzzy-status" default:"500,501,502,503,301,302,404" description:"Strings (comma split), custom fuzzy status" config:"fuzzy-status"`
UniqueStatus string `long:"unique-status" default:"403,200,404" description:"Strings (comma split), custom unique status" config:"unique-status"` UniqueStatus string `long:"unique-status" default:"403,200,404" description:"Strings (comma split), custom unique status" config:"unique-status"`
Unique bool `long:"unique" description:"Bool, unique response" config:"unique"` Unique bool `long:"unique" description:"Bool, unique response" config:"unique"`
RetryCount int `long:"retry" default:"0" description:"Int, retry count" config:"retry"` RetryCount int `long:"retry" default:"0" description:"Int, retry count" config:"retry"`
@ -253,7 +254,6 @@ func (opt *Option) Prepare() error {
logs.Log.Logf(pkg.LogVerbose, "Black Status: %v, WhiteStatus: %v, WAFStatus: %v", pkg.BlackStatus, pkg.WhiteStatus, pkg.WAFStatus) logs.Log.Logf(pkg.LogVerbose, "Black Status: %v, WhiteStatus: %v, WAFStatus: %v", pkg.BlackStatus, pkg.WhiteStatus, pkg.WAFStatus)
logs.Log.Logf(pkg.LogVerbose, "Fuzzy Status: %v, Unique Status: %v", pkg.FuzzyStatus, pkg.UniqueStatus) logs.Log.Logf(pkg.LogVerbose, "Fuzzy Status: %v, Unique Status: %v", pkg.FuzzyStatus, pkg.UniqueStatus)
pool.MaxCrawl = opt.CrawlDepth
return nil return nil
} }
@ -354,13 +354,12 @@ func (opt *Option) NewRunner() (*Runner, error) {
var express string var express string
if opt.Recursive != "current.IsDir()" && opt.Depth != 0 { if opt.Recursive != "current.IsDir()" && opt.Depth != 0 {
// 默认不打开递归, 除非指定了非默认的递归表达式 // 默认不打开递归, 除非指定了非默认的递归表达式
pool.MaxRecursion = 1 opt.Depth = 1
express = opt.Recursive express = opt.Recursive
} }
if opt.Depth != 0 { if opt.Depth != 0 {
// 手动设置的depth优先级高于默认 // 手动设置的depth优先级高于默认
pool.MaxRecursion = opt.Depth
express = opt.Recursive express = opt.Recursive
} }

View File

@ -23,9 +23,6 @@ import (
) )
var ( var (
MaxRedirect = 3
MaxCrawl = 3
MaxRecursion = 0
EnableAllFuzzy = false EnableAllFuzzy = false
EnableAllUnique = false EnableAllUnique = false
//AllowHostModSource = []parsers.SpraySource{parsers.WordSource, parsers.CheckSource, parsers.InitIndexSource, parsers.InitRandomSource} //AllowHostModSource = []parsers.SpraySource{parsers.WordSource, parsers.CheckSource, parsers.InitIndexSource, parsers.InitRandomSource}
@ -318,16 +315,14 @@ func (pool *BrutePool) Invoke(v interface{}) {
// 手动处理重定向 // 手动处理重定向
if bl.IsValid && unit.source != parsers.CheckSource && bl.RedirectURL != "" { if bl.IsValid && unit.source != parsers.CheckSource && bl.RedirectURL != "" {
//pool.wg.Add(1) bl.SameRedirectDomain = pool.checkHost(bl.RedirectURL)
pool.doRedirect(bl, unit.depth) pool.doRedirect(bl, unit.depth)
} }
if !ihttp.CheckBodySize(int64(bl.BodyLength)) { if !ihttp.CheckBodySize(int64(bl.BodyLength)) {
bl.ExceedLength = true bl.ExceedLength = true
} }
bl.Source = unit.source unit.Update(bl)
bl.ReqDepth = unit.depth
bl.Number = unit.number
bl.Spended = time.Since(start).Milliseconds() bl.Spended = time.Since(start).Milliseconds()
switch unit.source { switch unit.source {
case parsers.InitRandomSource: case parsers.InitRandomSource:
@ -494,7 +489,7 @@ func (pool *BrutePool) Handler() {
// 如果要进行递归判断, 要满足 bl有效, mod为path-spray, 当前深度小于最大递归深度 // 如果要进行递归判断, 要满足 bl有效, mod为path-spray, 当前深度小于最大递归深度
if bl.IsValid { if bl.IsValid {
pool.Statistor.FoundNumber++ pool.Statistor.FoundNumber++
if bl.RecuDepth < MaxRecursion { if bl.RecuDepth < pool.MaxRecursionDepth {
if pkg.CompareWithExpr(pool.RecuExpr, params) { if pkg.CompareWithExpr(pool.RecuExpr, params) {
bl.Recu = true bl.Recu = true
} }
@ -511,79 +506,9 @@ func (pool *BrutePool) Handler() {
pool.analyzeDone = true pool.analyzeDone = true
} }
func (pool *BrutePool) doAppendRule(bl *pkg.Baseline) {
if pool.AppendRule == nil || bl.Source == parsers.AppendRuleSource {
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for u := range rule.RunAsStream(pool.AppendRule.Expressions, path.Base(bl.Path)) {
pool.addAddition(&Unit{
path: pkg.Dir(bl.Url.Path) + u,
host: bl.Host,
source: parsers.AppendRuleSource,
})
}
}()
}
func (pool *BrutePool) doAppendWords(bl *pkg.Baseline) {
if pool.AppendWords == nil || bl.Source == parsers.AppendSource || bl.Source == parsers.RuleSource {
// 防止自身递归
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for u := range NewBruteWords(pool.Config, pool.AppendWords).Output {
pool.addAddition(&Unit{
path: pkg.SafePath(bl.Path, u),
host: bl.Host,
source: parsers.AppendSource,
})
}
}()
}
func (pool *BrutePool) doAppend(bl *pkg.Baseline) {
pool.wg.Add(2)
pool.doAppendWords(bl)
pool.doAppendRule(bl)
}
func (pool *BrutePool) doActive() {
defer pool.wg.Done()
if pool.Mod == HostSpray {
return
}
for _, u := range pkg.ActivePath {
pool.addAddition(&Unit{
path: pool.dir + u[1:],
source: parsers.FingerSource,
})
}
}
func (pool *BrutePool) doCommonFile() {
defer pool.wg.Done()
if pool.Mod == HostSpray {
return
}
for u := range NewBruteWords(pool.Config, append(pkg.Dicts["common"], pkg.Dicts["log"]...)).Output {
pool.addAddition(&Unit{
path: pool.dir + u,
source: parsers.CommonFileSource,
})
}
}
func (pool *BrutePool) checkRedirect(redirectURL string) bool { func (pool *BrutePool) checkRedirect(redirectURL string) bool {
if pool.random.RedirectURL == "" { if pool.random.RedirectURL == "" {
// 如果random的redirectURL为空, 此时该项 // 如果random的redirectURL为空, 忽略
return true return true
} }
@ -637,19 +562,41 @@ func (pool *BrutePool) PreCompare(resp *ihttp.Response) error {
return nil return nil
} }
// same host return true
// diff host return false
func (pool *BrutePool) checkHost(u string) bool {
if v, err := url.Parse(u); err == nil {
if v.Host == "" {
return true
}
if v.Host == pool.url.Host {
return true
} else {
return false
}
}
return true
}
func (pool *BrutePool) BaseCompare(bl *pkg.Baseline) bool { func (pool *BrutePool) BaseCompare(bl *pkg.Baseline) bool {
if !bl.IsValid { if !bl.IsValid {
return false return false
} }
var status = -1 var status = -1
// 30x状态码的特殊处理 // 30x状态码的特殊处理
if bl.RedirectURL != "" && strings.HasSuffix(bl.RedirectURL, bl.Url.Path+"/") { if bl.RedirectURL != "" {
if bl.SameRedirectDomain && strings.HasSuffix(bl.RedirectURL, bl.Url.Path+"/") {
bl.Reason = pkg.ErrFuzzyRedirect.Error() bl.Reason = pkg.ErrFuzzyRedirect.Error()
pool.putToFuzzy(bl)
return false return false
} }
}
// 使用与baseline相同状态码, 需要在fuzzystatus中提前配置 // 使用与baseline相同状态码, 需要在fuzzystatus中提前配置
base, ok := pool.baselines[bl.Status] // 挑选对应状态码的baseline进行compare base, ok := pool.baselines[bl.Status] // 挑选对应状态码的baseline进行compare
if bl.IsBaseline {
ok = false
}
if !ok { if !ok {
if pool.random.Status == bl.Status { if pool.random.Status == bl.Status {
// 当other的状态码与base相同时, 会使用base // 当other的状态码与base相同时, 会使用base
@ -694,6 +641,7 @@ func (pool *BrutePool) BaseCompare(bl *pkg.Baseline) bool {
func (pool *BrutePool) addFuzzyBaseline(bl *pkg.Baseline) { func (pool *BrutePool) addFuzzyBaseline(bl *pkg.Baseline) {
if _, ok := pool.baselines[bl.Status]; !ok && (EnableAllFuzzy || iutils.IntsContains(pkg.FuzzyStatus, bl.Status)) { if _, ok := pool.baselines[bl.Status]; !ok && (EnableAllFuzzy || iutils.IntsContains(pkg.FuzzyStatus, bl.Status)) {
bl.IsBaseline = true
bl.Collect() bl.Collect()
pool.doCrawl(bl) // 非有效页面也可能存在一些特殊的url可以用来爬取 pool.doCrawl(bl) // 非有效页面也可能存在一些特殊的url可以用来爬取
pool.baselines[bl.Status] = bl pool.baselines[bl.Status] = bl
@ -757,8 +705,32 @@ func (pool *BrutePool) doCheck() {
} }
} }
func (pool *BrutePool) doRedirect(bl *pkg.Baseline, depth int) {
if depth >= pool.MaxRedirect {
return
}
//if !bl.SameRedirectDomain {
// return // 不同域名的重定向不处理
//}
reURL := pkg.FormatURL(bl.Url.Path, bl.RedirectURL)
pool.wg.Add(1)
go func() {
defer pool.wg.Done()
pool.addAddition(&Unit{
path: reURL,
parent: bl.Number,
host: bl.Host,
source: parsers.RedirectSource,
from: bl.Source,
frontUrl: bl.UrlString,
depth: depth + 1,
})
}()
}
func (pool *BrutePool) doCrawl(bl *pkg.Baseline) { func (pool *BrutePool) doCrawl(bl *pkg.Baseline) {
if !pool.Crawl || bl.ReqDepth >= MaxCrawl { if !pool.Crawl || bl.ReqDepth >= pool.MaxCrawlDepth {
return return
} }
@ -778,8 +750,10 @@ func (pool *BrutePool) doCrawl(bl *pkg.Baseline) {
} }
pool.addAddition(&Unit{ pool.addAddition(&Unit{
path: u, path: u,
parent: bl.Number,
host: bl.Host, host: bl.Host,
source: parsers.CrawlSource, source: parsers.CrawlSource,
from: bl.Source,
depth: bl.ReqDepth + 1, depth: bl.ReqDepth + 1,
}) })
} }
@ -788,7 +762,7 @@ func (pool *BrutePool) doCrawl(bl *pkg.Baseline) {
} }
func (pool *BrutePool) doScopeCrawl(bl *pkg.Baseline) { func (pool *BrutePool) doScopeCrawl(bl *pkg.Baseline) {
if bl.ReqDepth >= MaxCrawl { if bl.ReqDepth >= pool.MaxCrawlDepth {
pool.wg.Done() pool.wg.Done()
return return
} }
@ -804,7 +778,13 @@ func (pool *BrutePool) doScopeCrawl(bl *pkg.Baseline) {
if _, ok := pool.scopeurls[u]; !ok { if _, ok := pool.scopeurls[u]; !ok {
pool.urls.Store(u, nil) pool.urls.Store(u, nil)
pool.wg.Add(1) pool.wg.Add(1)
pool.scopePool.Invoke(&Unit{path: u, source: parsers.CrawlSource, depth: bl.ReqDepth + 1}) pool.scopePool.Invoke(&Unit{
path: u,
parent: bl.Number,
source: parsers.CrawlSource,
from: bl.Source,
depth: bl.ReqDepth + 1,
})
} }
pool.scopeLocker.Unlock() pool.scopeLocker.Unlock()
} }
@ -817,12 +797,12 @@ func (pool *BrutePool) doBak() {
if pool.Mod == HostSpray { if pool.Mod == HostSpray {
return return
} }
for w := range NewBruteDSL(pool.Config, "{?0}.{?@bak_ext}", [][]string{pkg.BakGenerator(pool.url.Host)}).Output { //for w := range NewBruteDSL(pool.Config, "{?0}.{?@bak_ext}", [][]string{pkg.BakGenerator(pool.url.Host)}).Output {
pool.addAddition(&Unit{ // pool.addAddition(&Unit{
path: pool.dir + w, // path: pool.dir + w,
source: parsers.BakSource, // source: parsers.BakSource,
}) // })
} //}
for w := range NewBruteDSL(pool.Config, "{?@bak_name}.{?@bak_ext}", nil).Output { for w := range NewBruteDSL(pool.Config, "{?@bak_name}.{?@bak_ext}", nil).Output {
pool.addAddition(&Unit{ pool.addAddition(&Unit{
@ -831,3 +811,79 @@ func (pool *BrutePool) doBak() {
}) })
} }
} }
func (pool *BrutePool) doAppend(bl *pkg.Baseline) {
pool.wg.Add(2)
pool.doAppendWords(bl)
pool.doAppendRule(bl)
}
func (pool *BrutePool) doAppendRule(bl *pkg.Baseline) {
if pool.AppendRule == nil || bl.Source == parsers.AppendRuleSource || bl.ReqDepth >= pool.MaxAppendDepth {
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for u := range rule.RunAsStream(pool.AppendRule.Expressions, path.Base(bl.Path)) {
pool.addAddition(&Unit{
path: pkg.Dir(bl.Url.Path) + u,
parent: bl.Number,
host: bl.Host,
source: parsers.AppendRuleSource,
from: bl.Source,
depth: bl.ReqDepth + 1,
})
}
}()
}
func (pool *BrutePool) doAppendWords(bl *pkg.Baseline) {
if pool.AppendWords == nil || bl.Source == parsers.AppendSource || bl.Source == parsers.RuleSource || bl.ReqDepth >= pool.MaxAppendDepth {
// 防止自身递归
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for u := range NewBruteWords(pool.Config, pool.AppendWords).Output {
pool.addAddition(&Unit{
path: pkg.SafePath(bl.Path, u),
parent: bl.Number,
host: bl.Host,
source: parsers.AppendSource,
from: bl.Source,
depth: bl.RecuDepth + 1,
})
}
}()
}
func (pool *BrutePool) doActive() {
defer pool.wg.Done()
if pool.Mod == HostSpray {
return
}
for _, u := range pkg.ActivePath {
pool.addAddition(&Unit{
path: pool.dir + u[1:],
source: parsers.FingerSource,
})
}
}
func (pool *BrutePool) doCommonFile() {
defer pool.wg.Done()
if pool.Mod == HostSpray {
return
}
for u := range NewBruteWords(pool.Config, append(pkg.Dicts["common"], pkg.Dicts["log"]...)).Output {
pool.addAddition(&Unit{
path: pool.dir + u,
source: parsers.CommonFileSource,
})
}
}

View File

@ -181,7 +181,7 @@ func (pool *CheckPool) Handler() {
} }
func (pool *CheckPool) doRedirect(bl *pkg.Baseline, depth int) { func (pool *CheckPool) doRedirect(bl *pkg.Baseline, depth int) {
if depth >= MaxRedirect { if depth >= pool.MaxRedirect {
return return
} }
var reURL string var reURL string
@ -199,9 +199,11 @@ func (pool *CheckPool) doRedirect(bl *pkg.Baseline, depth int) {
go func() { go func() {
pool.additionCh <- &Unit{ pool.additionCh <- &Unit{
path: reURL, path: reURL,
parent: bl.Number,
source: parsers.RedirectSource, source: parsers.RedirectSource,
frontUrl: bl.UrlString, frontUrl: bl.UrlString,
depth: depth + 1, depth: depth + 1,
from: bl.Source,
} }
}() }()
} }
@ -221,8 +223,10 @@ func (pool *CheckPool) doUpgrade(bl *pkg.Baseline) {
go func() { go func() {
pool.additionCh <- &Unit{ pool.additionCh <- &Unit{
path: reurl, path: reurl,
parent: bl.Number,
source: parsers.UpgradeSource, source: parsers.UpgradeSource,
depth: bl.ReqDepth + 1, depth: bl.ReqDepth + 1,
from: bl.Source,
} }
}() }()
} }

69
internal/pool/config.go Normal file
View File

@ -0,0 +1,69 @@
package pool
import (
"github.com/chainreactors/logs"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words"
"github.com/chainreactors/words/rule"
"github.com/expr-lang/expr/vm"
"sync"
"time"
)
type Config struct {
BaseURL string
ProxyAddr string
Thread int
Wordlist []string
Timeout time.Duration
ProcessCh chan *pkg.Baseline
OutputCh chan *pkg.Baseline
FuzzyCh chan *pkg.Baseline
Outwg *sync.WaitGroup
RateLimit int
CheckPeriod int
ErrPeriod int32
BreakThreshold int32
Method string
Mod SprayMod
Headers map[string]string
ClientType int
MatchExpr *vm.Program
FilterExpr *vm.Program
RecuExpr *vm.Program
AppendRule *rule.Program
Fns []words.WordFunc
AppendWords []string
Fuzzy bool
IgnoreWaf bool
Crawl bool
Scope []string
Active bool
Bak bool
Common bool
RetryLimit int
RandomUserAgent bool
Random string
Index string
MaxRedirect int
MaxCrawlDepth int
MaxRecursionDepth int
MaxAppendDepth int
}
func NewBruteWords(config *Config, list []string) *words.Worder {
word := words.NewWorderWithList(list)
word.Fns = config.Fns
word.Run()
return word
}
func NewBruteDSL(config *Config, dsl string, params [][]string) *words.Worder {
word, err := words.NewWorderWithDsl(dsl, params, nil)
if err != nil {
logs.Log.Error(err.Error())
}
word.Fns = config.Fns
word.Run()
return word
}

View File

@ -28,24 +28,6 @@ type BasePool struct {
isFallback atomic.Bool isFallback atomic.Bool
} }
func (pool *BasePool) doRedirect(bl *pkg.Baseline, depth int) {
if depth >= MaxRedirect {
return
}
reURL := pkg.FormatURL(bl.Url.Path, bl.RedirectURL)
pool.wg.Add(1)
go func() {
defer pool.wg.Done()
pool.addAddition(&Unit{
path: reURL,
host: bl.Host,
source: parsers.RedirectSource,
frontUrl: bl.UrlString,
depth: depth + 1,
})
}()
}
func (pool *BasePool) doRetry(bl *pkg.Baseline) { func (pool *BasePool) doRetry(bl *pkg.Baseline) {
if bl.Retry >= pool.RetryLimit { if bl.Retry >= pool.RetryLimit {
return return
@ -55,8 +37,10 @@ func (pool *BasePool) doRetry(bl *pkg.Baseline) {
defer pool.wg.Done() defer pool.wg.Done()
pool.addAddition(&Unit{ pool.addAddition(&Unit{
path: bl.Path, path: bl.Path,
parent: bl.Number,
host: bl.Host, host: bl.Host,
source: parsers.RetrySource, source: parsers.RetrySource,
from: bl.Source,
retry: bl.Retry + 1, retry: bl.Retry + 1,
}) })
}() }()

57
internal/pool/type.go Normal file
View File

@ -0,0 +1,57 @@
package pool
import (
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/pkg"
)
func newUnit(path string, source parsers.SpraySource) *Unit {
return &Unit{path: path, source: source}
}
type Unit struct {
number int
parent int
host string
path string
from parsers.SpraySource
source parsers.SpraySource
retry int
frontUrl string
depth int
}
func (u *Unit) Update(bl *pkg.Baseline) {
bl.Number = u.number
bl.Parent = u.parent
bl.Host = u.host
bl.Path = u.path
bl.Source = u.source
}
func NewBaselines() *Baselines {
return &Baselines{
baselines: map[int]*pkg.Baseline{},
}
}
type Baselines struct {
FailedBaselines []*pkg.Baseline
random *pkg.Baseline
index *pkg.Baseline
baselines map[int]*pkg.Baseline
}
type SprayMod int
const (
PathSpray SprayMod = iota + 1
HostSpray
ParamSpray
CustomSpray
)
var ModMap = map[string]SprayMod{
"path": PathSpray,
"host": HostSpray,
}

View File

@ -1,107 +0,0 @@
package pool
import (
"github.com/chainreactors/logs"
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words"
"github.com/chainreactors/words/rule"
"github.com/expr-lang/expr/vm"
"sync"
"time"
)
func newUnit(path string, source parsers.SpraySource) *Unit {
return &Unit{path: path, source: source}
}
type Unit struct {
number int
host string
path string
source parsers.SpraySource
retry int
frontUrl string
depth int // redirect depth
}
func NewBaselines() *Baselines {
return &Baselines{
baselines: map[int]*pkg.Baseline{},
}
}
type Baselines struct {
FailedBaselines []*pkg.Baseline
random *pkg.Baseline
index *pkg.Baseline
baselines map[int]*pkg.Baseline
}
type SprayMod int
const (
PathSpray SprayMod = iota + 1
HostSpray
ParamSpray
CustomSpray
)
var ModMap = map[string]SprayMod{
"path": PathSpray,
"host": HostSpray,
}
type Config struct {
BaseURL string
ProxyAddr string
Thread int
Wordlist []string
Timeout time.Duration
ProcessCh chan *pkg.Baseline
OutputCh chan *pkg.Baseline
FuzzyCh chan *pkg.Baseline
Outwg *sync.WaitGroup
RateLimit int
CheckPeriod int
ErrPeriod int32
BreakThreshold int32
Method string
Mod SprayMod
Headers map[string]string
ClientType int
MatchExpr *vm.Program
FilterExpr *vm.Program
RecuExpr *vm.Program
AppendRule *rule.Program
Fns []words.WordFunc
AppendWords []string
Fuzzy bool
IgnoreWaf bool
Crawl bool
Scope []string
Active bool
Bak bool
Common bool
RetryLimit int
RandomUserAgent bool
Random string
Index string
}
func NewBruteWords(config *Config, list []string) *words.Worder {
word := words.NewWorderWithList(list)
word.Fns = config.Fns
word.Run()
return word
}
func NewBruteDSL(config *Config, dsl string, params [][]string) *words.Worder {
word, err := words.NewWorderWithDsl(dsl, params, nil)
if err != nil {
logs.Log.Error(err.Error())
}
word.Fns = config.Fns
word.Run()
return word
}

View File

@ -51,7 +51,6 @@ type Runner struct {
Count int // tasks total number Count int // tasks total number
Wordlist []string Wordlist []string
AppendWords []string AppendWords []string
RecuDepth int
ClientType int ClientType int
Probes []string Probes []string
Total int // wordlist total number Total int // wordlist total number
@ -92,6 +91,10 @@ func (r *Runner) PrepareConfig() *pool.Config {
Random: r.Random, Random: r.Random,
Index: r.Index, Index: r.Index,
ProxyAddr: r.Proxy, ProxyAddr: r.Proxy,
MaxRecursionDepth: r.Depth,
MaxRedirect: 3,
MaxAppendDepth: r.AppendDepth,
MaxCrawlDepth: r.CrawlDepth,
} }
if config.ClientType == ihttp.Auto { if config.ClientType == ihttp.Auto {
@ -361,7 +364,7 @@ func (r *Runner) Output(bl *pkg.Baseline) {
if r.Option.Json { if r.Option.Json {
out = bl.ToJson() out = bl.ToJson()
} else if len(r.Probes) > 0 { } else if len(r.Probes) > 0 {
out = bl.Format(r.Probes) out = bl.ProbeOutput(r.Probes)
} else if r.Color { } else if r.Color {
out = bl.ColorString() out = bl.ColorString()
} else { } else {
@ -382,7 +385,7 @@ func (r *Runner) Output(bl *pkg.Baseline) {
} else if r.FileOutput == "full" { } else if r.FileOutput == "full" {
r.OutputFile.SafeWrite(bl.String() + "\n") r.OutputFile.SafeWrite(bl.String() + "\n")
} else { } else {
r.OutputFile.SafeWrite(bl.Format(strings.Split(r.FileOutput, ",")) + "\n") r.OutputFile.SafeWrite(bl.ProbeOutput(strings.Split(r.FileOutput, ",")) + "\n")
} }
r.OutputFile.SafeSync() r.OutputFile.SafeSync()

View File

@ -125,6 +125,8 @@ type Baseline struct {
URLs []string `json:"-"` URLs []string `json:"-"`
Collected bool `json:"-"` Collected bool `json:"-"`
Retry int `json:"-"` Retry int `json:"-"`
SameRedirectDomain bool `json:"-"`
IsBaseline bool `json:"-"`
} }
func (bl *Baseline) IsDir() bool { func (bl *Baseline) IsDir() bool {
@ -235,6 +237,15 @@ func (bl *Baseline) Compare(other *Baseline) int {
return -1 return -1
} }
func (bl *Baseline) ProbeOutput(format []string) string {
var s strings.Builder
for _, f := range format {
s.WriteString("\t")
s.WriteString(bl.Get(f))
}
return strings.TrimSpace(s.String())
}
var Distance uint8 = 5 // 数字越小越相似, 数字为0则为完全一致. var Distance uint8 = 5 // 数字越小越相似, 数字为0则为完全一致.
func (bl *Baseline) FuzzyCompare(other *Baseline) bool { func (bl *Baseline) FuzzyCompare(other *Baseline) bool {

View File

@ -30,7 +30,7 @@ var (
WhiteStatus = []int{} // cmd input, 200 WhiteStatus = []int{} // cmd input, 200
BlackStatus = []int{} // cmd input, 400,410 BlackStatus = []int{} // cmd input, 400,410
FuzzyStatus = []int{} // cmd input, 500,501,502,503 FuzzyStatus = []int{} // cmd input, 500,501,502,503
WAFStatus = []int{493, 418, 1020, 406} WAFStatus = []int{493, 418, 1020, 406, 429}
UniqueStatus = []int{} // 相同unique的403表示命中了同一条acl, 相同unique的200表示default页面 UniqueStatus = []int{} // 相同unique的403表示命中了同一条acl, 相同unique的200表示default页面
// plugins // plugins
@ -270,9 +270,9 @@ func CRC16Hash(data []byte) uint16 {
func SafePath(dir, u string) string { func SafePath(dir, u string) string {
hasSlash := strings.HasPrefix(u, "/") hasSlash := strings.HasPrefix(u, "/")
if hasSlash { if hasSlash {
return path.Join(dir, u[1:]) return dir + u[1:]
} else { } else {
return path.Join(dir, u) return dir + u
} }
} }