Merge pull request #79 from chainreactors/dev

merge v1.1.6
This commit is contained in:
M09Ic 2024-11-01 13:54:13 +08:00 committed by GitHub
commit 7621514bd9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 349 additions and 257 deletions

View File

@ -149,6 +149,15 @@ func Spray() {
}
ctx, canceler := context.WithTimeout(context.Background(), time.Duration(runner.Deadline)*time.Second)
go func() {
select {
case <-ctx.Done():
time.Sleep(10 * time.Second)
logs.Log.Errorf("deadline and timeout not work, hard exit!!!")
os.Exit(0)
}
}()
go func() {
exitChan := make(chan os.Signal, 2)
signal.Notify(exitChan, os.Interrupt, syscall.SIGTERM)

2
go.mod
View File

@ -6,7 +6,7 @@ require (
github.com/chainreactors/files v0.0.0-20240716182835-7884ee1e77f0
github.com/chainreactors/fingers v0.0.0-20240716172449-2fc3147b9c2a
github.com/chainreactors/logs v0.0.0-20240207121836-c946f072f81f
github.com/chainreactors/parsers v0.0.0-20240910081704-fd57f462fc65
github.com/chainreactors/parsers v0.0.0-20241016065831-bedaf68005f1
github.com/chainreactors/utils v0.0.0-20240805193040-ff3b97aa3c3f
github.com/expr-lang/expr v1.16.9
github.com/gookit/config/v2 v2.2.5

4
go.sum
View File

@ -101,6 +101,10 @@ github.com/chainreactors/parsers v0.0.0-20240829055950-923f89a92b84 h1:F6umsdHLx
github.com/chainreactors/parsers v0.0.0-20240829055950-923f89a92b84/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA=
github.com/chainreactors/parsers v0.0.0-20240910081704-fd57f462fc65 h1:subSvyczsErYMRnCD07s4Ub6zOSaw2xZ1/O9t3tHkuw=
github.com/chainreactors/parsers v0.0.0-20240910081704-fd57f462fc65/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA=
github.com/chainreactors/parsers v0.0.0-20241013180542-88e2dc355c57 h1:KuijtekTNtSpQbKf2jqKp99gxnGQXffPeEF+EOHnXBE=
github.com/chainreactors/parsers v0.0.0-20241013180542-88e2dc355c57/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA=
github.com/chainreactors/parsers v0.0.0-20241016065831-bedaf68005f1 h1:Ka/KBrqAgwiL07TwYjtqF2DQ3x0fCxw1XHG+GFqMKEc=
github.com/chainreactors/parsers v0.0.0-20241016065831-bedaf68005f1/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA=
github.com/chainreactors/utils v0.0.0-20240528085651-ba1b255482c1/go.mod h1:JA4eiQZm+7AsfjXBcIzIdVKBEhDCb16eNtWFCGTxlvs=
github.com/chainreactors/utils v0.0.0-20240704062557-662d623b74f4/go.mod h1:JA4eiQZm+7AsfjXBcIzIdVKBEhDCb16eNtWFCGTxlvs=
github.com/chainreactors/utils v0.0.0-20240715080349-d2d0484c95ed/go.mod h1:LajXuvESQwP+qCMAvlcoSXppQCjuLlBrnQpu9XQ1HtU=

View File

@ -39,17 +39,20 @@ func Format(opts Option) {
group[result.Url.Host] = append(group[result.Url.Host], &result)
}
// 分组
for _, results := range group {
for _, result := range results {
if !opts.Fuzzy && result.IsFuzzy {
continue
}
if !opts.NoColor {
logs.Log.Console(result.ColorString() + "\n")
if opts.OutputProbe == "" {
if !opts.NoColor {
logs.Log.Console(result.ColorString() + "\n")
} else {
logs.Log.Console(result.String() + "\n")
}
} else {
logs.Log.Console(result.String() + "\n")
probes := strings.Split(opts.OutputProbe, ",")
logs.Log.Console(result.ProbeOutput(probes) + "\n")
}
}
}

View File

@ -115,6 +115,7 @@ type PluginOptions struct {
CommonPlugin bool `long:"common" description:"Bool, enable common file found" config:"common"`
CrawlPlugin bool `long:"crawl" description:"Bool, enable crawl" config:"crawl"`
CrawlDepth int `long:"crawl-depth" default:"3" description:"Int, crawl depth" config:"crawl-depth"`
AppendDepth int `long:"append-depth" default:"2" description:"Int, append depth" config:"append-depth"`
}
type ModeOptions struct {
@ -131,7 +132,7 @@ type ModeOptions struct {
BreakThreshold int `long:"error-threshold" default:"20" description:"Int, break when the error exceeds the threshold" config:"error-threshold"`
BlackStatus string `long:"black-status" default:"400,410" description:"Strings (comma split),custom black status" config:"black-status"`
WhiteStatus string `long:"white-status" default:"200" description:"Strings (comma split), custom white status" config:"white-status"`
FuzzyStatus string `long:"fuzzy-status" default:"500,501,502,503,301,302" description:"Strings (comma split), custom fuzzy status" config:"fuzzy-status"`
FuzzyStatus string `long:"fuzzy-status" default:"500,501,502,503,301,302,404" description:"Strings (comma split), custom fuzzy status" config:"fuzzy-status"`
UniqueStatus string `long:"unique-status" default:"403,200,404" description:"Strings (comma split), custom unique status" config:"unique-status"`
Unique bool `long:"unique" description:"Bool, unique response" config:"unique"`
RetryCount int `long:"retry" default:"0" description:"Int, retry count" config:"retry"`
@ -253,7 +254,6 @@ func (opt *Option) Prepare() error {
logs.Log.Logf(pkg.LogVerbose, "Black Status: %v, WhiteStatus: %v, WAFStatus: %v", pkg.BlackStatus, pkg.WhiteStatus, pkg.WAFStatus)
logs.Log.Logf(pkg.LogVerbose, "Fuzzy Status: %v, Unique Status: %v", pkg.FuzzyStatus, pkg.UniqueStatus)
pool.MaxCrawl = opt.CrawlDepth
return nil
}
@ -354,13 +354,12 @@ func (opt *Option) NewRunner() (*Runner, error) {
var express string
if opt.Recursive != "current.IsDir()" && opt.Depth != 0 {
// 默认不打开递归, 除非指定了非默认的递归表达式
pool.MaxRecursion = 1
opt.Depth = 1
express = opt.Recursive
}
if opt.Depth != 0 {
// 手动设置的depth优先级高于默认
pool.MaxRecursion = opt.Depth
express = opt.Recursive
}

View File

@ -23,9 +23,6 @@ import (
)
var (
MaxRedirect = 3
MaxCrawl = 3
MaxRecursion = 0
EnableAllFuzzy = false
EnableAllUnique = false
//AllowHostModSource = []parsers.SpraySource{parsers.WordSource, parsers.CheckSource, parsers.InitIndexSource, parsers.InitRandomSource}
@ -318,16 +315,14 @@ func (pool *BrutePool) Invoke(v interface{}) {
// 手动处理重定向
if bl.IsValid && unit.source != parsers.CheckSource && bl.RedirectURL != "" {
//pool.wg.Add(1)
bl.SameRedirectDomain = pool.checkHost(bl.RedirectURL)
pool.doRedirect(bl, unit.depth)
}
if !ihttp.CheckBodySize(int64(bl.BodyLength)) {
bl.ExceedLength = true
}
bl.Source = unit.source
bl.ReqDepth = unit.depth
bl.Number = unit.number
unit.Update(bl)
bl.Spended = time.Since(start).Milliseconds()
switch unit.source {
case parsers.InitRandomSource:
@ -494,7 +489,7 @@ func (pool *BrutePool) Handler() {
// 如果要进行递归判断, 要满足 bl有效, mod为path-spray, 当前深度小于最大递归深度
if bl.IsValid {
pool.Statistor.FoundNumber++
if bl.RecuDepth < MaxRecursion {
if bl.RecuDepth < pool.MaxRecursionDepth {
if pkg.CompareWithExpr(pool.RecuExpr, params) {
bl.Recu = true
}
@ -511,79 +506,9 @@ func (pool *BrutePool) Handler() {
pool.analyzeDone = true
}
func (pool *BrutePool) doAppendRule(bl *pkg.Baseline) {
if pool.AppendRule == nil || bl.Source == parsers.AppendRuleSource {
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for u := range rule.RunAsStream(pool.AppendRule.Expressions, path.Base(bl.Path)) {
pool.addAddition(&Unit{
path: pkg.Dir(bl.Url.Path) + u,
host: bl.Host,
source: parsers.AppendRuleSource,
})
}
}()
}
func (pool *BrutePool) doAppendWords(bl *pkg.Baseline) {
if pool.AppendWords == nil || bl.Source == parsers.AppendSource || bl.Source == parsers.RuleSource {
// 防止自身递归
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for u := range NewBruteWords(pool.Config, pool.AppendWords).Output {
pool.addAddition(&Unit{
path: pkg.SafePath(bl.Path, u),
host: bl.Host,
source: parsers.AppendSource,
})
}
}()
}
func (pool *BrutePool) doAppend(bl *pkg.Baseline) {
pool.wg.Add(2)
pool.doAppendWords(bl)
pool.doAppendRule(bl)
}
func (pool *BrutePool) doActive() {
defer pool.wg.Done()
if pool.Mod == HostSpray {
return
}
for _, u := range pkg.ActivePath {
pool.addAddition(&Unit{
path: pool.dir + u[1:],
source: parsers.FingerSource,
})
}
}
func (pool *BrutePool) doCommonFile() {
defer pool.wg.Done()
if pool.Mod == HostSpray {
return
}
for u := range NewBruteWords(pool.Config, append(pkg.Dicts["common"], pkg.Dicts["log"]...)).Output {
pool.addAddition(&Unit{
path: pool.dir + u,
source: parsers.CommonFileSource,
})
}
}
func (pool *BrutePool) checkRedirect(redirectURL string) bool {
if pool.random.RedirectURL == "" {
// 如果random的redirectURL为空, 此时该项
// 如果random的redirectURL为空, 忽略
return true
}
@ -637,19 +562,41 @@ func (pool *BrutePool) PreCompare(resp *ihttp.Response) error {
return nil
}
// same host return true
// diff host return false
func (pool *BrutePool) checkHost(u string) bool {
if v, err := url.Parse(u); err == nil {
if v.Host == "" {
return true
}
if v.Host == pool.url.Host {
return true
} else {
return false
}
}
return true
}
func (pool *BrutePool) BaseCompare(bl *pkg.Baseline) bool {
if !bl.IsValid {
return false
}
var status = -1
// 30x状态码的特殊处理
if bl.RedirectURL != "" && strings.HasSuffix(bl.RedirectURL, bl.Url.Path+"/") {
bl.Reason = pkg.ErrFuzzyRedirect.Error()
pool.putToFuzzy(bl)
return false
if bl.RedirectURL != "" {
if bl.SameRedirectDomain && strings.HasSuffix(bl.RedirectURL, bl.Url.Path+"/") {
bl.Reason = pkg.ErrFuzzyRedirect.Error()
return false
}
}
// 使用与baseline相同状态码, 需要在fuzzystatus中提前配置
base, ok := pool.baselines[bl.Status] // 挑选对应状态码的baseline进行compare
if bl.IsBaseline {
ok = false
}
if !ok {
if pool.random.Status == bl.Status {
// 当other的状态码与base相同时, 会使用base
@ -694,6 +641,7 @@ func (pool *BrutePool) BaseCompare(bl *pkg.Baseline) bool {
func (pool *BrutePool) addFuzzyBaseline(bl *pkg.Baseline) {
if _, ok := pool.baselines[bl.Status]; !ok && (EnableAllFuzzy || iutils.IntsContains(pkg.FuzzyStatus, bl.Status)) {
bl.IsBaseline = true
bl.Collect()
pool.doCrawl(bl) // 非有效页面也可能存在一些特殊的url可以用来爬取
pool.baselines[bl.Status] = bl
@ -757,8 +705,32 @@ func (pool *BrutePool) doCheck() {
}
}
func (pool *BrutePool) doRedirect(bl *pkg.Baseline, depth int) {
if depth >= pool.MaxRedirect {
return
}
//if !bl.SameRedirectDomain {
// return // 不同域名的重定向不处理
//}
reURL := pkg.FormatURL(bl.Url.Path, bl.RedirectURL)
pool.wg.Add(1)
go func() {
defer pool.wg.Done()
pool.addAddition(&Unit{
path: reURL,
parent: bl.Number,
host: bl.Host,
source: parsers.RedirectSource,
from: bl.Source,
frontUrl: bl.UrlString,
depth: depth + 1,
})
}()
}
func (pool *BrutePool) doCrawl(bl *pkg.Baseline) {
if !pool.Crawl || bl.ReqDepth >= MaxCrawl {
if !pool.Crawl || bl.ReqDepth >= pool.MaxCrawlDepth {
return
}
@ -778,8 +750,10 @@ func (pool *BrutePool) doCrawl(bl *pkg.Baseline) {
}
pool.addAddition(&Unit{
path: u,
parent: bl.Number,
host: bl.Host,
source: parsers.CrawlSource,
from: bl.Source,
depth: bl.ReqDepth + 1,
})
}
@ -788,7 +762,7 @@ func (pool *BrutePool) doCrawl(bl *pkg.Baseline) {
}
func (pool *BrutePool) doScopeCrawl(bl *pkg.Baseline) {
if bl.ReqDepth >= MaxCrawl {
if bl.ReqDepth >= pool.MaxCrawlDepth {
pool.wg.Done()
return
}
@ -804,7 +778,13 @@ func (pool *BrutePool) doScopeCrawl(bl *pkg.Baseline) {
if _, ok := pool.scopeurls[u]; !ok {
pool.urls.Store(u, nil)
pool.wg.Add(1)
pool.scopePool.Invoke(&Unit{path: u, source: parsers.CrawlSource, depth: bl.ReqDepth + 1})
pool.scopePool.Invoke(&Unit{
path: u,
parent: bl.Number,
source: parsers.CrawlSource,
from: bl.Source,
depth: bl.ReqDepth + 1,
})
}
pool.scopeLocker.Unlock()
}
@ -817,12 +797,12 @@ func (pool *BrutePool) doBak() {
if pool.Mod == HostSpray {
return
}
for w := range NewBruteDSL(pool.Config, "{?0}.{?@bak_ext}", [][]string{pkg.BakGenerator(pool.url.Host)}).Output {
pool.addAddition(&Unit{
path: pool.dir + w,
source: parsers.BakSource,
})
}
//for w := range NewBruteDSL(pool.Config, "{?0}.{?@bak_ext}", [][]string{pkg.BakGenerator(pool.url.Host)}).Output {
// pool.addAddition(&Unit{
// path: pool.dir + w,
// source: parsers.BakSource,
// })
//}
for w := range NewBruteDSL(pool.Config, "{?@bak_name}.{?@bak_ext}", nil).Output {
pool.addAddition(&Unit{
@ -831,3 +811,79 @@ func (pool *BrutePool) doBak() {
})
}
}
func (pool *BrutePool) doAppend(bl *pkg.Baseline) {
pool.wg.Add(2)
pool.doAppendWords(bl)
pool.doAppendRule(bl)
}
func (pool *BrutePool) doAppendRule(bl *pkg.Baseline) {
if pool.AppendRule == nil || bl.Source == parsers.AppendRuleSource || bl.ReqDepth >= pool.MaxAppendDepth {
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for u := range rule.RunAsStream(pool.AppendRule.Expressions, path.Base(bl.Path)) {
pool.addAddition(&Unit{
path: pkg.Dir(bl.Url.Path) + u,
parent: bl.Number,
host: bl.Host,
source: parsers.AppendRuleSource,
from: bl.Source,
depth: bl.ReqDepth + 1,
})
}
}()
}
func (pool *BrutePool) doAppendWords(bl *pkg.Baseline) {
if pool.AppendWords == nil || bl.Source == parsers.AppendSource || bl.Source == parsers.RuleSource || bl.ReqDepth >= pool.MaxAppendDepth {
// 防止自身递归
pool.wg.Done()
return
}
go func() {
defer pool.wg.Done()
for u := range NewBruteWords(pool.Config, pool.AppendWords).Output {
pool.addAddition(&Unit{
path: pkg.SafePath(bl.Path, u),
parent: bl.Number,
host: bl.Host,
source: parsers.AppendSource,
from: bl.Source,
depth: bl.RecuDepth + 1,
})
}
}()
}
func (pool *BrutePool) doActive() {
defer pool.wg.Done()
if pool.Mod == HostSpray {
return
}
for _, u := range pkg.ActivePath {
pool.addAddition(&Unit{
path: pool.dir + u[1:],
source: parsers.FingerSource,
})
}
}
func (pool *BrutePool) doCommonFile() {
defer pool.wg.Done()
if pool.Mod == HostSpray {
return
}
for u := range NewBruteWords(pool.Config, append(pkg.Dicts["common"], pkg.Dicts["log"]...)).Output {
pool.addAddition(&Unit{
path: pool.dir + u,
source: parsers.CommonFileSource,
})
}
}

View File

@ -181,7 +181,7 @@ func (pool *CheckPool) Handler() {
}
func (pool *CheckPool) doRedirect(bl *pkg.Baseline, depth int) {
if depth >= MaxRedirect {
if depth >= pool.MaxRedirect {
return
}
var reURL string
@ -199,9 +199,11 @@ func (pool *CheckPool) doRedirect(bl *pkg.Baseline, depth int) {
go func() {
pool.additionCh <- &Unit{
path: reURL,
parent: bl.Number,
source: parsers.RedirectSource,
frontUrl: bl.UrlString,
depth: depth + 1,
from: bl.Source,
}
}()
}
@ -221,8 +223,10 @@ func (pool *CheckPool) doUpgrade(bl *pkg.Baseline) {
go func() {
pool.additionCh <- &Unit{
path: reurl,
parent: bl.Number,
source: parsers.UpgradeSource,
depth: bl.ReqDepth + 1,
from: bl.Source,
}
}()
}

69
internal/pool/config.go Normal file
View File

@ -0,0 +1,69 @@
package pool
import (
"github.com/chainreactors/logs"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words"
"github.com/chainreactors/words/rule"
"github.com/expr-lang/expr/vm"
"sync"
"time"
)
type Config struct {
BaseURL string
ProxyAddr string
Thread int
Wordlist []string
Timeout time.Duration
ProcessCh chan *pkg.Baseline
OutputCh chan *pkg.Baseline
FuzzyCh chan *pkg.Baseline
Outwg *sync.WaitGroup
RateLimit int
CheckPeriod int
ErrPeriod int32
BreakThreshold int32
Method string
Mod SprayMod
Headers map[string]string
ClientType int
MatchExpr *vm.Program
FilterExpr *vm.Program
RecuExpr *vm.Program
AppendRule *rule.Program
Fns []words.WordFunc
AppendWords []string
Fuzzy bool
IgnoreWaf bool
Crawl bool
Scope []string
Active bool
Bak bool
Common bool
RetryLimit int
RandomUserAgent bool
Random string
Index string
MaxRedirect int
MaxCrawlDepth int
MaxRecursionDepth int
MaxAppendDepth int
}
func NewBruteWords(config *Config, list []string) *words.Worder {
word := words.NewWorderWithList(list)
word.Fns = config.Fns
word.Run()
return word
}
func NewBruteDSL(config *Config, dsl string, params [][]string) *words.Worder {
word, err := words.NewWorderWithDsl(dsl, params, nil)
if err != nil {
logs.Log.Error(err.Error())
}
word.Fns = config.Fns
word.Run()
return word
}

View File

@ -28,24 +28,6 @@ type BasePool struct {
isFallback atomic.Bool
}
func (pool *BasePool) doRedirect(bl *pkg.Baseline, depth int) {
if depth >= MaxRedirect {
return
}
reURL := pkg.FormatURL(bl.Url.Path, bl.RedirectURL)
pool.wg.Add(1)
go func() {
defer pool.wg.Done()
pool.addAddition(&Unit{
path: reURL,
host: bl.Host,
source: parsers.RedirectSource,
frontUrl: bl.UrlString,
depth: depth + 1,
})
}()
}
func (pool *BasePool) doRetry(bl *pkg.Baseline) {
if bl.Retry >= pool.RetryLimit {
return
@ -55,8 +37,10 @@ func (pool *BasePool) doRetry(bl *pkg.Baseline) {
defer pool.wg.Done()
pool.addAddition(&Unit{
path: bl.Path,
parent: bl.Number,
host: bl.Host,
source: parsers.RetrySource,
from: bl.Source,
retry: bl.Retry + 1,
})
}()

57
internal/pool/type.go Normal file
View File

@ -0,0 +1,57 @@
package pool
import (
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/pkg"
)
func newUnit(path string, source parsers.SpraySource) *Unit {
return &Unit{path: path, source: source}
}
type Unit struct {
number int
parent int
host string
path string
from parsers.SpraySource
source parsers.SpraySource
retry int
frontUrl string
depth int
}
func (u *Unit) Update(bl *pkg.Baseline) {
bl.Number = u.number
bl.Parent = u.parent
bl.Host = u.host
bl.Path = u.path
bl.Source = u.source
}
func NewBaselines() *Baselines {
return &Baselines{
baselines: map[int]*pkg.Baseline{},
}
}
type Baselines struct {
FailedBaselines []*pkg.Baseline
random *pkg.Baseline
index *pkg.Baseline
baselines map[int]*pkg.Baseline
}
type SprayMod int
const (
PathSpray SprayMod = iota + 1
HostSpray
ParamSpray
CustomSpray
)
var ModMap = map[string]SprayMod{
"path": PathSpray,
"host": HostSpray,
}

View File

@ -1,107 +0,0 @@
package pool
import (
"github.com/chainreactors/logs"
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words"
"github.com/chainreactors/words/rule"
"github.com/expr-lang/expr/vm"
"sync"
"time"
)
func newUnit(path string, source parsers.SpraySource) *Unit {
return &Unit{path: path, source: source}
}
type Unit struct {
number int
host string
path string
source parsers.SpraySource
retry int
frontUrl string
depth int // redirect depth
}
func NewBaselines() *Baselines {
return &Baselines{
baselines: map[int]*pkg.Baseline{},
}
}
type Baselines struct {
FailedBaselines []*pkg.Baseline
random *pkg.Baseline
index *pkg.Baseline
baselines map[int]*pkg.Baseline
}
type SprayMod int
const (
PathSpray SprayMod = iota + 1
HostSpray
ParamSpray
CustomSpray
)
var ModMap = map[string]SprayMod{
"path": PathSpray,
"host": HostSpray,
}
type Config struct {
BaseURL string
ProxyAddr string
Thread int
Wordlist []string
Timeout time.Duration
ProcessCh chan *pkg.Baseline
OutputCh chan *pkg.Baseline
FuzzyCh chan *pkg.Baseline
Outwg *sync.WaitGroup
RateLimit int
CheckPeriod int
ErrPeriod int32
BreakThreshold int32
Method string
Mod SprayMod
Headers map[string]string
ClientType int
MatchExpr *vm.Program
FilterExpr *vm.Program
RecuExpr *vm.Program
AppendRule *rule.Program
Fns []words.WordFunc
AppendWords []string
Fuzzy bool
IgnoreWaf bool
Crawl bool
Scope []string
Active bool
Bak bool
Common bool
RetryLimit int
RandomUserAgent bool
Random string
Index string
}
func NewBruteWords(config *Config, list []string) *words.Worder {
word := words.NewWorderWithList(list)
word.Fns = config.Fns
word.Run()
return word
}
func NewBruteDSL(config *Config, dsl string, params [][]string) *words.Worder {
word, err := words.NewWorderWithDsl(dsl, params, nil)
if err != nil {
logs.Log.Error(err.Error())
}
word.Fns = config.Fns
word.Run()
return word
}

View File

@ -51,7 +51,6 @@ type Runner struct {
Count int // tasks total number
Wordlist []string
AppendWords []string
RecuDepth int
ClientType int
Probes []string
Total int // wordlist total number
@ -81,17 +80,21 @@ func (r *Runner) PrepareConfig() *pool.Config {
AppendWords: r.AppendWords, // 对有效目录追加字典
Fns: r.Fns,
//IgnoreWaf: r.IgnoreWaf,
Crawl: r.CrawlPlugin,
Scope: r.Scope,
Active: r.Finger,
Bak: r.BakPlugin,
Common: r.CommonPlugin,
RetryLimit: r.RetryCount,
ClientType: r.ClientType,
RandomUserAgent: r.RandomUserAgent,
Random: r.Random,
Index: r.Index,
ProxyAddr: r.Proxy,
Crawl: r.CrawlPlugin,
Scope: r.Scope,
Active: r.Finger,
Bak: r.BakPlugin,
Common: r.CommonPlugin,
RetryLimit: r.RetryCount,
ClientType: r.ClientType,
RandomUserAgent: r.RandomUserAgent,
Random: r.Random,
Index: r.Index,
ProxyAddr: r.Proxy,
MaxRecursionDepth: r.Depth,
MaxRedirect: 3,
MaxAppendDepth: r.AppendDepth,
MaxCrawlDepth: r.CrawlDepth,
}
if config.ClientType == ihttp.Auto {
@ -361,7 +364,7 @@ func (r *Runner) Output(bl *pkg.Baseline) {
if r.Option.Json {
out = bl.ToJson()
} else if len(r.Probes) > 0 {
out = bl.Format(r.Probes)
out = bl.ProbeOutput(r.Probes)
} else if r.Color {
out = bl.ColorString()
} else {
@ -382,7 +385,7 @@ func (r *Runner) Output(bl *pkg.Baseline) {
} else if r.FileOutput == "full" {
r.OutputFile.SafeWrite(bl.String() + "\n")
} else {
r.OutputFile.SafeWrite(bl.Format(strings.Split(r.FileOutput, ",")) + "\n")
r.OutputFile.SafeWrite(bl.ProbeOutput(strings.Split(r.FileOutput, ",")) + "\n")
}
r.OutputFile.SafeSync()

View File

@ -113,18 +113,20 @@ func NewInvalidBaseline(u, host string, resp *ihttp.Response, reason string) *Ba
type Baseline struct {
*parsers.SprayResult
Url *url.URL `json:"-"`
Dir bool `json:"-"`
Chunked bool `json:"-"`
Body BS `json:"-"`
Header BS `json:"-"`
Raw BS `json:"-"`
Response *http.Response `json:"-"`
Recu bool `json:"-"`
RecuDepth int `json:"-"`
URLs []string `json:"-"`
Collected bool `json:"-"`
Retry int `json:"-"`
Url *url.URL `json:"-"`
Dir bool `json:"-"`
Chunked bool `json:"-"`
Body BS `json:"-"`
Header BS `json:"-"`
Raw BS `json:"-"`
Response *http.Response `json:"-"`
Recu bool `json:"-"`
RecuDepth int `json:"-"`
URLs []string `json:"-"`
Collected bool `json:"-"`
Retry int `json:"-"`
SameRedirectDomain bool `json:"-"`
IsBaseline bool `json:"-"`
}
func (bl *Baseline) IsDir() bool {
@ -235,6 +237,15 @@ func (bl *Baseline) Compare(other *Baseline) int {
return -1
}
func (bl *Baseline) ProbeOutput(format []string) string {
var s strings.Builder
for _, f := range format {
s.WriteString("\t")
s.WriteString(bl.Get(f))
}
return strings.TrimSpace(s.String())
}
var Distance uint8 = 5 // 数字越小越相似, 数字为0则为完全一致.
func (bl *Baseline) FuzzyCompare(other *Baseline) bool {

View File

@ -30,7 +30,7 @@ var (
WhiteStatus = []int{} // cmd input, 200
BlackStatus = []int{} // cmd input, 400,410
FuzzyStatus = []int{} // cmd input, 500,501,502,503
WAFStatus = []int{493, 418, 1020, 406}
WAFStatus = []int{493, 418, 1020, 406, 429}
UniqueStatus = []int{} // 相同unique的403表示命中了同一条acl, 相同unique的200表示default页面
// plugins
@ -270,9 +270,9 @@ func CRC16Hash(data []byte) uint16 {
func SafePath(dir, u string) string {
hasSlash := strings.HasPrefix(u, "/")
if hasSlash {
return path.Join(dir, u[1:])
return dir + u[1:]
} else {
return path.Join(dir, u)
return dir + u
}
}