refactor cmd ui.

This commit is contained in:
M09Ic 2024-07-14 04:08:50 +08:00
parent 40a1f90601
commit 3f4094d89e
6 changed files with 92 additions and 93 deletions

View File

@ -160,7 +160,7 @@ func Spray() {
}()
}()
if runner.CheckOnly {
if runner.IsCheck {
runner.RunWithCheck(ctx)
} else {
runner.Run(ctx)

View File

@ -101,7 +101,7 @@ mode:
# Bool, skip error break
force: false
# Bool, check only
check-only: false
default: false
# Bool, no scope
no-scope: false
# String, custom scope, e.g.: --scope *.example.com
@ -132,7 +132,7 @@ mode:
unique: false
# Int, retry count
retry: 0
distance: 5
sim-distance: 5
misc:
# String, path/host spray
mod: path

View File

@ -50,17 +50,18 @@ type InputOptions struct {
URL []string `short:"u" long:"url" description:"Strings, input baseurl, e.g.: http://google.com"`
URLFile string `short:"l" long:"list" description:"File, input filename"`
PortRange string `short:"p" long:"port" description:"String, input port range, e.g.: 80,8080-8090,db"`
CIDRs string `long:"cidr" description:"String, input cidr, e.g.: 1.1.1.1/24 "`
CIDRs string `short:"i" long:"cidr" description:"String, input cidr, e.g.: 1.1.1.1/24 "`
RawFile string `long:"raw" description:"File, input raw request filename"`
Dictionaries []string `short:"d" long:"dict" description:"Files, Multi,dict files, e.g.: -d 1.txt -d 2.txt" config:"dictionaries"`
NoDict bool `long:"no-dict" description:"Bool, no dictionary" config:"no-dict"`
Word string `short:"w" long:"word" description:"String, word generate dsl, e.g.: -w test{?ld#4}" config:"word"`
Rules []string `short:"r" long:"rules" description:"Files, rule files, e.g.: -r rule1.txt -r rule2.txt" config:"rules"`
AppendRule []string `long:"append-rule" description:"Files, when found valid path , use append rule generator new word with current path" config:"append-rules"`
FilterRule string `long:"filter-rule" description:"String, filter rule, e.g.: --rule-filter '>8 <4'" config:"filter-rule"`
AppendFile []string `long:"append-file" description:"Files, when found valid path , use append file new word with current path" config:"append-files"`
Offset int `long:"offset" description:"Int, wordlist offset"`
Limit int `long:"limit" description:"Int, wordlist limit, start with offset. e.g.: --offset 1000 --limit 100"`
//NoDict bool `long:"no-dict" description:"Bool, no dictionary" config:"no-dict"`
DefaultDict bool `short:"D" long:"default" description:"Bool, use default dictionary" config:"default"`
Word string `short:"w" long:"word" description:"String, word generate dsl, e.g.: -w test{?ld#4}" config:"word"`
Rules []string `short:"r" long:"rules" description:"Files, rule files, e.g.: -r rule1.txt -r rule2.txt" config:"rules"`
AppendRule []string `long:"append-rule" description:"Files, when found valid path , use append rule generator new word with current path" config:"append-rules"`
FilterRule string `long:"filter-rule" description:"String, filter rule, e.g.: --rule-filter '>8 <4'" config:"filter-rule"`
AppendFile []string `long:"append-file" description:"Files, when found valid path , use append file new word with current path" config:"append-files"`
Offset int `long:"offset" description:"Int, wordlist offset"`
Limit int `long:"limit" description:"Int, wordlist limit, start with offset. e.g.: --offset 1000 --limit 100"`
}
type FunctionOptions struct {
@ -118,9 +119,9 @@ type PluginOptions struct {
}
type ModeOptions struct {
RateLimit int `long:"rate-limit" default:"0" description:"Int, request rate limit (rate/s), e.g.: --rate-limit 100" config:"rate-limit"`
Force bool `long:"force" description:"Bool, skip error break" config:"force"`
CheckOnly bool `long:"check-only" description:"Bool, check only" config:"check-only"`
RateLimit int `long:"rate-limit" default:"0" description:"Int, request rate limit (rate/s), e.g.: --rate-limit 100" config:"rate-limit"`
Force bool `long:"force" description:"Bool, skip error break" config:"force"`
//CheckOnly bool `long:"check-only" description:"Bool, check only" config:"check-only"`
NoScope bool `long:"no-scope" description:"Bool, no scope" config:"no-scope"`
Scope []string `long:"scope" description:"String, custom scope, e.g.: --scope *.example.com" config:"scope"`
Recursive string `long:"recursive" default:"current.IsDir()" description:"String,custom recursive rule, e.g.: --recursive current.IsDir()" config:"recursive"`
@ -136,7 +137,7 @@ type ModeOptions struct {
UniqueStatus string `long:"unique-status" default:"403,200,404" description:"Strings (comma split), custom unique status" config:"unique-status"`
Unique bool `long:"unique" description:"Bool, unique response" config:"unique"`
RetryCount int `long:"retry" default:"0" description:"Int, retry count" config:"retry"`
SimhashDistance int `long:"distance" default:"5" config:"distance"`
SimhashDistance int `long:"sim-distance" default:"5" config:"sim-distance"`
}
type MiscOptions struct {
@ -200,7 +201,7 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
r.ClientType = ihttp.STANDARD
}
if opt.Threads == DefaultThreads && opt.CheckOnly {
if opt.Threads == DefaultThreads && len(opt.Dictionaries) == 0 {
r.Threads = 1000
}
@ -291,22 +292,28 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
}
// prepare word
dicts := make([][]string, len(opt.Dictionaries))
if len(opt.Dictionaries) == 0 && opt.Word == "" && !opt.NoDict {
var dicts [][]string
if opt.DefaultDict {
dicts = append(dicts, pkg.LoadDefaultDict())
logs.Log.Warn("not set any dictionary, use default dictionary: https://github.com/maurosoria/dirsearch/blob/master/db/dicc.txt")
} else {
for i, f := range opt.Dictionaries {
dicts[i], err = loadFileToSlice(f)
if opt.ResumeFrom != "" {
dictCache[f] = dicts[i]
}
if err != nil {
return nil, err
}
logs.Log.Logf(pkg.LogVerbose, "Loaded %d word from %s", len(dicts[i]), f)
logs.Log.Info("not set any dictionary, use default dictionary: https://github.com/maurosoria/dirsearch/blob/master/db/dicc.txt")
}
for i, f := range opt.Dictionaries {
dict, err := loadFileToSlice(f)
if err != nil {
return nil, err
}
if err != nil {
return nil, err
}
dicts = append(dicts, dict)
if opt.ResumeFrom != "" {
dictCache[f] = dicts[i]
}
logs.Log.Logf(pkg.LogVerbose, "Loaded %d word from %s", len(dicts[i]), f)
}
if len(dicts) == 0 {
r.IsCheck = true
}
if opt.Word == "" {

View File

@ -30,7 +30,7 @@ type Config struct {
ProcessCh chan *pkg.Baseline
OutputCh chan *pkg.Baseline
FuzzyCh chan *pkg.Baseline
OutLocker *sync.WaitGroup
Outwg *sync.WaitGroup
RateLimit int
CheckPeriod int
ErrPeriod int32

View File

@ -153,12 +153,12 @@ func (pool *BasePool) putToOutput(bl *pkg.Baseline) {
if bl.IsValid || bl.IsFuzzy {
bl.Collect()
}
pool.OutLocker.Add(1)
pool.Outwg.Add(1)
pool.OutputCh <- bl
}
func (pool *BasePool) putToFuzzy(bl *pkg.Baseline) {
pool.OutLocker.Add(1)
pool.Outwg.Add(1)
bl.IsFuzzy = true
pool.FuzzyCh <- bl
}

View File

@ -14,7 +14,6 @@ import (
"github.com/vbauerster/mpb/v8"
"github.com/vbauerster/mpb/v8/decor"
"sync"
"time"
)
var (
@ -36,7 +35,7 @@ type Runner struct {
outputCh chan *pkg.Baseline
fuzzyCh chan *pkg.Baseline
bar *mpb.Bar
finished int
IsCheck bool
Pools *ants.PoolWithFunc
PoolName map[string]bool
Tasks chan *Task
@ -73,7 +72,7 @@ func (r *Runner) PrepareConfig() *pool.Config {
Mod: pool.ModMap[r.Mod],
OutputCh: r.outputCh,
FuzzyCh: r.fuzzyCh,
OutLocker: r.outwg,
Outwg: r.outwg,
Fuzzy: r.Fuzzy,
CheckPeriod: r.CheckPeriod,
ErrPeriod: int32(r.ErrPeriod),
@ -113,15 +112,15 @@ func (r *Runner) AppendFunction(fn func(string) []string) {
func (r *Runner) Prepare(ctx context.Context) error {
var err error
if r.CheckOnly {
if r.IsCheck {
// 仅check, 类似httpx
r.Pools, err = ants.NewPoolWithFunc(1, func(i interface{}) {
config := r.PrepareConfig()
pool, err := pool.NewCheckPool(ctx, config)
checkPool, err := pool.NewCheckPool(ctx, config)
if err != nil {
logs.Log.Error(err.Error())
pool.Cancel()
checkPool.Cancel()
r.poolwg.Done()
return
}
@ -133,10 +132,10 @@ func (r *Runner) Prepare(ctx context.Context) error {
}
close(ch)
}()
pool.Worder = words.NewWorderWithChan(ch)
pool.Worder.Fns = r.Fns
pool.Bar = pkg.NewBar("check", r.Count-r.Offset, pool.Statistor, r.Progress)
pool.Run(ctx, r.Offset, r.Count)
checkPool.Worder = words.NewWorderWithChan(ch)
checkPool.Worder.Fns = r.Fns
checkPool.Bar = pkg.NewBar("check", r.Count-r.Offset, checkPool.Statistor, r.Progress)
checkPool.Run(ctx, r.Offset, r.Count)
r.poolwg.Done()
})
} else {
@ -162,57 +161,57 @@ func (r *Runner) Prepare(ctx context.Context) error {
config := r.PrepareConfig()
config.BaseURL = t.baseUrl
pool, err := pool.NewBrutePool(ctx, config)
brutePool, err := pool.NewBrutePool(ctx, config)
if err != nil {
logs.Log.Error(err.Error())
pool.Cancel()
brutePool.Cancel()
r.Done()
return
}
if t.origin != nil && len(r.Wordlist) == 0 {
// 如果是从断点续传中恢复的任务, 则自动设置word,dict与rule, 不过优先级低于命令行参数
pool.Statistor = pkg.NewStatistorFromStat(t.origin.Statistor)
pool.Worder, err = t.origin.InitWorder(r.Fns)
brutePool.Statistor = pkg.NewStatistorFromStat(t.origin.Statistor)
brutePool.Worder, err = t.origin.InitWorder(r.Fns)
if err != nil {
logs.Log.Error(err.Error())
r.Done()
return
}
pool.Statistor.Total = t.origin.sum
brutePool.Statistor.Total = t.origin.sum
} else {
pool.Statistor = pkg.NewStatistor(t.baseUrl)
pool.Worder = words.NewWorder(r.Wordlist)
pool.Worder.Fns = r.Fns
pool.Worder.Rules = r.Rules.Expressions
brutePool.Statistor = pkg.NewStatistor(t.baseUrl)
brutePool.Worder = words.NewWorder(r.Wordlist)
brutePool.Worder.Fns = r.Fns
brutePool.Worder.Rules = r.Rules.Expressions
}
var limit int
if pool.Statistor.Total > r.Limit && r.Limit != 0 {
if brutePool.Statistor.Total > r.Limit && r.Limit != 0 {
limit = r.Limit
} else {
limit = pool.Statistor.Total
limit = brutePool.Statistor.Total
}
pool.Bar = pkg.NewBar(config.BaseURL, limit-pool.Statistor.Offset, pool.Statistor, r.Progress)
logs.Log.Importantf("[pool] task: %s, total %d words, %d threads, proxy: %s", pool.BaseURL, limit-pool.Statistor.Offset, pool.Thread, pool.ProxyAddr)
err = pool.Init()
brutePool.Bar = pkg.NewBar(config.BaseURL, limit-brutePool.Statistor.Offset, brutePool.Statistor, r.Progress)
logs.Log.Importantf("[pool] task: %s, total %d words, %d threads, proxy: %s", brutePool.BaseURL, limit-brutePool.Statistor.Offset, brutePool.Thread, brutePool.ProxyAddr)
err = brutePool.Init()
if err != nil {
pool.Statistor.Error = err.Error()
brutePool.Statistor.Error = err.Error()
if !r.Force {
// 如果没开启force, init失败将会关闭pool
pool.Close()
r.PrintStat(pool)
brutePool.Close()
r.PrintStat(brutePool)
r.Done()
return
}
}
pool.Run(pool.Statistor.Offset, limit)
brutePool.Run(brutePool.Statistor.Offset, limit)
if pool.IsFailed && len(pool.FailedBaselines) > 0 {
if brutePool.IsFailed && len(brutePool.FailedBaselines) > 0 {
// 如果因为错误积累退出, end将指向第一个错误发生时, 防止resume时跳过大量目标
pool.Statistor.End = pool.FailedBaselines[0].Number
brutePool.Statistor.End = brutePool.FailedBaselines[0].Number
}
r.PrintStat(pool)
r.PrintStat(brutePool)
r.Done()
})
}
@ -224,28 +223,6 @@ func (r *Runner) Prepare(ctx context.Context) error {
return nil
}
func (r *Runner) AddRecursive(bl *pkg.Baseline) {
// 递归新任务
task := &Task{
baseUrl: bl.UrlString,
depth: bl.RecuDepth + 1,
origin: NewOrigin(pkg.NewStatistor(bl.UrlString)),
}
r.AddPool(task)
}
func (r *Runner) AddPool(task *Task) {
// 递归新任务
if _, ok := r.PoolName[task.baseUrl]; ok {
logs.Log.Importantf("already added pool, skip %s", task.baseUrl)
return
}
task.depth++
r.poolwg.Add(1)
r.Pools.Invoke(task)
}
func (r *Runner) Run(ctx context.Context) {
Loop:
for {
@ -294,13 +271,29 @@ Loop:
}
}
for {
if len(r.outputCh) == 0 {
break
}
r.outwg.Wait()
}
func (r *Runner) AddRecursive(bl *pkg.Baseline) {
// 递归新任务
task := &Task{
baseUrl: bl.UrlString,
depth: bl.RecuDepth + 1,
origin: NewOrigin(pkg.NewStatistor(bl.UrlString)),
}
time.Sleep(100 * time.Millisecond) // 延迟100ms, 等所有数据处理完毕
r.AddPool(task)
}
func (r *Runner) AddPool(task *Task) {
// 递归新任务
if _, ok := r.PoolName[task.baseUrl]; ok {
logs.Log.Importantf("already added pool, skip %s", task.baseUrl)
return
}
task.depth++
r.poolwg.Add(1)
r.Pools.Invoke(task)
}
func (r *Runner) addBar(total int) {
@ -329,7 +322,6 @@ func (r *Runner) Done() {
if r.bar != nil {
r.bar.Increment()
}
r.finished++
r.poolwg.Done()
}