mirror of
https://github.com/chainreactors/spray.git
synced 2025-06-21 10:21:50 +00:00
大量优化, 实装断点续传, 添加ctrl+c的后续处理
This commit is contained in:
parent
75c33e1135
commit
0d81491d5a
15
cmd/cmd.go
15
cmd/cmd.go
@ -6,6 +6,9 @@ import (
|
||||
"github.com/chainreactors/logs"
|
||||
"github.com/chainreactors/spray/internal"
|
||||
"github.com/jessevdk/go-flags"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
"time"
|
||||
)
|
||||
|
||||
@ -31,7 +34,7 @@ func Spray() {
|
||||
return
|
||||
}
|
||||
|
||||
ctx, _ := context.WithTimeout(context.Background(), time.Duration(runner.Deadline)*time.Second)
|
||||
ctx, canceler := context.WithTimeout(context.Background(), time.Duration(runner.Deadline)*time.Second)
|
||||
|
||||
err = runner.Prepare(ctx)
|
||||
if err != nil {
|
||||
@ -39,6 +42,16 @@ func Spray() {
|
||||
return
|
||||
}
|
||||
|
||||
go func() {
|
||||
c := make(chan os.Signal, 2)
|
||||
signal.Notify(c, os.Interrupt, syscall.SIGTERM)
|
||||
go func() {
|
||||
<-c
|
||||
fmt.Println("exit signal, save stat and exit")
|
||||
canceler()
|
||||
}()
|
||||
}()
|
||||
|
||||
if runner.CheckOnly {
|
||||
runner.RunWithCheck(ctx)
|
||||
} else {
|
||||
|
4
go.mod
4
go.mod
@ -3,13 +3,13 @@ module github.com/chainreactors/spray
|
||||
go 1.17
|
||||
|
||||
require (
|
||||
github.com/chainreactors/files v0.2.4
|
||||
github.com/chainreactors/files v0.2.5-0.20221212083256-16ee4c1ae47e
|
||||
github.com/chainreactors/go-metrics v0.0.0-20220926021830-24787b7a10f8
|
||||
github.com/chainreactors/gogo/v2 v2.9.5-0.20221110124606-bb8c89742d4d
|
||||
github.com/chainreactors/ipcs v0.0.13
|
||||
github.com/chainreactors/logs v0.6.2
|
||||
github.com/chainreactors/parsers v0.2.7
|
||||
github.com/chainreactors/words v0.3.2-0.20221210163218-dc834b0519bc
|
||||
github.com/chainreactors/words v0.3.2-0.20221212045930-0c976fe39aea
|
||||
)
|
||||
|
||||
require (
|
||||
|
4
go.sum
4
go.sum
@ -8,6 +8,8 @@ github.com/chainreactors/files v0.2.0/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2
|
||||
github.com/chainreactors/files v0.2.3/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2IDcovf42/6A=
|
||||
github.com/chainreactors/files v0.2.4 h1:R0iCqjWLcwwLoSi87FpgUlpxZAd+W4ZLQF3lkoLWZi0=
|
||||
github.com/chainreactors/files v0.2.4/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2IDcovf42/6A=
|
||||
github.com/chainreactors/files v0.2.5-0.20221212083256-16ee4c1ae47e h1:Y/NB1dgNmglJlyMNuXcWItZGujbAAa0BbJNXM+CuuVo=
|
||||
github.com/chainreactors/files v0.2.5-0.20221212083256-16ee4c1ae47e/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2IDcovf42/6A=
|
||||
github.com/chainreactors/go-metrics v0.0.0-20220926021830-24787b7a10f8 h1:kMFr1Hj+rkp1wBPIw2pcQvelO5GnA7r7wY3h6vJ1joA=
|
||||
github.com/chainreactors/go-metrics v0.0.0-20220926021830-24787b7a10f8/go.mod h1:7NDvFERNiXsujaBPD6s4WXj52uKdfnF2zVHQtKXIEV4=
|
||||
github.com/chainreactors/gogo/v2 v2.9.5-0.20221110124606-bb8c89742d4d h1:LrbxNFvUrCooEIbQd4JicGFxRCs5KNov8xjJb8b7udw=
|
||||
@ -23,6 +25,8 @@ github.com/chainreactors/parsers v0.2.7 h1:3iEuluL7gSDrElZWyf1KEiTgddgcoZC0IaIHb
|
||||
github.com/chainreactors/parsers v0.2.7/go.mod h1:Z9weht+lnFCk7UcwqFu6lXpS7u5vttiy0AJYOAyCCLA=
|
||||
github.com/chainreactors/words v0.3.2-0.20221210163218-dc834b0519bc h1:VBKKX6Uc6pJA9ST48m1p6H8V2mm1UIypIboFBaGNbMY=
|
||||
github.com/chainreactors/words v0.3.2-0.20221210163218-dc834b0519bc/go.mod h1:jRcFgafTKqdkd1+StzPCTJG1ESrZHluXEO2eERdHBMQ=
|
||||
github.com/chainreactors/words v0.3.2-0.20221212045930-0c976fe39aea h1:YQafXeVeh1uDH+tQEHJo12sICgJWPqaVvPtIQW7JN5A=
|
||||
github.com/chainreactors/words v0.3.2-0.20221212045930-0c976fe39aea/go.mod h1:jRcFgafTKqdkd1+StzPCTJG1ESrZHluXEO2eERdHBMQ=
|
||||
github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
|
@ -18,7 +18,6 @@ func NewCheckPool(ctx context.Context, config *pkg.Config) (*CheckPool, error) {
|
||||
ctx: pctx,
|
||||
cancel: cancel,
|
||||
client: ihttp.NewClient(config.Thread, 2, config.ClientType),
|
||||
worder: words.NewWorder(config.Wordlist, config.Fns),
|
||||
wg: sync.WaitGroup{},
|
||||
reqCount: 1,
|
||||
failedCount: 1,
|
||||
@ -86,6 +85,7 @@ func (p *CheckPool) Close() {
|
||||
}
|
||||
|
||||
func (p *CheckPool) Run(ctx context.Context, offset, limit int) {
|
||||
p.worder.Run()
|
||||
Loop:
|
||||
for {
|
||||
select {
|
||||
|
@ -12,6 +12,7 @@ import (
|
||||
"github.com/chainreactors/words/rule"
|
||||
"github.com/gosuri/uiprogress"
|
||||
"io/ioutil"
|
||||
"net/url"
|
||||
"os"
|
||||
"regexp"
|
||||
"strconv"
|
||||
@ -19,27 +20,31 @@ import (
|
||||
)
|
||||
|
||||
type Option struct {
|
||||
InputOptions `group:"Input Options"`
|
||||
OutputOptions `group:"Output Options"`
|
||||
RequestOptions `group:"Request Options"`
|
||||
ModeOptions `group:"Modify Options"`
|
||||
MiscOptions `group:"Miscellaneous Options"`
|
||||
InputOptions `group:"Input Options"`
|
||||
FunctionOptions `group:"Function Options"`
|
||||
OutputOptions `group:"Output Options"`
|
||||
RequestOptions `group:"Request Options"`
|
||||
ModeOptions `group:"Modify Options"`
|
||||
MiscOptions `group:"Miscellaneous Options"`
|
||||
}
|
||||
|
||||
type InputOptions struct {
|
||||
ResumeFrom string `long:"resume-from"`
|
||||
URL string `short:"u" long:"url" description:"String, input baseurl (separated by commas), e.g.: http://google.com, http://baidu.com"`
|
||||
URLFile string `short:"l" long:"list" description:"File, input filename"`
|
||||
Offset int `long:"offset" description:"Int, wordlist offset"`
|
||||
Limit int `long:"limit" description:"Int, wordlist limit, start with offset. e.g.: --offset 1000 --limit 100"`
|
||||
Dictionaries []string `short:"d" long:"dict" description:"Files, dict files, e.g.: -d 1.txt -d 2.txt"`
|
||||
Word string `short:"w" long:"word" description:"String, word generate dsl, e.g.: -w test{?ld#4}"`
|
||||
FilterRule string `long:"rule-filter" description:"String, filter rule, e.g.: --rule-filter '>8'"`
|
||||
Rules []string `short:"r" long:"rules" description:"Files, rule files, e.g.: -r rule1.txt -r rule2.txt"`
|
||||
ResumeFrom string `long:"resume-from"`
|
||||
URL string `short:"u" long:"url" description:"String, input baseurl (separated by commas), e.g.: http://google.com, http://baidu.com"`
|
||||
URLFile string `short:"l" long:"list" description:"File, input filename"`
|
||||
Offset int `long:"offset" description:"Int, wordlist offset"`
|
||||
Limit int `long:"limit" description:"Int, wordlist limit, start with offset. e.g.: --offset 1000 --limit 100"`
|
||||
Dictionaries []string `short:"d" long:"dict" description:"Files, dict files, e.g.: -d 1.txt -d 2.txt"`
|
||||
Word string `short:"w" long:"word" description:"String, word generate dsl, e.g.: -w test{?ld#4}"`
|
||||
FilterRule string `long:"rule-filter" description:"String, filter rule, e.g.: --rule-filter '>8'"`
|
||||
Rules []string `short:"r" long:"rules" description:"Files, rule files, e.g.: -r rule1.txt -r rule2.txt"`
|
||||
}
|
||||
|
||||
type FunctionOptions struct {
|
||||
Extensions string `short:"e" long:"extension" description:"String, add extensions (separated by commas), e.g.: -e jsp,jspx"`
|
||||
ExcludeExtensions string `long:"exclude-extension" description:"String, exclude extensions (separated by commas), e.g.: --exclude-extension jsp,jspx"`
|
||||
RemoveExtensions string `long:"remove-extension" description:"String, remove extensions (separated by commas), e.g.: --remove-extension jsp,jspx"`
|
||||
Uppercase bool `short:"U" long:"uppercase" description:"Bool, upper wordlist, e.g.: --uppercase"`
|
||||
Uppercase bool `short:"U" long:"uppercase" desvcription:"Bool, upper wordlist, e.g.: --uppercase"`
|
||||
Lowercase bool `short:"L" long:"lowercase" description:"Bool, lower wordlist, e.g.: --lowercase"`
|
||||
Prefixes []string `long:"prefix" description:"Strings, add prefix, e.g.: --prefix aaa --prefix bbb"`
|
||||
Suffixes []string `long:"suffix" description:"Strings, add suffix, e.g.: --suffix aaa --suffix bbb"`
|
||||
@ -189,6 +194,9 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
|
||||
dicts := make([][]string, len(opt.Dictionaries))
|
||||
for i, f := range opt.Dictionaries {
|
||||
dicts[i], err = loadFileToSlice(f)
|
||||
if opt.ResumeFrom != "" {
|
||||
dictCache[f] = dicts[i]
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -223,17 +231,12 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
|
||||
opt.Word += "{@ext}"
|
||||
}
|
||||
|
||||
mask.CustomWords = dicts
|
||||
r.Wordlist, err = mask.Run(opt.Word)
|
||||
r.Wordlist, err = mask.Run(opt.Word, dicts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
logs.Log.Importantf("Parsed %d words by %s", len(r.Wordlist), opt.Word)
|
||||
pkg.DefaultStatistor = pkg.Statistor{
|
||||
Word: opt.Word,
|
||||
WordCount: len(r.Wordlist),
|
||||
Dictionaries: opt.Dictionaries,
|
||||
Offset: opt.Offset,
|
||||
if len(r.Wordlist) > 0 {
|
||||
logs.Log.Importantf("Parsed %d words by %s", len(r.Wordlist), opt.Word)
|
||||
}
|
||||
|
||||
if opt.Rules != nil {
|
||||
@ -257,14 +260,24 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
|
||||
} else {
|
||||
r.Total = len(r.Wordlist)
|
||||
}
|
||||
|
||||
if opt.Limit != 0 {
|
||||
if total := r.Offset + opt.Limit; total < r.Total {
|
||||
r.Total = total
|
||||
}
|
||||
pkg.DefaultStatistor = pkg.Statistor{
|
||||
Word: opt.Word,
|
||||
WordCount: len(r.Wordlist),
|
||||
Dictionaries: opt.Dictionaries,
|
||||
Offset: opt.Offset,
|
||||
RuleFiles: opt.Rules,
|
||||
RuleFilter: opt.FilterRule,
|
||||
Total: r.Total,
|
||||
}
|
||||
|
||||
// prepare task
|
||||
var u *url.URL
|
||||
if opt.URL != "" {
|
||||
u, err = url.Parse(opt.URL)
|
||||
if err != nil {
|
||||
u, _ = url.Parse("http://" + opt.URL)
|
||||
}
|
||||
}
|
||||
var tasks []*Task
|
||||
var taskfrom string
|
||||
if opt.ResumeFrom != "" {
|
||||
@ -274,14 +287,15 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
|
||||
}
|
||||
taskfrom = "resume " + opt.ResumeFrom
|
||||
for _, stat := range stats {
|
||||
tasks = append(tasks, &Task{baseUrl: stat.BaseUrl, offset: stat.Offset + stat.End, total: r.Total})
|
||||
task := &Task{baseUrl: stat.BaseUrl, origin: stat}
|
||||
tasks = append(tasks, task)
|
||||
}
|
||||
} else {
|
||||
var file *os.File
|
||||
var urls []string
|
||||
if opt.URL != "" {
|
||||
urls = append(urls, opt.URL)
|
||||
tasks = append(tasks, &Task{baseUrl: opt.URL, offset: opt.Offset, total: r.Total})
|
||||
if u != nil {
|
||||
urls = append(urls, u.String())
|
||||
tasks = append(tasks, &Task{baseUrl: opt.URL})
|
||||
taskfrom = "cmd"
|
||||
} else if opt.URLFile != "" {
|
||||
file, err = os.Open(opt.URLFile)
|
||||
@ -301,7 +315,7 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
|
||||
}
|
||||
urls := strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||
for _, u := range urls {
|
||||
tasks = append(tasks, &Task{baseUrl: strings.TrimSpace(u), offset: opt.Offset, total: r.Total})
|
||||
tasks = append(tasks, &Task{baseUrl: strings.TrimSpace(u)})
|
||||
}
|
||||
}
|
||||
if opt.CheckOnly {
|
||||
@ -421,7 +435,20 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
r.StatFile, err = files.NewFile("stat.json", false, false, true)
|
||||
if opt.ResumeFrom != "" {
|
||||
r.StatFile, err = files.NewFile(opt.ResumeFrom, false, true, true)
|
||||
} else if opt.URLFile != "" {
|
||||
r.StatFile, err = files.NewFile(opt.URLFile+".stat", false, true, true)
|
||||
} else if taskfrom == "stdin" {
|
||||
r.StatFile, err = files.NewFile("stdin.stat", false, true, true)
|
||||
} else if u != nil {
|
||||
r.StatFile, err = files.NewFile(u.Host, false, true, true)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r.StatFile.Mod = os.O_WRONLY | os.O_CREATE
|
||||
err = r.StatFile.Init()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -433,56 +460,17 @@ func (opt *Option) Validate() bool {
|
||||
logs.Log.Error("Cannot set -U and -L at the same time")
|
||||
return false
|
||||
}
|
||||
|
||||
if (opt.Offset != 0 || opt.Limit != 0) && opt.Depth > 0 {
|
||||
// 偏移和上限与递归同时使用时也会造成混淆.
|
||||
logs.Log.Error("--offset and --limit cannot be used with --depth at the same time")
|
||||
return false
|
||||
}
|
||||
|
||||
if opt.Depth > 0 && opt.ResumeFrom != "" {
|
||||
// 递归与断点续传会造成混淆, 断点续传的word与rule不是通过命令行获取的
|
||||
logs.Log.Error("--resume-from and --depth cannot be used at the same time")
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func loadFileToSlice(filename string) ([]string, error) {
|
||||
var ss []string
|
||||
content, err := ioutil.ReadFile(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ss = strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||
|
||||
// 统一windows与linux的回车换行差异
|
||||
for i, word := range ss {
|
||||
ss[i] = strings.TrimSpace(word)
|
||||
}
|
||||
|
||||
return ss, nil
|
||||
}
|
||||
|
||||
func parseExtension(s string) string {
|
||||
if i := strings.Index(s, "."); i != -1 {
|
||||
return s[i+1:]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func StringsContains(s []string, e string) bool {
|
||||
for _, v := range s {
|
||||
if v == e {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func IntsContains(s []int, e int) bool {
|
||||
for _, v := range s {
|
||||
if v == e {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type Task struct {
|
||||
baseUrl string
|
||||
offset int
|
||||
total int
|
||||
depth int
|
||||
//wordlist []string
|
||||
//rule []rule.Expression
|
||||
}
|
||||
|
@ -31,11 +31,9 @@ func NewPool(ctx context.Context, config *pkg.Config) (*Pool, error) {
|
||||
pctx, cancel := context.WithCancel(ctx)
|
||||
pool := &Pool{
|
||||
Config: config,
|
||||
Statistor: pkg.NewStatistor(config.BaseURL),
|
||||
ctx: pctx,
|
||||
cancel: cancel,
|
||||
client: ihttp.NewClient(config.Thread, 2, config.ClientType),
|
||||
worder: words.NewWorder(config.Wordlist, config.Fns),
|
||||
baselines: make(map[int]*pkg.Baseline),
|
||||
tempCh: make(chan *pkg.Baseline, config.Thread),
|
||||
checkCh: make(chan *Unit),
|
||||
@ -45,8 +43,6 @@ func NewPool(ctx context.Context, config *pkg.Config) (*Pool, error) {
|
||||
failedCount: 1,
|
||||
}
|
||||
|
||||
pool.worder.Rules = pool.Rules
|
||||
pool.worder.RunWithRules()
|
||||
p, _ := ants.NewPoolWithFunc(config.Thread, func(i interface{}) {
|
||||
atomic.AddInt32(&pool.Statistor.ReqTotal, 1)
|
||||
unit := i.(*Unit)
|
||||
@ -307,6 +303,7 @@ func (pool *Pool) genReq(s string) (*ihttp.Request, error) {
|
||||
return nil, fmt.Errorf("unknown mod")
|
||||
}
|
||||
func (pool *Pool) Run(ctx context.Context, offset, limit int) {
|
||||
pool.worder.RunWithRules()
|
||||
Loop:
|
||||
for {
|
||||
select {
|
||||
@ -324,9 +321,6 @@ Loop:
|
||||
break Loop
|
||||
}
|
||||
|
||||
for _, fn := range pool.Fns {
|
||||
u = fn(u)
|
||||
}
|
||||
if u == "" {
|
||||
continue
|
||||
}
|
||||
|
@ -8,6 +8,7 @@ import (
|
||||
"github.com/chainreactors/logs"
|
||||
"github.com/chainreactors/spray/pkg"
|
||||
"github.com/chainreactors/spray/pkg/ihttp"
|
||||
"github.com/chainreactors/words"
|
||||
"github.com/chainreactors/words/rule"
|
||||
"github.com/gosuri/uiprogress"
|
||||
"github.com/panjf2000/ants/v2"
|
||||
@ -23,6 +24,12 @@ var (
|
||||
WAFStatus = []int{493, 418}
|
||||
)
|
||||
|
||||
var (
|
||||
dictCache = make(map[string][]string)
|
||||
wordlistCache = make(map[string][]string)
|
||||
ruleCache = make(map[string][]rule.Expression)
|
||||
)
|
||||
|
||||
type Runner struct {
|
||||
taskCh chan *Task
|
||||
poolwg sync.WaitGroup
|
||||
@ -56,6 +63,7 @@ type Runner struct {
|
||||
Force bool
|
||||
Progress *uiprogress.Progress
|
||||
Offset int
|
||||
Limit int
|
||||
Total int
|
||||
Deadline int
|
||||
CheckPeriod int
|
||||
@ -70,8 +78,6 @@ func (r *Runner) PrepareConfig() *pkg.Config {
|
||||
Timeout: r.Timeout,
|
||||
Headers: r.Headers,
|
||||
Mod: pkg.ModMap[r.Mod],
|
||||
Fns: r.Fns,
|
||||
Rules: r.Rules,
|
||||
OutputCh: r.OutputCh,
|
||||
FuzzyCh: r.FuzzyCh,
|
||||
Fuzzy: r.Fuzzy,
|
||||
@ -96,7 +102,7 @@ func (r *Runner) Prepare(ctx context.Context) error {
|
||||
// 仅check, 类似httpx
|
||||
r.Pools, err = ants.NewPoolWithFunc(1, func(i interface{}) {
|
||||
config := r.PrepareConfig()
|
||||
config.Wordlist = r.URLList
|
||||
|
||||
pool, err := NewCheckPool(ctx, config)
|
||||
if err != nil {
|
||||
logs.Log.Error(err.Error())
|
||||
@ -104,6 +110,7 @@ func (r *Runner) Prepare(ctx context.Context) error {
|
||||
r.poolwg.Done()
|
||||
return
|
||||
}
|
||||
pool.worder = words.NewWorderWithFns(r.URLList, r.Fns)
|
||||
pool.bar = pkg.NewBar("check", r.Total-r.Offset, r.Progress)
|
||||
pool.Run(ctx, r.Offset, r.Total)
|
||||
r.poolwg.Done()
|
||||
@ -127,9 +134,14 @@ func (r *Runner) Prepare(ctx context.Context) error {
|
||||
|
||||
r.Pools, err = ants.NewPoolWithFunc(r.PoolSize, func(i interface{}) {
|
||||
t := i.(*Task)
|
||||
if t.origin.End == t.origin.Total {
|
||||
r.StatFile.SafeWrite(t.origin.Json())
|
||||
r.Done()
|
||||
return
|
||||
}
|
||||
config := r.PrepareConfig()
|
||||
config.BaseURL = t.baseUrl
|
||||
config.Wordlist = r.Wordlist
|
||||
|
||||
pool, err := NewPool(ctx, config)
|
||||
if err != nil {
|
||||
logs.Log.Error(err.Error())
|
||||
@ -137,8 +149,41 @@ func (r *Runner) Prepare(ctx context.Context) error {
|
||||
r.Done()
|
||||
return
|
||||
}
|
||||
pool.Statistor.Total = r.Total
|
||||
pool.bar = pkg.NewBar(config.BaseURL, t.total-t.offset, r.Progress)
|
||||
if t.origin != nil && len(r.Wordlist) == 0 {
|
||||
// 如果是从断点续传中恢复的任务, 则自动设置word,dict与rule, 不过优先级低于命令行参数
|
||||
pool.Statistor = pkg.NewStatistorFromStat(t.origin)
|
||||
wl, err := loadWordlist(t.origin.Word, t.origin.Dictionaries)
|
||||
if err != nil {
|
||||
logs.Log.Error(err.Error())
|
||||
r.Done()
|
||||
return
|
||||
}
|
||||
pool.worder = words.NewWorderWithFns(wl, r.Fns)
|
||||
rules, err := loadRuleWithFiles(t.origin.RuleFiles, t.origin.RuleFilter)
|
||||
if err != nil {
|
||||
logs.Log.Error(err.Error())
|
||||
r.Done()
|
||||
return
|
||||
}
|
||||
pool.worder.Rules = rules
|
||||
if len(rules) > 0 {
|
||||
pool.Statistor.Total = len(rules) * len(wl)
|
||||
} else {
|
||||
pool.Statistor.Total = len(wl)
|
||||
}
|
||||
} else {
|
||||
pool.Statistor = pkg.NewStatistor(t.baseUrl)
|
||||
pool.worder = words.NewWorderWithFns(r.Wordlist, r.Fns)
|
||||
pool.worder.Rules = r.Rules
|
||||
}
|
||||
|
||||
var limit int
|
||||
if pool.Statistor.Total > r.Limit && r.Limit != 0 {
|
||||
limit = r.Limit
|
||||
} else {
|
||||
limit = pool.Statistor.Total
|
||||
}
|
||||
pool.bar = pkg.NewBar(config.BaseURL, limit-pool.Statistor.Offset, r.Progress)
|
||||
err = pool.Init()
|
||||
if err != nil {
|
||||
logs.Log.Error(err.Error())
|
||||
@ -150,7 +195,7 @@ func (r *Runner) Prepare(ctx context.Context) error {
|
||||
}
|
||||
}
|
||||
|
||||
pool.Run(ctx, t.offset, t.total)
|
||||
pool.Run(ctx, pool.Statistor.Offset, limit)
|
||||
logs.Log.Important(pool.Statistor.String())
|
||||
logs.Log.Important(pool.Statistor.Detail())
|
||||
if r.StatFile != nil {
|
||||
@ -184,7 +229,11 @@ Loop:
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
logs.Log.Error("cancel with deadline")
|
||||
for t := range r.taskCh {
|
||||
stat := pkg.NewStatistor(t.baseUrl)
|
||||
r.StatFile.SafeWrite(stat.Json())
|
||||
}
|
||||
logs.Log.Importantf("save all stat to %s", r.StatFile.Filename)
|
||||
break Loop
|
||||
case t, ok := <-r.taskCh:
|
||||
if !ok {
|
||||
@ -286,7 +335,7 @@ func (r *Runner) Outputting() {
|
||||
if bl.IsValid {
|
||||
saveFunc(bl)
|
||||
if bl.Recu {
|
||||
r.AddPool(&Task{bl.UrlString, 0, r.Total, bl.RecuDepth + 1})
|
||||
r.AddPool(&Task{baseUrl: bl.UrlString, depth: bl.RecuDepth + 1})
|
||||
}
|
||||
} else {
|
||||
logs.Log.Debug(bl.String())
|
||||
@ -300,7 +349,6 @@ func (r *Runner) Outputting() {
|
||||
if r.FuzzyFile != nil {
|
||||
fuzzySaveFunc = func(bl *pkg.Baseline) {
|
||||
r.FuzzyFile.SafeWrite(bl.Jsonify() + "\n")
|
||||
r.FuzzyFile.SafeSync()
|
||||
}
|
||||
} else {
|
||||
fuzzySaveFunc = func(bl *pkg.Baseline) {
|
||||
|
@ -1,5 +1,10 @@
|
||||
package internal
|
||||
|
||||
import (
|
||||
"github.com/chainreactors/spray/pkg"
|
||||
"github.com/chainreactors/words/rule"
|
||||
)
|
||||
|
||||
type ErrorType uint
|
||||
|
||||
const (
|
||||
@ -60,3 +65,10 @@ type Unit struct {
|
||||
frontUrl string
|
||||
reCount int
|
||||
}
|
||||
|
||||
type Task struct {
|
||||
baseUrl string
|
||||
depth int
|
||||
rule []rule.Expression
|
||||
origin *pkg.Statistor
|
||||
}
|
||||
|
107
internal/utils.go
Normal file
107
internal/utils.go
Normal file
@ -0,0 +1,107 @@
|
||||
package internal
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"github.com/chainreactors/words/mask"
|
||||
"github.com/chainreactors/words/rule"
|
||||
"io/ioutil"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func parseExtension(s string) string {
|
||||
if i := strings.Index(s, "."); i != -1 {
|
||||
return s[i+1:]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func StringsContains(s []string, e string) bool {
|
||||
for _, v := range s {
|
||||
if v == e {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func IntsContains(s []int, e int) bool {
|
||||
for _, v := range s {
|
||||
if v == e {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func loadFileToSlice(filename string) ([]string, error) {
|
||||
var ss []string
|
||||
content, err := ioutil.ReadFile(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ss = strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||
|
||||
// 统一windows与linux的回车换行差异
|
||||
for i, word := range ss {
|
||||
ss[i] = strings.TrimSpace(word)
|
||||
}
|
||||
|
||||
return ss, nil
|
||||
}
|
||||
|
||||
func loadFileWithCache(filename string) ([]string, error) {
|
||||
if dict, ok := dictCache[filename]; ok {
|
||||
return dict, nil
|
||||
}
|
||||
dict, err := loadFileToSlice(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dictCache[filename] = dict
|
||||
return dict, nil
|
||||
}
|
||||
|
||||
func loadDictionaries(filenames []string) ([][]string, error) {
|
||||
dicts := make([][]string, len(filenames))
|
||||
for i, name := range filenames {
|
||||
dict, err := loadFileWithCache(name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dicts[i] = dict
|
||||
}
|
||||
return dicts, nil
|
||||
}
|
||||
|
||||
func loadWordlist(word string, dictNames []string) ([]string, error) {
|
||||
if wl, ok := wordlistCache[word+strings.Join(dictNames, ",")]; ok {
|
||||
return wl, nil
|
||||
}
|
||||
dicts, err := loadDictionaries(dictNames)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
wl, err := mask.Run(word, dicts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
wordlistCache[word] = wl
|
||||
return wl, nil
|
||||
}
|
||||
|
||||
func loadRuleWithFiles(ruleFiles []string, filter string) ([]rule.Expression, error) {
|
||||
if rules, ok := ruleCache[strings.Join(ruleFiles, ",")]; ok {
|
||||
return rules, nil
|
||||
}
|
||||
var rules bytes.Buffer
|
||||
for _, filename := range ruleFiles {
|
||||
content, err := ioutil.ReadFile(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rules.Write(content)
|
||||
rules.WriteString("\n")
|
||||
}
|
||||
return rule.Compile(rules.String(), filter), nil
|
||||
}
|
@ -2,7 +2,6 @@ package pkg
|
||||
|
||||
import (
|
||||
"github.com/antonmedv/expr/vm"
|
||||
"github.com/chainreactors/words/rule"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
@ -22,8 +21,8 @@ var ModMap = map[string]SprayMod{
|
||||
|
||||
type Config struct {
|
||||
BaseURL string
|
||||
Wordlist []string
|
||||
Thread int
|
||||
Wordlist []string
|
||||
Timeout int
|
||||
CheckPeriod int
|
||||
ErrPeriod int
|
||||
@ -32,8 +31,6 @@ type Config struct {
|
||||
Mod SprayMod
|
||||
Headers http.Header
|
||||
ClientType int
|
||||
Fns []func(string) string
|
||||
Rules []rule.Expression
|
||||
MatchExpr *vm.Program
|
||||
FilterExpr *vm.Program
|
||||
RecuExpr *vm.Program
|
||||
|
@ -20,6 +20,19 @@ func NewStatistor(url string) *Statistor {
|
||||
return &stat
|
||||
}
|
||||
|
||||
func NewStatistorFromStat(origin *Statistor) *Statistor {
|
||||
return &Statistor{
|
||||
BaseUrl: origin.BaseUrl,
|
||||
Word: origin.Word,
|
||||
Dictionaries: origin.Dictionaries,
|
||||
Offset: origin.End,
|
||||
RuleFiles: origin.RuleFiles,
|
||||
RuleFilter: origin.RuleFilter,
|
||||
Counts: make(map[int]int),
|
||||
StartTime: time.Now().Unix(),
|
||||
}
|
||||
}
|
||||
|
||||
type Statistor struct {
|
||||
BaseUrl string `json:"url"`
|
||||
Counts map[int]int `json:"counts"`
|
||||
@ -39,6 +52,8 @@ type Statistor struct {
|
||||
WordCount int `json:"word_count"`
|
||||
Word string `json:"word"`
|
||||
Dictionaries []string `json:"dictionaries"`
|
||||
RuleFiles []string `json:"rule_files"`
|
||||
RuleFilter string `json:"rule_filter"`
|
||||
}
|
||||
|
||||
func (stat *Statistor) String() string {
|
||||
@ -84,15 +99,15 @@ func ReadStatistors(filename string) (Statistors, error) {
|
||||
return nil, err
|
||||
}
|
||||
var stats Statistors
|
||||
for _, line := range bytes.Split(content, []byte("\n")) {
|
||||
for _, line := range bytes.Split(bytes.TrimSpace(content), []byte("\n")) {
|
||||
var stat Statistor
|
||||
err := json.Unmarshal(line, &stat)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
stats = append(stats, stat)
|
||||
stats = append(stats, &stat)
|
||||
}
|
||||
return stats, nil
|
||||
}
|
||||
|
||||
type Statistors []Statistor
|
||||
type Statistors []*Statistor
|
||||
|
Loading…
x
Reference in New Issue
Block a user