mirror of
https://github.com/chainreactors/spray.git
synced 2025-09-15 11:40:13 +00:00
新增--append-rule, 用来对valid的结果进行二次基于规则的爆破.
添加了规则表: filebak.txt
This commit is contained in:
parent
436fb2f3f5
commit
9750f819cd
2
go.mod
2
go.mod
@ -9,7 +9,7 @@ require (
|
||||
github.com/chainreactors/ipcs v0.0.13
|
||||
github.com/chainreactors/logs v0.7.1-0.20221214153111-85f123ff6580
|
||||
github.com/chainreactors/parsers v0.2.9-0.20221210155102-cc0814762410
|
||||
github.com/chainreactors/words v0.3.2-0.20221214154622-381fc37abdf9
|
||||
github.com/chainreactors/words v0.3.2-0.20230105095023-67f7d4e9186a
|
||||
)
|
||||
|
||||
require (
|
||||
|
2
go.sum
2
go.sum
@ -37,6 +37,8 @@ github.com/chainreactors/words v0.3.2-0.20221214062855-48dff09b01ad h1:uL3TIQgvF
|
||||
github.com/chainreactors/words v0.3.2-0.20221214062855-48dff09b01ad/go.mod h1:QIWX1vMT5j/Mp9zx3/wgZh3FqskhjCbo/3Ffy/Hxj9w=
|
||||
github.com/chainreactors/words v0.3.2-0.20221214154622-381fc37abdf9 h1:IUNopSuorfINmn4pOuSwZtxJbg8zsRIZ67a33SiYoQ0=
|
||||
github.com/chainreactors/words v0.3.2-0.20221214154622-381fc37abdf9/go.mod h1:QIWX1vMT5j/Mp9zx3/wgZh3FqskhjCbo/3Ffy/Hxj9w=
|
||||
github.com/chainreactors/words v0.3.2-0.20230105095023-67f7d4e9186a h1:NoFfxJfPXiS2fzdmRIzWj4K+V7BRC2BAXlxQfckTeN0=
|
||||
github.com/chainreactors/words v0.3.2-0.20230105095023-67f7d4e9186a/go.mod h1:QIWX1vMT5j/Mp9zx3/wgZh3FqskhjCbo/3Ffy/Hxj9w=
|
||||
github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
|
@ -38,8 +38,9 @@ type InputOptions struct {
|
||||
Limit int `long:"limit" description:"Int, wordlist limit, start with offset. e.g.: --offset 1000 --limit 100"`
|
||||
Dictionaries []string `short:"d" long:"dict" description:"Files, Multi,dict files, e.g.: -d 1.txt -d 2.txt"`
|
||||
Word string `short:"w" long:"word" description:"String, word generate dsl, e.g.: -w test{?ld#4}"`
|
||||
FilterRule string `long:"rule-filter" description:"String, filter rule, e.g.: --rule-filter '>8'"`
|
||||
Rules []string `short:"r" long:"rules" description:"Files, Multi, rule files, e.g.: -r rule1.txt -r rule2.txt"`
|
||||
AppendRule string `long:"append-rule" description:"File, when found valid path , use append rule generator new word with current path"`
|
||||
FilterRule string `long:"filter-rule" description:"String, filter rule, e.g.: --rule-filter '>8 <4'"`
|
||||
}
|
||||
|
||||
type FunctionOptions struct {
|
||||
@ -274,13 +275,16 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
|
||||
} else if opt.FilterRule != "" {
|
||||
// if filter rule is not empty, set rules to ":", force to open filter mode
|
||||
r.Rules = rule.Compile(":", opt.FilterRule)
|
||||
} else {
|
||||
r.Rules = new(rule.Program)
|
||||
}
|
||||
|
||||
if len(r.Rules) > 0 {
|
||||
r.Total = len(r.Wordlist) * len(r.Rules)
|
||||
if len(r.Rules.Expressions) > 0 {
|
||||
r.Total = len(r.Wordlist) * len(r.Rules.Expressions)
|
||||
} else {
|
||||
r.Total = len(r.Wordlist)
|
||||
}
|
||||
|
||||
pkg.DefaultStatistor = pkg.Statistor{
|
||||
Word: opt.Word,
|
||||
WordCount: len(r.Wordlist),
|
||||
@ -291,6 +295,13 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
|
||||
Total: r.Total,
|
||||
}
|
||||
|
||||
if opt.AppendRule != "" {
|
||||
content, err := ioutil.ReadFile(opt.AppendRule)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r.AppendRules = rule.Compile(string(content), "")
|
||||
}
|
||||
// prepare task
|
||||
var tasks []*Task
|
||||
var taskfrom string
|
||||
|
@ -9,9 +9,11 @@ import (
|
||||
"github.com/chainreactors/spray/pkg"
|
||||
"github.com/chainreactors/spray/pkg/ihttp"
|
||||
"github.com/chainreactors/words"
|
||||
"github.com/chainreactors/words/rule"
|
||||
"github.com/panjf2000/ants/v2"
|
||||
"github.com/valyala/fasthttp"
|
||||
"net/url"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
@ -36,7 +38,7 @@ func NewPool(ctx context.Context, config *pkg.Config) (*Pool, error) {
|
||||
baselines: make(map[int]*pkg.Baseline),
|
||||
urls: make(map[string]int),
|
||||
tempCh: make(chan *pkg.Baseline, config.Thread),
|
||||
checkCh: make(chan sourceType),
|
||||
checkCh: make(chan int),
|
||||
additionCh: make(chan *Unit, 100),
|
||||
wg: sync.WaitGroup{},
|
||||
initwg: sync.WaitGroup{},
|
||||
@ -83,6 +85,7 @@ func NewPool(ctx context.Context, config *pkg.Config) (*Pool, error) {
|
||||
bl.RedirectURL = "/" + strings.TrimLeft(bl.RedirectURL, "/")
|
||||
bl.RedirectURL = pool.BaseURL + bl.RedirectURL
|
||||
}
|
||||
pool.wg.Add(1)
|
||||
pool.doRedirect(bl, unit.depth)
|
||||
}
|
||||
pool.addFuzzyBaseline(bl)
|
||||
@ -105,6 +108,7 @@ func NewPool(ctx context.Context, config *pkg.Config) (*Pool, error) {
|
||||
pool.initwg.Done()
|
||||
case InitIndexSource:
|
||||
pool.index = bl
|
||||
pool.wg.Add(1)
|
||||
pool.doCrawl(bl)
|
||||
pool.initwg.Done()
|
||||
case CheckSource:
|
||||
@ -140,7 +144,7 @@ func NewPool(ctx context.Context, config *pkg.Config) (*Pool, error) {
|
||||
case RedirectSource:
|
||||
bl.FrontURL = unit.frontUrl
|
||||
pool.tempCh <- bl
|
||||
case CrawlSource, ActiveSource:
|
||||
case CrawlSource, ActiveSource, RuleSource:
|
||||
pool.tempCh <- bl
|
||||
}
|
||||
|
||||
@ -196,7 +200,9 @@ func NewPool(ctx context.Context, config *pkg.Config) (*Pool, error) {
|
||||
|
||||
// 如果要进行递归判断, 要满足 bl有效, mod为path-spray, 当前深度小于最大递归深度
|
||||
if bl.IsValid {
|
||||
pool.doCrawl(bl)
|
||||
pool.wg.Add(2)
|
||||
go pool.doCrawl(bl)
|
||||
go pool.doRule(bl)
|
||||
if bl.RecuDepth < maxRecursion {
|
||||
if CompareWithExpr(pool.RecuExpr, params) {
|
||||
bl.Recu = true
|
||||
@ -221,7 +227,7 @@ type Pool struct {
|
||||
ctx context.Context
|
||||
cancel context.CancelFunc
|
||||
tempCh chan *pkg.Baseline // 待处理的baseline
|
||||
checkCh chan sourceType // 独立的check管道, 防止与redirect/crawl冲突
|
||||
checkCh chan int // 独立的check管道, 防止与redirect/crawl冲突
|
||||
additionCh chan *Unit
|
||||
reqCount int
|
||||
failedCount int
|
||||
@ -302,6 +308,7 @@ func (pool *Pool) Run(ctx context.Context, offset, limit int) {
|
||||
}
|
||||
}()
|
||||
if pool.Active {
|
||||
pool.wg.Add(1)
|
||||
go pool.doActive()
|
||||
}
|
||||
|
||||
@ -341,10 +348,12 @@ Loop:
|
||||
}
|
||||
}
|
||||
|
||||
for len(pool.additionCh) > 0 {
|
||||
time.Sleep(time.Second)
|
||||
for pool.analyzeDone {
|
||||
time.Sleep(time.Duration(100) * time.Millisecond)
|
||||
}
|
||||
|
||||
pool.wg.Wait()
|
||||
|
||||
pool.Statistor.EndTime = time.Now().Unix()
|
||||
pool.Close()
|
||||
}
|
||||
@ -435,6 +444,7 @@ func CompareWithExpr(exp *vm.Program, params map[string]interface{}) bool {
|
||||
}
|
||||
|
||||
func (pool *Pool) doRedirect(bl *pkg.Baseline, depth int) {
|
||||
defer pool.wg.Done()
|
||||
if depth >= maxRedirect {
|
||||
return
|
||||
}
|
||||
@ -451,6 +461,10 @@ func (pool *Pool) doRedirect(bl *pkg.Baseline, depth int) {
|
||||
}
|
||||
|
||||
func (pool *Pool) doCrawl(bl *pkg.Baseline) {
|
||||
defer pool.wg.Done()
|
||||
if !pool.Crawl {
|
||||
return
|
||||
}
|
||||
bl.CollectURL()
|
||||
for _, u := range bl.URLs {
|
||||
if strings.HasPrefix(u, "//") {
|
||||
@ -467,7 +481,9 @@ func (pool *Pool) doCrawl(bl *pkg.Baseline) {
|
||||
pool.urls[u]++
|
||||
} else {
|
||||
// 通过map去重, 只有新的url才会进入到该逻辑
|
||||
pool.locker.Lock()
|
||||
pool.urls[u] = 1
|
||||
pool.locker.Unlock()
|
||||
if bl.ReqDepth < maxCrawl {
|
||||
parsed, err := url.Parse(u)
|
||||
if err != nil {
|
||||
@ -489,6 +505,7 @@ func (pool *Pool) doCrawl(bl *pkg.Baseline) {
|
||||
}
|
||||
|
||||
func (pool *Pool) doActive() {
|
||||
defer pool.wg.Done()
|
||||
for _, u := range pkg.ActivePath {
|
||||
pool.wg.Add(1)
|
||||
pool.additionCh <- &Unit{
|
||||
@ -514,10 +531,30 @@ func (pool *Pool) doCheck() {
|
||||
}
|
||||
}
|
||||
|
||||
func (pool *Pool) doRule(bl *pkg.Baseline) {
|
||||
defer pool.wg.Done()
|
||||
if pool.AppendRule == nil {
|
||||
return
|
||||
}
|
||||
if bl.Source == int(RuleSource) || bl.Dir {
|
||||
return
|
||||
}
|
||||
|
||||
for u := range rule.RunAsStream(pool.AppendRule.Expressions, path.Base(bl.Path)) {
|
||||
pool.wg.Add(1)
|
||||
pool.additionCh <- &Unit{
|
||||
path: path.Join(path.Dir(bl.Path), u),
|
||||
source: RuleSource,
|
||||
depth: 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (pool *Pool) addFuzzyBaseline(bl *pkg.Baseline) {
|
||||
if _, ok := pool.baselines[bl.Status]; !ok && IntsContains(FuzzyStatus, bl.Status) {
|
||||
bl.Collect()
|
||||
pool.locker.Lock()
|
||||
pool.wg.Add(1)
|
||||
pool.doCrawl(bl)
|
||||
pool.baselines[bl.Status] = bl
|
||||
pool.locker.Unlock()
|
||||
|
@ -38,7 +38,8 @@ type Runner struct {
|
||||
Tasks []*Task
|
||||
URLList []string
|
||||
Wordlist []string
|
||||
Rules []rule.Expression
|
||||
Rules *rule.Program
|
||||
AppendRules *rule.Program
|
||||
Headers map[string]string
|
||||
Fns []func(string) string
|
||||
FilterExpr *vm.Program
|
||||
@ -90,6 +91,7 @@ func (r *Runner) PrepareConfig() *pkg.Config {
|
||||
MatchExpr: r.MatchExpr,
|
||||
FilterExpr: r.FilterExpr,
|
||||
RecuExpr: r.RecursiveExpr,
|
||||
AppendRule: r.AppendRules,
|
||||
IgnoreWaf: r.IgnoreWaf,
|
||||
Crawl: r.Crawl,
|
||||
Active: r.Active,
|
||||
@ -180,7 +182,7 @@ func (r *Runner) Prepare(ctx context.Context) error {
|
||||
} else {
|
||||
pool.Statistor = pkg.NewStatistor(t.baseUrl)
|
||||
pool.worder = words.NewWorderWithFns(r.Wordlist, r.Fns)
|
||||
pool.worder.Rules = r.Rules
|
||||
pool.worder.Rules = r.Rules.Expressions
|
||||
}
|
||||
|
||||
var limit int
|
||||
|
@ -44,10 +44,8 @@ func (e ErrorType) Error() string {
|
||||
}
|
||||
}
|
||||
|
||||
type sourceType int
|
||||
|
||||
const (
|
||||
CheckSource sourceType = iota + 1
|
||||
CheckSource = iota + 1
|
||||
InitRandomSource
|
||||
InitIndexSource
|
||||
RedirectSource
|
||||
@ -55,19 +53,20 @@ const (
|
||||
ActiveSource
|
||||
WordSource
|
||||
WafSource
|
||||
RuleSource
|
||||
)
|
||||
|
||||
func newUnit(path string, source sourceType) *Unit {
|
||||
func newUnit(path string, source int) *Unit {
|
||||
return &Unit{path: path, source: source}
|
||||
}
|
||||
|
||||
func newUnitWithNumber(path string, source sourceType, number int) *Unit {
|
||||
func newUnitWithNumber(path string, source int, number int) *Unit {
|
||||
return &Unit{path: path, source: source}
|
||||
}
|
||||
|
||||
type Unit struct {
|
||||
path string
|
||||
source sourceType
|
||||
source int
|
||||
frontUrl string
|
||||
depth int // redirect depth
|
||||
}
|
||||
|
@ -103,5 +103,5 @@ func loadRuleWithFiles(ruleFiles []string, filter string) ([]rule.Expression, er
|
||||
rules.Write(content)
|
||||
rules.WriteString("\n")
|
||||
}
|
||||
return rule.Compile(rules.String(), filter), nil
|
||||
return rule.Compile(rules.String(), filter).Expressions, nil
|
||||
}
|
||||
|
@ -47,6 +47,7 @@ func NewBaseline(u, host string, resp *ihttp.Response) *Baseline {
|
||||
bl.Path = uu.Path
|
||||
bl.Url = uu
|
||||
}
|
||||
bl.Dir = bl.IsDir()
|
||||
if resp.ClientType == ihttp.STANDARD {
|
||||
bl.Host = host
|
||||
}
|
||||
@ -72,6 +73,7 @@ func NewInvalidBaseline(u, host string, resp *ihttp.Response, reason string) *Ba
|
||||
bl.Path = uu.Path
|
||||
bl.Url = uu
|
||||
}
|
||||
bl.Dir = bl.IsDir()
|
||||
|
||||
if resp.ClientType == ihttp.STANDARD {
|
||||
bl.Host = host
|
||||
@ -91,6 +93,7 @@ type Baseline struct {
|
||||
Url *url.URL `json:"-"`
|
||||
UrlString string `json:"url"`
|
||||
Path string `json:"path"`
|
||||
Dir bool `json:"isdir"`
|
||||
Host string `json:"host"`
|
||||
Body []byte `json:"-"`
|
||||
BodyLength int `json:"body_length"`
|
||||
|
@ -2,6 +2,7 @@ package pkg
|
||||
|
||||
import (
|
||||
"github.com/antonmedv/expr/vm"
|
||||
"github.com/chainreactors/words/rule"
|
||||
)
|
||||
|
||||
type SprayMod int
|
||||
@ -33,6 +34,7 @@ type Config struct {
|
||||
MatchExpr *vm.Program
|
||||
FilterExpr *vm.Program
|
||||
RecuExpr *vm.Program
|
||||
AppendRule *rule.Program
|
||||
OutputCh chan *Baseline
|
||||
FuzzyCh chan *Baseline
|
||||
Fuzzy bool
|
||||
|
12
rule/filebak.txt
Normal file
12
rule/filebak.txt
Normal file
@ -0,0 +1,12 @@
|
||||
$~
|
||||
$b $a $k
|
||||
$. $b $a $k
|
||||
$. $b $a $k $2
|
||||
$. $o $l $d
|
||||
$. $1
|
||||
$. $2
|
||||
$. $z $i $p
|
||||
$. $t $a $r
|
||||
$. $g $z
|
||||
^.
|
||||
^. $. $s $w $p
|
Loading…
x
Reference in New Issue
Block a user