修复functions重复生成目标的bug

优化自动识别能力, 404,200,403现在都会自动进行unique
append-file 现在生效于uniquestatus与whitestatus
This commit is contained in:
M09Ic 2024-02-10 12:48:22 +08:00
parent 0bf3c0433a
commit c1309fe7da
10 changed files with 159 additions and 112 deletions

4
go.mod
View File

@ -9,9 +9,9 @@ require (
github.com/chainreactors/files v0.0.0-20231123083421-cea5b4ad18a8
github.com/chainreactors/gogo/v2 v2.11.12-0.20231228061950-116583962e30
github.com/chainreactors/logs v0.0.0-20240207121836-c946f072f81f
github.com/chainreactors/parsers v0.0.0-20240208083828-d2abbaa66a9c
github.com/chainreactors/parsers v0.0.0-20240208143911-65866d5bbc6d
github.com/chainreactors/utils v0.0.0-20231031063336-9477f1b23886
github.com/chainreactors/words v0.4.1-0.20240206181137-09f7a0bc8f50
github.com/chainreactors/words v0.4.1-0.20240208114042-a1c5053345b0
github.com/gosuri/uiprogress v0.0.1
github.com/jessevdk/go-flags v1.5.0
github.com/panjf2000/ants/v2 v2.7.0

8
go.sum
View File

@ -20,10 +20,18 @@ github.com/chainreactors/parsers v0.0.0-20231218072716-fb441aff745f/go.mod h1:ZH
github.com/chainreactors/parsers v0.0.0-20231220104848-3a0b5a5bd8dc/go.mod h1:V2w16sBSSiBlmsDR4A0Q9PIk9+TP/6coTXv6olvTI6M=
github.com/chainreactors/parsers v0.0.0-20240208083828-d2abbaa66a9c h1:zDqlRywqNXd2mnODH590mUiHAdaqjW/WBMu/E2mWLkw=
github.com/chainreactors/parsers v0.0.0-20240208083828-d2abbaa66a9c/go.mod h1:IS0hrYnccfJKU0NA12zdZk4mM7k/Qt4qnzMnFGBFLZI=
github.com/chainreactors/parsers v0.0.0-20240208143013-46f4b66a900a h1:jCCWBCTw/5L5FK6WMDqhz5ltMSjN0t0Jnlx+Lekzql4=
github.com/chainreactors/parsers v0.0.0-20240208143013-46f4b66a900a/go.mod h1:IS0hrYnccfJKU0NA12zdZk4mM7k/Qt4qnzMnFGBFLZI=
github.com/chainreactors/parsers v0.0.0-20240208143703-cdc9c2b86079 h1:00Pw9Beh1zUl7YaX5sD5V/a9To0PfQItolAOARlbxFo=
github.com/chainreactors/parsers v0.0.0-20240208143703-cdc9c2b86079/go.mod h1:IS0hrYnccfJKU0NA12zdZk4mM7k/Qt4qnzMnFGBFLZI=
github.com/chainreactors/parsers v0.0.0-20240208143911-65866d5bbc6d h1:NFZLic9KNL1KdyvZFatRufXV9FJ3AXmKgTFQQ6Sz+Vk=
github.com/chainreactors/parsers v0.0.0-20240208143911-65866d5bbc6d/go.mod h1:IS0hrYnccfJKU0NA12zdZk4mM7k/Qt4qnzMnFGBFLZI=
github.com/chainreactors/utils v0.0.0-20231031063336-9477f1b23886 h1:lS2T/uE9tg1MNDPrb44wawbNlD24zBlWoG0H+ZdwDAk=
github.com/chainreactors/utils v0.0.0-20231031063336-9477f1b23886/go.mod h1:JA4eiQZm+7AsfjXBcIzIdVKBEhDCb16eNtWFCGTxlvs=
github.com/chainreactors/words v0.4.1-0.20240206181137-09f7a0bc8f50 h1:5aK4k6ztjvDENa4zuxYPzQH2UAy9VM+tvOhpRSCCXWQ=
github.com/chainreactors/words v0.4.1-0.20240206181137-09f7a0bc8f50/go.mod h1:DUDx7PdsMEm5PvVhzkFyppzpiUhQb8dOJaWjVc1SMVk=
github.com/chainreactors/words v0.4.1-0.20240208114042-a1c5053345b0 h1:7aAfDhZDLs6uiWNzYa68L4uzBX7ZIj7IT8v+AlmmpHw=
github.com/chainreactors/words v0.4.1-0.20240208114042-a1c5053345b0/go.mod h1:DUDx7PdsMEm5PvVhzkFyppzpiUhQb8dOJaWjVc1SMVk=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=

View File

@ -33,7 +33,7 @@ func NewBaseline(u, host string, resp *ihttp.Response) *Baseline {
copy(bl.Header, header)
bl.HeaderLength = len(bl.Header)
if i := resp.ContentLength(); i != 0 && bl.ContentType != "bin" {
if i := resp.ContentLength(); i != 0 && i <= ihttp.DefaultMaxBodySize {
body := resp.Body()
bl.Body = make([]byte, len(body))
copy(bl.Body, body)
@ -103,7 +103,6 @@ func NewInvalidBaseline(u, host string, resp *ihttp.Response, reason string) *Ba
type Baseline struct {
*parsers.SprayResult
Unique uint16 `json:"-"`
Url *url.URL `json:"-"`
Dir bool `json:"-"`
Chunked bool `json:"-"`

View File

@ -105,7 +105,7 @@ Loop:
}
pool.wg.Add(1)
_ = pool.pool.Invoke(newUnit(u, CheckSource))
_ = pool.pool.Invoke(newUnit(u, parsers.CheckSource))
case u, ok := <-pool.additionCh:
if !ok {
continue
@ -206,7 +206,7 @@ func (pool *CheckPool) doRedirect(bl *Baseline, depth int) {
go func() {
pool.additionCh <- &Unit{
path: reURL,
source: RedirectSource,
source: parsers.RedirectSource,
frontUrl: bl.UrlString,
depth: depth + 1,
}
@ -228,7 +228,7 @@ func (pool *CheckPool) doUpgrade(bl *Baseline) {
go func() {
pool.additionCh <- &Unit{
path: reurl,
source: UpgradeSource,
source: parsers.UpgradeSource,
depth: bl.ReqDepth + 1,
}
}()

View File

@ -117,8 +117,8 @@ type ModeOptions struct {
BreakThreshold int `long:"error-threshold" default:"20" description:"Int, break when the error exceeds the threshold "`
BlackStatus string `long:"black-status" default:"400,410" description:"Strings (comma split),custom black status, "`
WhiteStatus string `long:"white-status" default:"200" description:"Strings (comma split), custom white status"`
FuzzyStatus string `long:"fuzzy-status" default:"404,403,500,501,502,503" description:"Strings (comma split), custom fuzzy status"`
UniqueStatus string `long:"unique-status" default:"403" description:"Strings (comma split), custom unique status"`
FuzzyStatus string `long:"fuzzy-status" default:"500,501,502,503" description:"Strings (comma split), custom fuzzy status"`
UniqueStatus string `long:"unique-status" default:"403,200,404" description:"Strings (comma split), custom unique status"`
Unique bool `long:"unique" description:"Bool, unique response"`
RetryCount int `long:"retry" default:"0" description:"Int, retry count"`
SimhashDistance int `long:"distance" default:"5"`

View File

@ -56,7 +56,7 @@ func NewPool(ctx context.Context, config *Config) (*Pool, error) {
scopeurls: make(map[string]struct{}),
uniques: make(map[uint16]struct{}),
handlerCh: make(chan *Baseline, config.Thread),
checkCh: make(chan int, config.Thread),
checkCh: make(chan struct{}, config.Thread),
additionCh: make(chan *Unit, config.Thread),
closeCh: make(chan struct{}),
waiter: sync.WaitGroup{},
@ -71,7 +71,7 @@ func NewPool(ctx context.Context, config *Config) (*Pool, error) {
} else if pool.url.Path == "" {
pool.dir = "/"
} else {
pool.dir = Dir(pool.url.Path)
pool.dir = dir(pool.url.Path)
}
pool.reqPool, _ = ants.NewPoolWithFunc(config.Thread, pool.Invoke)
@ -97,7 +97,7 @@ type Pool struct {
ctx context.Context
cancel context.CancelFunc
handlerCh chan *Baseline // 待处理的baseline
checkCh chan int // 独立的check管道 防止与redirect/crawl冲突
checkCh chan struct{} // 独立的check管道 防止与redirect/crawl冲突
additionCh chan *Unit // 插件添加的任务, 待处理管道
closeCh chan struct{}
closed bool
@ -144,18 +144,18 @@ func (pool *Pool) Init() error {
pool.initwg.Add(2)
if pool.Index != "/" {
logs.Log.Logf(LogVerbose, "custom index url: %s", BaseURL(pool.url)+FormatURL(BaseURL(pool.url), pool.Index))
pool.reqPool.Invoke(newUnit(pool.Index, InitIndexSource))
//pool.urls[Dir(pool.Index)] = struct{}{}
pool.reqPool.Invoke(newUnit(pool.Index, parsers.InitIndexSource))
//pool.urls[dir(pool.Index)] = struct{}{}
} else {
pool.reqPool.Invoke(newUnit(pool.url.Path, InitIndexSource))
//pool.urls[Dir(pool.url.Path)] = struct{}{}
pool.reqPool.Invoke(newUnit(pool.url.Path, parsers.InitIndexSource))
//pool.urls[dir(pool.url.Path)] = struct{}{}
}
if pool.Random != "" {
logs.Log.Logf(LogVerbose, "custom random url: %s", BaseURL(pool.url)+FormatURL(BaseURL(pool.url), pool.Random))
pool.reqPool.Invoke(newUnit(pool.Random, InitRandomSource))
pool.reqPool.Invoke(newUnit(pool.Random, parsers.InitRandomSource))
} else {
pool.reqPool.Invoke(newUnit(pool.safePath(pkg.RandPath()), InitRandomSource))
pool.reqPool.Invoke(newUnit(pool.safePath(pkg.RandPath()), parsers.InitRandomSource))
}
pool.initwg.Wait()
@ -241,25 +241,25 @@ Loop:
pool.waiter.Add(1)
if pool.Mod == HostSpray {
pool.reqPool.Invoke(newUnitWithNumber(w, WordSource, pool.wordOffset))
pool.reqPool.Invoke(newUnitWithNumber(w, parsers.WordSource, pool.wordOffset))
} else {
// 原样的目录拼接, 输入了几个"/"就是几个, 适配/有语义的中间件
pool.reqPool.Invoke(newUnitWithNumber(pool.safePath(w), WordSource, pool.wordOffset))
pool.reqPool.Invoke(newUnitWithNumber(pool.safePath(w), parsers.WordSource, pool.wordOffset))
}
case source := <-pool.checkCh:
case <-pool.checkCh:
pool.Statistor.CheckNumber++
if pool.Mod == HostSpray {
pool.reqPool.Invoke(newUnitWithNumber(pkg.RandHost(), source, pool.wordOffset))
pool.reqPool.Invoke(newUnitWithNumber(pkg.RandHost(), parsers.CheckSource, pool.wordOffset))
} else if pool.Mod == PathSpray {
pool.reqPool.Invoke(newUnitWithNumber(pool.safePath(pkg.RandPath()), source, pool.wordOffset))
pool.reqPool.Invoke(newUnitWithNumber(pool.safePath(pkg.RandPath()), parsers.CheckSource, pool.wordOffset))
}
case unit, ok := <-pool.additionCh:
if !ok || pool.closed {
continue
}
if _, ok := pool.urls.Load(unit.path); ok {
logs.Log.Debugf("[%s] duplicate path: %s, skipped", parsers.GetSpraySourceName(unit.source), pool.base+unit.path)
logs.Log.Debugf("[%s] duplicate path: %s, skipped", unit.source.Name(), pool.base+unit.path)
pool.waiter.Done()
} else {
pool.urls.Store(unit.path, nil)
@ -288,7 +288,7 @@ func (pool *Pool) Invoke(v interface{}) {
var req *ihttp.Request
var err error
if unit.source == WordSource {
if unit.source == parsers.WordSource {
req, err = pool.genReq(pool.Mod, unit.path)
} else {
req, err = pool.genReq(PathSpray, unit.path)
@ -325,7 +325,7 @@ func (pool *Pool) Invoke(v interface{}) {
// 自动重放失败请求
pool.doRetry(bl)
} else { // 特定场景优化
if unit.source <= 3 || unit.source == CrawlSource || unit.source == CommonFileSource {
if unit.source <= 3 || unit.source == parsers.CrawlSource || unit.source == parsers.CommonFileSource {
// 一些高优先级的source, 将跳过PreCompare
bl = NewBaseline(req.URI(), req.Host(), resp)
} else if pool.MatchExpr != nil {
@ -340,7 +340,7 @@ func (pool *Pool) Invoke(v interface{}) {
}
// 手动处理重定向
if bl.IsValid && unit.source != CheckSource && bl.RedirectURL != "" {
if bl.IsValid && unit.source != parsers.CheckSource && bl.RedirectURL != "" {
//pool.waiter.Add(1)
pool.doRedirect(bl, unit.depth)
}
@ -353,14 +353,14 @@ func (pool *Pool) Invoke(v interface{}) {
bl.Number = unit.number
bl.Spended = time.Since(start).Milliseconds()
switch unit.source {
case InitRandomSource:
case parsers.InitRandomSource:
bl.Collect()
pool.locker.Lock()
pool.random = bl
pool.addFuzzyBaseline(bl)
pool.locker.Unlock()
pool.initwg.Done()
case InitIndexSource:
case parsers.InitIndexSource:
bl.Collect()
pool.locker.Lock()
pool.index = bl
@ -372,7 +372,7 @@ func (pool *Pool) Invoke(v interface{}) {
pool.OutputCh <- bl
}
pool.initwg.Done()
case CheckSource:
case parsers.CheckSource:
if bl.ErrString != "" {
logs.Log.Warnf("[check.error] %s maybe ip had banned, break (%d/%d), error: %s", pool.BaseURL, pool.failedCount, pool.BreakThreshold, bl.ErrString)
} else if i := pool.random.Compare(bl); i < 1 {
@ -390,7 +390,7 @@ func (pool *Pool) Invoke(v interface{}) {
logs.Log.Debug("[check.pass] " + bl.String())
}
case WordSource:
case parsers.WordSource:
// 异步进行性能消耗较大的深度对比
pool.handlerCh <- bl
if int(pool.Statistor.ReqTotal)%pool.CheckPeriod == 0 {
@ -400,7 +400,7 @@ func (pool *Pool) Invoke(v interface{}) {
pool.doCheck()
}
pool.bar.Done()
case RedirectSource:
case parsers.RedirectSource:
bl.FrontURL = unit.frontUrl
pool.handlerCh <- bl
default:
@ -463,25 +463,25 @@ func (pool *Pool) Handler() {
"random": pool.random,
"current": bl,
}
//for _, status := range FuzzyStatus {
// if bl, ok := pool.baselines[status]; ok {
// params["bl"+strconv.Itoa(status)] = bl
//for _, ok := range FuzzyStatus {
// if bl, ok := pool.baselines[ok]; ok {
// params["bl"+strconv.Itoa(ok)] = bl
// } else {
// params["bl"+strconv.Itoa(status)] = nilBaseline
// params["bl"+strconv.Itoa(ok)] = nilBaseline
// }
//}
}
var status bool
var ok bool
if pool.MatchExpr != nil {
if CompareWithExpr(pool.MatchExpr, params) {
status = true
ok = true
}
} else {
status = pool.BaseCompare(bl)
ok = pool.BaseCompare(bl)
}
if status {
if ok {
pool.Statistor.FoundNumber++
// unique判断
@ -509,11 +509,12 @@ func (pool *Pool) Handler() {
pool.waiter.Add(2)
pool.doCrawl(bl)
pool.doRule(bl)
}
if iutils.IntsContains(WhiteStatus, bl.Status) || iutils.IntsContains([]int{403, 500, 502}, bl.Status) {
if iutils.IntsContains(WhiteStatus, bl.Status) || iutils.IntsContains(UniqueStatus, bl.Status) {
pool.waiter.Add(1)
pool.doAppendWords(bl)
}
}
// 如果要进行递归判断, 要满足 bl有效, mod为path-spray, 当前深度小于最大递归深度
if bl.IsValid {
if bl.RecuDepth < MaxRecursion {
@ -639,7 +640,7 @@ func (pool *Pool) doRedirect(bl *Baseline, depth int) {
defer pool.waiter.Done()
pool.addAddition(&Unit{
path: reURL,
source: RedirectSource,
source: parsers.RedirectSource,
frontUrl: bl.UrlString,
depth: depth + 1,
})
@ -668,7 +669,7 @@ func (pool *Pool) doCrawl(bl *Baseline) {
}
pool.addAddition(&Unit{
path: u,
source: CrawlSource,
source: parsers.CrawlSource,
depth: bl.ReqDepth + 1,
})
}
@ -693,7 +694,7 @@ func (pool *Pool) doScopeCrawl(bl *Baseline) {
if _, ok := pool.scopeurls[u]; !ok {
pool.urls.Store(u, nil)
pool.waiter.Add(1)
pool.scopePool.Invoke(&Unit{path: u, source: CrawlSource, depth: bl.ReqDepth + 1})
pool.scopePool.Invoke(&Unit{path: u, source: parsers.CrawlSource, depth: bl.ReqDepth + 1})
}
pool.scopeLocker.Unlock()
}
@ -706,7 +707,7 @@ func (pool *Pool) doRule(bl *Baseline) {
pool.waiter.Done()
return
}
if bl.Source == RuleSource {
if bl.Source == parsers.RuleSource {
pool.waiter.Done()
return
}
@ -715,8 +716,8 @@ func (pool *Pool) doRule(bl *Baseline) {
defer pool.waiter.Done()
for u := range rule.RunAsStream(pool.AppendRule.Expressions, path.Base(bl.Path)) {
pool.addAddition(&Unit{
path: Dir(bl.Url.Path) + u,
source: RuleSource,
path: dir(bl.Url.Path) + u,
source: parsers.RuleSource,
})
}
}()
@ -727,7 +728,7 @@ func (pool *Pool) doAppendWords(bl *Baseline) {
pool.waiter.Done()
return
}
if bl.Source == AppendSource {
if bl.Source == parsers.AppendSource {
pool.waiter.Done()
return
}
@ -736,8 +737,8 @@ func (pool *Pool) doAppendWords(bl *Baseline) {
defer pool.waiter.Done()
for _, u := range pool.AppendWords {
pool.addAddition(&Unit{
path: relaPath(Dir(bl.Url.Path), u),
source: AppendSource,
path: safePath(bl.Path, u),
source: parsers.AppendSource,
})
}
}()
@ -752,7 +753,7 @@ func (pool *Pool) doRetry(bl *Baseline) {
defer pool.waiter.Done()
pool.addAddition(&Unit{
path: bl.Path,
source: RetrySource,
source: parsers.RetrySource,
retry: bl.Retry + 1,
})
}()
@ -763,7 +764,7 @@ func (pool *Pool) doActive() {
for _, u := range pkg.ActivePath {
pool.addAddition(&Unit{
path: pool.dir + u[1:],
source: ActiveSource,
source: parsers.FingerSource,
})
}
}
@ -778,7 +779,7 @@ func (pool *Pool) doBak() {
for w := range worder.C {
pool.addAddition(&Unit{
path: pool.dir + w,
source: BakSource,
source: parsers.BakSource,
})
}
@ -790,7 +791,7 @@ func (pool *Pool) doBak() {
for w := range worder.C {
pool.addAddition(&Unit{
path: pool.dir + w,
source: BakSource,
source: parsers.BakSource,
})
}
}
@ -800,7 +801,7 @@ func (pool *Pool) doCommonFile() {
for _, u := range mask.SpecialWords["common_file"] {
pool.addAddition(&Unit{
path: pool.dir + u,
source: CommonFileSource,
source: parsers.CommonFileSource,
})
}
}
@ -815,9 +816,9 @@ func (pool *Pool) doCheck() {
}
if pool.Mod == HostSpray {
pool.checkCh <- CheckSource
pool.checkCh <- struct{}{}
} else if pool.Mod == PathSpray {
pool.checkCh <- CheckSource
pool.checkCh <- struct{}{}
}
}
@ -876,19 +877,10 @@ func (pool *Pool) Close() {
func (pool *Pool) safePath(u string) string {
// 自动生成的目录将采用safepath的方式拼接到相对目录中, 避免出现//的情况. 例如init, check, common
hasSlash := strings.HasPrefix(u, "/")
if hasSlash {
if pool.isDir {
return pool.dir + u[1:]
return safePath(pool.dir, u)
} else {
return pool.url.Path + u
}
} else {
if pool.isDir {
return pool.url.Path + u
} else {
return pool.url.Path + "/" + u
}
return safePath(pool.url.Path+"/", u)
}
}

View File

@ -17,11 +17,11 @@ import (
)
var (
WhiteStatus = []int{200}
BlackStatus = []int{400, 410}
FuzzyStatus = []int{403, 404, 500, 501, 502, 503}
WhiteStatus = []int{} // cmd input, 200
BlackStatus = []int{} // cmd input, 400,410
FuzzyStatus = []int{} // cmd input, 500,501,502,503
WAFStatus = []int{493, 418, 1020, 406}
UniqueStatus = []int{403, 200} // 相同unique的403表示命中了同一条acl, 相同unique的200表示default页面
UniqueStatus = []int{} // 相同unique的403表示命中了同一条acl, 相同unique的200表示default页面
)
var (

View File

@ -1,18 +1,21 @@
package internal
import (
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words"
"github.com/chainreactors/words/rule"
)
type Source int
const (
CheckSource = iota + 1
CheckSource Source = iota + 1
InitRandomSource
InitIndexSource
RedirectSource
CrawlSource
ActiveSource
FingerSource
WordSource
WafSource
RuleSource
@ -23,18 +26,54 @@ const (
AppendSource
)
func newUnit(path string, source int) *Unit {
// Name return the name of the source
func (s Source) Name() string {
switch s {
case CheckSource:
return "check"
case InitRandomSource:
return "random"
case InitIndexSource:
return "index"
case RedirectSource:
return "redirect"
case CrawlSource:
return "crawl"
case FingerSource:
return "finger"
case WordSource:
return "word"
case WafSource:
return "waf"
case RuleSource:
return "rule"
case BakSource:
return "bak"
case CommonFileSource:
return "common"
case UpgradeSource:
return "upgrade"
case RetrySource:
return "retry"
case AppendSource:
return "append"
default:
return "unknown"
}
}
func newUnit(path string, source parsers.SpraySource) *Unit {
return &Unit{path: path, source: source}
}
func newUnitWithNumber(path string, source int, number int) *Unit {
func newUnitWithNumber(path string, source parsers.SpraySource, number int) *Unit {
return &Unit{path: path, source: source, number: number}
}
type Unit struct {
number int
path string
source int
source parsers.SpraySource
retry int
frontUrl string
depth int // redirect depth

View File

@ -171,6 +171,15 @@ func loadRuleWithFiles(ruleFiles []string, filter string) ([]rule.Expression, er
return rule.Compile(rules.String(), filter).Expressions, nil
}
func safePath(dir, u string) string {
hasSlash := strings.HasPrefix(u, "/")
if hasSlash {
return path.Join(dir, u[1:])
} else {
return path.Join(dir, u)
}
}
func relaPath(base, u string) string {
// 拼接相对目录, 不使用path.join的原因是, 如果存在"////"这样的情况, 可能真的是有意义的路由, 不能随意去掉.
// "" /a /a
@ -200,14 +209,14 @@ func relaPath(base, u string) string {
}
} else {
if pathSlash {
return Dir(base) + u[1:]
return dir(base) + u[1:]
} else {
return Dir(base) + u
return dir(base) + u
}
}
}
func Dir(u string) string {
func dir(u string) string {
// 安全的获取目录, 不会额外处理多个"//", 并非用来获取上级目录
// /a /
// /a/ /a/
@ -243,7 +252,7 @@ func FormatURL(base, u string) string {
// "./"相对目录拼接
return relaPath(base, u[2:])
} else if strings.HasPrefix(u, "../") {
return path.Join(Dir(base), u)
return path.Join(dir(base), u)
} else {
// 相对目录拼接
return relaPath(base, u)
@ -303,7 +312,7 @@ func wrapWordsFunc(f func(string) string) func(string) []string {
}
func UniqueHash(bl *Baseline) uint16 {
// 由host+状态码+重定向url+content-type+title+length舍去个位与十位组成的hash
// 由host+状态码+重定向url+content-type+title+length舍去个位组成的hash
// body length可能会导致一些误报, 目前没有更好的解决办法
return pkg.CRC16Hash([]byte(bl.Host + strconv.Itoa(bl.Status) + bl.RedirectURL + bl.ContentType + bl.Title + strconv.Itoa(bl.BodyLength/100*100)))
return pkg.CRC16Hash([]byte(bl.Host + strconv.Itoa(bl.Status) + bl.RedirectURL + bl.ContentType + bl.Title + strconv.Itoa(bl.BodyLength/10*10)))
}

View File

@ -18,7 +18,7 @@ func NewStatistor(url string) *Statistor {
stat := DefaultStatistor
stat.StartTime = time.Now().Unix()
stat.Counts = make(map[int]int)
stat.Sources = make(map[int]int)
stat.Sources = make(map[parsers.SpraySource]int)
stat.BaseUrl = url
return &stat
}
@ -32,7 +32,7 @@ func NewStatistorFromStat(origin *Statistor) *Statistor {
RuleFiles: origin.RuleFiles,
RuleFilter: origin.RuleFilter,
Counts: make(map[int]int),
Sources: map[int]int{},
Sources: map[parsers.SpraySource]int{},
StartTime: time.Now().Unix(),
}
}
@ -41,7 +41,7 @@ type Statistor struct {
BaseUrl string `json:"url"`
Error string `json:"error"`
Counts map[int]int `json:"counts"`
Sources map[int]int `json:"sources"`
Sources map[parsers.SpraySource]int `json:"sources"`
FailedNumber int32 `json:"failed"`
ReqTotal int32 `json:"req_total"`
CheckNumber int `json:"check"`
@ -116,7 +116,7 @@ func (stat *Statistor) PrintSource() {
s.WriteString("[stat] ")
s.WriteString(stat.BaseUrl)
for k, v := range stat.Sources {
s.WriteString(fmt.Sprintf(" %s: %d,", parsers.GetSpraySourceName(k), v))
s.WriteString(fmt.Sprintf(" %s: %d,", k.Name(), v))
}
logs.Log.Important(s.String())
}
@ -145,7 +145,7 @@ func (stat *Statistor) PrintColorSource() {
s.WriteString("[stat] ")
s.WriteString(stat.BaseUrl)
for k, v := range stat.Sources {
s.WriteString(fmt.Sprintf(" %s: %s,", logs.Cyan(parsers.GetSpraySourceName(k)), logs.YellowBold(strconv.Itoa(v))))
s.WriteString(fmt.Sprintf(" %s: %s,", logs.Cyan(k.Name()), logs.YellowBold(strconv.Itoa(v))))
}
logs.Log.Important(s.String())
}