refactor: remove internal pkg, use engine replace

fix: chunk mod not read
fix: nil bar panic
enhance: add default accept and user-agent
This commit is contained in:
M09Ic 2025-02-22 20:31:32 +08:00
parent c07c2305af
commit f1b9400e19
18 changed files with 127 additions and 107 deletions

View File

@ -5,8 +5,8 @@ import (
"fmt"
"github.com/chainreactors/files"
"github.com/chainreactors/logs"
"github.com/chainreactors/spray/internal"
"github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/core"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/utils/iutils"
"github.com/jessevdk/go-flags"
@ -28,11 +28,11 @@ func init() {
}
func Spray() {
var option internal.Option
var option core.Option
if files.IsExist(DefaultConfig) {
logs.Log.Debug("config.yaml exist, loading")
err := internal.LoadConfig(DefaultConfig, &option)
err := core.LoadConfig(DefaultConfig, &option)
if err != nil {
logs.Log.Error(err.Error())
return
@ -83,7 +83,7 @@ func Spray() {
logs.Log.SetLevel(pkg.LogVerbose)
}
if option.InitConfig {
configStr := internal.InitDefaultConfig(&option, 0)
configStr := core.InitDefaultConfig(&option, 0)
err := os.WriteFile(DefaultConfig, []byte(configStr), 0o744)
if err != nil {
logs.Log.Warn("cannot create config: config.yaml, " + err.Error())
@ -96,7 +96,7 @@ func Spray() {
return
}
if option.Config != "" {
err := internal.LoadConfig(option.Config, &option)
err := core.LoadConfig(option.Config, &option)
if err != nil {
logs.Log.Error(err.Error())
return
@ -123,13 +123,13 @@ func Spray() {
if err != nil {
iutils.Fatal(err.Error())
}
internal.PrintPreset()
core.PrintPreset()
return
}
if option.Format != "" {
internal.Format(option)
core.Format(option)
return
}

View File

@ -1,14 +1,16 @@
package pkg
package baseline
import (
"bytes"
"github.com/chainreactors/fingers/common"
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/utils/encode"
"github.com/chainreactors/utils/iutils"
"net/http"
"net/url"
"strconv"
"strings"
)
@ -23,7 +25,7 @@ func NewBaseline(u, host string, resp *ihttp.Response) *Baseline {
},
}
if t, ok := ContentTypeMap[resp.ContentType()]; ok {
if t, ok := pkg.ContentTypeMap[resp.ContentType()]; ok {
bl.ContentType = t
bl.Title = t + " data"
} else {
@ -34,7 +36,6 @@ func NewBaseline(u, host string, resp *ihttp.Response) *Baseline {
bl.Header = make([]byte, len(header))
copy(bl.Header, header)
bl.HeaderLength = len(bl.Header)
if i := resp.ContentLength(); ihttp.CheckBodySize(i) {
if body := resp.Body(); body != nil {
bl.Body = make([]byte, len(body))
@ -50,10 +51,10 @@ func NewBaseline(u, host string, resp *ihttp.Response) *Baseline {
}
bl.Raw = append(bl.Header, bl.Body...)
bl.Response, err = ParseRawResponse(bl.Raw)
bl.Response, err = pkg.ParseRawResponse(bl.Raw)
if err != nil {
bl.IsValid = false
bl.Reason = ErrResponseError.Error()
bl.Reason = pkg.ErrResponseError.Error()
bl.ErrString = err.Error()
return bl
}
@ -73,7 +74,7 @@ func NewBaseline(u, host string, resp *ihttp.Response) *Baseline {
}
} else {
bl.IsValid = false
bl.Reason = ErrUrlError.Error()
bl.Reason = pkg.ErrUrlError.Error()
bl.ErrString = err.Error()
}
bl.Unique = UniqueHash(bl)
@ -116,9 +117,9 @@ type Baseline struct {
Url *url.URL `json:"-"`
Dir bool `json:"-"`
Chunked bool `json:"-"`
Body BS `json:"-"`
Header BS `json:"-"`
Raw BS `json:"-"`
Body pkg.BS `json:"-"`
Header pkg.BS `json:"-"`
Raw pkg.BS `json:"-"`
Response *http.Response `json:"-"`
Recu bool `json:"-"`
RecuDepth int `json:"-"`
@ -147,10 +148,10 @@ func (bl *Baseline) Collect() {
if bl.ContentType == "html" || bl.ContentType == "json" || bl.ContentType == "txt" {
// 指纹库设计的时候没考虑js,css文件的指纹, 跳过非必要的指纹收集减少误报提高性能
//fmt.Println(bl.Source, bl.Url.String()+bl.Path, bl.RedirectURL, "call fingersengine")
if EnableAllFingerEngine {
bl.Frameworks = EngineDetect(bl.Raw)
if pkg.EnableAllFingerEngine {
bl.Frameworks = pkg.EngineDetect(bl.Raw)
} else {
bl.Frameworks = FingersDetect(bl.Raw)
bl.Frameworks = pkg.FingersDetect(bl.Raw)
}
}
@ -158,14 +159,14 @@ func (bl *Baseline) Collect() {
if bl.ContentType == "html" {
bl.Title = iutils.AsciiEncode(parsers.MatchTitle(bl.Body))
} else if bl.ContentType == "ico" {
if frame := FingerEngine.Favicon().Match(bl.Body); frame != nil {
if frame := pkg.FingerEngine.Favicon().Match(bl.Body); frame != nil {
bl.Frameworks.Merge(frame)
}
}
}
bl.Hashes = parsers.NewHashes(bl.Raw)
bl.Extracteds = Extractors.Extract(string(bl.Raw))
bl.Extracteds = pkg.Extractors.Extract(string(bl.Raw))
bl.Unique = UniqueHash(bl)
}
@ -173,21 +174,21 @@ func (bl *Baseline) CollectURL() {
if len(bl.Body) == 0 {
return
}
for _, reg := range ExtractRegexps["js"][0].CompiledRegexps {
for _, reg := range pkg.ExtractRegexps["js"][0].CompiledRegexps {
urls := reg.FindAllStringSubmatch(string(bl.Body), -1)
for _, u := range urls {
u[1] = CleanURL(u[1])
if u[1] != "" && !FilterJs(u[1]) {
u[1] = pkg.CleanURL(u[1])
if u[1] != "" && !pkg.FilterJs(u[1]) {
bl.URLs = append(bl.URLs, u[1])
}
}
}
for _, reg := range ExtractRegexps["url"][0].CompiledRegexps {
for _, reg := range pkg.ExtractRegexps["url"][0].CompiledRegexps {
urls := reg.FindAllStringSubmatch(string(bl.Body), -1)
for _, u := range urls {
u[1] = CleanURL(u[1])
if u[1] != "" && !FilterUrl(u[1]) {
u[1] = pkg.CleanURL(u[1])
if u[1] != "" && !pkg.FilterUrl(u[1]) {
bl.URLs = append(bl.URLs, u[1])
}
}
@ -255,3 +256,9 @@ func (bl *Baseline) FuzzyCompare(other *Baseline) bool {
}
return false
}
func UniqueHash(bl *Baseline) uint16 {
// 由host+状态码+重定向url+content-type+title+length舍去个位组成的hash
// body length可能会导致一些误报, 目前没有更好的解决办法
return pkg.CRC16Hash([]byte(bl.Host + strconv.Itoa(bl.Status) + bl.RedirectURL + bl.ContentType + bl.Title + strconv.Itoa(bl.BodyLength/10*10)))
}

View File

@ -1,4 +1,4 @@
package internal
package core
import (
"fmt"

View File

@ -1,4 +1,4 @@
package internal
package core
import (
"fmt"

View File

@ -1,9 +1,10 @@
package internal
package core
import (
"bytes"
"encoding/json"
"github.com/chainreactors/logs"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words/mask"
"io"
@ -24,9 +25,9 @@ func Format(opts Option) {
if err != nil {
return
}
group := make(map[string][]*pkg.Baseline)
group := make(map[string][]*baseline.Baseline)
for _, line := range bytes.Split(bytes.TrimSpace(content), []byte("\n")) {
var result pkg.Baseline
var result baseline.Baseline
err := json.Unmarshal(line, &result)
if err != nil {
logs.Log.Error(err.Error())

View File

@ -2,6 +2,7 @@ package ihttp
import (
"context"
"github.com/chainreactors/spray/pkg"
"github.com/valyala/fasthttp"
"net/http"
)
@ -30,7 +31,19 @@ type Request struct {
ClientType int
}
func (r *Request) SetHeaders(header map[string]string) {
func (r *Request) SetHeaders(header map[string]string, RandomUA bool) {
if header["User-Agent"] == "" {
if RandomUA {
header["User-Agent"] = pkg.RandomUA()
} else {
header["User-Agent"] = pkg.DefaultUserAgent
}
}
if header["Accept"] == "" {
header["Accept"] = "*/*"
}
if r.StandardRequest != nil {
for k, v := range header {
r.StandardRequest.Header.Set(k, v)

View File

@ -29,7 +29,7 @@ func (r *Response) Body() []byte {
if r.FastResponse != nil {
return r.FastResponse.Body()
} else if r.StandardResponse != nil {
if DefaultMaxBodySize == -1 {
if r.StandardResponse.ContentLength == -1 {
body, err := io.ReadAll(r.StandardResponse.Body)
if err != nil {
return nil

View File

@ -1,4 +1,4 @@
package internal
package core
import (
"bufio"
@ -7,8 +7,9 @@ import (
"github.com/chainreactors/files"
"github.com/chainreactors/logs"
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/internal/pool"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/core/pool"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/utils"
"github.com/chainreactors/utils/iutils"
@ -231,7 +232,7 @@ func (opt *Option) Prepare() error {
}
// 初始化全局变量
pkg.Distance = uint8(opt.SimhashDistance)
baseline.Distance = uint8(opt.SimhashDistance)
if opt.MaxBodyLength == -1 {
ihttp.DefaultMaxBodySize = -1
} else {
@ -263,10 +264,10 @@ func (opt *Option) NewRunner() (*Runner, error) {
r := &Runner{
Option: opt,
taskCh: make(chan *Task),
outputCh: make(chan *pkg.Baseline, 256),
outputCh: make(chan *baseline.Baseline, 256),
poolwg: &sync.WaitGroup{},
outwg: &sync.WaitGroup{},
fuzzyCh: make(chan *pkg.Baseline, 256),
fuzzyCh: make(chan *baseline.Baseline, 256),
Headers: make(map[string]string),
Total: opt.Limit,
Color: true,

View File

@ -6,7 +6,8 @@ import (
"fmt"
"github.com/chainreactors/logs"
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/utils/iutils"
"github.com/chainreactors/words/rule"
@ -49,7 +50,7 @@ func NewBrutePool(ctx context.Context, config *Config) (*BrutePool, error) {
}),
additionCh: make(chan *Unit, config.Thread),
closeCh: make(chan struct{}),
processCh: make(chan *pkg.Baseline, config.Thread),
processCh: make(chan *baseline.Baseline, config.Thread),
wg: &sync.WaitGroup{},
},
base: u.Scheme + "://" + u.Host,
@ -271,10 +272,7 @@ func (pool *BrutePool) Invoke(v interface{}) {
return
}
req.SetHeaders(pool.Headers)
if pool.RandomUserAgent {
req.SetHeader("User-Agent", pkg.RandomUA())
}
req.SetHeaders(pool.Headers, pool.RandomUserAgent)
start := time.Now()
resp, reqerr := pool.client.Do(req)
@ -284,11 +282,11 @@ func (pool *BrutePool) Invoke(v interface{}) {
}
// compare与各种错误处理
var bl *pkg.Baseline
var bl *baseline.Baseline
if reqerr != nil && !errors.Is(reqerr, fasthttp.ErrBodyTooLarge) {
atomic.AddInt32(&pool.failedCount, 1)
atomic.AddInt32(&pool.Statistor.FailedNumber, 1)
bl = &pkg.Baseline{
bl = &baseline.Baseline{
SprayResult: &parsers.SprayResult{
UrlString: pool.base + unit.path,
ErrString: reqerr.Error(),
@ -301,15 +299,15 @@ func (pool *BrutePool) Invoke(v interface{}) {
} else { // 特定场景优化
if unit.source <= 3 || unit.source == parsers.CrawlSource || unit.source == parsers.CommonFileSource {
// 一些高优先级的source, 将跳过PreCompare
bl = pkg.NewBaseline(req.URI(), req.Host(), resp)
bl = baseline.NewBaseline(req.URI(), req.Host(), resp)
} else if pool.MatchExpr != nil {
// 如果自定义了match函数, 则所有数据送入tempch中
bl = pkg.NewBaseline(req.URI(), req.Host(), resp)
bl = baseline.NewBaseline(req.URI(), req.Host(), resp)
} else if err = pool.PreCompare(resp); err == nil {
// 通过预对比跳过一些无用数据, 减少性能消耗
bl = pkg.NewBaseline(req.URI(), req.Host(), resp)
bl = baseline.NewBaseline(req.URI(), req.Host(), resp)
} else {
bl = pkg.NewInvalidBaseline(req.URI(), req.Host(), resp, err.Error())
bl = baseline.NewInvalidBaseline(req.URI(), req.Host(), resp, err.Error())
}
}
@ -395,7 +393,7 @@ func (pool *BrutePool) NoScopeInvoke(v interface{}) {
logs.Log.Error(err.Error())
return
}
req.SetHeaders(pool.Headers)
req.SetHeaders(pool.Headers, pool.RandomUserAgent)
req.SetHeader("User-Agent", pkg.RandomUA())
resp, reqerr := pool.client.Do(req)
if pool.ClientType == ihttp.FAST {
@ -407,7 +405,7 @@ func (pool *BrutePool) NoScopeInvoke(v interface{}) {
return
}
if resp.StatusCode() == 200 {
bl := pkg.NewBaseline(req.URI(), req.Host(), resp)
bl := baseline.NewBaseline(req.URI(), req.Host(), resp)
bl.Source = unit.source
bl.ReqDepth = unit.depth
bl.Collect()
@ -522,7 +520,7 @@ func (pool *BrutePool) checkRedirect(redirectURL string) bool {
}
}
func (pool *BrutePool) Upgrade(bl *pkg.Baseline) error {
func (pool *BrutePool) Upgrade(bl *baseline.Baseline) error {
rurl, err := url.Parse(bl.RedirectURL)
if err == nil && rurl.Hostname() == bl.Url.Hostname() && bl.Url.Scheme == "http" && rurl.Scheme == "https" {
logs.Log.Infof("baseurl %s upgrade http to https, reinit", pool.BaseURL)
@ -579,7 +577,7 @@ func (pool *BrutePool) checkHost(u string) bool {
return true
}
func (pool *BrutePool) BaseCompare(bl *pkg.Baseline) bool {
func (pool *BrutePool) BaseCompare(bl *baseline.Baseline) bool {
if !bl.IsValid {
return false
}
@ -640,7 +638,7 @@ func (pool *BrutePool) BaseCompare(bl *pkg.Baseline) bool {
return true
}
func (pool *BrutePool) addFuzzyBaseline(bl *pkg.Baseline) {
func (pool *BrutePool) addFuzzyBaseline(bl *baseline.Baseline) {
if _, ok := pool.baselines[bl.Status]; !ok && (EnableAllFuzzy || iutils.IntsContains(pkg.FuzzyStatus, bl.Status)) {
bl.IsBaseline = true
bl.Collect()
@ -706,7 +704,7 @@ func (pool *BrutePool) doCheck() {
}
}
func (pool *BrutePool) doRedirect(bl *pkg.Baseline, depth int) {
func (pool *BrutePool) doRedirect(bl *baseline.Baseline, depth int) {
if depth >= pool.MaxRedirect {
return
}
@ -730,7 +728,7 @@ func (pool *BrutePool) doRedirect(bl *pkg.Baseline, depth int) {
}()
}
func (pool *BrutePool) doCrawl(bl *pkg.Baseline) {
func (pool *BrutePool) doCrawl(bl *baseline.Baseline) {
if !pool.Crawl || bl.ReqDepth >= pool.MaxCrawlDepth {
return
}
@ -762,7 +760,7 @@ func (pool *BrutePool) doCrawl(bl *pkg.Baseline) {
}
func (pool *BrutePool) doScopeCrawl(bl *pkg.Baseline) {
func (pool *BrutePool) doScopeCrawl(bl *baseline.Baseline) {
if bl.ReqDepth >= pool.MaxCrawlDepth {
pool.wg.Done()
return
@ -813,13 +811,13 @@ func (pool *BrutePool) doBak() {
}
}
func (pool *BrutePool) doAppend(bl *pkg.Baseline) {
func (pool *BrutePool) doAppend(bl *baseline.Baseline) {
pool.wg.Add(2)
pool.doAppendWords(bl)
pool.doAppendRule(bl)
}
func (pool *BrutePool) doAppendRule(bl *pkg.Baseline) {
func (pool *BrutePool) doAppendRule(bl *baseline.Baseline) {
if pool.AppendRule == nil || bl.Source == parsers.AppendRuleSource || bl.ReqDepth >= pool.MaxAppendDepth {
pool.wg.Done()
return
@ -840,7 +838,7 @@ func (pool *BrutePool) doAppendRule(bl *pkg.Baseline) {
}()
}
func (pool *BrutePool) doAppendWords(bl *pkg.Baseline) {
func (pool *BrutePool) doAppendWords(bl *baseline.Baseline) {
if pool.AppendWords == nil || bl.Source == parsers.AppendSource || bl.Source == parsers.RuleSource || bl.ReqDepth >= pool.MaxAppendDepth {
// 防止自身递归
pool.wg.Done()

View File

@ -4,7 +4,8 @@ import (
"context"
"github.com/chainreactors/logs"
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/pkg"
"github.com/panjf2000/ants/v2"
"net/url"
@ -32,7 +33,7 @@ func NewCheckPool(ctx context.Context, config *Config) (*CheckPool, error) {
wg: &sync.WaitGroup{},
additionCh: make(chan *Unit, 1024),
closeCh: make(chan struct{}),
processCh: make(chan *pkg.Baseline, config.Thread),
processCh: make(chan *baseline.Baseline, config.Thread),
},
}
pool.Headers = map[string]string{"Connection": "close"}
@ -115,7 +116,7 @@ func (pool *CheckPool) Invoke(v interface{}) {
req, err := ihttp.BuildRequest(pool.ctx, pool.ClientType, unit.path, "", "", "GET")
if err != nil {
logs.Log.Debug(err.Error())
bl := &pkg.Baseline{
bl := &baseline.Baseline{
SprayResult: &parsers.SprayResult{
UrlString: unit.path,
IsValid: false,
@ -127,13 +128,13 @@ func (pool *CheckPool) Invoke(v interface{}) {
pool.processCh <- bl
return
}
req.SetHeaders(pool.Headers)
req.SetHeaders(pool.Headers, pool.RandomUserAgent)
start := time.Now()
var bl *pkg.Baseline
var bl *baseline.Baseline
resp, reqerr := pool.client.Do(req)
if reqerr != nil {
pool.failedCount++
bl = &pkg.Baseline{
bl = &baseline.Baseline{
SprayResult: &parsers.SprayResult{
UrlString: unit.path,
IsValid: false,
@ -145,7 +146,7 @@ func (pool *CheckPool) Invoke(v interface{}) {
logs.Log.Debugf("%s, %s", unit.path, reqerr.Error())
pool.doUpgrade(bl)
} else {
bl = pkg.NewBaseline(req.URI(), req.Host(), resp)
bl = baseline.NewBaseline(req.URI(), req.Host(), resp)
bl.ReqDepth = unit.depth
bl.Collect()
if bl.Status == 400 {
@ -180,7 +181,7 @@ func (pool *CheckPool) Handler() {
}
}
func (pool *CheckPool) doRedirect(bl *pkg.Baseline, depth int) {
func (pool *CheckPool) doRedirect(bl *baseline.Baseline, depth int) {
if depth >= pool.MaxRedirect {
return
}
@ -209,7 +210,7 @@ func (pool *CheckPool) doRedirect(bl *pkg.Baseline, depth int) {
}
// tcp与400进行协议转换
func (pool *CheckPool) doUpgrade(bl *pkg.Baseline) {
func (pool *CheckPool) doUpgrade(bl *baseline.Baseline) {
if bl.ReqDepth >= 1 {
return
}

View File

@ -2,7 +2,7 @@ package pool
import (
"github.com/chainreactors/logs"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/words"
"github.com/chainreactors/words/rule"
"github.com/expr-lang/expr/vm"
@ -16,9 +16,9 @@ type Config struct {
Thread int
Wordlist []string
Timeout time.Duration
ProcessCh chan *pkg.Baseline
OutputCh chan *pkg.Baseline
FuzzyCh chan *pkg.Baseline
ProcessCh chan *baseline.Baseline
OutputCh chan *baseline.Baseline
FuzzyCh chan *baseline.Baseline
Outwg *sync.WaitGroup
RateLimit int
CheckPeriod int

View File

@ -3,7 +3,8 @@ package pool
import (
"context"
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words"
"sync"
@ -18,7 +19,7 @@ type BasePool struct {
Cancel context.CancelFunc
client *ihttp.Client
ctx context.Context
processCh chan *pkg.Baseline // 待处理的baseline
processCh chan *baseline.Baseline // 待处理的baseline
dir string
reqCount int
failedCount int
@ -28,7 +29,7 @@ type BasePool struct {
isFallback atomic.Bool
}
func (pool *BasePool) doRetry(bl *pkg.Baseline) {
func (pool *BasePool) doRetry(bl *baseline.Baseline) {
if bl.Retry >= pool.RetryLimit {
return
}
@ -56,7 +57,7 @@ func (pool *BasePool) addAddition(u *Unit) {
pool.additionCh <- u
}
func (pool *BasePool) putToOutput(bl *pkg.Baseline) {
func (pool *BasePool) putToOutput(bl *baseline.Baseline) {
if bl.IsValid || bl.IsFuzzy {
bl.Collect()
}
@ -64,7 +65,7 @@ func (pool *BasePool) putToOutput(bl *pkg.Baseline) {
pool.OutputCh <- bl
}
func (pool *BasePool) putToFuzzy(bl *pkg.Baseline) {
func (pool *BasePool) putToFuzzy(bl *baseline.Baseline) {
pool.Outwg.Add(1)
bl.IsFuzzy = true
pool.FuzzyCh <- bl

View File

@ -2,7 +2,7 @@ package pool
import (
"github.com/chainreactors/parsers"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/spray/core/baseline"
)
func newUnit(path string, source parsers.SpraySource) *Unit {
@ -21,7 +21,7 @@ type Unit struct {
depth int
}
func (u *Unit) Update(bl *pkg.Baseline) {
func (u *Unit) Update(bl *baseline.Baseline) {
bl.Number = u.number
bl.Parent = u.parent
bl.Host = u.host
@ -31,15 +31,15 @@ func (u *Unit) Update(bl *pkg.Baseline) {
func NewBaselines() *Baselines {
return &Baselines{
baselines: map[int]*pkg.Baseline{},
baselines: map[int]*baseline.Baseline{},
}
}
type Baselines struct {
FailedBaselines []*pkg.Baseline
random *pkg.Baseline
index *pkg.Baseline
baselines map[int]*pkg.Baseline
FailedBaselines []*baseline.Baseline
random *baseline.Baseline
index *baseline.Baseline
baselines map[int]*baseline.Baseline
}
type SprayMod int

View File

@ -1,11 +1,12 @@
package internal
package core
import (
"context"
"github.com/chainreactors/files"
"github.com/chainreactors/logs"
"github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/internal/pool"
"github.com/chainreactors/spray/core/baseline"
"github.com/chainreactors/spray/core/ihttp"
"github.com/chainreactors/spray/core/pool"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words"
"github.com/chainreactors/words/rule"
@ -28,8 +29,8 @@ type Runner struct {
taskCh chan *Task
poolwg *sync.WaitGroup
outwg *sync.WaitGroup
outputCh chan *pkg.Baseline
fuzzyCh chan *pkg.Baseline
outputCh chan *baseline.Baseline
fuzzyCh chan *baseline.Baseline
bar *mpb.Bar
bruteMod bool
IsCheck bool
@ -254,7 +255,9 @@ Loop:
}
}
r.bar.Wait()
if r.bar != nil {
r.bar.Wait()
}
r.poolwg.Wait()
r.outwg.Wait()
}
@ -285,7 +288,7 @@ Loop:
r.outwg.Wait()
}
func (r *Runner) AddRecursive(bl *pkg.Baseline) {
func (r *Runner) AddRecursive(bl *baseline.Baseline) {
// 递归新任务
task := &Task{
baseUrl: bl.UrlString,
@ -361,7 +364,7 @@ func (r *Runner) saveStat(content string) {
}
}
func (r *Runner) Output(bl *pkg.Baseline) {
func (r *Runner) Output(bl *baseline.Baseline) {
var out string
if r.Option.Json {
out = bl.ToJson()

View File

@ -1,4 +1,4 @@
package internal
package core
import (
"fmt"

View File

@ -1,4 +1,4 @@
package internal
package core
import (
"github.com/chainreactors/spray/pkg"

View File

@ -87,7 +87,8 @@ var (
"Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)",
"Mozilla/5.0 (compatible; Yahoo! Slurp; http://help.yahoo.com/help/us/ysearch/slurp)",
}
uacount = len(randomUserAgent)
uacount = len(randomUserAgent)
DefaultUserAgent = randomUserAgent[rand.Intn(uacount)]
)
type BS []byte
@ -327,12 +328,6 @@ func Dir(u string) string {
}
}
func UniqueHash(bl *Baseline) uint16 {
// 由host+状态码+重定向url+content-type+title+length舍去个位组成的hash
// body length可能会导致一些误报, 目前没有更好的解决办法
return CRC16Hash([]byte(bl.Host + strconv.Itoa(bl.Status) + bl.RedirectURL + bl.ContentType + bl.Title + strconv.Itoa(bl.BodyLength/10*10)))
}
func FormatURL(base, u string) string {
if strings.HasPrefix(u, "http") {
parsed, err := url.Parse(u)