Merge pull request #76 from chainreactors/dev

merge v1.1.3
This commit is contained in:
M09Ic 2024-09-28 10:58:07 +08:00 committed by GitHub
commit 2a68d0b49b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 22 additions and 14 deletions

View File

@ -61,6 +61,8 @@ output:
no-color: false
# Bool, No progress bar
no-bar: false
# Bool, No stat
no-stat: true
plugins:
# Bool, enable all plugin
all: false

View File

@ -47,8 +47,8 @@ func NewClient(config *ClientConfig) *Client {
MaxConnsPerHost: config.Thread * 3 / 2,
MaxIdleConnDuration: config.Timeout,
//MaxConnWaitTimeout: time.Duration(timeout) * time.Second,
ReadTimeout: config.Timeout * time.Second,
WriteTimeout: config.Timeout * time.Second,
ReadTimeout: config.Timeout,
WriteTimeout: config.Timeout,
ReadBufferSize: 16384, // 16k
MaxResponseBodySize: int(DefaultMaxBodySize),
NoDefaultUserAgentHeader: true,

View File

@ -97,7 +97,7 @@ type OutputOptions struct {
type RequestOptions struct {
Method string `short:"x" long:"method" default:"GET" description:"String, request method, e.g.: --method POST" config:"method"`
Headers []string `long:"header" description:"Strings, custom headers, e.g.: --headers 'Auth: example_auth'" config:"headers"`
Headers []string `long:"header" description:"Strings, custom headers, e.g.: --header 'Auth: example_auth'" config:"headers"`
UserAgent string `long:"user-agent" description:"String, custom user-agent, e.g.: --user-agent Custom" config:"useragent"`
RandomUserAgent bool `long:"random-agent" description:"Bool, use random with default user-agent" config:"random-useragent"`
Cookie []string `long:"cookie" description:"Strings, custom cookie" config:"cookies"`
@ -393,7 +393,9 @@ func (opt *Option) NewRunner() (*Runner, error) {
r.Probes = strings.Split(opt.OutputProbe, ",")
}
fmt.Println(opt.PrintConfig(r))
if !opt.Quiet {
fmt.Println(opt.PrintConfig(r))
}
// init output file
if opt.OutputFile != "" {
@ -421,14 +423,13 @@ func (opt *Option) NewRunner() (*Runner, error) {
}
if opt.ResumeFrom != "" {
r.StatFile, err = files.NewFile(opt.ResumeFrom, false, true, true)
} else {
r.StatFile, err = files.NewFile(pkg.SafeFilename(r.Tasks.Name)+".stat", false, true, true)
}
if err != nil {
return nil, err
}
if !opt.NoStat {
r.StatFile, err = files.NewFile(pkg.SafeFilename(r.Tasks.Name)+".stat", false, true, true)
r.StatFile.Mod = os.O_WRONLY | os.O_CREATE
err = r.StatFile.Init()
if err != nil {

View File

@ -268,7 +268,7 @@ func (pool *BrutePool) Invoke(v interface{}) {
var req *ihttp.Request
var err error
req, err = ihttp.BuildRequest(pool.ctx, pool.ClientType, pool.BaseURL, unit.path, unit.host, pool.Method)
req, err = ihttp.BuildRequest(pool.ctx, pool.ClientType, pool.base, unit.path, unit.host, pool.Method)
if err != nil {
logs.Log.Error(err.Error())
return
@ -465,8 +465,6 @@ func (pool *BrutePool) Handler() {
}
if ok {
pool.Statistor.FoundNumber++
// unique判断
if EnableAllUnique || iutils.IntsContains(pkg.UniqueStatus, bl.Status) {
if _, ok := pool.uniques[bl.Unique]; ok {
@ -495,6 +493,7 @@ func (pool *BrutePool) Handler() {
// 如果要进行递归判断, 要满足 bl有效, mod为path-spray, 当前深度小于最大递归深度
if bl.IsValid {
pool.Statistor.FoundNumber++
if bl.RecuDepth < MaxRecursion {
if pkg.CompareWithExpr(pool.RecuExpr, params) {
bl.Recu = true
@ -702,10 +701,10 @@ func (pool *BrutePool) addFuzzyBaseline(bl *pkg.Baseline) {
}
}
func (pool *BrutePool) recover() {
func (pool *BrutePool) fallback() {
logs.Log.Errorf("%s ,failed request exceeds the threshold , task will exit. Breakpoint %d", pool.BaseURL, pool.wordOffset)
for i, bl := range pool.FailedBaselines {
if i > int(pool.BreakThreshold) {
if i > 5 {
break
}
logs.Log.Errorf("[failed.%d] %s", i, bl.String())
@ -718,7 +717,7 @@ func (pool *BrutePool) Close() {
time.Sleep(time.Duration(100) * time.Millisecond)
}
close(pool.additionCh) // 关闭addition管道
close(pool.checkCh) // 关闭check管道
//close(pool.checkCh) // 关闭check管道
pool.Statistor.EndTime = time.Now().Unix()
pool.reqPool.Release()
pool.scopePool.Release()
@ -741,7 +740,11 @@ func (pool *BrutePool) resetFailed() {
func (pool *BrutePool) doCheck() {
if pool.failedCount > pool.BreakThreshold {
// 当报错次数超过上限是, 结束任务
pool.recover()
if pool.isFallback.Load() {
return
}
pool.isFallback.Store(true)
pool.fallback()
pool.Cancel()
pool.IsFailed = true
return

View File

@ -26,7 +26,7 @@ func NewCheckPool(ctx context.Context, config *Config) (*CheckPool, error) {
client: ihttp.NewClient(&ihttp.ClientConfig{
Thread: config.Thread,
Type: config.ClientType,
Timeout: time.Duration(config.Timeout) * time.Second,
Timeout: config.Timeout,
ProxyAddr: config.ProxyAddr,
}),
wg: &sync.WaitGroup{},

View File

@ -7,6 +7,7 @@ import (
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words"
"sync"
"sync/atomic"
)
type BasePool struct {
@ -24,6 +25,7 @@ type BasePool struct {
additionCh chan *Unit
closeCh chan struct{}
wg *sync.WaitGroup
isFallback atomic.Bool
}
func (pool *BasePool) doRedirect(bl *pkg.Baseline, depth int) {