Merge pull request #52 from chainreactors/dev

merge v1.0.0
This commit is contained in:
M09Ic 2024-07-29 17:08:42 +08:00 committed by GitHub
commit 3791b765ea
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 866 additions and 593 deletions

View File

@ -7,9 +7,7 @@ import (
"github.com/chainreactors/logs"
"github.com/chainreactors/spray/internal"
"github.com/chainreactors/spray/internal/ihttp"
"github.com/chainreactors/spray/internal/pool"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/utils/iutils"
"github.com/jessevdk/go-flags"
"os"
"os/signal"
@ -17,7 +15,7 @@ import (
"time"
)
var ver = "v0.9.6"
var ver = "v1.0.0"
var DefaultConfig = "config.yaml"
func init() {
@ -113,27 +111,13 @@ func Spray() {
return
}
err = pkg.Load()
err = option.Prepare()
if err != nil {
iutils.Fatal(err.Error())
logs.Log.Errorf(err.Error())
return
}
// 初始化全局变量
pkg.Distance = uint8(option.SimhashDistance)
if option.MaxBodyLength == -1 {
ihttp.DefaultMaxBodySize = -1
} else {
ihttp.DefaultMaxBodySize = option.MaxBodyLength * 1024
}
pool.MaxCrawl = option.CrawlDepth
var runner *internal.Runner
if option.ResumeFrom != "" {
runner, err = option.PrepareRunner()
} else {
runner, err = option.PrepareRunner()
}
runner, err := option.NewRunner()
if err != nil {
logs.Log.Errorf(err.Error())
return
@ -151,18 +135,29 @@ func Spray() {
}
go func() {
c := make(chan os.Signal, 2)
signal.Notify(c, os.Interrupt, syscall.SIGTERM)
exitChan := make(chan os.Signal, 2)
signal.Notify(exitChan, os.Interrupt, syscall.SIGTERM)
go func() {
<-c
logs.Log.Important("exit signal, save stat and exit")
sigCount := 0
for {
<-exitChan
sigCount++
if sigCount == 1 {
logs.Log.Infof("Exit signal received, saving task and exiting...")
canceler()
} else if sigCount == 2 {
logs.Log.Infof("forcing exit...")
os.Exit(1)
}
}
}()
}()
if runner.CheckOnly {
if runner.IsCheck {
runner.RunWithCheck(ctx)
} else {
runner.Run(ctx)
}
time.Sleep(1 * time.Second)
}

View File

@ -101,7 +101,7 @@ mode:
# Bool, skip error break
force: false
# Bool, check only
check-only: false
default: false
# Bool, no scope
no-scope: false
# String, custom scope, e.g.: --scope *.example.com
@ -132,7 +132,7 @@ mode:
unique: false
# Int, retry count
retry: 0
distance: 5
sim-distance: 5
misc:
# String, path/host spray
mod: path

8
go.mod
View File

@ -5,11 +5,11 @@ go 1.22
toolchain go1.22.2
require (
github.com/chainreactors/files v0.0.0-20231123083421-cea5b4ad18a8
github.com/chainreactors/fingers v0.0.0-20240704063230-de8fec05ff8b
github.com/chainreactors/files v0.0.0-20240716182835-7884ee1e77f0
github.com/chainreactors/fingers v0.0.0-20240716172449-2fc3147b9c2a
github.com/chainreactors/logs v0.0.0-20240207121836-c946f072f81f
github.com/chainreactors/parsers v0.0.0-20240704071623-9d0ee90230a6
github.com/chainreactors/utils v0.0.0-20240704062557-662d623b74f4
github.com/chainreactors/parsers v0.0.0-20240708072709-07deeece7ce2
github.com/chainreactors/utils v0.0.0-20240716182459-e85f2b01ee16
github.com/chainreactors/words v0.4.1-0.20240510105042-5ba5c2edc508
github.com/expr-lang/expr v1.16.9
github.com/gookit/config/v2 v2.2.5

48
go.sum
View File

@ -81,53 +81,23 @@ github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XL
github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/chainreactors/files v0.0.0-20230731174853-acee21c8c45a/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2IDcovf42/6A=
github.com/chainreactors/files v0.0.0-20231102192550-a652458cee26/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2IDcovf42/6A=
github.com/chainreactors/files v0.0.0-20231123083421-cea5b4ad18a8 h1:8Plpi6haQbU8NzH+JtU6bkGDWF/OeC+GFj8DIDuY5yk=
github.com/chainreactors/files v0.0.0-20231123083421-cea5b4ad18a8/go.mod h1:/Xa9YXhjBlaC33JTD6ZTJFig6pcplak2IDcovf42/6A=
github.com/chainreactors/fingers v0.0.0-20240304115656-fa8ca9fc375f/go.mod h1:cO2a79lRNSaM6hu17xIyws5eWCWxjcRxY9IFPlss2lE=
github.com/chainreactors/fingers v0.0.0-20240603064620-e83951a40541 h1:aDY5A+G53En6t3Pr4tbl+vxJle2p1VJsqHVlyhnWU8s=
github.com/chainreactors/fingers v0.0.0-20240603064620-e83951a40541/go.mod h1:s3lvNYcSW7NfM1inpgyn/wY3UEqQIvp6gE6BthFaOVo=
github.com/chainreactors/fingers v0.0.0-20240628163007-c08576f96117 h1:EJqE2nST/CigTcfexGLdoovYP46LFsJrcYknT8DD5qA=
github.com/chainreactors/fingers v0.0.0-20240628163007-c08576f96117/go.mod h1:P9RWZA2j7AALdNbpsZmxBtDa96y8CRpC4mRmwt9PwE8=
github.com/chainreactors/fingers v0.0.0-20240628164555-606f64752b5c h1:DWcMYWDzHWJ8kgQoAV6Z6/gN047o3BeKttFDPMM1xok=
github.com/chainreactors/fingers v0.0.0-20240628164555-606f64752b5c/go.mod h1:P9RWZA2j7AALdNbpsZmxBtDa96y8CRpC4mRmwt9PwE8=
github.com/chainreactors/fingers v0.0.0-20240628165118-3f8dcb553f8b h1:1KeC7d+Pc5GPerODPPtT0C2mnGI/y+SC+7wYQGLGNsI=
github.com/chainreactors/fingers v0.0.0-20240628165118-3f8dcb553f8b/go.mod h1:P9RWZA2j7AALdNbpsZmxBtDa96y8CRpC4mRmwt9PwE8=
github.com/chainreactors/fingers v0.0.0-20240628165313-8c7e41bf9ab5 h1:tmr3wbiwZZ+d8pgmD7LdjRY5Qp8bDm1STU0u3ADazRI=
github.com/chainreactors/fingers v0.0.0-20240628165313-8c7e41bf9ab5/go.mod h1:P9RWZA2j7AALdNbpsZmxBtDa96y8CRpC4mRmwt9PwE8=
github.com/chainreactors/fingers v0.0.0-20240628190949-7257c400d3da h1:5kX/KxHFFjKzpZ7H8Ofdu7ukyrj7IXURhToWpeL3ID0=
github.com/chainreactors/fingers v0.0.0-20240628190949-7257c400d3da/go.mod h1:P9RWZA2j7AALdNbpsZmxBtDa96y8CRpC4mRmwt9PwE8=
github.com/chainreactors/fingers v0.0.0-20240701103336-582e82977506 h1:hH7PXBn+nMU0uBFn7PYVqJagZM7EsP3X3RwAcqaTQ8U=
github.com/chainreactors/fingers v0.0.0-20240701103336-582e82977506/go.mod h1:l8AO6ZbIL8WQ8PkihCK/MD6Iww/O+LY/osAhRJjThs4=
github.com/chainreactors/fingers v0.0.0-20240701112353-ecc7954b4673 h1:ezIijqDJmoKFVmXxnUufUzKo/HBxQxp6QnQZvHxaBaY=
github.com/chainreactors/fingers v0.0.0-20240701112353-ecc7954b4673/go.mod h1:l8AO6ZbIL8WQ8PkihCK/MD6Iww/O+LY/osAhRJjThs4=
github.com/chainreactors/fingers v0.0.0-20240702104653-a66e34aa41df h1:1SrOHwdlU+X3/hxViU1ZCcwO+KRvSmJGtTstSph5xL0=
github.com/chainreactors/files v0.0.0-20240716182835-7884ee1e77f0 h1:cU3sGEODXZsUZGBXfnz0nyxF6+37vA+ZGDx6L/FKN4o=
github.com/chainreactors/files v0.0.0-20240716182835-7884ee1e77f0/go.mod h1:NSxGNMRWryAyrDzZpVwmujI22wbGw6c52bQOd5zEvyU=
github.com/chainreactors/fingers v0.0.0-20240702104653-a66e34aa41df/go.mod h1:l8AO6ZbIL8WQ8PkihCK/MD6Iww/O+LY/osAhRJjThs4=
github.com/chainreactors/fingers v0.0.0-20240704063230-de8fec05ff8b h1:vqkkXIyeD0PGvWdJuKxinjEerfh8Rov/PqoPlnLVt0E=
github.com/chainreactors/fingers v0.0.0-20240704063230-de8fec05ff8b/go.mod h1:vQ/LJzHnMdxbK6n1PwqZmvgPudfNpoQsyFAPdt3IlBo=
github.com/chainreactors/fingers v0.0.0-20240716172449-2fc3147b9c2a h1:5l4i8TdHRlz088J5xZM30yvTUMLVcWJ6iXiO/VyD3ro=
github.com/chainreactors/fingers v0.0.0-20240716172449-2fc3147b9c2a/go.mod h1:R03soobTE/AnZWtFgfQVYNM5QLH52NZ946wZTJVBXh4=
github.com/chainreactors/logs v0.0.0-20231027080134-7a11bb413460/go.mod h1:VZFqkFDGmp7/JOMeraW+YI7kTGcgz9fgc/HArVFnrGQ=
github.com/chainreactors/logs v0.0.0-20240207121836-c946f072f81f h1:tcfp+CEdgiMvjyUzWab5edJtxUwRMSMEIkLybupIx0k=
github.com/chainreactors/logs v0.0.0-20240207121836-c946f072f81f/go.mod h1:6Mv6W70JrtL6VClulZhmMRZnoYpcTahcDTKLMNEjK0o=
github.com/chainreactors/parsers v0.0.0-20240422094636-b88693700dfc h1:lGgglOE1FGWD7gVZuF0cufxd7i9HJ2gltUewxXCfvs4=
github.com/chainreactors/parsers v0.0.0-20240422094636-b88693700dfc/go.mod h1:BuI21VlpmYHFr1jva/IN5I5jFvvCtYRyeldGK80wYCg=
github.com/chainreactors/parsers v0.0.0-20240628194456-a176ea53b412 h1:GobKh1fztHQvslp8Ya6AgduYQshc1+CaomLYAf37yK8=
github.com/chainreactors/parsers v0.0.0-20240628194456-a176ea53b412/go.mod h1:0zPZn7glHadCyL2lvThINGU57UrLS7Mf+gf/kFW+k28=
github.com/chainreactors/parsers v0.0.0-20240628195954-201bdc68bdf8 h1:e0RDG17t6kcwOg4CLmwygn9Fh0alYa1GjxtRAcgm+so=
github.com/chainreactors/parsers v0.0.0-20240628195954-201bdc68bdf8/go.mod h1:0zPZn7glHadCyL2lvThINGU57UrLS7Mf+gf/kFW+k28=
github.com/chainreactors/parsers v0.0.0-20240701110332-2f9057256d29 h1:0RJnI9nDGao3LUSnqWhcg0ELex4RUlVvGY+SfAMuDSQ=
github.com/chainreactors/parsers v0.0.0-20240701110332-2f9057256d29/go.mod h1:91mj0+pHkKsXd1SGD6+jVW1tl6W25TuXgoESwK5dJh8=
github.com/chainreactors/parsers v0.0.0-20240702104902-1ce563b7ef76 h1:i4sHuonM50X/Tbgl6kNd7CYJqfRxgDUORIphcSuj4Bs=
github.com/chainreactors/parsers v0.0.0-20240702104902-1ce563b7ef76/go.mod h1:G/XLE5RAaUdqADkbhQ59mPrUAbsJLiQ2DN6CwtwNpBQ=
github.com/chainreactors/parsers v0.0.0-20240704062910-decf861def9e h1:42ILX5kS76M1D9IQvXgfelpgUJDi/K+4/egE0tLzuSE=
github.com/chainreactors/parsers v0.0.0-20240704062910-decf861def9e/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA=
github.com/chainreactors/parsers v0.0.0-20240704071443-8a8558f34cf9 h1:XxPUVhP29vnbLuhxFt8VT3eyBR8d/GfHR7YK44zyDVo=
github.com/chainreactors/parsers v0.0.0-20240704071443-8a8558f34cf9/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA=
github.com/chainreactors/parsers v0.0.0-20240704071623-9d0ee90230a6 h1:jUxPo0RJ/f+/4x3ydeXqCeMq5VbvYBjtmpBePWFfNc8=
github.com/chainreactors/parsers v0.0.0-20240704071623-9d0ee90230a6/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA=
github.com/chainreactors/utils v0.0.0-20240302165634-2b8494c9cfc3/go.mod h1:JA4eiQZm+7AsfjXBcIzIdVKBEhDCb16eNtWFCGTxlvs=
github.com/chainreactors/utils v0.0.0-20240528085651-ba1b255482c1 h1:+awuysRKLmdLQbVK+HPSOGvO3dFGdNSbM2jyLh+VYOA=
github.com/chainreactors/parsers v0.0.0-20240708072709-07deeece7ce2 h1:sE3SChgHLtPsEaqHo5tDSy8niDys1SO174C4eHlShSw=
github.com/chainreactors/parsers v0.0.0-20240708072709-07deeece7ce2/go.mod h1:7rXdYz6jrdjF0WUH1ICcAXKIKKjKmJo2PU8u43V7jkA=
github.com/chainreactors/utils v0.0.0-20240528085651-ba1b255482c1/go.mod h1:JA4eiQZm+7AsfjXBcIzIdVKBEhDCb16eNtWFCGTxlvs=
github.com/chainreactors/utils v0.0.0-20240704062557-662d623b74f4 h1:pW7yzxGD19ykkWnKkuJ8oq+hLt1WuTq9HP+cJZibip8=
github.com/chainreactors/utils v0.0.0-20240704062557-662d623b74f4/go.mod h1:JA4eiQZm+7AsfjXBcIzIdVKBEhDCb16eNtWFCGTxlvs=
github.com/chainreactors/utils v0.0.0-20240715080349-d2d0484c95ed/go.mod h1:LajXuvESQwP+qCMAvlcoSXppQCjuLlBrnQpu9XQ1HtU=
github.com/chainreactors/utils v0.0.0-20240716182459-e85f2b01ee16 h1:TCOshCp7PrWqhP/HSAM5kT3VxoOe7EoJbRseyoSX3RM=
github.com/chainreactors/utils v0.0.0-20240716182459-e85f2b01ee16/go.mod h1:LajXuvESQwP+qCMAvlcoSXppQCjuLlBrnQpu9XQ1HtU=
github.com/chainreactors/words v0.4.1-0.20240510105042-5ba5c2edc508 h1:iT4HWkoZzUAfQYcQMRH8XyrMau9tCVE0zSuFQnkhrqw=
github.com/chainreactors/words v0.4.1-0.20240510105042-5ba5c2edc508/go.mod h1:DUDx7PdsMEm5PvVhzkFyppzpiUhQb8dOJaWjVc1SMVk=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=

181
internal/finger.go Normal file
View File

@ -0,0 +1,181 @@
package internal
import (
"fmt"
"github.com/chainreactors/files"
"github.com/chainreactors/fingers"
"github.com/chainreactors/fingers/resources"
"github.com/chainreactors/logs"
"github.com/chainreactors/utils/encode"
"github.com/chainreactors/utils/iutils"
"io"
"net/http"
"os"
"path/filepath"
"strings"
)
var (
DefaultFingerPath = "fingers"
DefaultFingerTemplate = "fingers/templates"
FingerConfigs = map[string]string{
fingers.FingersEngine: "fingers_http.json.gz",
fingers.FingerPrintEngine: "fingerprinthub_v3.json.gz",
fingers.WappalyzerEngine: "wappalyzer.json.gz",
fingers.EHoleEngine: "ehole.json.gz",
fingers.GobyEngine: "goby.json.gz",
}
baseURL = "https://raw.githubusercontent.com/chainreactors/fingers/master/resources/"
)
type FingerOptions struct {
Finger bool `long:"finger" description:"Bool, enable active finger detect" config:"finger"`
FingerUpdate bool `long:"update" description:"Bool, update finger database" config:"update"`
FingerPath string `long:"finger-path" default:"fingers" description:"String, 3rd finger config path" config:"finger-path"`
//FingersTemplatesPath string `long:"finger-template" default:"fingers/templates" description:"Bool, use finger templates path" config:"finger-template"`
FingerEngines string `long:"finger-engine" default:"all" description:"String, custom finger engine, e.g. --finger-engine ehole,goby" config:"finger-engine"`
}
func (opt *FingerOptions) Validate() error {
var err error
if opt.FingerUpdate {
if opt.FingerPath != DefaultFingerPath && !files.IsExist(opt.FingerPath) {
err = os.MkdirAll(opt.FingerPath, 0755)
if err != nil {
return err
}
} else if !files.IsExist(DefaultFingerPath) {
opt.FingerPath = DefaultFingerPath
err = os.MkdirAll(DefaultFingerPath, 0755)
if err != nil {
return err
}
}
//if opt.FingersTemplatesPath != DefaultFingerTemplate && !files.IsExist(opt.FingersTemplatesPath) {
// err = os.MkdirAll(opt.FingersTemplatesPath, 0755)
// if err != nil {
// return err
// }
//} else if !files.IsExist(DefaultFingerTemplate) {
// err = os.MkdirAll(DefaultFingerTemplate, 0755)
// if err != nil {
// return err
// }
//}
}
if opt.FingerEngines != "all" {
for _, name := range strings.Split(opt.FingerEngines, ",") {
if !iutils.StringsContains(fingers.AllEngines, name) {
return fmt.Errorf("invalid finger engine: %s, please input one of %v", name, fingers.FingersEngine)
}
}
}
return nil
}
func (opt *FingerOptions) LoadLocalFingerConfig() error {
for name, fingerPath := range FingerConfigs {
if content, err := os.ReadFile(fingerPath); err == nil {
if encode.Md5Hash(content) != resources.CheckSum[name] {
logs.Log.Importantf("found %s difference, use %s replace embed", name, fingerPath)
switch name {
case fingers.FingersEngine:
resources.FingersHTTPData = content
case fingers.FingerPrintEngine:
resources.Fingerprinthubdata = content
case fingers.EHoleEngine:
resources.EholeData = content
case fingers.GobyEngine:
resources.GobyData = content
case fingers.WappalyzerEngine:
resources.WappalyzerData = content
default:
return fmt.Errorf("unknown engine name")
}
} else {
logs.Log.Infof("%s config is up to date", name)
}
}
}
return nil
}
func (opt *FingerOptions) UpdateFinger() error {
modified := false
for name, _ := range FingerConfigs {
if ok, err := opt.downloadConfig(name); err != nil {
return err
} else {
if ok {
modified = ok
}
}
}
if !modified {
logs.Log.Importantf("everything is up to date")
}
return nil
}
func (opt *FingerOptions) downloadConfig(name string) (bool, error) {
fingerFile, ok := FingerConfigs[name]
if !ok {
return false, fmt.Errorf("unknown engine name")
}
url := baseURL + fingerFile
resp, err := http.Get(url)
if err != nil {
return false, err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return false, fmt.Errorf("bad status: %s", resp.Status)
}
content, err := io.ReadAll(resp.Body)
filePath := filepath.Join(files.GetExcPath(), opt.FingerPath, fingerFile)
if files.IsExist(filePath) {
origin, err := os.ReadFile(filePath)
if err != nil {
return false, err
}
if resources.CheckSum[name] != encode.Md5Hash(origin) {
logs.Log.Importantf("update %s config from %s save to %s", name, url, fingerFile)
err = os.WriteFile(filePath, content, 0644)
if err != nil {
return false, err
}
return true, nil
}
} else {
out, err := os.Create(filePath)
if err != nil {
return false, err
}
defer out.Close()
logs.Log.Importantf("download %s config from %s save to %s", name, url, fingerFile)
err = os.WriteFile(filePath, content, 0644)
if err != nil {
return false, err
}
}
if err != nil {
return false, err
}
if origin, err := os.ReadFile(filePath); err == nil {
if encode.Md5Hash(content) != encode.Md5Hash(origin) {
logs.Log.Infof("download %s config from %s save to %s", name, url, fingerFile)
err = os.WriteFile(filePath, content, 0644)
if err != nil {
return false, err
}
return true, nil
}
}
return false, nil
}

View File

@ -5,7 +5,7 @@ import (
"encoding/json"
"github.com/chainreactors/logs"
"github.com/chainreactors/spray/pkg"
"io/ioutil"
"io"
"os"
)
@ -13,9 +13,9 @@ func Format(filename string, color bool) {
var content []byte
var err error
if filename == "stdin" {
content, err = ioutil.ReadAll(os.Stdin)
content, err = io.ReadAll(os.Stdin)
} else {
content, err = ioutil.ReadFile(filename)
content, err = os.ReadFile(filename)
}
if err != nil {

View File

@ -39,6 +39,7 @@ type Option struct {
FunctionOptions `group:"Function Options" config:"functions" `
OutputOptions `group:"Output Options" config:"output"`
PluginOptions `group:"Plugin Options" config:"plugins"`
FingerOptions `group:"Finger Options" config:"finger"`
RequestOptions `group:"Request Options" config:"request"`
ModeOptions `group:"Modify Options" config:"mode"`
MiscOptions `group:"Miscellaneous Options" config:"misc"`
@ -50,10 +51,11 @@ type InputOptions struct {
URL []string `short:"u" long:"url" description:"Strings, input baseurl, e.g.: http://google.com"`
URLFile string `short:"l" long:"list" description:"File, input filename"`
PortRange string `short:"p" long:"port" description:"String, input port range, e.g.: 80,8080-8090,db"`
CIDRs string `long:"cidr" description:"String, input cidr, e.g.: 1.1.1.1/24 "`
CIDRs []string `short:"i" long:"cidr" description:"String, input cidr, e.g.: 1.1.1.1/24 "`
RawFile string `long:"raw" description:"File, input raw request filename"`
Dictionaries []string `short:"d" long:"dict" description:"Files, Multi,dict files, e.g.: -d 1.txt -d 2.txt" config:"dictionaries"`
NoDict bool `long:"no-dict" description:"Bool, no dictionary" config:"no-dict"`
//NoDict bool `long:"no-dict" description:"Bool, no dictionary" config:"no-dict"`
DefaultDict bool `short:"D" long:"default" description:"Bool, use default dictionary" config:"default"`
Word string `short:"w" long:"word" description:"String, word generate dsl, e.g.: -w test{?ld#4}" config:"word"`
Rules []string `short:"r" long:"rules" description:"Files, rule files, e.g.: -r rule1.txt -r rule2.txt" config:"rules"`
AppendRule []string `long:"append-rule" description:"Files, when found valid path , use append rule generator new word with current path" config:"append-rules"`
@ -92,6 +94,7 @@ type OutputOptions struct {
Quiet bool `short:"q" long:"quiet" description:"Bool, Quiet" config:"quiet"`
NoColor bool `long:"no-color" description:"Bool, no color" config:"no-color"`
NoBar bool `long:"no-bar" description:"Bool, No progress bar" config:"no-bar"`
NoStat bool `long:"no-stat" description:"Bool, No stat file output" config:"no-stat"`
}
type RequestOptions struct {
@ -109,7 +112,6 @@ type PluginOptions struct {
Extracts []string `long:"extract" description:"Strings, extract response, e.g.: --extract js --extract ip --extract version:(.*?)" config:"extract"`
ExtractConfig string `long:"extract-config" description:"String, extract config filename" config:"extract-config"`
Recon bool `long:"recon" description:"Bool, enable recon" config:"recon"`
Finger bool `long:"finger" description:"Bool, enable active finger detect" config:"finger"`
Bak bool `long:"bak" description:"Bool, enable bak found" config:"bak"`
FileBak bool `long:"file-bak" description:"Bool, enable valid result bak found, equal --append-rule rule/filebak.txt" config:"file-bak"`
Common bool `long:"common" description:"Bool, enable common file found" config:"common"`
@ -120,7 +122,7 @@ type PluginOptions struct {
type ModeOptions struct {
RateLimit int `long:"rate-limit" default:"0" description:"Int, request rate limit (rate/s), e.g.: --rate-limit 100" config:"rate-limit"`
Force bool `long:"force" description:"Bool, skip error break" config:"force"`
CheckOnly bool `long:"check-only" description:"Bool, check only" config:"check-only"`
//CheckOnly bool `long:"check-only" description:"Bool, check only" config:"check-only"`
NoScope bool `long:"no-scope" description:"Bool, no scope" config:"no-scope"`
Scope []string `long:"scope" description:"String, custom scope, e.g.: --scope *.example.com" config:"scope"`
Recursive string `long:"recursive" default:"current.IsDir()" description:"String,custom recursive rule, e.g.: --recursive current.IsDir()" config:"recursive"`
@ -136,7 +138,7 @@ type ModeOptions struct {
UniqueStatus string `long:"unique-status" default:"403,200,404" description:"Strings (comma split), custom unique status" config:"unique-status"`
Unique bool `long:"unique" description:"Bool, unique response" config:"unique"`
RetryCount int `long:"retry" default:"0" description:"Int, retry count" config:"retry"`
SimhashDistance int `long:"distance" default:"5" config:"distance"`
SimhashDistance int `long:"sim-distance" default:"5" config:"sim-distance"`
}
type MiscOptions struct {
@ -153,11 +155,111 @@ type MiscOptions struct {
InitConfig bool `long:"init" description:"Bool, init config file"`
}
func (opt *Option) PrepareRunner() (*Runner, error) {
err := opt.Validate()
if err != nil {
return nil, err
func (opt *Option) Validate() error {
if opt.Uppercase && opt.Lowercase {
return errors.New("cannot set -U and -L at the same time")
}
if (opt.Offset != 0 || opt.Limit != 0) && opt.Depth > 0 {
// 偏移和上限与递归同时使用时也会造成混淆.
return errors.New("--offset and --limit cannot be used with --depth at the same time")
}
if opt.Depth > 0 && opt.ResumeFrom != "" {
// 递归与断点续传会造成混淆, 断点续传的word与rule不是通过命令行获取的
return errors.New("--resume and --depth cannot be used at the same time")
}
if opt.ResumeFrom == "" && len(opt.URL) == 0 && opt.URLFile == "" && len(opt.CIDRs) == 0 && opt.RawFile == "" {
return fmt.Errorf("without any target, please use -u/-l/-c/--resume to set targets")
}
return nil
}
func (opt *Option) Prepare() error {
var err error
logs.Log.SetColor(true)
if err = opt.FingerOptions.Validate(); err != nil {
return err
}
if opt.FingerUpdate {
err = opt.UpdateFinger()
if err != nil {
return err
}
}
err = opt.LoadLocalFingerConfig()
if err != nil {
return err
}
err = opt.Validate()
if err != nil {
return err
}
err = pkg.LoadFingers()
if err != nil {
return err
}
if opt.Extracts != nil {
for _, e := range opt.Extracts {
if reg, ok := pkg.ExtractRegexps[e]; ok {
pkg.Extractors[e] = reg
} else {
pkg.Extractors[e] = []*parsers.Extractor{
&parsers.Extractor{
Name: e,
CompiledRegexps: []*regexp.Regexp{regexp.MustCompile(e)},
},
}
}
}
}
if opt.ExtractConfig != "" {
extracts, err := pkg.LoadExtractorConfig(opt.ExtractConfig)
if err != nil {
return err
}
pkg.Extractors[opt.ExtractConfig] = extracts
}
err = pkg.Load()
if err != nil {
iutils.Fatal(err.Error())
}
// 初始化全局变量
pkg.Distance = uint8(opt.SimhashDistance)
if opt.MaxBodyLength == -1 {
ihttp.DefaultMaxBodySize = -1
} else {
ihttp.DefaultMaxBodySize = opt.MaxBodyLength * 1024
}
pkg.BlackStatus = parseStatus(pkg.BlackStatus, opt.BlackStatus)
pkg.WhiteStatus = parseStatus(pkg.WhiteStatus, opt.WhiteStatus)
if opt.FuzzyStatus == "all" {
pool.EnableAllFuzzy = true
} else {
pkg.FuzzyStatus = parseStatus(pkg.FuzzyStatus, opt.FuzzyStatus)
}
if opt.Unique {
pool.EnableAllUnique = true
} else {
pkg.UniqueStatus = parseStatus(pkg.UniqueStatus, opt.UniqueStatus)
}
pool.MaxCrawl = opt.CrawlDepth
return nil
}
func (opt *Option) NewRunner() (*Runner, error) {
var err error
r := &Runner{
Option: opt,
taskCh: make(chan *Task),
@ -166,18 +268,20 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
fuzzyCh: make(chan *pkg.Baseline, 256),
Headers: make(map[string]string),
Total: opt.Limit,
Color: true,
}
// log and bar
if !opt.NoColor {
logs.Log.SetColor(true)
r.Color = true
if opt.NoColor {
logs.Log.SetColor(false)
r.Color = false
}
if opt.Quiet {
logs.Log.SetQuiet(true)
logs.Log.SetColor(false)
r.Color = false
}
if !(opt.Quiet || opt.NoBar) {
r.Progress = mpb.New(mpb.WithRefreshRate(100 * time.Millisecond))
logs.Log.SetOutput(r.Progress)
@ -200,32 +304,10 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
r.ClientType = ihttp.STANDARD
}
if opt.Threads == DefaultThreads && opt.CheckOnly {
if opt.Threads == DefaultThreads && len(opt.Dictionaries) == 0 {
r.Threads = 1000
}
if opt.Extracts != nil {
for _, e := range opt.Extracts {
if reg, ok := pkg.ExtractRegexps[e]; ok {
pkg.Extractors[e] = reg
} else {
pkg.Extractors[e] = []*parsers.Extractor{
&parsers.Extractor{
Name: e,
CompiledRegexps: []*regexp.Regexp{regexp.MustCompile(e)},
},
}
}
}
}
if opt.ExtractConfig != "" {
extracts, err := pkg.LoadExtractorConfig(opt.ExtractConfig)
if err != nil {
return nil, err
}
pkg.Extractors[opt.ExtractConfig] = extracts
}
if opt.Recon {
pkg.Extractors["recon"] = pkg.ExtractRegexps["pentest"]
}
@ -238,132 +320,30 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
pkg.EnableAllFingerEngine = true
pkg.Extractors["recon"] = pkg.ExtractRegexps["pentest"]
opt.AppendRule = append(opt.AppendRule, "filebak")
} else if opt.FileBak {
}
if opt.FileBak {
opt.AppendRule = append(opt.AppendRule, "filebak")
}
var s strings.Builder
if r.Crawl {
s.WriteString("crawl enable; ")
if opt.Common {
r.AppendWords = append(r.AppendWords, mask.SpecialWords["common_file"]...)
}
if r.Finger {
if opt.Finger {
r.AppendWords = append(r.AppendWords, pkg.ActivePath...)
pkg.EnableAllFingerEngine = true
s.WriteString("active fingerprint enable; ")
}
if r.Bak {
s.WriteString("bak file enable; ")
}
if r.Common {
r.AppendWords = append(r.AppendWords, mask.SpecialWords["common_file"]...)
s.WriteString("common file enable; ")
}
if opt.Recon {
s.WriteString("recon enable; ")
}
if len(opt.AppendRule) > 0 {
s.WriteString("file bak enable; ")
}
if r.RetryCount > 0 {
s.WriteString("Retry Count: " + strconv.Itoa(r.RetryCount))
}
if s.Len() > 0 {
logs.Log.Important(s.String())
}
opt.PrintPlugin()
if opt.NoScope {
r.Scope = []string{"*"}
}
pkg.BlackStatus = parseStatus(pkg.BlackStatus, opt.BlackStatus)
pkg.WhiteStatus = parseStatus(pkg.WhiteStatus, opt.WhiteStatus)
if opt.FuzzyStatus == "all" {
pool.EnableAllFuzzy = true
} else {
pkg.FuzzyStatus = parseStatus(pkg.FuzzyStatus, opt.FuzzyStatus)
}
if opt.Unique {
pool.EnableAllUnique = true
} else {
pkg.UniqueStatus = parseStatus(pkg.UniqueStatus, opt.UniqueStatus)
}
// prepare word
dicts := make([][]string, len(opt.Dictionaries))
if len(opt.Dictionaries) == 0 && opt.Word == "" && !opt.NoDict {
dicts = append(dicts, pkg.LoadDefaultDict())
logs.Log.Warn("not set any dictionary, use default dictionary: https://github.com/maurosoria/dirsearch/blob/master/db/dicc.txt")
} else {
for i, f := range opt.Dictionaries {
dicts[i], err = loadFileToSlice(f)
if opt.ResumeFrom != "" {
dictCache[f] = dicts[i]
}
err = opt.BuildWords(r)
if err != nil {
return nil, err
}
logs.Log.Logf(pkg.LogVerbose, "Loaded %d word from %s", len(dicts[i]), f)
}
}
if opt.Word == "" {
opt.Word = "{?"
for i, _ := range dicts {
opt.Word += strconv.Itoa(i)
}
opt.Word += "}"
}
if len(opt.Suffixes) != 0 {
mask.SpecialWords["suffix"] = opt.Suffixes
opt.Word += "{?@suffix}"
}
if len(opt.Prefixes) != 0 {
mask.SpecialWords["prefix"] = opt.Prefixes
opt.Word = "{?@prefix}" + opt.Word
}
if opt.ForceExtension && opt.Extensions != "" {
exts := strings.Split(opt.Extensions, ",")
for i, e := range exts {
if !strings.HasPrefix(e, ".") {
exts[i] = "." + e
}
}
mask.SpecialWords["ext"] = exts
opt.Word += "{?@ext}"
}
r.Wordlist, err = mask.Run(opt.Word, dicts, nil)
if err != nil {
return nil, fmt.Errorf("%s %w", opt.Word, err)
}
if len(r.Wordlist) > 0 {
logs.Log.Logf(pkg.LogVerbose, "Parsed %d words by %s", len(r.Wordlist), opt.Word)
}
if len(opt.Rules) != 0 {
rules, err := loadRuleAndCombine(opt.Rules)
if err != nil {
return nil, err
}
r.Rules = rule.Compile(rules, opt.FilterRule)
} else if opt.FilterRule != "" {
// if filter rule is not empty, set rules to ":", force to open filter mode
r.Rules = rule.Compile(":", opt.FilterRule)
} else {
r.Rules = new(rule.Program)
}
if len(r.Rules.Expressions) > 0 {
r.Total = len(r.Wordlist) * len(r.Rules.Expressions)
} else {
r.Total = len(r.Wordlist)
}
pkg.DefaultStatistor = pkg.Statistor{
Word: opt.Word,
WordCount: len(r.Wordlist),
@ -374,240 +354,10 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
Total: r.Total,
}
if len(opt.AppendRule) != 0 {
content, err := loadRuleAndCombine(opt.AppendRule)
r.Tasks, err = opt.BuildTasks(r)
if err != nil {
return nil, err
}
r.AppendRules = rule.Compile(string(content), "")
}
if len(opt.AppendFile) != 0 {
var bs bytes.Buffer
for _, f := range opt.AppendFile {
content, err := ioutil.ReadFile(f)
if err != nil {
return nil, err
}
bs.Write(bytes.TrimSpace(content))
bs.WriteString("\n")
}
lines := strings.Split(bs.String(), "\n")
for i, line := range lines {
lines[i] = strings.TrimSpace(line)
}
r.AppendWords = append(r.AppendWords, lines...)
}
ports := utils.ParsePort(opt.PortRange)
// prepare task
tasks := make(chan *Task, opt.PoolSize)
var taskfrom string
if opt.ResumeFrom != "" {
stats, err := pkg.ReadStatistors(opt.ResumeFrom)
if err != nil {
logs.Log.Error(err.Error())
}
r.Count = len(stats)
taskfrom = "resume " + opt.ResumeFrom
go func() {
for _, stat := range stats {
tasks <- &Task{baseUrl: stat.BaseUrl, origin: NewOrigin(stat)}
}
close(tasks)
}()
} else {
var file *os.File
// 根据不同的输入类型生成任务
if len(opt.URL) == 1 {
go func() {
opt.GenerateTasks(tasks, opt.URL[0], ports)
close(tasks)
}()
parsed, _ := url.Parse(opt.URL[0])
taskfrom = parsed.Host
r.Count = 1
} else if len(opt.URL) > 1 {
go func() {
for _, u := range opt.URL {
opt.GenerateTasks(tasks, u, ports)
}
close(tasks)
}()
taskfrom = "cmd"
r.Count = len(opt.URL)
} else if opt.RawFile != "" {
raw, err := os.Open(opt.RawFile)
if err != nil {
return nil, err
}
req, err := http.ReadRequest(bufio.NewReader(raw))
if err != nil {
return nil, err
}
go func() {
opt.GenerateTasks(tasks, fmt.Sprintf("http://%s%s", req.Host, req.URL.String()), ports)
close(tasks)
}()
r.Method = req.Method
for k, _ := range req.Header {
r.Headers[k] = req.Header.Get(k)
}
} else if opt.CIDRs != "" {
if len(ports) == 0 {
ports = []string{"80", "443"}
}
for _, cidr := range strings.Split(opt.CIDRs, ",") {
ips := utils.ParseCIDR(cidr)
if ips != nil {
r.Count += ips.Count()
}
}
go func() {
for _, cidr := range strings.Split(opt.CIDRs, ",") {
ips := utils.ParseCIDR(cidr)
if ips == nil {
logs.Log.Error("cidr format error: " + cidr)
}
for ip := range ips.Range() {
opt.GenerateTasks(tasks, ip.String(), ports)
}
}
close(tasks)
}()
taskfrom = "cidr"
} else if opt.URLFile != "" {
file, err = os.Open(opt.URLFile)
if err != nil {
return nil, err
}
taskfrom = filepath.Base(opt.URLFile)
} else if files.HasStdin() {
file = os.Stdin
taskfrom = "stdin"
}
if file != nil {
content, err := ioutil.ReadAll(file)
if err != nil {
return nil, err
}
urls := strings.Split(strings.TrimSpace(string(content)), "\n")
for _, u := range urls {
u = strings.TrimSpace(u)
if _, err := url.Parse(u); err == nil {
r.Count++
} else if ip := utils.ParseIP(u); ip != nil {
r.Count++
} else if cidr := utils.ParseCIDR(u); cidr != nil {
r.Count += cidr.Count()
}
}
go func() {
for _, u := range urls {
u = strings.TrimSpace(u)
if _, err := url.Parse(u); err == nil {
opt.GenerateTasks(tasks, u, ports)
} else if ip := utils.ParseIP(u); ip != nil {
opt.GenerateTasks(tasks, u, ports)
} else if cidr := utils.ParseCIDR(u); cidr != nil {
for ip := range cidr.Range() {
opt.GenerateTasks(tasks, ip.String(), ports)
}
}
}
close(tasks)
}()
}
}
if len(ports) > 0 {
r.Count = r.Count * len(ports)
}
r.Tasks = tasks
logs.Log.Logf(pkg.LogVerbose, "Loaded %d urls from %s", len(tasks), taskfrom)
// 类似dirsearch中的
if opt.Extensions != "" {
r.AppendFunction(func(s string) []string {
exts := strings.Split(opt.Extensions, ",")
ss := make([]string, len(exts))
for i, e := range exts {
if strings.Contains(s, "%EXT%") {
ss[i] = strings.Replace(s, "%EXT%", e, -1)
}
}
return ss
})
} else {
r.AppendFunction(func(s string) []string {
if strings.Contains(s, "%EXT%") {
return nil
}
return []string{s}
})
}
if opt.Uppercase {
r.AppendFunction(wrapWordsFunc(strings.ToUpper))
}
if opt.Lowercase {
r.AppendFunction(wrapWordsFunc(strings.ToLower))
}
if opt.RemoveExtensions != "" {
rexts := strings.Split(opt.ExcludeExtensions, ",")
r.AppendFunction(func(s string) []string {
if ext := parseExtension(s); iutils.StringsContains(rexts, ext) {
return []string{strings.TrimSuffix(s, "."+ext)}
}
return []string{s}
})
}
if opt.ExcludeExtensions != "" {
exexts := strings.Split(opt.ExcludeExtensions, ",")
r.AppendFunction(func(s string) []string {
if ext := parseExtension(s); iutils.StringsContains(exexts, ext) {
return nil
}
return []string{s}
})
}
if len(opt.Replaces) > 0 {
r.AppendFunction(func(s string) []string {
for k, v := range opt.Replaces {
s = strings.Replace(s, k, v, -1)
}
return []string{s}
})
}
// default skip function, skip %EXT%
r.AppendFunction(func(s string) []string {
if strings.Contains(s, "%EXT%") {
return nil
}
return []string{s}
})
if len(opt.Skips) > 0 {
r.AppendFunction(func(s string) []string {
for _, skip := range opt.Skips {
if strings.Contains(s, skip) {
return nil
}
}
return []string{s}
})
}
logs.Log.Logf(pkg.LogVerbose, "Loaded %d dictionaries and %d decorators", len(opt.Dictionaries), len(r.Fns))
if opt.Match != "" {
exp, err := expr.Compile(opt.Match)
@ -707,67 +457,354 @@ func (opt *Option) PrepareRunner() (*Runner, error) {
if opt.ResumeFrom != "" {
r.StatFile, err = files.NewFile(opt.ResumeFrom, false, true, true)
} else {
r.StatFile, err = files.NewFile(strings.ReplaceAll(taskfrom, ":", "_")+".stat", false, true, true)
r.StatFile, err = files.NewFile(safeFilename(r.Tasks.Name)+".stat", false, true, true)
}
if err != nil {
return nil, err
}
if !opt.NoStat {
r.StatFile.Mod = os.O_WRONLY | os.O_CREATE
err = r.StatFile.Init()
if err != nil {
return nil, err
}
}
return r, nil
}
func (opt *Option) Validate() error {
if opt.Uppercase && opt.Lowercase {
return errors.New("cannot set -U and -L at the same time")
func (opt *Option) PrintPlugin() {
var s strings.Builder
if opt.Crawl {
s.WriteString("crawl enable; ")
}
if opt.Finger {
s.WriteString("active fingerprint enable; ")
}
if opt.Bak {
s.WriteString("bak file enable; ")
}
if opt.Common {
s.WriteString("common file enable; ")
}
if opt.Recon {
s.WriteString("recon enable; ")
}
if opt.FileBak {
s.WriteString("file bak enable; ")
}
if (opt.Offset != 0 || opt.Limit != 0) && opt.Depth > 0 {
// 偏移和上限与递归同时使用时也会造成混淆.
return errors.New("--offset and --limit cannot be used with --depth at the same time")
if opt.RetryCount > 0 {
s.WriteString("Retry Count: " + strconv.Itoa(opt.RetryCount))
}
if s.Len() > 0 {
logs.Log.Important(s.String())
}
}
if opt.Depth > 0 && opt.ResumeFrom != "" {
// 递归与断点续传会造成混淆, 断点续传的word与rule不是通过命令行获取的
return errors.New("--resume and --depth cannot be used at the same time")
func (opt *Option) BuildWords(r *Runner) error {
var dicts [][]string
var err error
if opt.DefaultDict {
dicts = append(dicts, pkg.LoadDefaultDict())
logs.Log.Info("use default dictionary: https://github.com/maurosoria/dirsearch/blob/master/db/dicc.txt")
}
for i, f := range opt.Dictionaries {
dict, err := loadFileToSlice(f)
if err != nil {
return err
}
dicts = append(dicts, dict)
if opt.ResumeFrom != "" {
dictCache[f] = dicts[i]
}
if opt.ResumeFrom == "" && opt.URL == nil && opt.URLFile == "" && opt.CIDRs == "" && opt.RawFile == "" {
return fmt.Errorf("without any target, please use -u/-l/-c/--resume to set targets")
logs.Log.Logf(pkg.LogVerbose, "Loaded %d word from %s", len(dicts[i]), f)
}
if len(dicts) == 0 {
r.IsCheck = true
}
if opt.Word == "" {
opt.Word = "{?"
for i, _ := range dicts {
opt.Word += strconv.Itoa(i)
}
opt.Word += "}"
}
if len(opt.Suffixes) != 0 {
mask.SpecialWords["suffix"] = opt.Suffixes
opt.Word += "{?@suffix}"
}
if len(opt.Prefixes) != 0 {
mask.SpecialWords["prefix"] = opt.Prefixes
opt.Word = "{?@prefix}" + opt.Word
}
if opt.ForceExtension && opt.Extensions != "" {
exts := strings.Split(opt.Extensions, ",")
for i, e := range exts {
if !strings.HasPrefix(e, ".") {
exts[i] = "." + e
}
}
mask.SpecialWords["ext"] = exts
opt.Word += "{?@ext}"
}
r.Wordlist, err = mask.Run(opt.Word, dicts, nil)
if err != nil {
return fmt.Errorf("%s %w", opt.Word, err)
}
if len(r.Wordlist) > 0 {
logs.Log.Logf(pkg.LogVerbose, "Parsed %d words by %s", len(r.Wordlist), opt.Word)
}
if len(opt.Rules) != 0 {
rules, err := loadRuleAndCombine(opt.Rules)
if err != nil {
return err
}
r.Rules = rule.Compile(rules, opt.FilterRule)
} else if opt.FilterRule != "" {
// if filter rule is not empty, set rules to ":", force to open filter mode
r.Rules = rule.Compile(":", opt.FilterRule)
} else {
r.Rules = new(rule.Program)
}
if len(r.Rules.Expressions) > 0 {
r.Total = len(r.Wordlist) * len(r.Rules.Expressions)
} else {
r.Total = len(r.Wordlist)
}
if len(opt.AppendRule) != 0 {
content, err := loadRuleAndCombine(opt.AppendRule)
if err != nil {
return err
}
r.AppendRules = rule.Compile(string(content), "")
}
if len(opt.AppendFile) != 0 {
var bs bytes.Buffer
for _, f := range opt.AppendFile {
content, err := ioutil.ReadFile(f)
if err != nil {
return err
}
bs.Write(bytes.TrimSpace(content))
bs.WriteString("\n")
}
lines := strings.Split(bs.String(), "\n")
for i, line := range lines {
lines[i] = strings.TrimSpace(line)
}
r.AppendWords = append(r.AppendWords, lines...)
}
// 类似dirsearch中的
if opt.Extensions != "" {
r.AppendFunction(func(s string) []string {
exts := strings.Split(opt.Extensions, ",")
ss := make([]string, len(exts))
for i, e := range exts {
if strings.Contains(s, "%EXT%") {
ss[i] = strings.Replace(s, "%EXT%", e, -1)
}
}
return ss
})
} else {
r.AppendFunction(func(s string) []string {
if strings.Contains(s, "%EXT%") {
return nil
}
return []string{s}
})
}
if opt.Uppercase {
r.AppendFunction(wrapWordsFunc(strings.ToUpper))
}
if opt.Lowercase {
r.AppendFunction(wrapWordsFunc(strings.ToLower))
}
if opt.RemoveExtensions != "" {
rexts := strings.Split(opt.ExcludeExtensions, ",")
r.AppendFunction(func(s string) []string {
if ext := parseExtension(s); iutils.StringsContains(rexts, ext) {
return []string{strings.TrimSuffix(s, "."+ext)}
}
return []string{s}
})
}
if opt.ExcludeExtensions != "" {
exexts := strings.Split(opt.ExcludeExtensions, ",")
r.AppendFunction(func(s string) []string {
if ext := parseExtension(s); iutils.StringsContains(exexts, ext) {
return nil
}
return []string{s}
})
}
if len(opt.Replaces) > 0 {
r.AppendFunction(func(s string) []string {
for k, v := range opt.Replaces {
s = strings.Replace(s, k, v, -1)
}
return []string{s}
})
}
// default skip function, skip %EXT%
r.AppendFunction(func(s string) []string {
if strings.Contains(s, "%EXT%") {
return nil
}
return []string{s}
})
if len(opt.Skips) > 0 {
r.AppendFunction(func(s string) []string {
for _, skip := range opt.Skips {
if strings.Contains(s, skip) {
return nil
}
}
return []string{s}
})
}
logs.Log.Logf(pkg.LogVerbose, "Loaded %d dictionaries and %d decorators", len(opt.Dictionaries), len(r.Fns))
return nil
}
// Generate Tasks
func (opt *Option) GenerateTasks(ch chan *Task, u string, ports []string) {
parsed, err := url.Parse(u)
func (opt *Option) BuildTasks(r *Runner) (*TaskGenerator, error) {
// prepare task`
var err error
gen := NewTaskGenerator(opt.PortRange)
if opt.ResumeFrom != "" {
stats, err := pkg.ReadStatistors(opt.ResumeFrom)
if err != nil {
logs.Log.Warnf("parse %s, %s ", u, err.Error())
return
logs.Log.Error(err.Error())
}
if parsed.Scheme == "" {
if parsed.Port() == "443" {
parsed.Scheme = "https"
r.Count = len(stats)
gen.Name = "resume " + opt.ResumeFrom
go func() {
for _, stat := range stats {
gen.In <- &Task{baseUrl: stat.BaseUrl, origin: NewOrigin(stat)}
}
close(gen.In)
}()
} else {
parsed.Scheme = "http"
var file *os.File
// 根据不同的输入类型生成任务
if len(opt.URL) == 1 {
gen.Name = opt.URL[0]
go func() {
gen.Run(opt.URL[0])
close(gen.In)
}()
r.Count = 1
} else if len(opt.URL) > 1 {
go func() {
for _, u := range opt.URL {
gen.Run(u)
}
close(gen.In)
}()
gen.Name = "cmd"
r.Count = len(opt.URL)
} else if opt.RawFile != "" {
raw, err := os.Open(opt.RawFile)
if err != nil {
return nil, err
}
req, err := http.ReadRequest(bufio.NewReader(raw))
if err != nil {
return nil, err
}
go func() {
gen.Run(fmt.Sprintf("http://%s%s", req.Host, req.URL.String()))
close(gen.In)
}()
r.Method = req.Method
for k, _ := range req.Header {
r.Headers[k] = req.Header.Get(k)
}
r.Count = 1
} else if len(opt.CIDRs) != 0 {
cidrs := utils.ParseCIDRs(opt.CIDRs)
if len(gen.ports) == 0 {
gen.ports = []string{"80", "443"}
}
gen.Name = "cidr"
r.Count = cidrs.Count()
go func() {
for _, cidr := range cidrs {
if cidr == nil {
logs.Log.Error("cidr format error: " + cidr.String())
}
for ip := range cidr.Range() {
gen.Run(ip.String())
}
}
close(gen.In)
}()
} else if opt.URLFile != "" {
file, err = os.Open(opt.URLFile)
if err != nil {
return nil, err
}
gen.Name = filepath.Base(opt.URLFile)
} else if files.HasStdin() {
file = os.Stdin
gen.Name = "stdin"
}
if file != nil {
content, err := ioutil.ReadAll(file)
if err != nil {
return nil, err
}
urls := strings.Split(strings.TrimSpace(string(content)), "\n")
for _, u := range urls {
u = strings.TrimSpace(u)
if _, err := url.Parse(u); err == nil {
r.Count++
} else if ip := utils.ParseIP(u); ip != nil {
r.Count++
} else if cidr := utils.ParseCIDR(u); cidr != nil {
r.Count += cidr.Count()
}
}
if len(ports) == 0 {
ch <- &Task{baseUrl: parsed.String()}
return
go func() {
for _, u := range urls {
u = strings.TrimSpace(u)
if _, err := url.Parse(u); err == nil {
gen.Run(u)
} else if ip := utils.ParseIP(u); ip != nil {
gen.Run(u)
} else if cidr := utils.ParseCIDR(u); cidr != nil {
for ip := range cidr.Range() {
gen.Run(ip.String())
}
}
}
close(gen.In)
}()
}
}
for _, p := range ports {
if parsed.Host == "" {
ch <- &Task{baseUrl: fmt.Sprintf("%s://%s:%s", parsed.Scheme, parsed.Path, p)}
} else {
ch <- &Task{baseUrl: fmt.Sprintf("%s://%s:%s/%s", parsed.Scheme, parsed.Host, p, parsed.Path)}
}
if len(gen.ports) > 0 {
r.Count = r.Count * len(gen.ports)
}
return gen, nil
}

View File

@ -154,10 +154,10 @@ func (pool *CheckPool) Handler() {
if bl.IsValid {
if bl.RedirectURL != "" {
pool.doRedirect(bl, bl.ReqDepth)
pool.putToFuzzy(bl)
pool.putToOutput(bl)
} else if bl.Status == 400 {
pool.doUpgrade(bl)
pool.putToFuzzy(bl)
pool.putToOutput(bl)
} else {
params := map[string]interface{}{
"current": bl,

View File

@ -30,7 +30,7 @@ type Config struct {
ProcessCh chan *pkg.Baseline
OutputCh chan *pkg.Baseline
FuzzyCh chan *pkg.Baseline
OutLocker *sync.WaitGroup
Outwg *sync.WaitGroup
RateLimit int
CheckPeriod int
ErrPeriod int32

View File

@ -153,12 +153,12 @@ func (pool *BasePool) putToOutput(bl *pkg.Baseline) {
if bl.IsValid || bl.IsFuzzy {
bl.Collect()
}
pool.OutLocker.Add(1)
pool.Outwg.Add(1)
pool.OutputCh <- bl
}
func (pool *BasePool) putToFuzzy(bl *pkg.Baseline) {
pool.OutLocker.Add(1)
pool.Outwg.Add(1)
bl.IsFuzzy = true
pool.FuzzyCh <- bl
}

View File

@ -14,7 +14,6 @@ import (
"github.com/vbauerster/mpb/v8"
"github.com/vbauerster/mpb/v8/decor"
"sync"
"time"
)
var (
@ -36,10 +35,10 @@ type Runner struct {
outputCh chan *pkg.Baseline
fuzzyCh chan *pkg.Baseline
bar *mpb.Bar
finished int
IsCheck bool
Pools *ants.PoolWithFunc
PoolName map[string]bool
Tasks chan *Task
Tasks *TaskGenerator
Rules *rule.Program
AppendRules *rule.Program
Headers map[string]string
@ -73,7 +72,7 @@ func (r *Runner) PrepareConfig() *pool.Config {
Mod: pool.ModMap[r.Mod],
OutputCh: r.outputCh,
FuzzyCh: r.fuzzyCh,
OutLocker: r.outwg,
Outwg: r.outwg,
Fuzzy: r.Fuzzy,
CheckPeriod: r.CheckPeriod,
ErrPeriod: int32(r.ErrPeriod),
@ -113,106 +112,106 @@ func (r *Runner) AppendFunction(fn func(string) []string) {
func (r *Runner) Prepare(ctx context.Context) error {
var err error
if r.CheckOnly {
if r.IsCheck {
// 仅check, 类似httpx
r.Pools, err = ants.NewPoolWithFunc(1, func(i interface{}) {
config := r.PrepareConfig()
pool, err := pool.NewCheckPool(ctx, config)
checkPool, err := pool.NewCheckPool(ctx, config)
if err != nil {
logs.Log.Error(err.Error())
pool.Cancel()
checkPool.Cancel()
r.poolwg.Done()
return
}
ch := make(chan string)
go func() {
for t := range r.Tasks {
for t := range r.Tasks.tasks {
ch <- t.baseUrl
}
close(ch)
}()
pool.Worder = words.NewWorderWithChan(ch)
pool.Worder.Fns = r.Fns
pool.Bar = pkg.NewBar("check", r.Count-r.Offset, pool.Statistor, r.Progress)
pool.Run(ctx, r.Offset, r.Count)
checkPool.Worder = words.NewWorderWithChan(ch)
checkPool.Worder.Fns = r.Fns
checkPool.Bar = pkg.NewBar("check", r.Count-r.Offset, checkPool.Statistor, r.Progress)
checkPool.Run(ctx, r.Offset, r.Count)
r.poolwg.Done()
})
} else {
// 完整探测模式
go func() {
for t := range r.Tasks {
for t := range r.Tasks.tasks {
r.taskCh <- t
}
close(r.taskCh)
}()
if r.Count > 0 {
r.addBar(r.Count)
r.newBar(r.Count)
}
r.Pools, err = ants.NewPoolWithFunc(r.PoolSize, func(i interface{}) {
t := i.(*Task)
if t.origin != nil && t.origin.End == t.origin.Total {
r.StatFile.SafeWrite(t.origin.Json())
r.saveStat(t.origin.Json())
r.Done()
return
}
config := r.PrepareConfig()
config.BaseURL = t.baseUrl
pool, err := pool.NewBrutePool(ctx, config)
brutePool, err := pool.NewBrutePool(ctx, config)
if err != nil {
logs.Log.Error(err.Error())
pool.Cancel()
brutePool.Cancel()
r.Done()
return
}
if t.origin != nil && len(r.Wordlist) == 0 {
// 如果是从断点续传中恢复的任务, 则自动设置word,dict与rule, 不过优先级低于命令行参数
pool.Statistor = pkg.NewStatistorFromStat(t.origin.Statistor)
pool.Worder, err = t.origin.InitWorder(r.Fns)
brutePool.Statistor = pkg.NewStatistorFromStat(t.origin.Statistor)
brutePool.Worder, err = t.origin.InitWorder(r.Fns)
if err != nil {
logs.Log.Error(err.Error())
r.Done()
return
}
pool.Statistor.Total = t.origin.sum
brutePool.Statistor.Total = t.origin.sum
} else {
pool.Statistor = pkg.NewStatistor(t.baseUrl)
pool.Worder = words.NewWorder(r.Wordlist)
pool.Worder.Fns = r.Fns
pool.Worder.Rules = r.Rules.Expressions
brutePool.Statistor = pkg.NewStatistor(t.baseUrl)
brutePool.Worder = words.NewWorder(r.Wordlist)
brutePool.Worder.Fns = r.Fns
brutePool.Worder.Rules = r.Rules.Expressions
}
var limit int
if pool.Statistor.Total > r.Limit && r.Limit != 0 {
if brutePool.Statistor.Total > r.Limit && r.Limit != 0 {
limit = r.Limit
} else {
limit = pool.Statistor.Total
limit = brutePool.Statistor.Total
}
pool.Bar = pkg.NewBar(config.BaseURL, limit-pool.Statistor.Offset, pool.Statistor, r.Progress)
logs.Log.Importantf("[pool] task: %s, total %d words, %d threads, proxy: %s", pool.BaseURL, limit-pool.Statistor.Offset, pool.Thread, pool.ProxyAddr)
err = pool.Init()
brutePool.Bar = pkg.NewBar(config.BaseURL, limit-brutePool.Statistor.Offset, brutePool.Statistor, r.Progress)
logs.Log.Importantf("[pool] task: %s, total %d words, %d threads, proxy: %s", brutePool.BaseURL, limit-brutePool.Statistor.Offset, brutePool.Thread, brutePool.ProxyAddr)
err = brutePool.Init()
if err != nil {
pool.Statistor.Error = err.Error()
brutePool.Statistor.Error = err.Error()
if !r.Force {
// 如果没开启force, init失败将会关闭pool
pool.Close()
r.PrintStat(pool)
brutePool.Close()
r.PrintStat(brutePool)
r.Done()
return
}
}
pool.Run(pool.Statistor.Offset, limit)
brutePool.Run(brutePool.Statistor.Offset, limit)
if pool.IsFailed && len(pool.FailedBaselines) > 0 {
if brutePool.IsFailed && len(brutePool.FailedBaselines) > 0 {
// 如果因为错误积累退出, end将指向第一个错误发生时, 防止resume时跳过大量目标
pool.Statistor.End = pool.FailedBaselines[0].Number
brutePool.Statistor.End = brutePool.FailedBaselines[0].Number
}
r.PrintStat(pool)
r.PrintStat(brutePool)
r.Done()
})
}
@ -224,28 +223,6 @@ func (r *Runner) Prepare(ctx context.Context) error {
return nil
}
func (r *Runner) AddRecursive(bl *pkg.Baseline) {
// 递归新任务
task := &Task{
baseUrl: bl.UrlString,
depth: bl.RecuDepth + 1,
origin: NewOrigin(pkg.NewStatistor(bl.UrlString)),
}
r.AddPool(task)
}
func (r *Runner) AddPool(task *Task) {
// 递归新任务
if _, ok := r.PoolName[task.baseUrl]; ok {
logs.Log.Importantf("already added pool, skip %s", task.baseUrl)
return
}
task.depth++
r.poolwg.Add(1)
r.Pools.Invoke(task)
}
func (r *Runner) Run(ctx context.Context) {
Loop:
for {
@ -254,10 +231,12 @@ Loop:
if len(r.taskCh) > 0 {
for t := range r.taskCh {
stat := pkg.NewStatistor(t.baseUrl)
r.StatFile.SafeWrite(stat.Json())
r.saveStat(stat.Json())
}
}
if r.StatFile != nil {
logs.Log.Importantf("already save all stat to %s", r.StatFile.Filename)
}
break Loop
case t, ok := <-r.taskCh:
if !ok {
@ -294,16 +273,32 @@ Loop:
}
}
for {
if len(r.outputCh) == 0 {
break
}
r.outwg.Wait()
}
time.Sleep(100 * time.Millisecond) // 延迟100ms, 等所有数据处理完毕
func (r *Runner) AddRecursive(bl *pkg.Baseline) {
// 递归新任务
task := &Task{
baseUrl: bl.UrlString,
depth: bl.RecuDepth + 1,
origin: NewOrigin(pkg.NewStatistor(bl.UrlString)),
}
func (r *Runner) addBar(total int) {
r.AddPool(task)
}
func (r *Runner) AddPool(task *Task) {
// 递归新任务
if _, ok := r.PoolName[task.baseUrl]; ok {
logs.Log.Importantf("already added pool, skip %s", task.baseUrl)
return
}
task.depth++
r.poolwg.Add(1)
r.Pools.Invoke(task)
}
func (r *Runner) newBar(total int) {
if r.Progress == nil {
return
}
@ -329,7 +324,6 @@ func (r *Runner) Done() {
if r.bar != nil {
r.bar.Increment()
}
r.finished++
r.poolwg.Done()
}
@ -348,8 +342,12 @@ func (r *Runner) PrintStat(pool *pool.BrutePool) {
}
}
r.saveStat(pool.Statistor.Json())
}
func (r *Runner) saveStat(content string) {
if r.StatFile != nil {
r.StatFile.SafeWrite(pool.Statistor.Json())
r.StatFile.SafeWrite(content)
r.StatFile.SafeSync()
}
}

73
internal/task.go Normal file
View File

@ -0,0 +1,73 @@
package internal
import (
"fmt"
"github.com/chainreactors/logs"
"github.com/chainreactors/utils"
"github.com/chainreactors/words/rule"
"net/url"
)
type Task struct {
baseUrl string
depth int
rule []rule.Expression
origin *Origin
}
func NewTaskGenerator(port string) *TaskGenerator {
gen := &TaskGenerator{
ports: utils.ParsePortsString(port),
tasks: make(chan *Task),
In: make(chan *Task),
}
go func() {
for task := range gen.In {
gen.tasks <- task
}
close(gen.tasks)
}()
return gen
}
type TaskGenerator struct {
Name string
ports []string
tasks chan *Task
In chan *Task
}
func (gen *TaskGenerator) Run(baseurl string) {
parsed, err := url.Parse(baseurl)
if err != nil {
logs.Log.Warnf("parse %s, %s ", baseurl, err.Error())
return
}
if parsed.Scheme == "" {
if parsed.Port() == "443" {
parsed.Scheme = "https"
} else {
parsed.Scheme = "http"
}
}
if len(gen.ports) == 0 {
gen.In <- &Task{baseUrl: parsed.String()}
return
}
for _, p := range gen.ports {
if parsed.Host == "" {
gen.In <- &Task{baseUrl: fmt.Sprintf("%s://%s:%s", parsed.Scheme, parsed.Path, p)}
} else {
gen.In <- &Task{baseUrl: fmt.Sprintf("%s://%s:%s/%s", parsed.Scheme, parsed.Host, p, parsed.Path)}
}
}
}
func (gen *TaskGenerator) Close() {
close(gen.tasks)
}

View File

@ -3,16 +3,8 @@ package internal
import (
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words"
"github.com/chainreactors/words/rule"
)
type Task struct {
baseUrl string
depth int
rule []rule.Expression
origin *Origin
}
func NewOrigin(stat *pkg.Statistor) *Origin {
return &Origin{Statistor: stat}
}

View File

@ -164,3 +164,11 @@ func wrapWordsFunc(f func(string) string) func(string) []string {
return []string{f(s)}
}
}
func safeFilename(filename string) string {
filename = strings.ReplaceAll(filename, "http://", "")
filename = strings.ReplaceAll(filename, "https://", "")
filename = strings.ReplaceAll(filename, ":", "_")
filename = strings.ReplaceAll(filename, "/", "_")
return filename
}

View File

@ -155,8 +155,8 @@ func (bl *Baseline) Collect() {
if bl.ContentType == "html" {
bl.Title = iutils.AsciiEncode(parsers.MatchTitle(bl.Body))
} else if bl.ContentType == "ico" {
if frame := FingerEngine.HashContentMatch(bl.Body); frame != nil {
bl.Frameworks.Add(frame)
if frame := FingerEngine.Favicon().Match(bl.Body); frame != nil {
bl.Frameworks.Merge(frame)
}
}
}

View File

@ -4,6 +4,7 @@ import (
"encoding/json"
"github.com/chainreactors/fingers"
"github.com/chainreactors/parsers"
"github.com/chainreactors/utils"
"github.com/chainreactors/utils/iutils"
"github.com/chainreactors/words/mask"
"os"
@ -19,9 +20,19 @@ var (
ActivePath []string
)
func LoadTemplates() error {
func LoadPorts() error {
var err error
var ports []*utils.PortConfig
err = json.Unmarshal(LoadConfig("port"), &ports)
if err != nil {
return err
}
utils.PrePort = utils.NewPortPreset(ports)
return nil
}
func LoadFingers() error {
var err error
// load fingers
FingerEngine, err = fingers.NewEngine()
if err != nil {
return err
@ -38,7 +49,11 @@ func LoadTemplates() error {
ActivePath = append(ActivePath, f.Path)
}
}
return nil
}
func LoadTemplates() error {
var err error
// load rule
var data map[string]interface{}
err = json.Unmarshal(LoadConfig("spray_rule"), &data)
@ -105,11 +120,15 @@ func LoadExtractorConfig(filename string) ([]*parsers.Extractor, error) {
}
func Load() error {
// load fingers
err := LoadTemplates()
err := LoadPorts()
if err != nil {
return err
}
err = LoadTemplates()
if err != nil {
return err
}
return nil
}

@ -1 +1 @@
Subproject commit f1150d00253e0c888976dbfe55cf2669c51f0b58
Subproject commit 3e85234341b95f7e6e45b31468311f01093ac970