refactor option.go

This commit is contained in:
M09Ic 2024-07-22 16:27:07 +08:00
parent 31f4dc661e
commit 7af848a6fd
6 changed files with 438 additions and 387 deletions

View File

@ -111,13 +111,13 @@ func Spray() {
return
}
err = option.PreCompare()
err = option.Prepare()
if err != nil {
logs.Log.Errorf(err.Error())
return
}
runner, err := option.Compare()
runner, err := option.NewRunner()
if err != nil {
logs.Log.Errorf(err.Error())
return

View File

@ -20,11 +20,11 @@ var (
DefaultFingerTemplate = "fingers/templates"
FingerConfigs = map[string]string{
fingers.FingersEngine: "fingers_http.json",
fingers.FingerPrintEngine: "fingerprinthub_v3.json",
fingers.WappalyzerEngine: "wappalyzer.json",
fingers.EHoleEngine: "ehole.json",
fingers.GobyEngine: "goby.json",
fingers.FingersEngine: "fingers_http.json.gz",
fingers.FingerPrintEngine: "fingerprinthub_v3.json.gz",
fingers.WappalyzerEngine: "wappalyzer.json.gz",
fingers.EHoleEngine: "ehole.json.gz",
fingers.GobyEngine: "goby.json.gz",
}
baseURL = "https://raw.githubusercontent.com/chainreactors/fingers/master/resources/"
)

View File

@ -155,7 +155,30 @@ type MiscOptions struct {
InitConfig bool `long:"init" description:"Bool, init config file"`
}
func (opt *Option) PreCompare() error {
func (opt *Option) Validate() error {
if opt.Uppercase && opt.Lowercase {
return errors.New("cannot set -U and -L at the same time")
}
if (opt.Offset != 0 || opt.Limit != 0) && opt.Depth > 0 {
// 偏移和上限与递归同时使用时也会造成混淆.
return errors.New("--offset and --limit cannot be used with --depth at the same time")
}
if opt.Depth > 0 && opt.ResumeFrom != "" {
// 递归与断点续传会造成混淆, 断点续传的word与rule不是通过命令行获取的
return errors.New("--resume and --depth cannot be used at the same time")
}
if opt.ResumeFrom == "" && len(opt.URL) == 0 && opt.URLFile == "" && len(opt.CIDRs) == 0 && opt.RawFile == "" {
return fmt.Errorf("without any target, please use -u/-l/-c/--resume to set targets")
}
return nil
}
func (opt *Option) Prepare() error {
var err error
logs.Log.SetColor(true)
if err = opt.FingerOptions.Validate(); err != nil {
@ -235,7 +258,7 @@ func (opt *Option) PreCompare() error {
return nil
}
func (opt *Option) Compare() (*Runner, error) {
func (opt *Option) NewRunner() (*Runner, error) {
var err error
r := &Runner{
Option: opt,
@ -296,123 +319,29 @@ func (opt *Option) Compare() (*Runner, error) {
pkg.EnableAllFingerEngine = true
pkg.Extractors["recon"] = pkg.ExtractRegexps["pentest"]
opt.AppendRule = append(opt.AppendRule, "filebak")
} else if opt.FileBak {
}
if opt.FileBak {
opt.AppendRule = append(opt.AppendRule, "filebak")
}
var s strings.Builder
if r.Crawl {
s.WriteString("crawl enable; ")
if opt.Common {
r.AppendWords = append(r.AppendWords, mask.SpecialWords["common_file"]...)
}
if r.Finger {
if opt.Finger {
r.AppendWords = append(r.AppendWords, pkg.ActivePath...)
pkg.EnableAllFingerEngine = true
s.WriteString("active fingerprint enable; ")
}
if r.Bak {
s.WriteString("bak file enable; ")
}
if r.Common {
r.AppendWords = append(r.AppendWords, mask.SpecialWords["common_file"]...)
s.WriteString("common file enable; ")
}
if opt.Recon {
s.WriteString("recon enable; ")
}
if len(opt.AppendRule) > 0 {
s.WriteString("file bak enable; ")
}
if r.RetryCount > 0 {
s.WriteString("Retry Count: " + strconv.Itoa(r.RetryCount))
}
if s.Len() > 0 {
logs.Log.Important(s.String())
}
opt.PrintPlugin()
if opt.NoScope {
r.Scope = []string{"*"}
}
// prepare word
var dicts [][]string
if opt.DefaultDict {
dicts = append(dicts, pkg.LoadDefaultDict())
logs.Log.Info("not set any dictionary, use default dictionary: https://github.com/maurosoria/dirsearch/blob/master/db/dicc.txt")
}
for i, f := range opt.Dictionaries {
dict, err := loadFileToSlice(f)
err = opt.BuildWords(r)
if err != nil {
return nil, err
}
if err != nil {
return nil, err
}
dicts = append(dicts, dict)
if opt.ResumeFrom != "" {
dictCache[f] = dicts[i]
}
logs.Log.Logf(pkg.LogVerbose, "Loaded %d word from %s", len(dicts[i]), f)
}
if len(dicts) == 0 {
r.IsCheck = true
}
if opt.Word == "" {
opt.Word = "{?"
for i, _ := range dicts {
opt.Word += strconv.Itoa(i)
}
opt.Word += "}"
}
if len(opt.Suffixes) != 0 {
mask.SpecialWords["suffix"] = opt.Suffixes
opt.Word += "{?@suffix}"
}
if len(opt.Prefixes) != 0 {
mask.SpecialWords["prefix"] = opt.Prefixes
opt.Word = "{?@prefix}" + opt.Word
}
if opt.ForceExtension && opt.Extensions != "" {
exts := strings.Split(opt.Extensions, ",")
for i, e := range exts {
if !strings.HasPrefix(e, ".") {
exts[i] = "." + e
}
}
mask.SpecialWords["ext"] = exts
opt.Word += "{?@ext}"
}
r.Wordlist, err = mask.Run(opt.Word, dicts, nil)
if err != nil {
return nil, fmt.Errorf("%s %w", opt.Word, err)
}
if len(r.Wordlist) > 0 {
logs.Log.Logf(pkg.LogVerbose, "Parsed %d words by %s", len(r.Wordlist), opt.Word)
}
if len(opt.Rules) != 0 {
rules, err := loadRuleAndCombine(opt.Rules)
if err != nil {
return nil, err
}
r.Rules = rule.Compile(rules, opt.FilterRule)
} else if opt.FilterRule != "" {
// if filter rule is not empty, set rules to ":", force to open filter mode
r.Rules = rule.Compile(":", opt.FilterRule)
} else {
r.Rules = new(rule.Program)
}
if len(r.Rules.Expressions) > 0 {
r.Total = len(r.Wordlist) * len(r.Rules.Expressions)
} else {
r.Total = len(r.Wordlist)
}
pkg.DefaultStatistor = pkg.Statistor{
Word: opt.Word,
@ -424,235 +353,10 @@ func (opt *Option) Compare() (*Runner, error) {
Total: r.Total,
}
if len(opt.AppendRule) != 0 {
content, err := loadRuleAndCombine(opt.AppendRule)
r.Tasks, err = opt.BuildTasks(r)
if err != nil {
return nil, err
}
r.AppendRules = rule.Compile(string(content), "")
}
if len(opt.AppendFile) != 0 {
var bs bytes.Buffer
for _, f := range opt.AppendFile {
content, err := ioutil.ReadFile(f)
if err != nil {
return nil, err
}
bs.Write(bytes.TrimSpace(content))
bs.WriteString("\n")
}
lines := strings.Split(bs.String(), "\n")
for i, line := range lines {
lines[i] = strings.TrimSpace(line)
}
r.AppendWords = append(r.AppendWords, lines...)
}
ports := utils.ParsePortsString(opt.PortRange)
// prepare task
tasks := make(chan *Task, opt.PoolSize)
var taskfrom string
if opt.ResumeFrom != "" {
stats, err := pkg.ReadStatistors(opt.ResumeFrom)
if err != nil {
logs.Log.Error(err.Error())
}
r.Count = len(stats)
taskfrom = "resume " + opt.ResumeFrom
go func() {
for _, stat := range stats {
tasks <- &Task{baseUrl: stat.BaseUrl, origin: NewOrigin(stat)}
}
close(tasks)
}()
} else {
var file *os.File
// 根据不同的输入类型生成任务
if len(opt.URL) == 1 {
go func() {
opt.GenerateTasks(tasks, opt.URL[0], ports)
close(tasks)
}()
parsed, _ := url.Parse(opt.URL[0])
taskfrom = parsed.Host
r.Count = 1
} else if len(opt.URL) > 1 {
go func() {
for _, u := range opt.URL {
opt.GenerateTasks(tasks, u, ports)
}
close(tasks)
}()
taskfrom = "cmd"
r.Count = len(opt.URL)
} else if opt.RawFile != "" {
raw, err := os.Open(opt.RawFile)
if err != nil {
return nil, err
}
req, err := http.ReadRequest(bufio.NewReader(raw))
if err != nil {
return nil, err
}
go func() {
opt.GenerateTasks(tasks, fmt.Sprintf("http://%s%s", req.Host, req.URL.String()), ports)
close(tasks)
}()
r.Method = req.Method
for k, _ := range req.Header {
r.Headers[k] = req.Header.Get(k)
}
} else if len(opt.CIDRs) != 0 {
cidrs := utils.ParseCIDRs(opt.CIDRs)
if len(ports) == 0 {
ports = []string{"80", "443"}
}
r.Count = cidrs.Count()
go func() {
for _, cidr := range cidrs {
if cidr == nil {
logs.Log.Error("cidr format error: " + cidr.String())
}
for ip := range cidr.Range() {
opt.GenerateTasks(tasks, ip.String(), ports)
}
}
close(tasks)
}()
taskfrom = "cidr"
} else if opt.URLFile != "" {
file, err = os.Open(opt.URLFile)
if err != nil {
return nil, err
}
taskfrom = filepath.Base(opt.URLFile)
} else if files.HasStdin() {
file = os.Stdin
taskfrom = "stdin"
}
if file != nil {
content, err := ioutil.ReadAll(file)
if err != nil {
return nil, err
}
urls := strings.Split(strings.TrimSpace(string(content)), "\n")
for _, u := range urls {
u = strings.TrimSpace(u)
if _, err := url.Parse(u); err == nil {
r.Count++
} else if ip := utils.ParseIP(u); ip != nil {
r.Count++
} else if cidr := utils.ParseCIDR(u); cidr != nil {
r.Count += cidr.Count()
}
}
go func() {
for _, u := range urls {
u = strings.TrimSpace(u)
if _, err := url.Parse(u); err == nil {
opt.GenerateTasks(tasks, u, ports)
} else if ip := utils.ParseIP(u); ip != nil {
opt.GenerateTasks(tasks, u, ports)
} else if cidr := utils.ParseCIDR(u); cidr != nil {
for ip := range cidr.Range() {
opt.GenerateTasks(tasks, ip.String(), ports)
}
}
}
close(tasks)
}()
}
}
if len(ports) > 0 {
r.Count = r.Count * len(ports)
}
r.Tasks = tasks
logs.Log.Logf(pkg.LogVerbose, "Loaded %d urls from %s", len(tasks), taskfrom)
// 类似dirsearch中的
if opt.Extensions != "" {
r.AppendFunction(func(s string) []string {
exts := strings.Split(opt.Extensions, ",")
ss := make([]string, len(exts))
for i, e := range exts {
if strings.Contains(s, "%EXT%") {
ss[i] = strings.Replace(s, "%EXT%", e, -1)
}
}
return ss
})
} else {
r.AppendFunction(func(s string) []string {
if strings.Contains(s, "%EXT%") {
return nil
}
return []string{s}
})
}
if opt.Uppercase {
r.AppendFunction(wrapWordsFunc(strings.ToUpper))
}
if opt.Lowercase {
r.AppendFunction(wrapWordsFunc(strings.ToLower))
}
if opt.RemoveExtensions != "" {
rexts := strings.Split(opt.ExcludeExtensions, ",")
r.AppendFunction(func(s string) []string {
if ext := parseExtension(s); iutils.StringsContains(rexts, ext) {
return []string{strings.TrimSuffix(s, "."+ext)}
}
return []string{s}
})
}
if opt.ExcludeExtensions != "" {
exexts := strings.Split(opt.ExcludeExtensions, ",")
r.AppendFunction(func(s string) []string {
if ext := parseExtension(s); iutils.StringsContains(exexts, ext) {
return nil
}
return []string{s}
})
}
if len(opt.Replaces) > 0 {
r.AppendFunction(func(s string) []string {
for k, v := range opt.Replaces {
s = strings.Replace(s, k, v, -1)
}
return []string{s}
})
}
// default skip function, skip %EXT%
r.AppendFunction(func(s string) []string {
if strings.Contains(s, "%EXT%") {
return nil
}
return []string{s}
})
if len(opt.Skips) > 0 {
r.AppendFunction(func(s string) []string {
for _, skip := range opt.Skips {
if strings.Contains(s, skip) {
return nil
}
}
return []string{s}
})
}
logs.Log.Logf(pkg.LogVerbose, "Loaded %d dictionaries and %d decorators", len(opt.Dictionaries), len(r.Fns))
if opt.Match != "" {
exp, err := expr.Compile(opt.Match)
@ -752,7 +456,7 @@ func (opt *Option) Compare() (*Runner, error) {
if opt.ResumeFrom != "" {
r.StatFile, err = files.NewFile(opt.ResumeFrom, false, true, true)
} else {
r.StatFile, err = files.NewFile(strings.ReplaceAll(taskfrom, ":", "_")+".stat", false, true, true)
r.StatFile, err = files.NewFile(strings.ReplaceAll(r.Tasks.Name, ":", "_")+".stat", false, true, true)
}
if err != nil {
return nil, err
@ -768,55 +472,338 @@ func (opt *Option) Compare() (*Runner, error) {
return r, nil
}
func (opt *Option) Validate() error {
if opt.Uppercase && opt.Lowercase {
return errors.New("cannot set -U and -L at the same time")
func (opt *Option) PrintPlugin() {
var s strings.Builder
if opt.Crawl {
s.WriteString("crawl enable; ")
}
if opt.Finger {
s.WriteString("active fingerprint enable; ")
}
if opt.Bak {
s.WriteString("bak file enable; ")
}
if opt.Common {
s.WriteString("common file enable; ")
}
if opt.Recon {
s.WriteString("recon enable; ")
}
if opt.FileBak {
s.WriteString("file bak enable; ")
}
if (opt.Offset != 0 || opt.Limit != 0) && opt.Depth > 0 {
// 偏移和上限与递归同时使用时也会造成混淆.
return errors.New("--offset and --limit cannot be used with --depth at the same time")
if opt.RetryCount > 0 {
s.WriteString("Retry Count: " + strconv.Itoa(opt.RetryCount))
}
if s.Len() > 0 {
logs.Log.Important(s.String())
}
}
if opt.Depth > 0 && opt.ResumeFrom != "" {
// 递归与断点续传会造成混淆, 断点续传的word与rule不是通过命令行获取的
return errors.New("--resume and --depth cannot be used at the same time")
func (opt *Option) BuildWords(r *Runner) error {
var dicts [][]string
var err error
if opt.DefaultDict {
dicts = append(dicts, pkg.LoadDefaultDict())
logs.Log.Info("not set any dictionary, use default dictionary: https://github.com/maurosoria/dirsearch/blob/master/db/dicc.txt")
}
for i, f := range opt.Dictionaries {
dict, err := loadFileToSlice(f)
if err != nil {
return err
}
dicts = append(dicts, dict)
if opt.ResumeFrom != "" {
dictCache[f] = dicts[i]
}
if opt.ResumeFrom == "" && len(opt.URL) == 0 && opt.URLFile == "" && len(opt.CIDRs) == 0 && opt.RawFile == "" {
return fmt.Errorf("without any target, please use -u/-l/-c/--resume to set targets")
logs.Log.Logf(pkg.LogVerbose, "Loaded %d word from %s", len(dicts[i]), f)
}
if len(dicts) == 0 {
r.IsCheck = true
}
if opt.Word == "" {
opt.Word = "{?"
for i, _ := range dicts {
opt.Word += strconv.Itoa(i)
}
opt.Word += "}"
}
if len(opt.Suffixes) != 0 {
mask.SpecialWords["suffix"] = opt.Suffixes
opt.Word += "{?@suffix}"
}
if len(opt.Prefixes) != 0 {
mask.SpecialWords["prefix"] = opt.Prefixes
opt.Word = "{?@prefix}" + opt.Word
}
if opt.ForceExtension && opt.Extensions != "" {
exts := strings.Split(opt.Extensions, ",")
for i, e := range exts {
if !strings.HasPrefix(e, ".") {
exts[i] = "." + e
}
}
mask.SpecialWords["ext"] = exts
opt.Word += "{?@ext}"
}
r.Wordlist, err = mask.Run(opt.Word, dicts, nil)
if err != nil {
return fmt.Errorf("%s %w", opt.Word, err)
}
if len(r.Wordlist) > 0 {
logs.Log.Logf(pkg.LogVerbose, "Parsed %d words by %s", len(r.Wordlist), opt.Word)
}
if len(opt.Rules) != 0 {
rules, err := loadRuleAndCombine(opt.Rules)
if err != nil {
return err
}
r.Rules = rule.Compile(rules, opt.FilterRule)
} else if opt.FilterRule != "" {
// if filter rule is not empty, set rules to ":", force to open filter mode
r.Rules = rule.Compile(":", opt.FilterRule)
} else {
r.Rules = new(rule.Program)
}
if len(r.Rules.Expressions) > 0 {
r.Total = len(r.Wordlist) * len(r.Rules.Expressions)
} else {
r.Total = len(r.Wordlist)
}
if len(opt.AppendRule) != 0 {
content, err := loadRuleAndCombine(opt.AppendRule)
if err != nil {
return err
}
r.AppendRules = rule.Compile(string(content), "")
}
if len(opt.AppendFile) != 0 {
var bs bytes.Buffer
for _, f := range opt.AppendFile {
content, err := ioutil.ReadFile(f)
if err != nil {
return err
}
bs.Write(bytes.TrimSpace(content))
bs.WriteString("\n")
}
lines := strings.Split(bs.String(), "\n")
for i, line := range lines {
lines[i] = strings.TrimSpace(line)
}
r.AppendWords = append(r.AppendWords, lines...)
}
// 类似dirsearch中的
if opt.Extensions != "" {
r.AppendFunction(func(s string) []string {
exts := strings.Split(opt.Extensions, ",")
ss := make([]string, len(exts))
for i, e := range exts {
if strings.Contains(s, "%EXT%") {
ss[i] = strings.Replace(s, "%EXT%", e, -1)
}
}
return ss
})
} else {
r.AppendFunction(func(s string) []string {
if strings.Contains(s, "%EXT%") {
return nil
}
return []string{s}
})
}
if opt.Uppercase {
r.AppendFunction(wrapWordsFunc(strings.ToUpper))
}
if opt.Lowercase {
r.AppendFunction(wrapWordsFunc(strings.ToLower))
}
if opt.RemoveExtensions != "" {
rexts := strings.Split(opt.ExcludeExtensions, ",")
r.AppendFunction(func(s string) []string {
if ext := parseExtension(s); iutils.StringsContains(rexts, ext) {
return []string{strings.TrimSuffix(s, "."+ext)}
}
return []string{s}
})
}
if opt.ExcludeExtensions != "" {
exexts := strings.Split(opt.ExcludeExtensions, ",")
r.AppendFunction(func(s string) []string {
if ext := parseExtension(s); iutils.StringsContains(exexts, ext) {
return nil
}
return []string{s}
})
}
if len(opt.Replaces) > 0 {
r.AppendFunction(func(s string) []string {
for k, v := range opt.Replaces {
s = strings.Replace(s, k, v, -1)
}
return []string{s}
})
}
// default skip function, skip %EXT%
r.AppendFunction(func(s string) []string {
if strings.Contains(s, "%EXT%") {
return nil
}
return []string{s}
})
if len(opt.Skips) > 0 {
r.AppendFunction(func(s string) []string {
for _, skip := range opt.Skips {
if strings.Contains(s, skip) {
return nil
}
}
return []string{s}
})
}
logs.Log.Logf(pkg.LogVerbose, "Loaded %d dictionaries and %d decorators", len(opt.Dictionaries), len(r.Fns))
return nil
}
// Generate Tasks
func (opt *Option) GenerateTasks(ch chan *Task, u string, ports []string) {
parsed, err := url.Parse(u)
func (opt *Option) BuildTasks(r *Runner) (*TaskGenerator, error) {
// prepare task`
var err error
gen := NewTaskGenerator(opt.PortRange)
if opt.ResumeFrom != "" {
stats, err := pkg.ReadStatistors(opt.ResumeFrom)
if err != nil {
logs.Log.Warnf("parse %s, %s ", u, err.Error())
return
logs.Log.Error(err.Error())
}
if parsed.Scheme == "" {
if parsed.Port() == "443" {
parsed.Scheme = "https"
r.Count = len(stats)
gen.Name = "resume " + opt.ResumeFrom
go func() {
for _, stat := range stats {
gen.In <- &Task{baseUrl: stat.BaseUrl, origin: NewOrigin(stat)}
}
close(gen.In)
}()
} else {
parsed.Scheme = "http"
var file *os.File
// 根据不同的输入类型生成任务
if len(opt.URL) == 1 {
gen.Name = opt.URL[0]
go func() {
gen.Run(opt.URL[0])
close(gen.In)
}()
r.Count = 1
} else if len(opt.URL) > 1 {
go func() {
for _, u := range opt.URL {
gen.Run(u)
}
close(gen.In)
}()
gen.Name = "cmd"
r.Count = len(opt.URL)
} else if opt.RawFile != "" {
raw, err := os.Open(opt.RawFile)
if err != nil {
return nil, err
}
req, err := http.ReadRequest(bufio.NewReader(raw))
if err != nil {
return nil, err
}
go func() {
gen.Run(fmt.Sprintf("http://%s%s", req.Host, req.URL.String()))
close(gen.In)
}()
r.Method = req.Method
for k, _ := range req.Header {
r.Headers[k] = req.Header.Get(k)
}
r.Count = 1
} else if len(opt.CIDRs) != 0 {
cidrs := utils.ParseCIDRs(opt.CIDRs)
if len(gen.ports) == 0 {
gen.ports = []string{"80", "443"}
}
gen.Name = "cidr"
r.Count = cidrs.Count()
go func() {
for _, cidr := range cidrs {
if cidr == nil {
logs.Log.Error("cidr format error: " + cidr.String())
}
for ip := range cidr.Range() {
gen.Run(ip.String())
}
}
close(gen.In)
}()
} else if opt.URLFile != "" {
file, err = os.Open(opt.URLFile)
if err != nil {
return nil, err
}
gen.Name = filepath.Base(opt.URLFile)
} else if files.HasStdin() {
file = os.Stdin
gen.Name = "stdin"
}
if file != nil {
content, err := ioutil.ReadAll(file)
if err != nil {
return nil, err
}
urls := strings.Split(strings.TrimSpace(string(content)), "\n")
for _, u := range urls {
u = strings.TrimSpace(u)
if _, err := url.Parse(u); err == nil {
r.Count++
} else if ip := utils.ParseIP(u); ip != nil {
r.Count++
} else if cidr := utils.ParseCIDR(u); cidr != nil {
r.Count += cidr.Count()
}
}
if len(ports) == 0 {
ch <- &Task{baseUrl: parsed.String()}
return
go func() {
for _, u := range urls {
u = strings.TrimSpace(u)
if _, err := url.Parse(u); err == nil {
gen.Run(u)
} else if ip := utils.ParseIP(u); ip != nil {
gen.Run(u)
} else if cidr := utils.ParseCIDR(u); cidr != nil {
for ip := range cidr.Range() {
gen.Run(ip.String())
}
}
}
close(gen.In)
}()
}
}
for _, p := range ports {
if parsed.Host == "" {
ch <- &Task{baseUrl: fmt.Sprintf("%s://%s:%s", parsed.Scheme, parsed.Path, p)}
} else {
ch <- &Task{baseUrl: fmt.Sprintf("%s://%s:%s/%s", parsed.Scheme, parsed.Host, p, parsed.Path)}
}
if len(gen.ports) > 0 {
r.Count = r.Count * len(gen.ports)
}
return gen, nil
}

View File

@ -38,7 +38,7 @@ type Runner struct {
IsCheck bool
Pools *ants.PoolWithFunc
PoolName map[string]bool
Tasks chan *Task
Tasks *TaskGenerator
Rules *rule.Program
AppendRules *rule.Program
Headers map[string]string
@ -127,7 +127,7 @@ func (r *Runner) Prepare(ctx context.Context) error {
ch := make(chan string)
go func() {
for t := range r.Tasks {
for t := range r.Tasks.tasks {
ch <- t.baseUrl
}
close(ch)
@ -141,7 +141,7 @@ func (r *Runner) Prepare(ctx context.Context) error {
} else {
// 完整探测模式
go func() {
for t := range r.Tasks {
for t := range r.Tasks.tasks {
r.taskCh <- t
}
close(r.taskCh)

72
internal/task.go Normal file
View File

@ -0,0 +1,72 @@
package internal
import (
"fmt"
"github.com/chainreactors/logs"
"github.com/chainreactors/utils"
"github.com/chainreactors/words/rule"
"net/url"
)
type Task struct {
baseUrl string
depth int
rule []rule.Expression
origin *Origin
}
func NewTaskGenerator(port string) *TaskGenerator {
gen := &TaskGenerator{
ports: utils.ParsePortsString(port),
tasks: make(chan *Task, 256),
In: make(chan *Task),
}
go func() {
for task := range gen.In {
gen.tasks <- task
}
close(gen.tasks)
}()
return gen
}
type TaskGenerator struct {
Name string
ports []string
tasks chan *Task
In chan *Task
}
func (gen *TaskGenerator) Run(baseurl string) {
parsed, err := url.Parse(baseurl)
if err != nil {
logs.Log.Warnf("parse %s, %s ", baseurl, err.Error())
return
}
if parsed.Scheme == "" {
if parsed.Port() == "443" {
parsed.Scheme = "https"
} else {
parsed.Scheme = "http"
}
}
if len(gen.ports) == 0 {
gen.In <- &Task{baseUrl: parsed.String()}
}
for _, p := range gen.ports {
if parsed.Host == "" {
gen.In <- &Task{baseUrl: fmt.Sprintf("%s://%s:%s", parsed.Scheme, parsed.Path, p)}
} else {
gen.In <- &Task{baseUrl: fmt.Sprintf("%s://%s:%s/%s", parsed.Scheme, parsed.Host, p, parsed.Path)}
}
}
}
func (gen *TaskGenerator) Close() {
close(gen.tasks)
}

View File

@ -3,16 +3,8 @@ package internal
import (
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words"
"github.com/chainreactors/words/rule"
)
type Task struct {
baseUrl string
depth int
rule []rule.Expression
origin *Origin
}
func NewOrigin(stat *pkg.Statistor) *Origin {
return &Origin{Statistor: stat}
}