union load appendwords and dict

This commit is contained in:
M09Ic 2024-08-26 02:22:35 +08:00
parent 77a5e58a2a
commit 106f007693
7 changed files with 223 additions and 227 deletions

View File

@ -2,7 +2,6 @@ package internal
import ( import (
"bufio" "bufio"
"bytes"
"errors" "errors"
"fmt" "fmt"
"github.com/chainreactors/files" "github.com/chainreactors/files"
@ -54,7 +53,6 @@ type InputOptions struct {
CIDRs []string `short:"i" long:"cidr" description:"String, input cidr, e.g.: 1.1.1.1/24 "` CIDRs []string `short:"i" long:"cidr" description:"String, input cidr, e.g.: 1.1.1.1/24 "`
RawFile string `long:"raw" description:"File, input raw request filename"` RawFile string `long:"raw" description:"File, input raw request filename"`
Dictionaries []string `short:"d" long:"dict" description:"Files, Multi,dict files, e.g.: -d 1.txt -d 2.txt" config:"dictionaries"` Dictionaries []string `short:"d" long:"dict" description:"Files, Multi,dict files, e.g.: -d 1.txt -d 2.txt" config:"dictionaries"`
PrintPreset bool `long:"print" description:"Bool, print preset all preset config "`
DefaultDict bool `short:"D" long:"default" description:"Bool, use default dictionary" config:"default"` DefaultDict bool `short:"D" long:"default" description:"Bool, use default dictionary" config:"default"`
Word string `short:"w" long:"word" description:"String, word generate dsl, e.g.: -w test{?ld#4}" config:"word"` Word string `short:"w" long:"word" description:"String, word generate dsl, e.g.: -w test{?ld#4}" config:"word"`
Rules []string `short:"r" long:"rules" description:"Files, rule files, e.g.: -r rule1.txt -r rule2.txt" config:"rules"` Rules []string `short:"r" long:"rules" description:"Files, rule files, e.g.: -r rule1.txt -r rule2.txt" config:"rules"`
@ -80,11 +78,10 @@ type FunctionOptions struct {
} }
type OutputOptions struct { type OutputOptions struct {
Match string `long:"match" description:"String, custom match function, e.g.: --match 'current.Status != 200''" config:"match" ` Match string `long:"match" description:"String, custom match function, e.g.: --match 'current.Status != 200''" config:"match" `
Filter string `long:"filter" description:"String, custom filter function, e.g.: --filter 'current.Body contains \"hello\"'" config:"filter"` Filter string `long:"filter" description:"String, custom filter function, e.g.: --filter 'current.Body contains \"hello\"'" config:"filter"`
Fuzzy bool `long:"fuzzy" description:"String, open fuzzy output" config:"fuzzy"` Fuzzy bool `long:"fuzzy" description:"String, open fuzzy output" config:"fuzzy"`
OutputFile string `short:"f" long:"file" description:"String, output filename" json:"output_file,omitempty" config:"output-file"` OutputFile string `short:"f" long:"file" description:"String, output filename" json:"output_file,omitempty" config:"output-file"`
//FuzzyFile string `long:"fuzzy-file" description:"String, fuzzy output filename" json:"fuzzy_file,omitempty" config:"fuzzy-file"`
DumpFile string `long:"dump-file" description:"String, dump all request, and write to filename" config:"dump-file"` DumpFile string `long:"dump-file" description:"String, dump all request, and write to filename" config:"dump-file"`
Dump bool `long:"dump" description:"Bool, dump all request" config:"dump"` Dump bool `long:"dump" description:"Bool, dump all request" config:"dump"`
AutoFile bool `long:"auto-file" description:"Bool, auto generator output and fuzzy filename" config:"auto-file"` AutoFile bool `long:"auto-file" description:"Bool, auto generator output and fuzzy filename" config:"auto-file"`
@ -120,9 +117,8 @@ type PluginOptions struct {
} }
type ModeOptions struct { type ModeOptions struct {
RateLimit int `long:"rate-limit" default:"0" description:"Int, request rate limit (rate/s), e.g.: --rate-limit 100" config:"rate-limit"` RateLimit int `long:"rate-limit" default:"0" description:"Int, request rate limit (rate/s), e.g.: --rate-limit 100" config:"rate-limit"`
Force bool `long:"force" description:"Bool, skip error break" config:"force"` Force bool `long:"force" description:"Bool, skip error break" config:"force"`
//CheckOnly bool `long:"check-only" description:"Bool, check only" config:"check-only"`
NoScope bool `long:"no-scope" description:"Bool, no scope" config:"no-scope"` NoScope bool `long:"no-scope" description:"Bool, no scope" config:"no-scope"`
Scope []string `long:"scope" description:"String, custom scope, e.g.: --scope *.example.com" config:"scope"` Scope []string `long:"scope" description:"String, custom scope, e.g.: --scope *.example.com" config:"scope"`
Recursive string `long:"recursive" default:"current.IsDir()" description:"String,custom recursive rule, e.g.: --recursive current.IsDir()" config:"recursive"` Recursive string `long:"recursive" default:"current.IsDir()" description:"String,custom recursive rule, e.g.: --recursive current.IsDir()" config:"recursive"`
@ -142,17 +138,18 @@ type ModeOptions struct {
} }
type MiscOptions struct { type MiscOptions struct {
Mod string `short:"m" long:"mod" default:"path" choice:"path" choice:"host" description:"String, path/host spray" config:"mod"` Mod string `short:"m" long:"mod" default:"path" choice:"path" choice:"host" description:"String, path/host spray" config:"mod"`
Client string `short:"C" long:"client" default:"auto" choice:"fast" choice:"standard" choice:"auto" description:"String, Client type" config:"client"` Client string `short:"C" long:"client" default:"auto" choice:"fast" choice:"standard" choice:"auto" description:"String, Client type" config:"client"`
Deadline int `long:"deadline" default:"999999" description:"Int, deadline (seconds)" config:"deadline"` // todo 总的超时时间,适配云函数的deadline Deadline int `long:"deadline" default:"999999" description:"Int, deadline (seconds)" config:"deadline"` // todo 总的超时时间,适配云函数的deadline
Timeout int `short:"T" long:"timeout" default:"5" description:"Int, timeout with request (seconds)" config:"timeout"` Timeout int `short:"T" long:"timeout" default:"5" description:"Int, timeout with request (seconds)" config:"timeout"`
PoolSize int `short:"P" long:"pool" default:"5" description:"Int, Pool size" config:"pool"` PoolSize int `short:"P" long:"pool" default:"5" description:"Int, Pool size" config:"pool"`
Threads int `short:"t" long:"thread" default:"20" description:"Int, number of threads per pool" config:"thread"` Threads int `short:"t" long:"thread" default:"20" description:"Int, number of threads per pool" config:"thread"`
Debug bool `long:"debug" description:"Bool, output debug info" config:"debug"` Debug bool `long:"debug" description:"Bool, output debug info" config:"debug"`
Version bool `long:"version" description:"Bool, show version"` Version bool `long:"version" description:"Bool, show version"`
Verbose []bool `short:"v" description:"Bool, log verbose level ,default 0, level1: -v level2 -vv " config:"verbose"` Verbose []bool `short:"v" description:"Bool, log verbose level ,default 0, level1: -v level2 -vv " config:"verbose"`
Proxy string `long:"proxy" description:"String, proxy address, e.g.: --proxy socks5://127.0.0.1:1080" config:"proxy"` Proxy string `long:"proxy" description:"String, proxy address, e.g.: --proxy socks5://127.0.0.1:1080" config:"proxy"`
InitConfig bool `long:"init" description:"Bool, init config file"` InitConfig bool `long:"init" description:"Bool, init config file"`
PrintPreset bool `long:"print" description:"Bool, print preset all preset config "`
} }
func (opt *Option) Validate() error { func (opt *Option) Validate() error {
@ -240,18 +237,18 @@ func (opt *Option) Prepare() error {
ihttp.DefaultMaxBodySize = opt.MaxBodyLength * 1024 ihttp.DefaultMaxBodySize = opt.MaxBodyLength * 1024
} }
pkg.BlackStatus = parseStatus(pkg.BlackStatus, opt.BlackStatus) pkg.BlackStatus = pkg.ParseStatus(pkg.BlackStatus, opt.BlackStatus)
pkg.WhiteStatus = parseStatus(pkg.WhiteStatus, opt.WhiteStatus) pkg.WhiteStatus = pkg.ParseStatus(pkg.WhiteStatus, opt.WhiteStatus)
if opt.FuzzyStatus == "all" { if opt.FuzzyStatus == "all" {
pool.EnableAllFuzzy = true pool.EnableAllFuzzy = true
} else { } else {
pkg.FuzzyStatus = parseStatus(pkg.FuzzyStatus, opt.FuzzyStatus) pkg.FuzzyStatus = pkg.ParseStatus(pkg.FuzzyStatus, opt.FuzzyStatus)
} }
if opt.Unique { if opt.Unique {
pool.EnableAllUnique = true pool.EnableAllUnique = true
} else { } else {
pkg.UniqueStatus = parseStatus(pkg.UniqueStatus, opt.UniqueStatus) pkg.UniqueStatus = pkg.ParseStatus(pkg.UniqueStatus, opt.UniqueStatus)
} }
pool.MaxCrawl = opt.CrawlDepth pool.MaxCrawl = opt.CrawlDepth
@ -432,7 +429,7 @@ func (opt *Option) NewRunner() (*Runner, error) {
if opt.ResumeFrom != "" { if opt.ResumeFrom != "" {
r.StatFile, err = files.NewFile(opt.ResumeFrom, false, true, true) r.StatFile, err = files.NewFile(opt.ResumeFrom, false, true, true)
} else { } else {
r.StatFile, err = files.NewFile(safeFilename(r.Tasks.Name)+".stat", false, true, true) r.StatFile, err = files.NewFile(pkg.SafeFilename(r.Tasks.Name)+".stat", false, true, true)
} }
if err != nil { if err != nil {
return nil, err return nil, err
@ -500,7 +497,8 @@ func (opt *Option) BuildPlugin(r *Runner) error {
if opt.CommonPlugin { if opt.CommonPlugin {
r.bruteMod = true r.bruteMod = true
r.AppendWords = append(r.AppendWords, pkg.GetPresetWordList([]string{"common_file", "log_file"})...) r.AppendWords = append(r.AppendWords, pkg.Dicts["common"]...)
r.AppendWords = append(r.AppendWords, pkg.Dicts["log"]...)
} }
if opt.ActivePlugin { if opt.ActivePlugin {
@ -527,20 +525,20 @@ func (opt *Option) BuildWords(r *Runner) error {
var dicts [][]string var dicts [][]string
var err error var err error
if opt.DefaultDict { if opt.DefaultDict {
//dicts = append(dicts, pkg.LoadDefaultDict()) dicts = append(dicts, pkg.Dicts["default"])
//logs.Log.Info("use default dictionary: https://github.com/maurosoria/dirsearch/blob/master/db/dicc.txt") logs.Log.Info("use default dictionary: https://github.com/maurosoria/dirsearch/blob/master/db/dicc.txt")
} }
for i, f := range opt.Dictionaries { for i, f := range opt.Dictionaries {
dict, err := loadFileToSlice(f) dict, err := pkg.LoadFileToSlice(f)
if err != nil { if err != nil {
return err return err
} }
dicts = append(dicts, dict) dicts = append(dicts, dict)
if opt.ResumeFrom != "" { if opt.ResumeFrom != "" {
dictCache[f] = dicts[i] pkg.Dicts[f] = dicts[i]
} }
logs.Log.Logf(pkg.LogVerbose, "Loaded %d word from %s", len(dicts[i]), f) logs.Log.Logf(pkg.LogVerbose, "Loaded %d word from %s", len(dict), f)
} }
if len(dicts) == 0 && opt.Word == "" && len(opt.Rules) == 0 && len(opt.AppendRule) == 0 { if len(dicts) == 0 && opt.Word == "" && len(opt.Rules) == 0 && len(opt.AppendRule) == 0 {
@ -584,7 +582,7 @@ func (opt *Option) BuildWords(r *Runner) error {
} }
if len(opt.Rules) != 0 { if len(opt.Rules) != 0 {
rules, err := loadRuleAndCombine(opt.Rules) rules, err := pkg.LoadRuleAndCombine(opt.Rules)
if err != nil { if err != nil {
return err return err
} }
@ -603,7 +601,7 @@ func (opt *Option) BuildWords(r *Runner) error {
} }
if len(opt.AppendRule) != 0 { if len(opt.AppendRule) != 0 {
content, err := loadRuleAndCombine(opt.AppendRule) content, err := pkg.LoadRuleAndCombine(opt.AppendRule)
if err != nil { if err != nil {
return err return err
} }
@ -611,18 +609,13 @@ func (opt *Option) BuildWords(r *Runner) error {
} }
if len(opt.AppendFile) != 0 { if len(opt.AppendFile) != 0 {
var bs bytes.Buffer var lines []string
for _, f := range opt.AppendFile { for _, f := range opt.AppendFile {
content, err := ioutil.ReadFile(f) dict, err := pkg.LoadFileToSlice(f)
if err != nil { if err != nil {
return err return err
} }
bs.Write(bytes.TrimSpace(content)) lines = append(lines, dict...)
bs.WriteString("\n")
}
lines := strings.Split(bs.String(), "\n")
for i, line := range lines {
lines[i] = strings.TrimSpace(line)
} }
r.AppendWords = append(r.AppendWords, lines...) r.AppendWords = append(r.AppendWords, lines...)
} }
@ -649,16 +642,16 @@ func (opt *Option) BuildWords(r *Runner) error {
} }
if opt.Uppercase { if opt.Uppercase {
r.AppendFunction(wrapWordsFunc(strings.ToUpper)) r.AppendFunction(pkg.WrapWordsFunc(strings.ToUpper))
} }
if opt.Lowercase { if opt.Lowercase {
r.AppendFunction(wrapWordsFunc(strings.ToLower)) r.AppendFunction(pkg.WrapWordsFunc(strings.ToLower))
} }
if opt.RemoveExtensions != "" { if opt.RemoveExtensions != "" {
rexts := strings.Split(opt.ExcludeExtensions, ",") rexts := strings.Split(opt.ExcludeExtensions, ",")
r.AppendFunction(func(s string) []string { r.AppendFunction(func(s string) []string {
if ext := parseExtension(s); iutils.StringsContains(rexts, ext) { if ext := pkg.ParseExtension(s); iutils.StringsContains(rexts, ext) {
return []string{strings.TrimSuffix(s, "."+ext)} return []string{strings.TrimSuffix(s, "."+ext)}
} }
return []string{s} return []string{s}
@ -668,7 +661,7 @@ func (opt *Option) BuildWords(r *Runner) error {
if opt.ExcludeExtensions != "" { if opt.ExcludeExtensions != "" {
exexts := strings.Split(opt.ExcludeExtensions, ",") exexts := strings.Split(opt.ExcludeExtensions, ",")
r.AppendFunction(func(s string) []string { r.AppendFunction(func(s string) []string {
if ext := parseExtension(s); iutils.StringsContains(exexts, ext) { if ext := pkg.ParseExtension(s); iutils.StringsContains(exexts, ext) {
return nil return nil
} }
return []string{s} return []string{s}
@ -702,7 +695,7 @@ func (opt *Option) BuildWords(r *Runner) error {
}) })
} }
logs.Log.Logf(pkg.LogVerbose, "Loaded %d dictionaries and %d decorators", len(opt.Dictionaries), len(r.Fns)) logs.Log.Importantf("Loaded %d dictionaries, %d rules and %d decorators", len(opt.Dictionaries), len(opt.Rules), len(r.Fns))
return nil return nil
} }

View File

@ -596,7 +596,13 @@ func (pool *BrutePool) doActive() {
func (pool *BrutePool) doCommonFile() { func (pool *BrutePool) doCommonFile() {
defer pool.wg.Done() defer pool.wg.Done()
for _, u := range pkg.GetPresetWordList([]string{"common_file", "log_file"}) { for _, u := range pkg.Dicts["common"] {
pool.addAddition(&Unit{
path: pool.dir + u,
source: parsers.CommonFileSource,
})
}
for _, u := range pkg.Dicts["log"] {
pool.addAddition(&Unit{ pool.addAddition(&Unit{
path: pool.dir + u, path: pool.dir + u,
source: parsers.CommonFileSource, source: parsers.CommonFileSource,

View File

@ -20,12 +20,6 @@ var (
MAX = 2147483647 MAX = 2147483647
) )
var (
dictCache = make(map[string][]string)
wordlistCache = make(map[string][]string)
ruleCache = make(map[string][]rule.Expression)
)
type Runner struct { type Runner struct {
*Option *Option

View File

@ -16,13 +16,13 @@ type Origin struct {
func (o *Origin) InitWorder(fns []func(string) []string) (*words.Worder, error) { func (o *Origin) InitWorder(fns []func(string) []string) (*words.Worder, error) {
var worder *words.Worder var worder *words.Worder
wl, err := loadWordlist(o.Word, o.Dictionaries) wl, err := pkg.LoadWordlist(o.Word, o.Dictionaries)
if err != nil { if err != nil {
return nil, err return nil, err
} }
worder = words.NewWorder(wl) worder = words.NewWorder(wl)
worder.Fns = fns worder.Fns = fns
rules, err := loadRuleWithFiles(o.RuleFiles, o.RuleFilter) rules, err := pkg.LoadRuleWithFiles(o.RuleFiles, o.RuleFilter)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -1,149 +1,5 @@
package internal package internal
import (
"bytes"
"github.com/chainreactors/spray/pkg"
"github.com/chainreactors/words/mask"
"github.com/chainreactors/words/rule"
"io/ioutil"
"strconv"
"strings"
)
func parseExtension(s string) string {
if i := strings.Index(s, "."); i != -1 {
return s[i+1:]
}
return ""
}
func parseStatus(preset []int, changed string) []int {
if changed == "" {
return preset
}
if strings.HasPrefix(changed, "+") {
for _, s := range strings.Split(changed[1:], ",") {
if t, err := strconv.Atoi(s); err != nil {
continue
} else {
preset = append(preset, t)
}
}
} else if strings.HasPrefix(changed, "!") {
for _, s := range strings.Split(changed[1:], ",") {
for i, status := range preset {
if t, err := strconv.Atoi(s); err != nil {
break
} else if t == status {
preset = append(preset[:i], preset[i+1:]...)
break
}
}
}
} else {
preset = []int{}
for _, s := range strings.Split(changed, ",") {
if t, err := strconv.Atoi(s); err != nil {
continue
} else {
preset = append(preset, t)
}
}
}
return preset
}
func loadFileToSlice(filename string) ([]string, error) {
var ss []string
content, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
ss = strings.Split(strings.TrimSpace(string(content)), "\n")
// 统一windows与linux的回车换行差异
for i, word := range ss {
ss[i] = strings.TrimSpace(word)
}
return ss, nil
}
func loadRuleAndCombine(filename []string) (string, error) {
var bs bytes.Buffer
for _, f := range filename {
if data, ok := pkg.Rules[f]; ok {
bs.WriteString(strings.TrimSpace(data))
bs.WriteString("\n")
} else {
content, err := ioutil.ReadFile(f)
if err != nil {
return "", err
}
bs.Write(bytes.TrimSpace(content))
bs.WriteString("\n")
}
}
return bs.String(), nil
}
func loadFileWithCache(filename string) ([]string, error) {
if dict, ok := dictCache[filename]; ok {
return dict, nil
}
dict, err := loadFileToSlice(filename)
if err != nil {
return nil, err
}
dictCache[filename] = dict
return dict, nil
}
func loadDictionaries(filenames []string) ([][]string, error) {
dicts := make([][]string, len(filenames))
for i, name := range filenames {
dict, err := loadFileWithCache(name)
if err != nil {
return nil, err
}
dicts[i] = dict
}
return dicts, nil
}
func loadWordlist(word string, dictNames []string) ([]string, error) {
if wl, ok := wordlistCache[word+strings.Join(dictNames, ",")]; ok {
return wl, nil
}
dicts, err := loadDictionaries(dictNames)
if err != nil {
return nil, err
}
wl, err := mask.Run(word, dicts, nil)
if err != nil {
return nil, err
}
wordlistCache[word] = wl
return wl, nil
}
func loadRuleWithFiles(ruleFiles []string, filter string) ([]rule.Expression, error) {
if rules, ok := ruleCache[strings.Join(ruleFiles, ",")]; ok {
return rules, nil
}
var rules bytes.Buffer
for _, filename := range ruleFiles {
content, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
rules.Write(content)
rules.WriteString("\n")
}
return rule.Compile(rules.String(), filter).Expressions, nil
}
//type bytesPatcher struct{} //type bytesPatcher struct{}
// //
//func (p *bytesPatcher) Visit(node *ast.Node) { //func (p *bytesPatcher) Visit(node *ast.Node) {
@ -158,17 +14,3 @@ func loadRuleWithFiles(ruleFiles []string, filter string) ([]rule.Expression, er
// }) // })
// } // }
//} //}
func wrapWordsFunc(f func(string) string) func(string) []string {
return func(s string) []string {
return []string{f(s)}
}
}
func safeFilename(filename string) string {
filename = strings.ReplaceAll(filename, "http://", "")
filename = strings.ReplaceAll(filename, "https://", "")
filename = strings.ReplaceAll(filename, ":", "_")
filename = strings.ReplaceAll(filename, "/", "_")
return filename
}

View File

@ -12,14 +12,6 @@ import (
"strings" "strings"
) )
var (
ExtractRegexps = make(parsers.Extractors)
Extractors = make(parsers.Extractors)
FingerEngine *fingers.Engine
ActivePath []string
)
func LoadPorts() error { func LoadPorts() error {
var err error var err error
var ports []*utils.PortConfig var ports []*utils.PortConfig
@ -68,7 +60,11 @@ func LoadTemplates() error {
return err return err
} }
for name, wordlist := range dicts { for name, wordlist := range dicts {
Dicts[strings.TrimSuffix(name, ".txt")] = strings.Split(strings.TrimSpace(wordlist), "\n") dict := strings.Split(strings.TrimSpace(wordlist), "\n")
for i, d := range dict {
dict[i] = strings.TrimSpace(d)
}
Dicts[strings.TrimSuffix(name, ".txt")] = dict
} }
// load mask // load mask

View File

@ -3,11 +3,16 @@ package pkg
import ( import (
"bufio" "bufio"
"bytes" "bytes"
"github.com/chainreactors/files"
"github.com/chainreactors/fingers"
"github.com/chainreactors/logs" "github.com/chainreactors/logs"
"github.com/chainreactors/parsers"
"github.com/chainreactors/utils/iutils" "github.com/chainreactors/utils/iutils"
"github.com/chainreactors/words/mask" "github.com/chainreactors/words/mask"
"github.com/chainreactors/words/rule"
"github.com/expr-lang/expr" "github.com/expr-lang/expr"
"github.com/expr-lang/expr/vm" "github.com/expr-lang/expr/vm"
"io/ioutil"
"math/rand" "math/rand"
"net/http" "net/http"
"net/url" "net/url"
@ -32,11 +37,17 @@ var (
EnableAllFingerEngine = false EnableAllFingerEngine = false
) )
var ( var (
Rules map[string]string = make(map[string]string) Rules map[string]string = make(map[string]string)
Dicts map[string][]string = make(map[string][]string) Dicts map[string][]string = make(map[string][]string)
BadExt = []string{".js", ".css", ".scss", ".,", ".jpeg", ".jpg", ".png", ".gif", ".svg", ".vue", ".ts", ".swf", ".pdf", ".mp4", ".zip", ".rar"} wordlistCache = make(map[string][]string)
BadURL = []string{";", "}", "\\n", "webpack://", "{", "www.w3.org", ".src", ".url", ".att", ".href", "location.href", "javascript:", "location:", ".createObject", ":location", ".path"} ruleCache = make(map[string][]rule.Expression)
BadExt = []string{".js", ".css", ".scss", ".,", ".jpeg", ".jpg", ".png", ".gif", ".svg", ".vue", ".ts", ".swf", ".pdf", ".mp4", ".zip", ".rar"}
BadURL = []string{";", "}", "\\n", "webpack://", "{", "www.w3.org", ".src", ".url", ".att", ".href", "location.href", "javascript:", "location:", ".createObject", ":location", ".path"}
ExtractRegexps = make(parsers.Extractors)
Extractors = make(parsers.Extractors)
FingerEngine *fingers.Engine
ActivePath []string
ContentTypeMap = map[string]string{ ContentTypeMap = map[string]string{
"application/javascript": "js", "application/javascript": "js",
"application/json": "json", "application/json": "json",
@ -403,3 +414,157 @@ func GetPresetWordList(key []string) []string {
} }
return wordlist return wordlist
} }
func ParseExtension(s string) string {
if i := strings.Index(s, "."); i != -1 {
return s[i+1:]
}
return ""
}
func ParseStatus(preset []int, changed string) []int {
if changed == "" {
return preset
}
if strings.HasPrefix(changed, "+") {
for _, s := range strings.Split(changed[1:], ",") {
if t, err := strconv.Atoi(s); err != nil {
continue
} else {
preset = append(preset, t)
}
}
} else if strings.HasPrefix(changed, "!") {
for _, s := range strings.Split(changed[1:], ",") {
for i, status := range preset {
if t, err := strconv.Atoi(s); err != nil {
break
} else if t == status {
preset = append(preset[:i], preset[i+1:]...)
break
}
}
}
} else {
preset = []int{}
for _, s := range strings.Split(changed, ",") {
if t, err := strconv.Atoi(s); err != nil {
continue
} else {
preset = append(preset, t)
}
}
}
return preset
}
func LoadFileToSlice(filename string) ([]string, error) {
var ss []string
if dicts, ok := Dicts[filename]; ok {
if files.IsExist(filename) {
logs.Log.Warnf("load and overwrite %s from preset", filename)
}
return dicts, nil
}
content, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
ss = strings.Split(strings.TrimSpace(string(content)), "\n")
// 统一windows与linux的回车换行差异
for i, word := range ss {
ss[i] = strings.TrimSpace(word)
}
return ss, nil
}
func LoadRuleAndCombine(filename []string) (string, error) {
var bs bytes.Buffer
for _, f := range filename {
if data, ok := Rules[f]; ok {
bs.WriteString(strings.TrimSpace(data))
bs.WriteString("\n")
} else {
content, err := ioutil.ReadFile(f)
if err != nil {
return "", err
}
bs.Write(bytes.TrimSpace(content))
bs.WriteString("\n")
}
}
return bs.String(), nil
}
func loadFileWithCache(filename string) ([]string, error) {
if dict, ok := Dicts[filename]; ok {
return dict, nil
}
dict, err := LoadFileToSlice(filename)
if err != nil {
return nil, err
}
Dicts[filename] = dict
return dict, nil
}
func loadDictionaries(filenames []string) ([][]string, error) {
dicts := make([][]string, len(filenames))
for i, name := range filenames {
dict, err := loadFileWithCache(name)
if err != nil {
return nil, err
}
dicts[i] = dict
}
return dicts, nil
}
func LoadWordlist(word string, dictNames []string) ([]string, error) {
if wl, ok := wordlistCache[word+strings.Join(dictNames, ",")]; ok {
return wl, nil
}
dicts, err := loadDictionaries(dictNames)
if err != nil {
return nil, err
}
wl, err := mask.Run(word, dicts, nil)
if err != nil {
return nil, err
}
wordlistCache[word] = wl
return wl, nil
}
func LoadRuleWithFiles(ruleFiles []string, filter string) ([]rule.Expression, error) {
if rules, ok := ruleCache[strings.Join(ruleFiles, ",")]; ok {
return rules, nil
}
var rules bytes.Buffer
for _, filename := range ruleFiles {
content, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
rules.Write(content)
rules.WriteString("\n")
}
return rule.Compile(rules.String(), filter).Expressions, nil
}
func WrapWordsFunc(f func(string) string) func(string) []string {
return func(s string) []string {
return []string{f(s)}
}
}
func SafeFilename(filename string) string {
filename = strings.ReplaceAll(filename, "http://", "")
filename = strings.ReplaceAll(filename, "https://", "")
filename = strings.ReplaceAll(filename, ":", "_")
filename = strings.ReplaceAll(filename, "/", "_")
return filename
}