2022-09-08 15:57:17 +08:00
|
|
|
package pkg
|
|
|
|
|
|
|
|
import (
|
2024-03-04 20:03:06 +08:00
|
|
|
"bufio"
|
|
|
|
"bytes"
|
2024-08-26 02:22:35 +08:00
|
|
|
"github.com/chainreactors/files"
|
|
|
|
"github.com/chainreactors/fingers"
|
2024-02-10 18:23:50 +08:00
|
|
|
"github.com/chainreactors/logs"
|
2024-08-26 02:22:35 +08:00
|
|
|
"github.com/chainreactors/parsers"
|
2023-12-28 14:34:19 +08:00
|
|
|
"github.com/chainreactors/utils/iutils"
|
2024-08-26 01:20:03 +08:00
|
|
|
"github.com/chainreactors/words/mask"
|
2024-08-26 02:22:35 +08:00
|
|
|
"github.com/chainreactors/words/rule"
|
2024-06-06 18:11:45 +08:00
|
|
|
"github.com/expr-lang/expr"
|
|
|
|
"github.com/expr-lang/expr/vm"
|
2024-08-26 02:22:35 +08:00
|
|
|
"io/ioutil"
|
2022-09-08 15:57:17 +08:00
|
|
|
"math/rand"
|
2024-03-04 20:03:06 +08:00
|
|
|
"net/http"
|
2023-01-03 18:22:13 +08:00
|
|
|
"net/url"
|
|
|
|
"path"
|
2024-02-10 18:23:50 +08:00
|
|
|
"path/filepath"
|
|
|
|
"strconv"
|
2023-01-03 17:09:32 +08:00
|
|
|
"strings"
|
2022-09-08 15:57:17 +08:00
|
|
|
"time"
|
|
|
|
"unsafe"
|
|
|
|
)
|
|
|
|
|
2024-02-10 18:23:50 +08:00
|
|
|
var (
|
2025-06-05 13:45:48 +08:00
|
|
|
LogVerbose = logs.Warn - 2
|
|
|
|
LogFuzz = logs.Warn - 1
|
|
|
|
DefaultWhiteStatus = []int{200} // cmd input
|
|
|
|
DefaultBlackStatus = []int{400, 410} // cmd input
|
|
|
|
DefaultFuzzyStatus = []int{500, 501, 502, 503, 301, 302, 404} // cmd input
|
|
|
|
DefaultUniqueStatus = []int{403, 200, 404} // 相同unique的403表示命中了同一条acl, 相同unique的200表示default页面
|
|
|
|
WhiteStatus = []int{} // cmd input, 200
|
|
|
|
BlackStatus = []int{} // cmd input, 400,410
|
|
|
|
FuzzyStatus = []int{} // cmd input, 500,501,502,503
|
|
|
|
WAFStatus = []int{493, 418, 1020, 406, 429, 406, 412}
|
|
|
|
UniqueStatus = []int{} // 相同unique的403表示命中了同一条acl, 相同unique的200表示default页面
|
2024-02-20 18:25:43 +08:00
|
|
|
|
|
|
|
// plugins
|
2024-03-04 20:03:06 +08:00
|
|
|
EnableAllFingerEngine = false
|
2024-02-10 18:23:50 +08:00
|
|
|
)
|
2023-01-03 17:09:32 +08:00
|
|
|
var (
|
2024-08-26 02:22:35 +08:00
|
|
|
Rules map[string]string = make(map[string]string)
|
|
|
|
Dicts map[string][]string = make(map[string][]string)
|
|
|
|
wordlistCache = make(map[string][]string)
|
|
|
|
ruleCache = make(map[string][]rule.Expression)
|
|
|
|
BadExt = []string{".js", ".css", ".scss", ".,", ".jpeg", ".jpg", ".png", ".gif", ".svg", ".vue", ".ts", ".swf", ".pdf", ".mp4", ".zip", ".rar"}
|
|
|
|
BadURL = []string{";", "}", "\\n", "webpack://", "{", "www.w3.org", ".src", ".url", ".att", ".href", "location.href", "javascript:", "location:", ".createObject", ":location", ".path"}
|
|
|
|
ExtractRegexps = make(parsers.Extractors)
|
|
|
|
Extractors = make(parsers.Extractors)
|
|
|
|
|
|
|
|
FingerEngine *fingers.Engine
|
|
|
|
ActivePath []string
|
2023-01-09 13:41:59 +08:00
|
|
|
ContentTypeMap = map[string]string{
|
|
|
|
"application/javascript": "js",
|
|
|
|
"application/json": "json",
|
|
|
|
"application/xml": "xml",
|
|
|
|
"application/octet-stream": "bin",
|
|
|
|
"application/atom+xml": "atom",
|
|
|
|
"application/msword": "doc",
|
|
|
|
"application/pdf": "pdf",
|
|
|
|
"image/gif": "gif",
|
|
|
|
"image/jpeg": "jpg",
|
|
|
|
"image/png": "png",
|
|
|
|
"image/svg+xml": "svg",
|
|
|
|
"text/css": "css",
|
|
|
|
"text/plain": "txt",
|
|
|
|
"text/html": "html",
|
|
|
|
"audio/mpeg": "mp3",
|
|
|
|
"video/mp4": "mp4",
|
|
|
|
"video/ogg": "ogg",
|
|
|
|
"video/webm": "webm",
|
|
|
|
"video/x-ms-wmv": "wmv",
|
|
|
|
"video/avi": "avi",
|
|
|
|
"image/x-icon": "ico",
|
|
|
|
}
|
2024-02-10 18:23:50 +08:00
|
|
|
|
|
|
|
// from feroxbuster
|
|
|
|
randomUserAgent = []string{
|
|
|
|
"Mozilla/5.0 (Linux; Android 8.0.0; SM-G960F Build/R16NW) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.84 Mobile Safari/537.36",
|
|
|
|
"Mozilla/5.0 (iPhone; CPU iPhone OS 12_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/12.0 Mobile/15E148 Safari/604.1",
|
|
|
|
"Mozilla/5.0 (Windows Phone 10.0; Android 6.0.1; Microsoft; RM-1152) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Mobile Safari/537.36 Edge/15.15254",
|
|
|
|
"Mozilla/5.0 (Linux; Android 7.0; Pixel C Build/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/52.0.2743.98 Safari/537.36",
|
|
|
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.246",
|
|
|
|
"Mozilla/5.0 (X11; CrOS x86_64 8172.45.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.64 Safari/537.36",
|
|
|
|
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_2) AppleWebKit/601.3.9 (KHTML, like Gecko) Version/9.0.2 Safari/601.3.9",
|
|
|
|
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36",
|
|
|
|
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:15.0) Gecko/20100101 Firefox/15.0.1",
|
|
|
|
"Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)",
|
|
|
|
"Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)",
|
|
|
|
"Mozilla/5.0 (compatible; Yahoo! Slurp; http://help.yahoo.com/help/us/ysearch/slurp)",
|
|
|
|
}
|
2025-02-22 20:31:32 +08:00
|
|
|
uacount = len(randomUserAgent)
|
|
|
|
DefaultUserAgent = randomUserAgent[rand.Intn(uacount)]
|
2023-01-03 17:09:32 +08:00
|
|
|
)
|
|
|
|
|
2024-02-08 15:26:01 +08:00
|
|
|
type BS []byte
|
2023-01-09 21:33:05 +08:00
|
|
|
|
2024-02-08 15:26:01 +08:00
|
|
|
func (b BS) String() string {
|
|
|
|
return string(b)
|
2023-01-09 21:33:05 +08:00
|
|
|
}
|
|
|
|
|
2022-09-08 15:57:17 +08:00
|
|
|
const letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
|
|
|
|
|
|
|
var src = rand.NewSource(time.Now().UnixNano())
|
|
|
|
|
|
|
|
const (
|
|
|
|
// 6 bits to represent a letter index
|
|
|
|
letterIdBits = 6
|
|
|
|
// All 1-bits as many as letterIdBits
|
|
|
|
letterIdMask = 1<<letterIdBits - 1
|
|
|
|
letterIdMax = 63 / letterIdBits
|
|
|
|
)
|
|
|
|
|
|
|
|
func RandPath() string {
|
|
|
|
n := 16
|
|
|
|
b := make([]byte, n)
|
|
|
|
// A rand.Int63() generates 63 random bits, enough for letterIdMax letters!
|
2023-01-09 14:47:58 +08:00
|
|
|
for i, cache, remain := n-1, src.Int63(), letterIdMax; i >= 0; {
|
2022-09-08 15:57:17 +08:00
|
|
|
if remain == 0 {
|
|
|
|
cache, remain = src.Int63(), letterIdMax
|
|
|
|
}
|
|
|
|
if idx := int(cache & letterIdMask); idx < len(letters) {
|
|
|
|
b[i] = letters[idx]
|
|
|
|
i--
|
|
|
|
}
|
|
|
|
cache >>= letterIdBits
|
|
|
|
remain--
|
|
|
|
}
|
|
|
|
return *(*string)(unsafe.Pointer(&b))
|
|
|
|
}
|
2022-10-26 18:28:40 +08:00
|
|
|
|
|
|
|
func RandHost() string {
|
|
|
|
n := 8
|
|
|
|
b := make([]byte, n)
|
|
|
|
// A rand.Int63() generates 63 random bits, enough for letterIdMax letters!
|
|
|
|
for i, cache, remain := n-1, src.Int63(), letterIdMax; i >= 1; {
|
|
|
|
if remain == 0 {
|
|
|
|
cache, remain = src.Int63(), letterIdMax
|
|
|
|
}
|
|
|
|
if idx := int(cache & letterIdMask); idx < len(letters) {
|
|
|
|
b[i] = letters[idx]
|
|
|
|
i--
|
|
|
|
}
|
|
|
|
cache >>= letterIdBits
|
|
|
|
remain--
|
|
|
|
}
|
|
|
|
|
|
|
|
b[5] = byte(0x2e)
|
|
|
|
return *(*string)(unsafe.Pointer(&b))
|
|
|
|
}
|
2022-10-28 00:46:54 +08:00
|
|
|
|
2024-02-08 15:26:01 +08:00
|
|
|
func FilterJs(u string) bool {
|
2023-01-09 21:33:05 +08:00
|
|
|
if commonFilter(u) {
|
|
|
|
return true
|
2023-01-03 18:22:13 +08:00
|
|
|
}
|
2023-01-09 21:33:05 +08:00
|
|
|
|
2023-01-03 18:22:13 +08:00
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2024-02-08 15:26:01 +08:00
|
|
|
func FilterUrl(u string) bool {
|
2023-01-09 21:33:05 +08:00
|
|
|
if commonFilter(u) {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2023-01-03 18:22:13 +08:00
|
|
|
parsed, err := url.Parse(u)
|
|
|
|
if err != nil {
|
|
|
|
return true
|
|
|
|
} else {
|
|
|
|
ext := path.Ext(parsed.Path)
|
|
|
|
for _, e := range BadExt {
|
2023-01-09 22:41:05 +08:00
|
|
|
if strings.EqualFold(e, ext) {
|
2023-01-03 18:22:13 +08:00
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-01-09 21:33:05 +08:00
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2024-02-08 15:26:01 +08:00
|
|
|
func CleanURL(u string) string {
|
2023-01-09 21:33:05 +08:00
|
|
|
// 去掉frag与params, 节约url.parse性能, 防止带参数造成意外的影响
|
2023-06-30 17:43:47 +08:00
|
|
|
u = strings.Trim(u, "\"")
|
|
|
|
u = strings.Trim(u, "'")
|
2023-01-10 00:58:16 +08:00
|
|
|
if strings.Contains(u, "2f") || strings.Contains(u, "2F") {
|
|
|
|
u = strings.ReplaceAll(u, "\\u002F", "/")
|
|
|
|
u = strings.ReplaceAll(u, "\\u002f", "/")
|
|
|
|
u = strings.ReplaceAll(u, "%252F", "/")
|
|
|
|
u = strings.ReplaceAll(u, "%252f", "/")
|
|
|
|
u = strings.ReplaceAll(u, "%2f", "/")
|
|
|
|
u = strings.ReplaceAll(u, "%2F", "/")
|
|
|
|
}
|
|
|
|
|
|
|
|
u = strings.TrimRight(u, "\\")
|
2023-01-09 21:33:05 +08:00
|
|
|
if i := strings.Index(u, "?"); i != -1 {
|
|
|
|
return u[:i]
|
|
|
|
}
|
|
|
|
if i := strings.Index(u, "#"); i != -1 {
|
|
|
|
return u[:i]
|
|
|
|
}
|
|
|
|
return u
|
|
|
|
}
|
|
|
|
|
|
|
|
func commonFilter(u string) bool {
|
2023-01-09 22:41:05 +08:00
|
|
|
if strings.HasPrefix(u, "http") && len(u) < 15 {
|
2023-01-09 21:33:05 +08:00
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2023-01-10 00:58:16 +08:00
|
|
|
for _, bad := range BadURL {
|
|
|
|
if strings.Contains(u, bad) {
|
2023-01-03 18:22:13 +08:00
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
2023-01-05 23:26:14 +08:00
|
|
|
|
2023-01-06 00:48:13 +08:00
|
|
|
func BakGenerator(domain string) []string {
|
|
|
|
var possibilities []string
|
|
|
|
for first, _ := range domain {
|
|
|
|
for last, _ := range domain[first:] {
|
|
|
|
p := domain[first : first+last+1]
|
2023-01-28 13:15:49 +08:00
|
|
|
if !iutils.StringsContains(possibilities, p) {
|
2023-01-06 00:48:13 +08:00
|
|
|
possibilities = append(possibilities, p)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return possibilities
|
|
|
|
}
|
2023-02-08 12:58:56 +08:00
|
|
|
|
|
|
|
var MbTable = []uint16{
|
|
|
|
0x0000, 0xC0C1, 0xC181, 0x0140, 0xC301, 0x03C0, 0x0280, 0xC241,
|
|
|
|
0xC601, 0x06C0, 0x0780, 0xC741, 0x0500, 0xC5C1, 0xC481, 0x0440,
|
|
|
|
0xCC01, 0x0CC0, 0x0D80, 0xCD41, 0x0F00, 0xCFC1, 0xCE81, 0x0E40,
|
|
|
|
0x0A00, 0xCAC1, 0xCB81, 0x0B40, 0xC901, 0x09C0, 0x0880, 0xC841,
|
|
|
|
0xD801, 0x18C0, 0x1980, 0xD941, 0x1B00, 0xDBC1, 0xDA81, 0x1A40,
|
|
|
|
0x1E00, 0xDEC1, 0xDF81, 0x1F40, 0xDD01, 0x1DC0, 0x1C80, 0xDC41,
|
|
|
|
0x1400, 0xD4C1, 0xD581, 0x1540, 0xD701, 0x17C0, 0x1680, 0xD641,
|
|
|
|
0xD201, 0x12C0, 0x1380, 0xD341, 0x1100, 0xD1C1, 0xD081, 0x1040,
|
|
|
|
0xF001, 0x30C0, 0x3180, 0xF141, 0x3300, 0xF3C1, 0xF281, 0x3240,
|
|
|
|
0x3600, 0xF6C1, 0xF781, 0x3740, 0xF501, 0x35C0, 0x3480, 0xF441,
|
|
|
|
0x3C00, 0xFCC1, 0xFD81, 0x3D40, 0xFF01, 0x3FC0, 0x3E80, 0xFE41,
|
|
|
|
0xFA01, 0x3AC0, 0x3B80, 0xFB41, 0x3900, 0xF9C1, 0xF881, 0x3840,
|
|
|
|
0x2800, 0xE8C1, 0xE981, 0x2940, 0xEB01, 0x2BC0, 0x2A80, 0xEA41,
|
|
|
|
0xEE01, 0x2EC0, 0x2F80, 0xEF41, 0x2D00, 0xEDC1, 0xEC81, 0x2C40,
|
|
|
|
0xE401, 0x24C0, 0x2580, 0xE541, 0x2700, 0xE7C1, 0xE681, 0x2640,
|
|
|
|
0x2200, 0xE2C1, 0xE381, 0x2340, 0xE101, 0x21C0, 0x2080, 0xE041,
|
|
|
|
0xA001, 0x60C0, 0x6180, 0xA141, 0x6300, 0xA3C1, 0xA281, 0x6240,
|
|
|
|
0x6600, 0xA6C1, 0xA781, 0x6740, 0xA501, 0x65C0, 0x6480, 0xA441,
|
|
|
|
0x6C00, 0xACC1, 0xAD81, 0x6D40, 0xAF01, 0x6FC0, 0x6E80, 0xAE41,
|
|
|
|
0xAA01, 0x6AC0, 0x6B80, 0xAB41, 0x6900, 0xA9C1, 0xA881, 0x6840,
|
|
|
|
0x7800, 0xB8C1, 0xB981, 0x7940, 0xBB01, 0x7BC0, 0x7A80, 0xBA41,
|
|
|
|
0xBE01, 0x7EC0, 0x7F80, 0xBF41, 0x7D00, 0xBDC1, 0xBC81, 0x7C40,
|
|
|
|
0xB401, 0x74C0, 0x7580, 0xB541, 0x7700, 0xB7C1, 0xB681, 0x7640,
|
|
|
|
0x7200, 0xB2C1, 0xB381, 0x7340, 0xB101, 0x71C0, 0x7080, 0xB041,
|
|
|
|
0x5000, 0x90C1, 0x9181, 0x5140, 0x9301, 0x53C0, 0x5280, 0x9241,
|
|
|
|
0x9601, 0x56C0, 0x5780, 0x9741, 0x5500, 0x95C1, 0x9481, 0x5440,
|
|
|
|
0x9C01, 0x5CC0, 0x5D80, 0x9D41, 0x5F00, 0x9FC1, 0x9E81, 0x5E40,
|
|
|
|
0x5A00, 0x9AC1, 0x9B81, 0x5B40, 0x9901, 0x59C0, 0x5880, 0x9841,
|
|
|
|
0x8801, 0x48C0, 0x4980, 0x8941, 0x4B00, 0x8BC1, 0x8A81, 0x4A40,
|
|
|
|
0x4E00, 0x8EC1, 0x8F81, 0x4F40, 0x8D01, 0x4DC0, 0x4C80, 0x8C41,
|
|
|
|
0x4400, 0x84C1, 0x8581, 0x4540, 0x8701, 0x47C0, 0x4680, 0x8641,
|
|
|
|
0x8201, 0x42C0, 0x4380, 0x8341, 0x4100, 0x81C1, 0x8081, 0x4040}
|
|
|
|
|
|
|
|
func CRC16Hash(data []byte) uint16 {
|
|
|
|
var crc16 uint16
|
|
|
|
crc16 = 0xffff
|
|
|
|
for _, v := range data {
|
|
|
|
n := uint8(uint16(v) ^ crc16)
|
|
|
|
crc16 >>= 8
|
|
|
|
crc16 ^= MbTable[n]
|
|
|
|
}
|
|
|
|
return crc16
|
|
|
|
}
|
2024-02-10 18:23:50 +08:00
|
|
|
|
|
|
|
func SafePath(dir, u string) string {
|
|
|
|
hasSlash := strings.HasPrefix(u, "/")
|
|
|
|
if hasSlash {
|
2024-11-01 12:25:53 +08:00
|
|
|
return dir + u[1:]
|
2024-02-10 18:23:50 +08:00
|
|
|
} else {
|
2024-11-01 12:25:53 +08:00
|
|
|
return dir + u
|
2024-02-10 18:23:50 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func RelaPath(base, u string) string {
|
|
|
|
// 拼接相对目录, 不使用path.join的原因是, 如果存在"////"这样的情况, 可能真的是有意义的路由, 不能随意去掉.
|
|
|
|
// "" /a /a
|
|
|
|
// "" a /a
|
|
|
|
// / "" /
|
|
|
|
// /a/ b /a/b
|
|
|
|
// /a/ /b /a/b
|
|
|
|
// /a b /b
|
|
|
|
// /a /b /b
|
|
|
|
|
|
|
|
if u == "" {
|
|
|
|
return base
|
|
|
|
}
|
|
|
|
|
|
|
|
pathSlash := strings.HasPrefix(u, "/")
|
|
|
|
if base == "" {
|
|
|
|
if pathSlash {
|
|
|
|
return u[1:]
|
|
|
|
} else {
|
|
|
|
return "/" + u
|
|
|
|
}
|
|
|
|
} else if strings.HasSuffix(base, "/") {
|
|
|
|
if pathSlash {
|
|
|
|
return base + u[1:]
|
|
|
|
} else {
|
|
|
|
return base + u
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if pathSlash {
|
|
|
|
return Dir(base) + u[1:]
|
|
|
|
} else {
|
|
|
|
return Dir(base) + u
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func Dir(u string) string {
|
|
|
|
// 安全的获取目录, 不会额外处理多个"//", 并非用来获取上级目录
|
|
|
|
// /a /
|
|
|
|
// /a/ /a/
|
|
|
|
// a/ a/
|
|
|
|
// aaa /
|
|
|
|
if strings.HasSuffix(u, "/") {
|
|
|
|
return u
|
|
|
|
} else if i := strings.LastIndex(u, "/"); i == -1 {
|
|
|
|
return "/"
|
|
|
|
} else {
|
|
|
|
return u[:i+1]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func FormatURL(base, u string) string {
|
|
|
|
if strings.HasPrefix(u, "http") {
|
|
|
|
parsed, err := url.Parse(u)
|
|
|
|
if err != nil {
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
return parsed.Path
|
|
|
|
} else if strings.HasPrefix(u, "//") {
|
|
|
|
parsed, err := url.Parse(u)
|
|
|
|
if err != nil {
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
return parsed.Path
|
|
|
|
} else if strings.HasPrefix(u, "/") {
|
|
|
|
// 绝对目录拼接
|
|
|
|
// 不需要进行处理, 用来跳过下面的判断
|
|
|
|
return u
|
|
|
|
} else if strings.HasPrefix(u, "./") {
|
|
|
|
// "./"相对目录拼接
|
|
|
|
return RelaPath(base, u[2:])
|
|
|
|
} else if strings.HasPrefix(u, "../") {
|
|
|
|
return path.Join(Dir(base), u)
|
|
|
|
} else {
|
|
|
|
// 相对目录拼接
|
|
|
|
return RelaPath(base, u)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func BaseURL(u *url.URL) string {
|
|
|
|
return u.Scheme + "://" + u.Host
|
|
|
|
}
|
|
|
|
|
|
|
|
func RandomUA() string {
|
|
|
|
return randomUserAgent[rand.Intn(uacount)]
|
|
|
|
}
|
|
|
|
|
|
|
|
func CompareWithExpr(exp *vm.Program, params map[string]interface{}) bool {
|
|
|
|
res, err := expr.Run(exp, params)
|
|
|
|
if err != nil {
|
|
|
|
logs.Log.Warn(err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
if res == true {
|
|
|
|
return true
|
|
|
|
} else {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func MatchWithGlobs(u string, globs []string) bool {
|
|
|
|
for _, glob := range globs {
|
|
|
|
ok, err := filepath.Match(glob, u)
|
|
|
|
if err == nil && ok {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
2024-03-04 20:03:06 +08:00
|
|
|
|
|
|
|
func ParseRawResponse(raw []byte) (*http.Response, error) {
|
|
|
|
reader := bytes.NewReader(raw)
|
|
|
|
|
|
|
|
// 使用http.ReadResponse解析HTTP响应
|
|
|
|
resp, err := http.ReadResponse(bufio.NewReader(reader), nil)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
return resp, nil
|
|
|
|
}
|
2024-08-26 01:20:03 +08:00
|
|
|
|
|
|
|
func GetPresetWordList(key []string) []string {
|
|
|
|
var wordlist []string
|
|
|
|
|
|
|
|
for _, k := range key {
|
|
|
|
if v, ok := mask.SpecialWords[k]; ok {
|
|
|
|
wordlist = append(wordlist, v...)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return wordlist
|
|
|
|
}
|
2024-08-26 02:22:35 +08:00
|
|
|
|
|
|
|
func ParseExtension(s string) string {
|
|
|
|
if i := strings.Index(s, "."); i != -1 {
|
|
|
|
return s[i+1:]
|
|
|
|
}
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
|
2025-06-05 13:45:48 +08:00
|
|
|
// ParseStatus parses the input string and updates the preset status filters.
|
2024-08-26 02:22:35 +08:00
|
|
|
func ParseStatus(preset []int, changed string) []int {
|
|
|
|
if changed == "" {
|
|
|
|
return preset
|
|
|
|
}
|
2025-06-05 13:45:48 +08:00
|
|
|
|
|
|
|
parseToken := func(s string) (int, bool) {
|
|
|
|
s = strings.TrimSpace(s)
|
|
|
|
if strings.HasSuffix(s, "*") {
|
|
|
|
prefix := s[:len(s)-1]
|
|
|
|
if t, err := strconv.Atoi(prefix); err == nil {
|
|
|
|
return t, true // isPrefix = true
|
|
|
|
}
|
|
|
|
} else if t, err := strconv.Atoi(s); err == nil {
|
|
|
|
return t, false // isPrefix = false
|
|
|
|
}
|
|
|
|
return 0, false
|
|
|
|
}
|
|
|
|
|
2024-08-26 02:22:35 +08:00
|
|
|
if strings.HasPrefix(changed, "+") {
|
|
|
|
for _, s := range strings.Split(changed[1:], ",") {
|
2025-06-05 13:45:48 +08:00
|
|
|
if t, _ := parseToken(s); t != 0 {
|
2024-08-26 02:22:35 +08:00
|
|
|
preset = append(preset, t)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else if strings.HasPrefix(changed, "!") {
|
|
|
|
for _, s := range strings.Split(changed[1:], ",") {
|
2025-06-05 13:45:48 +08:00
|
|
|
if t, _ := parseToken(s); t != 0 {
|
|
|
|
newPreset := preset[:0]
|
|
|
|
for _, val := range preset {
|
|
|
|
if val != t {
|
|
|
|
newPreset = append(newPreset, val)
|
|
|
|
}
|
2024-08-26 02:22:35 +08:00
|
|
|
}
|
2025-06-05 13:45:48 +08:00
|
|
|
preset = newPreset
|
2024-08-26 02:22:35 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
preset = []int{}
|
|
|
|
for _, s := range strings.Split(changed, ",") {
|
2025-06-05 13:45:48 +08:00
|
|
|
if t, _ := parseToken(s); t != 0 {
|
2024-08-26 02:22:35 +08:00
|
|
|
preset = append(preset, t)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2025-06-05 13:45:48 +08:00
|
|
|
return UniqueInts(preset)
|
|
|
|
}
|
|
|
|
|
|
|
|
func UniqueInts(input []int) []int {
|
|
|
|
seen := make(map[int]bool)
|
|
|
|
result := make([]int, 0, len(input))
|
|
|
|
|
|
|
|
for _, val := range input {
|
|
|
|
if !seen[val] {
|
|
|
|
seen[val] = true
|
|
|
|
result = append(result, val)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
|
|
|
// StatusContain checks if a status matches any of the preset filters.
|
|
|
|
// Preset values < 100 are treated as prefix filters (e.g. 5 = 5xx, 51 = 51x).
|
|
|
|
func StatusContain(preset []int, status int) bool {
|
|
|
|
if len(preset) == 0 {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
for _, s := range preset {
|
|
|
|
if s < 10 {
|
|
|
|
if status/100 == s {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
} else if s < 100 {
|
|
|
|
if status/10 == s {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
} else if s == status {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false
|
2024-08-26 02:22:35 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
func LoadFileToSlice(filename string) ([]string, error) {
|
|
|
|
var ss []string
|
|
|
|
if dicts, ok := Dicts[filename]; ok {
|
|
|
|
if files.IsExist(filename) {
|
|
|
|
logs.Log.Warnf("load and overwrite %s from preset", filename)
|
|
|
|
}
|
|
|
|
return dicts, nil
|
|
|
|
}
|
|
|
|
content, err := ioutil.ReadFile(filename)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
ss = strings.Split(strings.TrimSpace(string(content)), "\n")
|
|
|
|
|
|
|
|
// 统一windows与linux的回车换行差异
|
|
|
|
for i, word := range ss {
|
|
|
|
ss[i] = strings.TrimSpace(word)
|
|
|
|
}
|
|
|
|
|
|
|
|
return ss, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func LoadRuleAndCombine(filename []string) (string, error) {
|
|
|
|
var bs bytes.Buffer
|
|
|
|
for _, f := range filename {
|
|
|
|
if data, ok := Rules[f]; ok {
|
|
|
|
bs.WriteString(strings.TrimSpace(data))
|
|
|
|
bs.WriteString("\n")
|
|
|
|
} else {
|
|
|
|
content, err := ioutil.ReadFile(f)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
bs.Write(bytes.TrimSpace(content))
|
|
|
|
bs.WriteString("\n")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return bs.String(), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func loadFileWithCache(filename string) ([]string, error) {
|
|
|
|
if dict, ok := Dicts[filename]; ok {
|
|
|
|
return dict, nil
|
|
|
|
}
|
|
|
|
dict, err := LoadFileToSlice(filename)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
Dicts[filename] = dict
|
|
|
|
return dict, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func loadDictionaries(filenames []string) ([][]string, error) {
|
|
|
|
dicts := make([][]string, len(filenames))
|
|
|
|
for i, name := range filenames {
|
|
|
|
dict, err := loadFileWithCache(name)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
dicts[i] = dict
|
|
|
|
}
|
|
|
|
return dicts, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func LoadWordlist(word string, dictNames []string) ([]string, error) {
|
|
|
|
if wl, ok := wordlistCache[word+strings.Join(dictNames, ",")]; ok {
|
|
|
|
return wl, nil
|
|
|
|
}
|
|
|
|
dicts, err := loadDictionaries(dictNames)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
wl, err := mask.Run(word, dicts, nil)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
wordlistCache[word] = wl
|
|
|
|
return wl, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func LoadRuleWithFiles(ruleFiles []string, filter string) ([]rule.Expression, error) {
|
|
|
|
if rules, ok := ruleCache[strings.Join(ruleFiles, ",")]; ok {
|
|
|
|
return rules, nil
|
|
|
|
}
|
|
|
|
var rules bytes.Buffer
|
|
|
|
for _, filename := range ruleFiles {
|
|
|
|
content, err := ioutil.ReadFile(filename)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
rules.Write(content)
|
|
|
|
rules.WriteString("\n")
|
|
|
|
}
|
|
|
|
return rule.Compile(rules.String(), filter).Expressions, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func WrapWordsFunc(f func(string) string) func(string) []string {
|
|
|
|
return func(s string) []string {
|
|
|
|
return []string{f(s)}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func SafeFilename(filename string) string {
|
|
|
|
filename = strings.ReplaceAll(filename, "http://", "")
|
|
|
|
filename = strings.ReplaceAll(filename, "https://", "")
|
|
|
|
filename = strings.ReplaceAll(filename, ":", "_")
|
|
|
|
filename = strings.ReplaceAll(filename, "/", "_")
|
|
|
|
return filename
|
|
|
|
}
|