2022-09-08 15:57:17 +08:00
|
|
|
|
package pkg
|
|
|
|
|
|
|
|
|
|
import (
|
2023-01-05 23:26:14 +08:00
|
|
|
|
"encoding/json"
|
2022-10-28 00:46:54 +08:00
|
|
|
|
"github.com/chainreactors/gogo/v2/pkg/fingers"
|
|
|
|
|
"github.com/chainreactors/ipcs"
|
2023-01-28 13:15:49 +08:00
|
|
|
|
"github.com/chainreactors/parsers"
|
|
|
|
|
"github.com/chainreactors/parsers/iutils"
|
2023-01-06 00:48:13 +08:00
|
|
|
|
"github.com/chainreactors/words/mask"
|
2022-09-08 15:57:17 +08:00
|
|
|
|
"math/rand"
|
2023-01-03 18:22:13 +08:00
|
|
|
|
"net/url"
|
2022-09-08 15:57:17 +08:00
|
|
|
|
"os"
|
2023-01-03 18:22:13 +08:00
|
|
|
|
"path"
|
2023-01-03 17:09:32 +08:00
|
|
|
|
"regexp"
|
|
|
|
|
"strings"
|
2022-09-08 15:57:17 +08:00
|
|
|
|
"time"
|
|
|
|
|
"unsafe"
|
|
|
|
|
)
|
|
|
|
|
|
2023-01-03 17:09:32 +08:00
|
|
|
|
var (
|
|
|
|
|
Md5Fingers map[string]string = make(map[string]string)
|
|
|
|
|
Mmh3Fingers map[string]string = make(map[string]string)
|
2023-01-05 23:26:14 +08:00
|
|
|
|
Rules map[string]string = make(map[string]string)
|
2023-01-03 17:09:32 +08:00
|
|
|
|
ActivePath []string
|
|
|
|
|
Fingers fingers.Fingers
|
2023-01-28 13:15:49 +08:00
|
|
|
|
//JSRegexps []*regexp.Regexp = []*regexp.Regexp{
|
|
|
|
|
// regexp.MustCompile(`.(https{0,1}:[^\s'’"”><()|*\[]{2,250}?[^=*\s'’><:;|()[]{3}\[]\.js)`),
|
|
|
|
|
// regexp.MustCompile(`["']([^\s',’"”><;()|*:\[]{2,250}?[^=*\s'’|"”><^:;()\[]{3}\.js)`),
|
|
|
|
|
// regexp.MustCompile(`=\s{0,6}["']{0,1}\s{0,6}([^\s^'’,+><;()|*\[]{2,250}?[^=,\s'’"”>|<:;*()\[]{3}\.js)`),
|
|
|
|
|
//}
|
|
|
|
|
//URLRegexps []*regexp.Regexp = []*regexp.Regexp{
|
|
|
|
|
// regexp.MustCompile(`=\s{0,6}(https{0,1}:[^\s'"><()|*\[]{2,250})`),
|
|
|
|
|
// regexp.MustCompile(`["']([^\s',’"”><.@$;:()|*\[]{2,250}\.[a-zA-Z]\w{1,4})["']`),
|
|
|
|
|
// regexp.MustCompile(`["'](https?:[^\s'"><()@|*\[]{2,250}?\.[^\s',’"”><;()|*\[]{2,250}?)["']`),
|
|
|
|
|
// regexp.MustCompile(`["']\s{0,6}([#,.]{0,2}/[^\s'",><;@$()|*\[]{2,250}?)\s{0,6}["']`),
|
|
|
|
|
// regexp.MustCompile(`href\s{0,6}=\s{0,6}["'‘“]{0,1}\s{0,6}([^\s',’"”><$@;()|*\[]{2,250})|action\s{0,6}=\s{0,6}["'‘“]{0,1}\s{0,6}([^\s'’"“><)(]{2,250})`),
|
|
|
|
|
//}
|
|
|
|
|
ExtractRegexps map[string][]*regexp.Regexp = map[string][]*regexp.Regexp{}
|
|
|
|
|
Extractors = make(parsers.Extractors)
|
|
|
|
|
|
2023-01-11 11:40:38 +08:00
|
|
|
|
BadExt = []string{".js", ".css", ".scss", ".,", ".jpeg", ".jpg", ".png", ".gif", ".svg", ".vue", ".ts", ".swf", ".pdf", ".mp4"}
|
|
|
|
|
BadURL = []string{";", "}", "\\n", "webpack://", "{", "www.w3.org", ".src", ".url", ".att", ".href", "location.href", "javascript:", "location:", ".createObject", ":location", ".path"}
|
2023-01-09 13:41:59 +08:00
|
|
|
|
|
|
|
|
|
ContentTypeMap = map[string]string{
|
|
|
|
|
"application/javascript": "js",
|
|
|
|
|
"application/json": "json",
|
|
|
|
|
"application/xml": "xml",
|
|
|
|
|
"application/octet-stream": "bin",
|
|
|
|
|
"application/atom+xml": "atom",
|
|
|
|
|
"application/msword": "doc",
|
|
|
|
|
"application/pdf": "pdf",
|
|
|
|
|
"image/gif": "gif",
|
|
|
|
|
"image/jpeg": "jpg",
|
|
|
|
|
"image/png": "png",
|
|
|
|
|
"image/svg+xml": "svg",
|
|
|
|
|
"text/css": "css",
|
|
|
|
|
"text/plain": "txt",
|
|
|
|
|
"text/html": "html",
|
|
|
|
|
"audio/mpeg": "mp3",
|
|
|
|
|
"video/mp4": "mp4",
|
|
|
|
|
"video/ogg": "ogg",
|
|
|
|
|
"video/webm": "webm",
|
|
|
|
|
"video/x-ms-wmv": "wmv",
|
|
|
|
|
"video/avi": "avi",
|
|
|
|
|
"image/x-icon": "ico",
|
|
|
|
|
}
|
2023-01-03 17:09:32 +08:00
|
|
|
|
)
|
|
|
|
|
|
2023-01-09 21:33:05 +08:00
|
|
|
|
func RemoveDuplication(arr []string) []string {
|
|
|
|
|
set := make(map[string]struct{}, len(arr))
|
|
|
|
|
j := 0
|
|
|
|
|
for _, v := range arr {
|
|
|
|
|
_, ok := set[v]
|
|
|
|
|
if ok {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
set[v] = struct{}{}
|
|
|
|
|
arr[j] = v
|
|
|
|
|
j++
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return arr[:j]
|
|
|
|
|
}
|
|
|
|
|
|
2022-09-08 15:57:17 +08:00
|
|
|
|
func HasStdin() bool {
|
|
|
|
|
stat, err := os.Stdin.Stat()
|
|
|
|
|
if err != nil {
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
isPipedFromChrDev := (stat.Mode() & os.ModeCharDevice) == 0
|
|
|
|
|
isPipedFromFIFO := (stat.Mode() & os.ModeNamedPipe) != 0
|
|
|
|
|
|
|
|
|
|
return isPipedFromChrDev || isPipedFromFIFO
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
|
|
|
|
|
|
|
|
|
var src = rand.NewSource(time.Now().UnixNano())
|
|
|
|
|
|
|
|
|
|
const (
|
|
|
|
|
// 6 bits to represent a letter index
|
|
|
|
|
letterIdBits = 6
|
|
|
|
|
// All 1-bits as many as letterIdBits
|
|
|
|
|
letterIdMask = 1<<letterIdBits - 1
|
|
|
|
|
letterIdMax = 63 / letterIdBits
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
func RandPath() string {
|
|
|
|
|
n := 16
|
|
|
|
|
b := make([]byte, n)
|
|
|
|
|
// A rand.Int63() generates 63 random bits, enough for letterIdMax letters!
|
2023-01-09 14:47:58 +08:00
|
|
|
|
for i, cache, remain := n-1, src.Int63(), letterIdMax; i >= 0; {
|
2022-09-08 15:57:17 +08:00
|
|
|
|
if remain == 0 {
|
|
|
|
|
cache, remain = src.Int63(), letterIdMax
|
|
|
|
|
}
|
|
|
|
|
if idx := int(cache & letterIdMask); idx < len(letters) {
|
|
|
|
|
b[i] = letters[idx]
|
|
|
|
|
i--
|
|
|
|
|
}
|
|
|
|
|
cache >>= letterIdBits
|
|
|
|
|
remain--
|
|
|
|
|
}
|
|
|
|
|
return *(*string)(unsafe.Pointer(&b))
|
|
|
|
|
}
|
2022-10-26 18:28:40 +08:00
|
|
|
|
|
|
|
|
|
func RandHost() string {
|
|
|
|
|
n := 8
|
|
|
|
|
b := make([]byte, n)
|
|
|
|
|
// A rand.Int63() generates 63 random bits, enough for letterIdMax letters!
|
|
|
|
|
for i, cache, remain := n-1, src.Int63(), letterIdMax; i >= 1; {
|
|
|
|
|
if remain == 0 {
|
|
|
|
|
cache, remain = src.Int63(), letterIdMax
|
|
|
|
|
}
|
|
|
|
|
if idx := int(cache & letterIdMask); idx < len(letters) {
|
|
|
|
|
b[i] = letters[idx]
|
|
|
|
|
i--
|
|
|
|
|
}
|
|
|
|
|
cache >>= letterIdBits
|
|
|
|
|
remain--
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
b[5] = byte(0x2e)
|
|
|
|
|
return *(*string)(unsafe.Pointer(&b))
|
|
|
|
|
}
|
2022-10-28 00:46:54 +08:00
|
|
|
|
|
|
|
|
|
func LoadTemplates() error {
|
|
|
|
|
var err error
|
2023-01-06 00:48:13 +08:00
|
|
|
|
// load fingers
|
2022-10-28 00:46:54 +08:00
|
|
|
|
Fingers, err = fingers.LoadFingers(LoadConfig("http"))
|
|
|
|
|
if err != nil {
|
2023-01-05 23:26:14 +08:00
|
|
|
|
return err
|
2022-10-28 00:46:54 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for _, finger := range Fingers {
|
|
|
|
|
err := finger.Compile(ipcs.ParsePorts)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for _, f := range Fingers {
|
|
|
|
|
for _, rule := range f.Rules {
|
2023-01-03 17:09:32 +08:00
|
|
|
|
if rule.SendDataStr != "" {
|
|
|
|
|
ActivePath = append(ActivePath, rule.SendDataStr)
|
|
|
|
|
}
|
2022-10-28 00:46:54 +08:00
|
|
|
|
if rule.Favicon != nil {
|
|
|
|
|
for _, mmh3 := range rule.Favicon.Mmh3 {
|
|
|
|
|
Mmh3Fingers[mmh3] = f.Name
|
|
|
|
|
}
|
|
|
|
|
for _, md5 := range rule.Favicon.Md5 {
|
|
|
|
|
Md5Fingers[md5] = f.Name
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2023-01-06 00:48:13 +08:00
|
|
|
|
// load rule
|
2023-01-05 23:26:14 +08:00
|
|
|
|
var data map[string]interface{}
|
2023-01-06 00:48:13 +08:00
|
|
|
|
err = json.Unmarshal(LoadConfig("rule"), &data)
|
2023-01-05 23:26:14 +08:00
|
|
|
|
if err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
for k, v := range data {
|
|
|
|
|
Rules[k] = v.(string)
|
|
|
|
|
}
|
2023-01-06 00:48:13 +08:00
|
|
|
|
|
|
|
|
|
// load mask
|
|
|
|
|
var keywords map[string]interface{}
|
|
|
|
|
err = json.Unmarshal(LoadConfig("mask"), &keywords)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for k, v := range keywords {
|
|
|
|
|
t := make([]string, len(v.([]interface{})))
|
|
|
|
|
for i, vv := range v.([]interface{}) {
|
2023-01-28 15:56:21 +08:00
|
|
|
|
t[i] = iutils.ToString(vv)
|
2023-01-06 00:48:13 +08:00
|
|
|
|
}
|
|
|
|
|
mask.SpecialWords[k] = t
|
|
|
|
|
}
|
2023-01-28 13:15:49 +08:00
|
|
|
|
|
|
|
|
|
var extracts []*parsers.Extractor
|
|
|
|
|
err = json.Unmarshal(LoadConfig("extract"), &extracts)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for _, extract := range extracts {
|
|
|
|
|
extract.Compile()
|
|
|
|
|
|
|
|
|
|
ExtractRegexps[extract.Name] = extract.CompiledRegexps
|
|
|
|
|
for _, tag := range extract.Tags {
|
|
|
|
|
if _, ok := ExtractRegexps[tag]; !ok {
|
|
|
|
|
ExtractRegexps[tag] = extract.CompiledRegexps
|
|
|
|
|
} else {
|
|
|
|
|
ExtractRegexps[tag] = append(ExtractRegexps[tag], extract.CompiledRegexps...)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2023-01-05 23:26:14 +08:00
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
2023-02-01 18:31:50 +08:00
|
|
|
|
func FingerDetect(content string) parsers.Frameworks {
|
|
|
|
|
frames := make(parsers.Frameworks)
|
2022-10-28 00:46:54 +08:00
|
|
|
|
for _, finger := range Fingers {
|
2023-01-28 13:15:49 +08:00
|
|
|
|
// sender置空, 所有的发包交给spray的pool
|
2022-10-28 00:46:54 +08:00
|
|
|
|
frame, _, ok := fingers.FingerMatcher(finger, content, 0, nil)
|
|
|
|
|
if ok {
|
2023-02-01 18:31:50 +08:00
|
|
|
|
frames[frame.Name] = frame
|
2022-10-28 00:46:54 +08:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return frames
|
|
|
|
|
}
|
2023-01-03 17:09:32 +08:00
|
|
|
|
|
2023-01-03 18:22:13 +08:00
|
|
|
|
func filterJs(u string) bool {
|
2023-01-09 21:33:05 +08:00
|
|
|
|
if commonFilter(u) {
|
|
|
|
|
return true
|
2023-01-03 18:22:13 +08:00
|
|
|
|
}
|
2023-01-09 21:33:05 +08:00
|
|
|
|
|
2023-01-03 18:22:13 +08:00
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func filterUrl(u string) bool {
|
2023-01-09 21:33:05 +08:00
|
|
|
|
if commonFilter(u) {
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
|
2023-01-03 18:22:13 +08:00
|
|
|
|
parsed, err := url.Parse(u)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return true
|
|
|
|
|
} else {
|
|
|
|
|
ext := path.Ext(parsed.Path)
|
|
|
|
|
for _, e := range BadExt {
|
2023-01-09 22:41:05 +08:00
|
|
|
|
if strings.EqualFold(e, ext) {
|
2023-01-03 18:22:13 +08:00
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2023-01-09 21:33:05 +08:00
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func formatURL(u string) string {
|
|
|
|
|
// 去掉frag与params, 节约url.parse性能, 防止带参数造成意外的影响
|
2023-01-10 00:58:16 +08:00
|
|
|
|
if strings.Contains(u, "2f") || strings.Contains(u, "2F") {
|
|
|
|
|
u = strings.ReplaceAll(u, "\\u002F", "/")
|
|
|
|
|
u = strings.ReplaceAll(u, "\\u002f", "/")
|
|
|
|
|
u = strings.ReplaceAll(u, "%252F", "/")
|
|
|
|
|
u = strings.ReplaceAll(u, "%252f", "/")
|
|
|
|
|
u = strings.ReplaceAll(u, "%2f", "/")
|
|
|
|
|
u = strings.ReplaceAll(u, "%2F", "/")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
u = strings.TrimRight(u, "\\")
|
2023-01-09 21:33:05 +08:00
|
|
|
|
if i := strings.Index(u, "?"); i != -1 {
|
|
|
|
|
return u[:i]
|
|
|
|
|
}
|
|
|
|
|
if i := strings.Index(u, "#"); i != -1 {
|
|
|
|
|
return u[:i]
|
|
|
|
|
}
|
|
|
|
|
return u
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func commonFilter(u string) bool {
|
2023-01-09 22:41:05 +08:00
|
|
|
|
if strings.HasPrefix(u, "http") && len(u) < 15 {
|
2023-01-09 21:33:05 +08:00
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
|
2023-01-10 00:58:16 +08:00
|
|
|
|
for _, bad := range BadURL {
|
|
|
|
|
if strings.Contains(u, bad) {
|
2023-01-03 18:22:13 +08:00
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return false
|
|
|
|
|
}
|
2023-01-05 23:26:14 +08:00
|
|
|
|
|
2023-01-06 00:48:13 +08:00
|
|
|
|
func BakGenerator(domain string) []string {
|
|
|
|
|
var possibilities []string
|
|
|
|
|
for first, _ := range domain {
|
|
|
|
|
for last, _ := range domain[first:] {
|
|
|
|
|
p := domain[first : first+last+1]
|
2023-01-28 13:15:49 +08:00
|
|
|
|
if !iutils.StringsContains(possibilities, p) {
|
2023-01-06 00:48:13 +08:00
|
|
|
|
possibilities = append(possibilities, p)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return possibilities
|
|
|
|
|
}
|