mirror of
https://github.com/XTLS/Xray-core.git
synced 2026-05-08 14:13:22 +00:00
MPH domian matcher: Support building & using cache directly (instead of building from geosite.dat when Xray starts) (#5505)
Like https://github.com/XTLS/Xray-core/pull/5488#issuecomment-3710995080
This commit is contained in:
@@ -12,9 +12,11 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/xtls/xray-core/app/router"
|
||||
"github.com/xtls/xray-core/common"
|
||||
"github.com/xtls/xray-core/common/errors"
|
||||
"github.com/xtls/xray-core/common/net"
|
||||
"github.com/xtls/xray-core/common/platform"
|
||||
"github.com/xtls/xray-core/common/session"
|
||||
"github.com/xtls/xray-core/common/strmatcher"
|
||||
"github.com/xtls/xray-core/features/dns"
|
||||
@@ -83,9 +85,31 @@ func New(ctx context.Context, config *Config) (*DNS, error) {
|
||||
return nil, errors.New("unexpected query strategy ", config.QueryStrategy)
|
||||
}
|
||||
|
||||
hosts, err := NewStaticHosts(config.StaticHosts)
|
||||
if err != nil {
|
||||
return nil, errors.New("failed to create hosts").Base(err)
|
||||
var hosts *StaticHosts
|
||||
mphLoaded := false
|
||||
domainMatcherPath := platform.NewEnvFlag(platform.MphCachePath).GetValue(func() string { return "" })
|
||||
if domainMatcherPath != "" {
|
||||
if f, err := os.Open(domainMatcherPath); err == nil {
|
||||
defer f.Close()
|
||||
if m, err := router.LoadGeoSiteMatcher(f, "HOSTS"); err == nil {
|
||||
f.Seek(0, 0)
|
||||
if hostIPs, err := router.LoadGeoSiteHosts(f); err == nil {
|
||||
if sh, err := NewStaticHostsFromCache(m, hostIPs); err == nil {
|
||||
hosts = sh
|
||||
mphLoaded = true
|
||||
errors.LogDebug(ctx, "MphDomainMatcher loaded from cache for DNS hosts, size: ", sh.matchers.Size())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !mphLoaded {
|
||||
sh, err := NewStaticHosts(config.StaticHosts)
|
||||
if err != nil {
|
||||
return nil, errors.New("failed to create hosts").Base(err)
|
||||
}
|
||||
hosts = sh
|
||||
}
|
||||
|
||||
var clients []*Client
|
||||
|
||||
@@ -14,7 +14,7 @@ import (
|
||||
// StaticHosts represents static domain-ip mapping in DNS server.
|
||||
type StaticHosts struct {
|
||||
ips [][]net.Address
|
||||
matchers *strmatcher.MatcherGroup
|
||||
matchers strmatcher.IndexMatcher
|
||||
}
|
||||
|
||||
// NewStaticHosts creates a new StaticHosts instance.
|
||||
@@ -124,3 +124,50 @@ func (h *StaticHosts) lookup(domain string, option dns.IPOption, maxDepth int) (
|
||||
func (h *StaticHosts) Lookup(domain string, option dns.IPOption) ([]net.Address, error) {
|
||||
return h.lookup(domain, option, 5)
|
||||
}
|
||||
func NewStaticHostsFromCache(matcher strmatcher.IndexMatcher, hostIPs map[string][]string) (*StaticHosts, error) {
|
||||
sh := &StaticHosts{
|
||||
ips: make([][]net.Address, matcher.Size()+1),
|
||||
matchers: matcher,
|
||||
}
|
||||
|
||||
order := hostIPs["_ORDER"]
|
||||
var offset uint32
|
||||
|
||||
img, ok := matcher.(*strmatcher.IndexMatcherGroup)
|
||||
if !ok {
|
||||
// Single matcher (e.g. only manual or only one geosite)
|
||||
if len(order) > 0 {
|
||||
pattern := order[0]
|
||||
ips := parseIPs(hostIPs[pattern])
|
||||
for i := uint32(1); i <= matcher.Size(); i++ {
|
||||
sh.ips[i] = ips
|
||||
}
|
||||
}
|
||||
return sh, nil
|
||||
}
|
||||
|
||||
for i, m := range img.Matchers {
|
||||
if i < len(order) {
|
||||
pattern := order[i]
|
||||
ips := parseIPs(hostIPs[pattern])
|
||||
for j := uint32(1); j <= m.Size(); j++ {
|
||||
sh.ips[offset+j] = ips
|
||||
}
|
||||
offset += m.Size()
|
||||
}
|
||||
}
|
||||
return sh, nil
|
||||
}
|
||||
|
||||
func parseIPs(raw []string) []net.Address {
|
||||
addrs := make([]net.Address, 0, len(raw))
|
||||
for _, s := range raw {
|
||||
if len(s) > 1 && s[0] == '#' {
|
||||
rcode, _ := strconv.Atoi(s[1:])
|
||||
addrs = append(addrs, dns.RCodeError(rcode))
|
||||
} else {
|
||||
addrs = append(addrs, net.ParseAddress(s))
|
||||
}
|
||||
}
|
||||
return addrs
|
||||
}
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
package dns_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"testing"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
. "github.com/xtls/xray-core/app/dns"
|
||||
"github.com/xtls/xray-core/app/router"
|
||||
"github.com/xtls/xray-core/common"
|
||||
"github.com/xtls/xray-core/common/net"
|
||||
"github.com/xtls/xray-core/features/dns"
|
||||
@@ -130,3 +132,57 @@ func TestStaticHosts(t *testing.T) {
|
||||
}
|
||||
}
|
||||
}
|
||||
func TestStaticHostsFromCache(t *testing.T) {
|
||||
sites := []*router.GeoSite{
|
||||
{
|
||||
CountryCode: "cloudflare-dns.com",
|
||||
Domain: []*router.Domain{
|
||||
{Type: router.Domain_Full, Value: "example.com"},
|
||||
},
|
||||
},
|
||||
{
|
||||
CountryCode: "geosite:cn",
|
||||
Domain: []*router.Domain{
|
||||
{Type: router.Domain_Domain, Value: "baidu.cn"},
|
||||
},
|
||||
},
|
||||
}
|
||||
deps := map[string][]string{
|
||||
"HOSTS": {"cloudflare-dns.com", "geosite:cn"},
|
||||
}
|
||||
hostIPs := map[string][]string{
|
||||
"cloudflare-dns.com": {"1.1.1.1"},
|
||||
"geosite:cn": {"2.2.2.2"},
|
||||
"_ORDER": {"cloudflare-dns.com", "geosite:cn"},
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
err := router.SerializeGeoSiteList(sites, deps, hostIPs, &buf)
|
||||
common.Must(err)
|
||||
|
||||
// Load matcher
|
||||
m, err := router.LoadGeoSiteMatcher(bytes.NewReader(buf.Bytes()), "HOSTS")
|
||||
common.Must(err)
|
||||
|
||||
// Load hostIPs
|
||||
f := bytes.NewReader(buf.Bytes())
|
||||
hips, err := router.LoadGeoSiteHosts(f)
|
||||
common.Must(err)
|
||||
|
||||
hosts, err := NewStaticHostsFromCache(m, hips)
|
||||
common.Must(err)
|
||||
|
||||
{
|
||||
ips, _ := hosts.Lookup("example.com", dns.IPOption{IPv4Enable: true})
|
||||
if len(ips) != 1 || ips[0].String() != "1.1.1.1" {
|
||||
t.Error("failed to lookup example.com from cache")
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
ips, _ := hosts.Lookup("baidu.cn", dns.IPOption{IPv4Enable: true})
|
||||
if len(ips) != 1 || ips[0].String() != "2.2.2.2" {
|
||||
t.Error("failed to lookup baidu.cn from cache deps")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,6 +10,8 @@ import (
|
||||
"github.com/xtls/xray-core/app/router"
|
||||
"github.com/xtls/xray-core/common/errors"
|
||||
"github.com/xtls/xray-core/common/net"
|
||||
"github.com/xtls/xray-core/common/platform"
|
||||
"github.com/xtls/xray-core/common/platform/filesystem"
|
||||
"github.com/xtls/xray-core/common/session"
|
||||
"github.com/xtls/xray-core/common/strmatcher"
|
||||
"github.com/xtls/xray-core/core"
|
||||
@@ -17,6 +19,18 @@ import (
|
||||
"github.com/xtls/xray-core/features/routing"
|
||||
)
|
||||
|
||||
type mphMatcherWrapper struct {
|
||||
m strmatcher.IndexMatcher
|
||||
}
|
||||
|
||||
func (w *mphMatcherWrapper) Match(s string) bool {
|
||||
return w.m.Match(s) != nil
|
||||
}
|
||||
|
||||
func (w *mphMatcherWrapper) String() string {
|
||||
return "mph-matcher"
|
||||
}
|
||||
|
||||
// Server is the interface for Name Server.
|
||||
type Server interface {
|
||||
// Name of the Client.
|
||||
@@ -132,29 +146,50 @@ func NewClient(
|
||||
var rules []string
|
||||
ruleCurr := 0
|
||||
ruleIter := 0
|
||||
for i, domain := range ns.PrioritizedDomain {
|
||||
ns.PrioritizedDomain[i] = nil
|
||||
domainRule, err := toStrMatcher(domain.Type, domain.Domain)
|
||||
if err != nil {
|
||||
errors.LogErrorInner(ctx, err, "failed to create domain matcher, ignore domain rule [type: ", domain.Type, ", domain: ", domain.Domain, "]")
|
||||
domainRule, _ = toStrMatcher(DomainMatchingType_Full, "hack.fix.index.for.illegal.domain.rule")
|
||||
}
|
||||
originalRuleIdx := ruleCurr
|
||||
if ruleCurr < len(ns.OriginalRules) {
|
||||
rule := ns.OriginalRules[ruleCurr]
|
||||
if ruleCurr >= len(rules) {
|
||||
rules = append(rules, rule.Rule)
|
||||
|
||||
// Check if domain matcher cache is provided via environment
|
||||
domainMatcherPath := platform.NewEnvFlag(platform.MphCachePath).GetValue(func() string { return "" })
|
||||
var mphLoaded bool
|
||||
|
||||
if domainMatcherPath != "" && ns.Tag != "" {
|
||||
f, err := filesystem.NewFileReader(domainMatcherPath)
|
||||
if err == nil {
|
||||
defer f.Close()
|
||||
g, err := router.LoadGeoSiteMatcher(f, ns.Tag)
|
||||
if err == nil {
|
||||
errors.LogDebug(ctx, "MphDomainMatcher loaded from cache for ", ns.Tag, " dns tag)")
|
||||
updateDomainRule(&mphMatcherWrapper{m: g}, 0, *matcherInfos)
|
||||
rules = append(rules, "[MPH Cache]")
|
||||
mphLoaded = true
|
||||
}
|
||||
ruleIter++
|
||||
if ruleIter >= int(rule.Size) {
|
||||
ruleIter = 0
|
||||
}
|
||||
}
|
||||
|
||||
if !mphLoaded {
|
||||
for i, domain := range ns.PrioritizedDomain {
|
||||
ns.PrioritizedDomain[i] = nil
|
||||
domainRule, err := toStrMatcher(domain.Type, domain.Domain)
|
||||
if err != nil {
|
||||
errors.LogErrorInner(ctx, err, "failed to create domain matcher, ignore domain rule [type: ", domain.Type, ", domain: ", domain.Domain, "]")
|
||||
domainRule, _ = toStrMatcher(DomainMatchingType_Full, "hack.fix.index.for.illegal.domain.rule")
|
||||
}
|
||||
originalRuleIdx := ruleCurr
|
||||
if ruleCurr < len(ns.OriginalRules) {
|
||||
rule := ns.OriginalRules[ruleCurr]
|
||||
if ruleCurr >= len(rules) {
|
||||
rules = append(rules, rule.Rule)
|
||||
}
|
||||
ruleIter++
|
||||
if ruleIter >= int(rule.Size) {
|
||||
ruleIter = 0
|
||||
ruleCurr++
|
||||
}
|
||||
} else { // No original rule, generate one according to current domain matcher (majorly for compatibility with tests)
|
||||
rules = append(rules, domainRule.String())
|
||||
ruleCurr++
|
||||
}
|
||||
} else { // No original rule, generate one according to current domain matcher (majorly for compatibility with tests)
|
||||
rules = append(rules, domainRule.String())
|
||||
ruleCurr++
|
||||
updateDomainRule(domainRule, originalRuleIdx, *matcherInfos)
|
||||
}
|
||||
updateDomainRule(domainRule, originalRuleIdx, *matcherInfos)
|
||||
}
|
||||
ns.PrioritizedDomain = nil
|
||||
runtime.GC()
|
||||
|
||||
@@ -2,6 +2,7 @@ package router
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
@@ -52,7 +53,34 @@ var matcherTypeMap = map[Domain_Type]strmatcher.Type{
|
||||
}
|
||||
|
||||
type DomainMatcher struct {
|
||||
matchers strmatcher.IndexMatcher
|
||||
Matchers strmatcher.IndexMatcher
|
||||
}
|
||||
|
||||
func SerializeDomainMatcher(domains []*Domain, w io.Writer) error {
|
||||
|
||||
g := strmatcher.NewMphMatcherGroup()
|
||||
for _, d := range domains {
|
||||
matcherType, f := matcherTypeMap[d.Type]
|
||||
if !f {
|
||||
continue
|
||||
}
|
||||
|
||||
_, err := g.AddPattern(d.Value, matcherType)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
g.Build()
|
||||
// serialize
|
||||
return g.Serialize(w)
|
||||
}
|
||||
|
||||
func NewDomainMatcherFromBuffer(data []byte) (*strmatcher.MphMatcherGroup, error) {
|
||||
matcher, err := strmatcher.NewMphMatcherGroupFromBuffer(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return matcher, nil
|
||||
}
|
||||
|
||||
func NewMphMatcherGroup(domains []*Domain) (*DomainMatcher, error) {
|
||||
@@ -72,12 +100,12 @@ func NewMphMatcherGroup(domains []*Domain) (*DomainMatcher, error) {
|
||||
}
|
||||
g.Build()
|
||||
return &DomainMatcher{
|
||||
matchers: g,
|
||||
Matchers: g,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (m *DomainMatcher) ApplyDomain(domain string) bool {
|
||||
return len(m.matchers.Match(strings.ToLower(domain))) > 0
|
||||
return len(m.Matchers.Match(strings.ToLower(domain))) > 0
|
||||
}
|
||||
|
||||
// Apply implements Condition.
|
||||
|
||||
167
app/router/condition_serialize_test.go
Normal file
167
app/router/condition_serialize_test.go
Normal file
@@ -0,0 +1,167 @@
|
||||
package router_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/xtls/xray-core/app/router"
|
||||
"github.com/xtls/xray-core/common/platform/filesystem"
|
||||
)
|
||||
|
||||
func TestDomainMatcherSerialization(t *testing.T) {
|
||||
|
||||
domains := []*router.Domain{
|
||||
{Type: router.Domain_Domain, Value: "google.com"},
|
||||
{Type: router.Domain_Domain, Value: "v2ray.com"},
|
||||
{Type: router.Domain_Full, Value: "full.example.com"},
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
if err := router.SerializeDomainMatcher(domains, &buf); err != nil {
|
||||
t.Fatalf("Serialize failed: %v", err)
|
||||
}
|
||||
|
||||
matcher, err := router.NewDomainMatcherFromBuffer(buf.Bytes())
|
||||
if err != nil {
|
||||
t.Fatalf("Deserialize failed: %v", err)
|
||||
}
|
||||
|
||||
dMatcher := &router.DomainMatcher{
|
||||
Matchers: matcher,
|
||||
}
|
||||
testCases := []struct {
|
||||
Input string
|
||||
Match bool
|
||||
}{
|
||||
{"google.com", true},
|
||||
{"maps.google.com", true},
|
||||
{"v2ray.com", true},
|
||||
{"full.example.com", true},
|
||||
|
||||
{"example.com", false},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
if res := dMatcher.ApplyDomain(tc.Input); res != tc.Match {
|
||||
t.Errorf("Match(%s) = %v, want %v", tc.Input, res, tc.Match)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestGeoSiteSerialization(t *testing.T) {
|
||||
sites := []*router.GeoSite{
|
||||
{
|
||||
CountryCode: "CN",
|
||||
Domain: []*router.Domain{
|
||||
{Type: router.Domain_Domain, Value: "baidu.cn"},
|
||||
{Type: router.Domain_Domain, Value: "qq.com"},
|
||||
},
|
||||
},
|
||||
{
|
||||
CountryCode: "US",
|
||||
Domain: []*router.Domain{
|
||||
{Type: router.Domain_Domain, Value: "google.com"},
|
||||
{Type: router.Domain_Domain, Value: "facebook.com"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
if err := router.SerializeGeoSiteList(sites, nil, nil, &buf); err != nil {
|
||||
t.Fatalf("SerializeGeoSiteList failed: %v", err)
|
||||
}
|
||||
|
||||
tmp := t.TempDir()
|
||||
path := filepath.Join(tmp, "matcher.cache")
|
||||
|
||||
f, err := os.Create(path)
|
||||
require.NoError(t, err)
|
||||
_, err = f.Write(buf.Bytes())
|
||||
require.NoError(t, err)
|
||||
f.Close()
|
||||
|
||||
f, err = os.Open(path)
|
||||
require.NoError(t, err)
|
||||
defer f.Close()
|
||||
|
||||
require.NoError(t, err)
|
||||
data, _ := filesystem.ReadFile(path)
|
||||
|
||||
// cn
|
||||
gp, err := router.LoadGeoSiteMatcher(bytes.NewReader(data), "CN")
|
||||
if err != nil {
|
||||
t.Fatalf("LoadGeoSiteMatcher(CN) failed: %v", err)
|
||||
}
|
||||
|
||||
cnMatcher := &router.DomainMatcher{
|
||||
Matchers: gp,
|
||||
}
|
||||
|
||||
if !cnMatcher.ApplyDomain("baidu.cn") {
|
||||
t.Error("CN matcher should match baidu.cn")
|
||||
}
|
||||
if cnMatcher.ApplyDomain("google.com") {
|
||||
t.Error("CN matcher should NOT match google.com")
|
||||
}
|
||||
|
||||
// us
|
||||
gp, err = router.LoadGeoSiteMatcher(bytes.NewReader(data), "US")
|
||||
if err != nil {
|
||||
t.Fatalf("LoadGeoSiteMatcher(US) failed: %v", err)
|
||||
}
|
||||
|
||||
usMatcher := &router.DomainMatcher{
|
||||
Matchers: gp,
|
||||
}
|
||||
if !usMatcher.ApplyDomain("google.com") {
|
||||
t.Error("US matcher should match google.com")
|
||||
}
|
||||
if usMatcher.ApplyDomain("baidu.cn") {
|
||||
t.Error("US matcher should NOT match baidu.cn")
|
||||
}
|
||||
|
||||
// unknown
|
||||
_, err = router.LoadGeoSiteMatcher(bytes.NewReader(data), "unknown")
|
||||
if err == nil {
|
||||
t.Error("LoadGeoSiteMatcher(unknown) should fail")
|
||||
}
|
||||
}
|
||||
func TestGeoSiteSerializationWithDeps(t *testing.T) {
|
||||
sites := []*router.GeoSite{
|
||||
{
|
||||
CountryCode: "geosite:cn",
|
||||
Domain: []*router.Domain{
|
||||
{Type: router.Domain_Domain, Value: "baidu.cn"},
|
||||
},
|
||||
},
|
||||
{
|
||||
CountryCode: "geosite:google@cn",
|
||||
Domain: []*router.Domain{
|
||||
{Type: router.Domain_Domain, Value: "google.cn"},
|
||||
},
|
||||
},
|
||||
{
|
||||
CountryCode: "rule-1",
|
||||
Domain: []*router.Domain{
|
||||
{Type: router.Domain_Domain, Value: "google.com"},
|
||||
},
|
||||
},
|
||||
}
|
||||
deps := map[string][]string{
|
||||
"rule-1": {"geosite:cn", "geosite:google@cn"},
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
err := router.SerializeGeoSiteList(sites, deps, nil, &buf)
|
||||
require.NoError(t, err)
|
||||
|
||||
matcher, err := router.LoadGeoSiteMatcher(bytes.NewReader(buf.Bytes()), "rule-1")
|
||||
require.NoError(t, err)
|
||||
|
||||
require.True(t, matcher.Match("google.com") != nil)
|
||||
require.True(t, matcher.Match("baidu.cn") != nil)
|
||||
require.True(t, matcher.Match("google.cn") != nil)
|
||||
}
|
||||
@@ -7,6 +7,8 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/xtls/xray-core/common/errors"
|
||||
"github.com/xtls/xray-core/common/platform"
|
||||
"github.com/xtls/xray-core/common/platform/filesystem"
|
||||
"github.com/xtls/xray-core/features/outbound"
|
||||
"github.com/xtls/xray-core/features/routing"
|
||||
)
|
||||
@@ -105,11 +107,25 @@ func (rr *RoutingRule) BuildCondition() (Condition, error) {
|
||||
}
|
||||
|
||||
if len(rr.Domain) > 0 {
|
||||
matcher, err := NewMphMatcherGroup(rr.Domain)
|
||||
if err != nil {
|
||||
return nil, errors.New("failed to build domain condition with MphDomainMatcher").Base(err)
|
||||
var matcher *DomainMatcher
|
||||
var err error
|
||||
// Check if domain matcher cache is provided via environment
|
||||
domainMatcherPath := platform.NewEnvFlag(platform.MphCachePath).GetValue(func() string { return "" })
|
||||
|
||||
if domainMatcherPath != "" {
|
||||
matcher, err = GetDomainMatcherWithRuleTag(domainMatcherPath, rr.RuleTag)
|
||||
if err != nil {
|
||||
return nil, errors.New("failed to build domain condition from cached MphDomainMatcher").Base(err)
|
||||
}
|
||||
errors.LogDebug(context.Background(), "MphDomainMatcher loaded from cache for ", rr.RuleTag, " rule tag)")
|
||||
|
||||
} else {
|
||||
matcher, err = NewMphMatcherGroup(rr.Domain)
|
||||
if err != nil {
|
||||
return nil, errors.New("failed to build domain condition with MphDomainMatcher").Base(err)
|
||||
}
|
||||
errors.LogDebug(context.Background(), "MphDomainMatcher is enabled for ", len(rr.Domain), " domain rule(s)")
|
||||
}
|
||||
errors.LogDebug(context.Background(), "MphDomainMatcher is enabled for ", len(rr.Domain), " domain rule(s)")
|
||||
conds.Add(matcher)
|
||||
rr.Domain = nil
|
||||
runtime.GC()
|
||||
@@ -172,3 +188,20 @@ func (br *BalancingRule) Build(ohm outbound.Manager, dispatcher routing.Dispatch
|
||||
return nil, errors.New("unrecognized balancer type")
|
||||
}
|
||||
}
|
||||
|
||||
func GetDomainMatcherWithRuleTag(domainMatcherPath string, ruleTag string) (*DomainMatcher, error) {
|
||||
f, err := filesystem.NewFileReader(domainMatcherPath)
|
||||
if err != nil {
|
||||
return nil, errors.New("failed to load file: ", domainMatcherPath).Base(err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
g, err := LoadGeoSiteMatcher(f, ruleTag)
|
||||
if err != nil {
|
||||
return nil, errors.New("failed to load file:", domainMatcherPath).Base(err)
|
||||
}
|
||||
return &DomainMatcher{
|
||||
Matchers: g,
|
||||
}, nil
|
||||
|
||||
}
|
||||
|
||||
100
app/router/geosite_compact.go
Normal file
100
app/router/geosite_compact.go
Normal file
@@ -0,0 +1,100 @@
|
||||
package router
|
||||
|
||||
import (
|
||||
"encoding/gob"
|
||||
"errors"
|
||||
"io"
|
||||
"runtime"
|
||||
|
||||
"github.com/xtls/xray-core/common/strmatcher"
|
||||
)
|
||||
|
||||
type geoSiteListGob struct {
|
||||
Sites map[string][]byte
|
||||
Deps map[string][]string
|
||||
Hosts map[string][]string
|
||||
}
|
||||
|
||||
func SerializeGeoSiteList(sites []*GeoSite, deps map[string][]string, hosts map[string][]string, w io.Writer) error {
|
||||
data := geoSiteListGob{
|
||||
Sites: make(map[string][]byte),
|
||||
Deps: deps,
|
||||
Hosts: hosts,
|
||||
}
|
||||
|
||||
for _, site := range sites {
|
||||
if site == nil {
|
||||
continue
|
||||
}
|
||||
var buf bytesWriter
|
||||
if err := SerializeDomainMatcher(site.Domain, &buf); err != nil {
|
||||
return err
|
||||
}
|
||||
data.Sites[site.CountryCode] = buf.Bytes()
|
||||
}
|
||||
|
||||
return gob.NewEncoder(w).Encode(data)
|
||||
}
|
||||
|
||||
type bytesWriter struct {
|
||||
data []byte
|
||||
}
|
||||
|
||||
func (w *bytesWriter) Write(p []byte) (n int, err error) {
|
||||
w.data = append(w.data, p...)
|
||||
return len(p), nil
|
||||
}
|
||||
|
||||
func (w *bytesWriter) Bytes() []byte {
|
||||
return w.data
|
||||
}
|
||||
|
||||
func LoadGeoSiteMatcher(r io.Reader, countryCode string) (strmatcher.IndexMatcher, error) {
|
||||
var data geoSiteListGob
|
||||
if err := gob.NewDecoder(r).Decode(&data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return loadWithDeps(&data, countryCode, make(map[string]bool))
|
||||
}
|
||||
|
||||
func loadWithDeps(data *geoSiteListGob, code string, visited map[string]bool) (strmatcher.IndexMatcher, error) {
|
||||
if visited[code] {
|
||||
return nil, errors.New("cyclic dependency")
|
||||
}
|
||||
visited[code] = true
|
||||
|
||||
var matchers []strmatcher.IndexMatcher
|
||||
|
||||
if siteData, ok := data.Sites[code]; ok {
|
||||
m, err := NewDomainMatcherFromBuffer(siteData)
|
||||
if err == nil {
|
||||
matchers = append(matchers, m)
|
||||
}
|
||||
}
|
||||
|
||||
if deps, ok := data.Deps[code]; ok {
|
||||
for _, dep := range deps {
|
||||
m, err := loadWithDeps(data, dep, visited)
|
||||
if err == nil {
|
||||
matchers = append(matchers, m)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(matchers) == 0 {
|
||||
return nil, errors.New("matcher not found for: " + code)
|
||||
}
|
||||
if len(matchers) == 1 {
|
||||
return matchers[0], nil
|
||||
}
|
||||
runtime.GC()
|
||||
return &strmatcher.IndexMatcherGroup{Matchers: matchers}, nil
|
||||
}
|
||||
func LoadGeoSiteHosts(r io.Reader) (map[string][]string, error) {
|
||||
var data geoSiteListGob
|
||||
if err := gob.NewDecoder(r).Decode(&data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return data.Hosts, nil
|
||||
}
|
||||
Reference in New Issue
Block a user