This commit is contained in:
Alex Yang
2026-03-30 01:04:46 +08:00
parent 050aa421b1
commit f627244b8f
5978 changed files with 1502187 additions and 2947 deletions
+83 -9
View File
@@ -85,16 +85,90 @@ go build -o dns-server main.go
### 配置文件格式
配置文件使用JSON格式,位于`config.json`
配置文件使用INI格式,位于`config.ini`
```json
{
"ListenPort": 53,
"HTTPPort": 8080,
"StatsFile": "data/stats.json",
"SaveInterval": 300,
"MaxQueryLogs": 1000
}
```ini
# DNS服务器配置文件
# 格式:INI格式,使用#注释
[dns]
# DNS服务器监听端口
port = 53
# 上游DNS服务器列表,逗号分隔
upstreamDNS = 223.5.5.5:53, 223.6.6.6:53
# DNSSEC专用服务器列表,逗号分隔
dnssecUpstreamDNS = 8.8.8.8:53, 1.1.1.1:53
# 数据保存间隔(秒)
saveInterval = 300
# DNS缓存过期时间(分钟)
cacheTTL = 30
# 是否启用DNSSEC支持
enableDNSSEC = true
# 查询模式:parallel(并行请求)、fastest-ip(最快的IP地址)
queryMode = parallel
# 查询超时时间(毫秒)
queryTimeout = 5000
# 是否启用快速返回机制
enableFastReturn = true
# 不验证DNSSEC的域名模式列表,逗号分隔
noDNSSECDomains =
# 是否启用IPv6解析(AAAA记录)
enableIPv6 = false
# 缓存模式:memory(内存缓存)、file(文件缓存)
cacheMode = memory
# 缓存大小限制(MB
cacheSize = 100
# 最大缓存TTL(分钟)
maxCacheTTL = 120
# 最小缓存TTL(分钟)
minCacheTTL = 5
[http]
# HTTP控制台监听端口
port = 8080
# HTTP控制台监听地址
host = 0.0.0.0
# 是否启用API
enableAPI = true
# 登录用户名
username = admin
# 登录密码
password = admin
[shield]
# 屏蔽规则更新间隔(秒)
updateInterval = 3600
# 屏蔽方法: NXDOMAIN, refused, emptyIP, customIP
blockMethod = NXDOMAIN
# 自定义屏蔽IP,当BlockMethod为"customIP"时使用
customBlockIP =
# 计数数据保存间隔(秒)
statsSaveInterval = 60
# 黑名单配置
# 格式:blacklist_名称 = URL,enabled
blacklist_AdGuard_DNS_filter = https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/filter.txt,true
blacklist_Adaway_Default_Blocklist = https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-Filters/raw/branch/main/hosts/adaway.txt,true
blacklist_CHN_anti_AD = https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-Filters/raw/branch/main/list/easylist.txt,true
blacklist_My_GitHub_Rules = https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/rules/costomize.txt,true
[gfwList]
# GFWList域名解析的目标IP地址
ip = 127.0.0.1
# GFWList规则文件路径
content = ./data/gfwlist.txt
# 是否启用GFWList功能
enabled = true
[log]
# 日志级别:debug, info, warn, error
level = debug
# 日志文件最大大小(MB
maxSize = 100
# 日志文件最大备份数
maxBackups = 10
# 日志文件最大保留天数
maxAge = 30
```
## 使用方法
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
+5
View File
@@ -0,0 +1,5 @@
CGO_ENABLED=1 \
GOOS=windows \
GOARCH=amd64 \
CC=gcc \
go build -o dns-server.exe main.go
+103
View File
@@ -0,0 +1,103 @@
# DNS服务器配置文件
# 格式:INI格式,使用#注释
[dns]
# DNS服务器监听端口
port = 53
# 上游DNS服务器列表,逗号分隔
upstreamDNS = 10.35.10.200, 106.14.121.141
# DNSSEC专用服务器列表,逗号分隔
dnssecUpstreamDNS = 208.67.220.220, 208.67.222.222
# 数据保存间隔(秒)
saveInterval = 10
# DNS缓存过期时间(分钟)
cacheTTL = 60
# 是否启用DNSSEC支持
enableDNSSEC = false
# 查询模式:parallel(并行请求)、fastest-ip(最快的IP地址)
queryMode = parallel
# 查询超时时间(毫秒)
queryTimeout = 500
# 是否启用快速返回机制
enableFastReturn = true
# 不验证DNSSEC的域名模式列表,逗号分隔
noDNSSECDomains = amazehome.cn, addr.arpa, amazehome.xyz, .cn
# 是否启用IPv6解析(AAAA记录)
enableIPv6 = false
# 缓存模式:memory(内存缓存)、file(文件缓存)
cacheMode = file
# 缓存大小限制(MB
cacheSize = 100
# 最大缓存TTL(分钟)
maxCacheTTL = 60
# 最小缓存TTL(分钟)
minCacheTTL = 30
# 域名特定DNS服务器配置
# 格式:domain_域名匹配字符串 = DNS服务器1, DNS服务器2
domain_addr.arpa = 10.35.10.200:53
domain_akadns = 10.35.10.200:53
domain_akamai = 10.35.10.200:53
domain_amazehome.cn = 10.35.10.200:53
domain_amazehome.xyz = 10.35.10.200:53
domain_microsoft.com = 10.35.10.200:53
#domain_steam.com = 10.35.10.70:53
#domain_steamcontent.com = 10.35.10.70:53
[http]
# HTTP控制台监听端口
port = 8081
# HTTP控制台监听地址
host = 0.0.0.0
# 是否启用API
enableAPI = true
# 登录用户名
username = admin
# 登录密码
password = admin
[shield]
# 屏蔽规则更新间隔(秒)
updateInterval = 3600
# 屏蔽方法: NXDOMAIN, refused, emptyIP, customIP
blockMethod = NXDOMAIN
# 自定义屏蔽IP,当BlockMethod为"customIP"时使用
customBlockIP = 0.0.0.2
# 计数数据保存间隔(秒)
statsSaveInterval = 60
# 黑名单配置
# 格式:blacklist_名称 = URL,enabled
blacklist_AdGuard_DNS_filter = https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/filter.txt,true
blacklist_Adaway_Default_Blocklist = https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-Filters/raw/branch/main/hosts/adaway.txt,true
blacklist_CHN_anti_AD = https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-Filters/raw/branch/main/list/easylist.txt,true
blacklist_My_GitHub_Rules = https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/rules/costomize.txt,true
blacklist_CNList = https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/list/china.list,false
blacklist_大圣净化 = http://gitea.amazehome.xyz/AMAZEHOME/hosts-and-Filters/raw/branch/main/hosts/dsjh.txt,true
blacklist_Hate_and_Junk = http://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/hate-and-junk-extended.txt,true
blacklist_My_Gitlab_Hosts = http://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/hosts/costomize.txt,true
blacklist_Anti_Remote_Requests = http://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/hosts/anti-remoterequests.txt,true
blacklist_URL_Based = http://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/rules/url-based-adguard.txt,true
blacklist_My_Gitlab_A_T_Rules = http://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/rules/ads-and-trackers.txt,true
blacklist_My_Gitlab_Malware_List = http://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/rules/malware.txt,true
blacklist_hosts = http://gitea.amazehome.xyz/AMAZEHOME/hosts-and-Filters/raw/branch/main/hosts/costomize.txt,true
blacklist_AWAvenue_Ads_Rule = http://gitea.amazehome.xyz/AMAZEHOME/hosts-and-Filters/raw/branch/main/rules/AWAvenue-Ads-Rule.txt,true
blacklist_诈骗域名 = https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/rules/cheat.txt,true
[gfwList]
# GFWList域名解析的目标IP地址
ip = 127.0.0.1
# GFWList规则文件路径
content =
# 是否启用GFWList功能
enabled = false
[log]
# 日志级别:debug, info, warn, error
level = debug
# 日志文件最大大小(MB
maxSize = 100
# 日志文件最大备份数
maxBackups = 10
# 日志文件最大保留天数
maxAge = 30
-163
View File
@@ -1,163 +0,0 @@
{
"dns": {
"port": 53,
"upstreamDNS": [
"10.35.10.200",
"223.5.5.5",
"223.6.6.6"
],
"dnssecUpstreamDNS": [
"208.67.220.220",
"208.67.222.222"
],
"saveInterval": 10,
"cacheTTL": 60,
"enableDNSSEC": false,
"queryMode": "parallel",
"queryTimeout": 500,
"enableFastReturn": true,
"domainSpecificDNS": {
"addr.arpa": [
"10.35.10.200:53"
],
"akadns": [
"223.5.5.5:53"
],
"akamai": [
"223.5.5.5:53"
],
"amazehome.cn": [
"10.35.10.200:53"
],
"amazehome.xyz": [
"10.35.10.200:53"
],
"microsoft.com": [
"4.2.2.1:53"
],
"steam": [
"223.5.5.5:53"
]
},
"noDNSSECDomains": [
"amazehome.cn",
"addr.arpa",
"amazehome.xyz",
".cn"
],
"enableIPv6": false,
"cacheMode": "file",
"cacheSize": 100,
"maxCacheTTL": 60,
"minCacheTTL": 30,
"cacheFilePath": "data/cache.json"
},
"http": {
"port": 8081,
"host": "0.0.0.0",
"enableAPI": true,
"username": "admin",
"password": "admin"
},
"shield": {
"blacklists": [
{
"name": "AdGuard DNS filter",
"url": "https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/filter.txt",
"enabled": true,
"lastUpdateTime": "2026-01-23T01:04:32.424Z"
},
{
"name": "Adaway Default Blocklist",
"url": "https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-Filters/raw/branch/main/hosts/adaway.txt",
"enabled": true,
"lastUpdateTime": "2025-11-28T15:36:43.086Z"
},
{
"name": "CHN-anti-AD",
"url": "https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-Filters/raw/branch/main/list/easylist.txt",
"enabled": true,
"lastUpdateTime": "2025-12-16T08:50:10.180Z"
},
{
"name": "My GitHub Rules",
"url": "https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/rules/costomize.txt",
"enabled": true,
"lastUpdateTime": "2026-01-17T19:04:34.551Z"
},
{
"name": "CNList",
"url": "https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/list/china.list",
"enabled": false
},
{
"name": "大圣净化",
"url": "http://gitea.amazehome.xyz/AMAZEHOME/hosts-and-Filters/raw/branch/main/hosts/dsjh.txt",
"enabled": true
},
{
"name": "Hate \u0026 Junk",
"url": "http://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/hate-and-junk-extended.txt",
"enabled": true,
"lastUpdateTime": "2025-12-21T10:46:43.522Z"
},
{
"name": "My Gitlab Hosts",
"url": "http://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/hosts/costomize.txt",
"enabled": true,
"lastUpdateTime": "2025-12-18T10:39:39.333Z"
},
{
"name": "Anti Remote Requests",
"url": "http://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/hosts/anti-remoterequests.txt",
"enabled": true
},
{
"name": "URL-Based.txt",
"url": "http://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/rules/url-based-adguard.txt",
"enabled": true
},
{
"name": "My Gitlab A/T Rules",
"url": "http://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/rules/ads-and-trackers.txt",
"enabled": true,
"lastUpdateTime": "2025-12-24T07:11:07.334Z"
},
{
"name": "My Gitlab Malware List",
"url": "http://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/rules/malware.txt",
"enabled": true
},
{
"name": "hosts",
"url": "http://gitea.amazehome.xyz/AMAZEHOME/hosts-and-Filters/raw/branch/main/hosts/costomize.txt",
"enabled": true
},
{
"name": "AWAvenue-Ads-Rule",
"url": "http://gitea.amazehome.xyz/AMAZEHOME/hosts-and-Filters/raw/branch/main/rules/AWAvenue-Ads-Rule.txt",
"enabled": true
},
{
"name": "诈骗域名",
"url": "https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/rules/cheat.txt",
"enabled": true
}
],
"updateInterval": 3600,
"blockMethod": "NXDOMAIN",
"customBlockIP": "0.0.0.2",
"statsSaveInterval": 60
},
"gfwList": {
"ip": "",
"content": "",
"enabled": false
},
"log": {
"level": "debug",
"maxSize": 100,
"maxBackups": 10,
"maxAge": 30
}
}
+117 -104
View File
@@ -1,14 +1,15 @@
package config
import (
"encoding/json"
"io/ioutil"
"strings"
"gopkg.in/ini.v1"
)
// DomainSpecificDNS 域名特定DNS服务器配置
// 格式:{"domainMatch": ["dns1", "dns2"]}
// domainMatch: 域名匹配字符串,当域名中包含该字符串时使用对应的DNS服务器
// dns1, dns2: 用于解析匹配域名的DNS服务器列表
// INI格式:domain_域名匹配字符串 = DNS服务器1, DNS服务器2
// 例如:domain_google.com = 8.8.8.8:53, 8.8.4.4:53
type DomainSpecificDNS map[string][]string
@@ -86,113 +87,65 @@ type Config struct {
// LoadConfig 加载配置文件
func LoadConfig(path string) (*Config, error) {
// 读取配置文件
data, err := ioutil.ReadFile(path)
if err != nil {
return nil, err
}
var config Config
err = json.Unmarshal(data, &config)
// 解析INI文件
cfg, err := ini.Load(data)
if err != nil {
return nil, err
}
// 设置默认值
if config.DNS.Port == 0 {
config.DNS.Port = 53
// 初始化配置
config := &Config{
DNS: DNSConfig{
Port: cfg.Section("dns").Key("port").MustInt(53),
SaveInterval: cfg.Section("dns").Key("saveInterval").MustInt(300),
CacheTTL: cfg.Section("dns").Key("cacheTTL").MustInt(30),
EnableDNSSEC: cfg.Section("dns").Key("enableDNSSEC").MustBool(true),
QueryMode: cfg.Section("dns").Key("queryMode").MustString("parallel"),
QueryTimeout: cfg.Section("dns").Key("queryTimeout").MustInt(500),
EnableFastReturn: cfg.Section("dns").Key("enableFastReturn").MustBool(true),
EnableIPv6: cfg.Section("dns").Key("enableIPv6").MustBool(false),
CacheMode: cfg.Section("dns").Key("cacheMode").MustString("memory"),
CacheSize: cfg.Section("dns").Key("cacheSize").MustInt(100),
MaxCacheTTL: cfg.Section("dns").Key("maxCacheTTL").MustInt(120),
MinCacheTTL: cfg.Section("dns").Key("minCacheTTL").MustInt(5),
CacheFilePath: "data/cache.json", // 固定路径
UpstreamDNS: parseStringList(cfg.Section("dns").Key("upstreamDNS").MustString("223.5.5.5:53,223.6.6.6:53")),
DNSSECUpstreamDNS: parseStringList(cfg.Section("dns").Key("dnssecUpstreamDNS").MustString("8.8.8.8:53,1.1.1.1:53")),
NoDNSSECDomains: parseStringList(cfg.Section("dns").Key("noDNSSECDomains").MustString("")),
DomainSpecificDNS: parseDomainSpecificDNS(cfg.Section("dns")),
},
HTTP: HTTPConfig{
Port: cfg.Section("http").Key("port").MustInt(8080),
Host: cfg.Section("http").Key("host").MustString("0.0.0.0"),
EnableAPI: cfg.Section("http").Key("enableAPI").MustBool(true),
Username: cfg.Section("http").Key("username").MustString("admin"),
Password: cfg.Section("http").Key("password").MustString("admin"),
},
Shield: ShieldConfig{
UpdateInterval: cfg.Section("shield").Key("updateInterval").MustInt(3600),
BlockMethod: cfg.Section("shield").Key("blockMethod").MustString("NXDOMAIN"),
CustomBlockIP: cfg.Section("shield").Key("customBlockIP").MustString(""),
StatsSaveInterval: cfg.Section("shield").Key("statsSaveInterval").MustInt(300),
Blacklists: parseBlacklists(cfg.Section("shield")),
},
GFWList: GFWListConfig{
IP: cfg.Section("gfwList").Key("ip").MustString("127.0.0.1"),
Content: cfg.Section("gfwList").Key("content").MustString(""),
Enabled: cfg.Section("gfwList").Key("enabled").MustBool(false),
},
Log: LogConfig{
Level: cfg.Section("log").Key("level").MustString("info"),
MaxSize: cfg.Section("log").Key("maxSize").MustInt(100),
MaxBackups: cfg.Section("log").Key("maxBackups").MustInt(10),
MaxAge: cfg.Section("log").Key("maxAge").MustInt(30),
},
}
if len(config.DNS.UpstreamDNS) == 0 {
config.DNS.UpstreamDNS = []string{"223.5.5.5:53", "223.6.6.6:53"}
}
if config.DNS.SaveInterval == 0 {
config.DNS.SaveInterval = 300 // 默认5分钟保存一次
}
// 默认DNS缓存TTL为30分钟
if config.DNS.CacheTTL == 0 {
config.DNS.CacheTTL = 30 // 默认30分钟
}
// 缓存模式默认值
if config.DNS.CacheMode == "" {
config.DNS.CacheMode = "memory" // 默认内存缓存
}
// 缓存大小默认值(100MB
if config.DNS.CacheSize == 0 {
config.DNS.CacheSize = 100 // 默认100MB
}
// 最大缓存TTL默认值(120分钟)
if config.DNS.MaxCacheTTL == 0 {
config.DNS.MaxCacheTTL = 120 // 默认120分钟
}
// 最小缓存TTL默认值(5分钟)
if config.DNS.MinCacheTTL == 0 {
config.DNS.MinCacheTTL = 5 // 默认5分钟
}
// 缓存文件路径固定为data/cache.json,不再从配置文件读取
config.DNS.CacheFilePath = "data/cache.json"
// DNSSEC默认配置
// 如果未在配置文件中设置,默认启用DNSSEC支持
// json.Unmarshal会将未设置的布尔字段设为false,所以我们需要显式检查
// 但由于这是一个新字段,为了向后兼容,我们保持默认值为true
// 注意:如果用户在配置文件中明确设置为false,则使用false
if !config.DNS.EnableDNSSEC {
// 检查是否真的是用户设置为false,还是默认值
// 由于JSON布尔值默认是false,我们无法直接区分
// 所以这里保持默认行为,让用户可以通过配置文件设置为false
}
// IPv6默认配置
// 注意:我们不能直接设置默认值,因为JSON布尔值默认是false
// 我们需要检查配置文件中是否真的设置了这个字段
// 由于我们无法直接区分,这里保持现状,让用户可以通过配置文件设置为false
// DNSSEC专用服务器默认配置
if len(config.DNS.DNSSECUpstreamDNS) == 0 {
config.DNS.DNSSECUpstreamDNS = []string{"8.8.8.8:53", "1.1.1.1:53"}
}
// 查询模式默认配置
if config.DNS.QueryMode == "" {
config.DNS.QueryMode = "parallel" // 默认使用并行请求模式
}
// 查询超时默认配置(毫秒)
if config.DNS.QueryTimeout == 0 {
config.DNS.QueryTimeout = 500 // 默认超时时间为500ms
}
// 快速返回机制默认配置
if config.DNS.EnableFastReturn == false {
config.DNS.EnableFastReturn = true // 默认启用快速返回机制
}
// 域名特定DNS服务器配置默认值
if config.DNS.DomainSpecificDNS == nil {
config.DNS.DomainSpecificDNS = make(DomainSpecificDNS) // 默认为空映射
}
if config.HTTP.Port == 0 {
config.HTTP.Port = 8080
}
if config.HTTP.Host == "" {
config.HTTP.Host = "0.0.0.0"
}
// 默认用户名和密码,如果未配置则使用admin/admin
if config.HTTP.Username == "" {
config.HTTP.Username = "admin"
}
if config.HTTP.Password == "" {
config.HTTP.Password = "admin"
}
if config.Shield.UpdateInterval == 0 {
config.Shield.UpdateInterval = 3600
}
if config.Shield.BlockMethod == "" {
config.Shield.BlockMethod = "NXDOMAIN" // 默认屏蔽方法为NXDOMAIN
}
if config.Shield.StatsSaveInterval == 0 {
config.Shield.StatsSaveInterval = 300 // 默认5分钟保存一次
}
// GFWList默认配置
if config.GFWList.IP == "" {
config.GFWList.IP = "127.0.0.1" // 默认GFWList解析目标IP为127.0.0.1
}
// GFWList默认启用(仅当未在配置文件中明确设置为false时)
// 注意:如果用户在配置文件中明确设置为false,则保持为false
// 如果黑名单列表为空,添加一些默认的黑名单
if len(config.Shield.Blacklists) == 0 {
@@ -203,9 +156,69 @@ func LoadConfig(path string) (*Config, error) {
{Name: "My GitHub Rules", URL: "https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/rules/costomize.txt", Enabled: true},
}
}
if config.Log.Level == "" {
config.Log.Level = "info"
return config, nil
}
// parseStringList 解析逗号分隔的字符串列表
func parseStringList(s string) []string {
if s == "" {
return []string{}
}
return &config, nil
// 分割字符串
parts := []string{}
for _, part := range strings.Split(s, ",") {
part = strings.TrimSpace(part)
if part != "" {
parts = append(parts, part)
}
}
return parts
}
// parseDomainSpecificDNS 解析域名特定DNS服务器配置
func parseDomainSpecificDNS(section *ini.Section) DomainSpecificDNS {
domainDNS := make(DomainSpecificDNS)
// 遍历所有键,查找以"domain_"开头的键
for _, key := range section.Keys() {
if strings.HasPrefix(key.Name(), "domain_") {
domain := strings.TrimPrefix(key.Name(), "domain_")
dnsServers := parseStringList(key.String())
if len(dnsServers) > 0 {
domainDNS[domain] = dnsServers
}
}
}
return domainDNS
}
// parseBlacklists 解析黑名单配置
func parseBlacklists(section *ini.Section) []BlacklistEntry {
blacklists := []BlacklistEntry{}
// 遍历所有键,查找以"blacklist_"开头的键
for _, key := range section.Keys() {
if strings.HasPrefix(key.Name(), "blacklist_") {
// 提取黑名单名称和属性
name := strings.TrimPrefix(key.Name(), "blacklist_")
value := key.String()
// 解析黑名单URL和启用状态,格式: url,enabled
parts := strings.Split(value, ",")
if len(parts) >= 2 {
url := strings.TrimSpace(parts[0])
enabled := strings.TrimSpace(parts[1]) == "true"
blacklists = append(blacklists, BlacklistEntry{
Name: name,
URL: url,
Enabled: enabled,
})
}
}
}
return blacklists
}
+1406
View File
File diff suppressed because it is too large Load Diff
+59
View File
@@ -0,0 +1,59 @@
# DNS Server Hosts File
# Generated by DNS Server
10.35.10.40 www.linkedin.cn
36.25.250.217 speed.netmon.360safe.com
10.35.10.40 store.steampowered.com
10.35.10.40 www.google.com.hk
10.35.10.40 release-assets.githubusercontent.com
10.35.10.40 cdn.v2ex.com
10.35.10.40 ngx.download.nvidia.com
61.147.112.100 ip.cn
10.35.10.40 github.com
10.35.10.40 google.com
10.35.10.40 prod.otel.kaizen.nvidia.com
10.35.10.40 ghcr.io
106.63.24.133 pdown.stat.360safe.com
10.35.10.40 app.fing.com
10.35.10.40 search.brave.com
10.35.10.40 www.google.com.fr
10.35.10.40 cn.pornhub.com
192.135.135.33 vc.njszyyvs.vm
10.35.10.40 maps.google.com
10.35.10.40 www.epochtimes.com
10.35.10.40 play.google.com
10.35.10.40 registry.docker.io
10.35.10.40 www.google.com
10.35.10.40 registry-1.docker.io
10.35.10.40 docker.io
47.96.156.45 api-std.sunlogin.oray.com
10.35.10.40 xvideos.red
10.35.10.40 download.nextcloud.com
10.35.10.40 challenges.cloudflare.com
10.35.10.40 payment-website-pci.ol.epicgames.com
10.35.10.40 steamcommunity.com
10.35.10.40 www.pornhub.com
10.35.10.40 ogads-pa.clients6.google.com
10.35.10.40 raw.githubusercontent.com
10.35.10.40 registry.ollama.ai
10.35.10.40 update.googleapis.com
10.35.10.40 waa-pa.clients6.google.com
10.35.10.40 codeload.github.com
10.35.10.40 map.google.com
10.35.10.40 linux.do
10.35.10.40 pornhub.com
10.35.10.40 immersivetranslate.com
10.35.10.40 proxy.golang.org
10.35.10.40 xvideos.com
111.206.127.27 sdns.360.net
10.35.10.40 msdl.microsoft.com
10.35.10.40 api2.immersivetranslate.com
10.35.10.40 github.io
10.35.10.40 forum.paradoxplaza.com
10.35.10.40 clients3.google.com
10.35.10.40 www.xvideos.com
10.35.10.40 www.v2ex.com
::1 localhost
10.35.10.40 huggingface.co
10.35.10.40 www.xvideos.red
192.144.202.10 vcenter.nbxcmc.cn
+254
View File
@@ -0,0 +1,254 @@
# DNS Server Hosts File
# Generated by DNS Server
10.35.10.200 cdn.akamai.steamstatic.com
10.35.10.200 badges.roblox.com
10.35.10.200 www.fandom.com
10.35.10.200 gql.twitch.tv
::1 localhost
10.35.10.200 gist.github.com
10.35.10.200 ucaf37cba09486e69c215bdfe2e2.dl.dropboxusercontent.com
10.35.10.200 www.mega.nz
10.35.10.200 translate.google.com
10.35.10.200 skyapi.onedrive.live.com
10.35.10.200 www.youtube.com
10.35.10.200 vercel.app
10.35.10.200 fufufu23.imgur.com
10.35.10.200 uc957f785cc03b9b273234fd24f9.dl.dropboxusercontent.com
10.35.10.200 apresolve.spotify.com
10.35.10.200 id.twitch.tv
10.35.10.200 thumbnails.roblox.com
10.35.10.200 pubster.twitch.tv
10.35.10.200 api-partner.spotify.com
10.35.10.200 api.github.com
10.35.10.200 uc07aaf207f16a978a3dbc24a1c9.dl.dropboxusercontent.com
10.35.10.200 api.twitch.tv
10.35.10.200 aleksi.artstation.com
10.35.10.200 assets.help.twitch.tv
10.35.10.200 i.ytimg.com
10.35.10.200 storage.live.com
10.35.10.200 dashboard.twitch.tv
10.35.10.200 develop.roblox.com
10.35.10.200 accounts.youtube.com
10.35.10.200 raw.githubusercontent.com
10.35.10.200 s.pinimg.com
10.35.10.200 mega.co.nz
10.35.10.200 media.steampowered.com
10.35.10.200 gds-vhs-drops-campaign-images.twitch.tv
10.35.10.200 yourihoek.artstation.com
10.35.10.200 archiveprogram.github.com
10.35.10.200 img.youtube.com
10.35.10.200 files.nexus-cdn.com
10.35.10.200 mega.io
10.35.10.200 www.github.io
10.35.10.200 epic-games-api.arkoselabs.com
10.35.10.200 beta.mod.io
10.35.10.200 g.cdn1.mega.co.nz
10.35.10.200 thepoy.imgur.com
10.35.10.200 uc4b4b602d4b01e27782f92ce984.dl.dropboxusercontent.com
10.35.10.200 origin-a.akamaihd.net
10.35.10.200 login.steampowered.com
10.35.10.200 fonts.googleapis.com
10.35.10.200 ucb277f9a438d6b3f4ea2147ac26.dl.dropboxusercontent.com
10.35.10.200 g.api.mega.co.nz
10.35.10.200 www.dropbox.com
10.35.10.200 prod-ireland.arkoselabs.com
10.35.10.200 contacts.roblox.com
10.35.10.200 github.dev
10.35.10.200 presence.roblox.com
10.35.10.200 resources.github.com
10.35.10.200 pinterest.com
10.35.10.200 twitch.tv
10.35.10.200 cdn-learning.artstation.com
10.35.10.200 store.akamai.steamstatic.com
10.35.10.200 versioncompatibility.api.roblox.com
10.35.10.200 github.com
10.35.10.200 ephemeralcounters.api.roblox.com
10.35.10.200 help.twitch.tv
10.35.10.200 dl.dropboxusercontent.com
10.35.10.200 imgs.hcaptcha.com
10.35.10.200 www.roblox.com
10.35.10.200 fonts.gstatic.com
10.35.10.200 apis.roblox.com
10.35.10.200 www.google.com
10.35.10.200 video-edge-00252e.pdx01.abs.hls.ttvnw.net
10.35.10.200 fandom.com
10.35.10.200 vod-storyboards.twitch.tv
10.35.10.200 t.imgur.com
10.35.10.200 aem.dropbox.com
10.35.10.200 nl.roblox.com
10.35.10.200 twostepverification.roblox.com
10.35.10.200 avatars1.githubusercontent.com
10.35.10.200 pinimg.com
10.35.10.200 cdnb.artstation.com
10.35.10.200 docs.hcaptcha.com
10.35.10.200 groups.roblox.com
10.35.10.200 vod-metro.twitch.tv
10.35.10.200 www.hcaptcha.com
10.35.10.200 uca3a40eb53259715309022eb9fd.dl.dropboxusercontent.com
10.35.10.200 extension-files.twitch.tv
10.35.10.200 mod.io
10.35.10.200 panels.twitch.tv
10.35.10.200 steamcommunity.com
10.35.10.200 spotify.com
10.35.10.200 local.steampp.net
10.35.10.200 auth.mod.io
10.35.10.200 cvp.twitch.tv
10.35.10.200 blog.mod.io
10.35.10.200 pages.github.com
10.35.10.200 t.email.roblox.com
10.35.10.200 camo.githubusercontent.com
10.35.10.200 blog.imgur.com
10.35.10.200 google.com
10.35.10.200 imgur.com
10.35.10.200 imgs3.hcaptcha.com
10.35.10.200 appcenter.ms
10.35.10.200 accounts.google.pl
10.35.10.200 cellcow.imgur.com
10.35.10.200 gravatar.com
10.35.10.200 metrics.roblox.com
10.35.10.200 accounts.google.com
10.35.10.200 abs.hls.ttvnw.net
10.35.10.200 notifications.roblox.com
10.35.10.200 clientsettings.api.roblox.com
10.35.10.200 accountsettings.roblox.com
10.35.10.200 staticstats.nexusmods.com
10.35.10.200 clips.twitch.tv
10.35.10.200 help.steampowered.com
10.35.10.200 roblox.com
10.35.10.200 githubapp.com
10.35.10.200 i.stack.imgur.com
10.35.10.200 private-user-images.githubusercontent.com
10.35.10.200 supervisor.ext-twitch.tv
10.35.10.200 checkout.steampowered.com
10.35.10.200 app.twitch.tv
10.35.10.200 blog.twitch.tv
10.35.10.200 countess.twitch.tv
10.35.10.200 rishablue.artstation.com
10.35.10.200 userstroage.mega.co.nz
10.35.10.200 client-event-reporter.twitch.tv
10.35.10.200 www.spotify.com
10.35.10.200 cf-files.nexusmods.com
10.35.10.200 community.steamstatic.com
10.35.10.200 www.imgur.com
10.35.10.200 dev.twitch.tv
10.35.10.200 link.twitch.tv
10.35.10.200 avatars.githubusercontent.com
10.35.10.200 supporter-files.nexus-cdn.com
10.35.10.200 users.nexusmods.com
10.35.10.200 player.twitch.tv
10.35.10.200 steamuserimages-a.akamaihd.net
10.35.10.200 google.com.hk
10.35.10.200 vod-secure.twitch.tv
10.35.10.200 www.google.com.hk
10.35.10.200 onedrive.live.com
10.35.10.200 trades.roblox.com
10.35.10.200 www.steamcommunity.com
10.35.10.200 api.imgur.com
10.35.10.200 update.greasyfork.org
10.35.10.200 www.artstation.com
10.35.10.200 steamcdn-a.akamaihd.net
10.35.10.200 gamejoin.roblox.com
10.35.10.200 staticdelivery.nexusmods.com
10.35.10.200 assets.twitch.tv
10.35.10.200 m.imgur.com
10.35.10.200 cdna.artstation.com
10.35.10.200 gds.google.com
10.35.10.200 music.twitch.tv
10.35.10.200 id-cdn.twitch.tv
10.35.10.200 binary.lge.modcdn.io
10.35.10.200 js.hcaptcha.com
10.35.10.200 gameinternationalization.roblox.com
10.35.10.200 nexusmods.com
10.35.10.200 https://accounts.google.com.hk/
10.35.10.200 realtime-signalr.roblox.com
10.35.10.200 themes.googleusercontent.com
10.35.10.200 community.akamai.steamstatic.com
10.35.10.200 clips-media-assets2.twitch.tv
10.35.10.200 trowel.twitch.tv
10.35.10.200 store.ubisoft.com
10.35.10.200 user-images.githubusercontent.com
10.35.10.200 us-west-2.uploads-regional.twitch.tv
10.35.10.200 mega.nz
10.35.10.200 newassets.hcaptcha.com
10.35.10.200 uploads.github.com
10.35.10.200 auth.roblox.com
10.35.10.200 education.github.com
10.35.10.200 translate.googleapis.com
10.35.10.200 githubusercontent.com
10.35.10.200 s.imgur.com
10.35.10.200 cdn.artstation.com
10.35.10.200 i.pinimg.com
10.35.10.200 ajax.googleapis.com
10.35.10.200 magazine.artstation.com
10.35.10.200 client-api.arkoselabs.com
10.35.10.200 ecsv2.roblox.com
10.35.10.200 play.google.com
10.35.10.200 avatars3.githubusercontent.com
10.35.10.200 discuss.dev.twitch.tv
10.35.10.200 passport.twitch.tv
10.35.10.200 github.githubassets.com
10.35.10.200 ww.artstation.com
10.35.10.200 github.io
10.35.10.200 hub.docker.com
10.35.10.200 open.spotify.com
10.35.10.200 www.gravatar.com
10.35.10.200 irc-ws.chat.twitch.tv
10.35.10.200 lh3.googleusercontent.com
10.35.10.200 games.roblox.com
10.35.10.200 users.roblox.com
10.35.10.200 g.static.mega.co.nz
10.35.10.200 www.twitch.tv
10.35.10.200 locale.roblox.com
10.35.10.200 p.imgur.com
10.35.10.200 economy.roblox.com
10.35.10.200 badges.twitch.tv
10.35.10.200 cdn.arkoselabs.com
10.35.10.200 translate.google.cn
10.35.10.200 onedrive.live
10.35.10.200 raw.github.com
10.35.10.200 aroll.artstation.com
10.35.10.200 uc9c83355d6aa8bc75f7f597c7d6.dl.dropboxusercontent.com
10.35.10.200 huggingface.co
10.35.10.200 greasyfork.org
10.35.10.200 avatars.akamai.steamstatic.com
10.35.10.200 assetgame.roblox.com
10.35.10.200 hcaptcha.com
10.35.10.200 web.roblox.com
10.35.10.200 avatars2.githubusercontent.com
10.35.10.200 api.mod.io
10.35.10.200 www.mega.co.nz
10.35.10.200 store.steampowered.com
10.35.10.200 support-assets.githubassets.com
10.35.10.200 artstation.com
10.35.10.200 www.nexusmods.com
10.35.10.200 docs.mod.io
10.35.10.200 translate-pa.googleapis.com
10.35.10.200 premium-files.nexus-cdn.com
10.35.10.200 in.appcenter.ms
10.35.10.200 chat.roblox.com
10.35.10.200 inspector.twitch.tv
10.35.10.200 i.imgur.com
10.35.10.200 objects.githubusercontent.com
10.35.10.200 assets.hcaptcha.com
10.35.10.200 myaccount.google.com
10.35.10.200 play-lh.googleusercontent.com
10.35.10.200 static.mod.io
10.35.10.200 maxcdn.bootstrapcdn.com
10.35.10.200 dya.artstation.com
10.35.10.200 www.pinterest.com
10.35.10.200 m.twitch.tv
10.35.10.200 pubsub-edge.twitch.tv
10.35.10.200 uc87442e427766fe8cf2a7a07827.dl.dropboxusercontent.com
10.35.10.200 friends.roblox.com
10.35.10.200 cloud.githubusercontent.com
10.35.10.200 www.google.com.pl
10.35.10.200 sm.pinimg.com
10.35.10.200 irc-ws-r.chat.twitch.tv
10.35.10.200 accounts.google.com.hk
10.35.10.200 stream.twitch.tv
10.35.10.200 api.steampowered.com
10.35.10.200 secure.gravatar.com
10.35.10.200 dropbox.com
10.35.10.200 ucc541451e9df780e40777d477eb.dl.dropboxusercontent.com
10.35.10.200 avatars0.githubusercontent.com
+256458
View File
File diff suppressed because it is too large Load Diff
@@ -0,0 +1,734 @@
!Title: AWAvenue Ads Rule
!--------------------------------------
!Total lines: 725
!Version: 1.5.5-release
!Homepage: https://github.com/TG-Twilight/AWAvenue-Ads-Rule
!License: https://github.com/TG-Twilight/AWAvenue-Ads-Rule/blob/main/LICENSE
||1010pic.com^
||16dd-advertise-1252317822.file.myqcloud.com^
||16dd-advertise-1252317822.image.myqcloud.com^
||8le8le.com^
||a0.app.xiaomi.com^
||aaid.umeng.com^
||abtest-ch.snssdk.com^
||ad-cache.dopool.com^
||ad-cdn.qingting.fm^
||ad-cmp.hismarttv.com^
||ad-download.hismarttv.com^
||ad-imp.hismarttv.com^
||ad-scope.com^
||ad-scope.com.cn^
||ad-sdk-config.youdao.com^
||ad-sdk.huxiu.com^
||ad.12306.cn^
||ad.51wnl.com^
||ad.bwton.com^
||ad.cctv.com^
||ad.cyapi.cn^
||ad.doubleclick.net^
||ad.partner.gifshow.com^
||ad.qingting.fm^
||ad.qq.com^
||ad.richmob.cn^
||ad.tencentmusic.com^
||ad.toutiao.com^
||ad.v3mh.com^
||ad.winrar.com.cn^
||ad.xelements.cn^
||ad.xiaomi.com^
||ad.ximalaya.com^
||ad.zijieapi.com^
||adapi.izuiyou.com^
||adapi.yynetwk.com^
||adashbc.ut.taobao.com^
||adc.hpplay.cn^
||adcdn.hpplay.cn^
||adcdn.tencentmusic.com^
||adclick.g.doubleclick.net^
||adclick.tencentmusic.com^
||adcolony.com^
||adexpo.tencentmusic.com^
||adfilter.imtt.qq.com^
||adfstat.yandex.ru^
||adguanggao.eee114.com^
||adjust.cn^
||adjust.com^
||adkwai.com^
||adlink-api.huan.tv^
||adm.funshion.com^
||ads-api-o.api.leiniao.com^
||ads-api.tiktok.com^
||ads-api.twitter.com^
||ads-img-qc.xhscdn.com^
||ads-jp.tiktok.com^
||ads-marketing-vivofs.vivo.com.cn^
||ads-sg.tiktok.com^
||ads-us.tiktok.com^
||ads-video-al.xhscdn.com^
||ads-video-qc.xhscdn.com^
||ads.95516.com^
||ads.google.cn^
||ads.heytapmobi.com^
||ads.huan.tv^
||ads.huantest.com^
||ads.icloseli.cn^
||ads.linkedin.com^
||ads.music.126.net^
||ads.oppomobile.com^
||ads.pinterest.com^
||ads.servebom.com^
||ads.service.kugou.com^
||ads.tiktok.com^
||ads.v3mh.com^
||ads.youtube.com^
||ads3-normal-hl.zijieapi.com^
||ads3-normal-lf.zijieapi.com^
||ads3-normal-lq.zijieapi.com^
||ads3-normal.zijieapi.com^
||ads5-normal-hl.zijieapi.com^
||ads5-normal-lf.zijieapi.com^
||ads5-normal-lq.zijieapi.com^
||ads5-normal.zijieapi.com^
||adse.test.ximalaya.com^
||adse.wsa.ximalaya.com^
||adse.ximalaya.com^
||adsebs.ximalaya.com^
||adsense.google.cn^
||adserver.unityads.unity3d.com^
||adservice.google.cn^
||adservice.google.com^
||adserviceretry.kugou.com^
||adsfile.bssdlbig.kugou.com^
||adsfile.qq.com^
||adsfilebssdlbig.ali.kugou.com^
||adsfileretry.service.kugou.com^
||adsfs-sdkconfig.heytapimage.com^
||adsfs.oppomobile.com^
||adslvfile.qq.com^
||adsmart.konka.com^
||adsmind.gdtimg.com^
||adsmind.ugdtimg.com^
||adsp.xunlei.com^
||adstats.tencentmusic.com^
||adstore-1252524079.file.myqcloud.com^
||adstore-index-1252524079.file.myqcloud.com^
||adtago.s3.amazonaws.com^
||adtech.yahooinc.com^
||adtrack.quark.cn^
||adukwai.com^
||adv.fjtv.net^
||adv.sec.intl.miui.com^
||adv.sec.miui.com^
||advertiseonbing.azureedge.net^
||advertising-api-eu.amazon.com^
||advertising-api-fe.amazon.com^
||advertising-api.amazon.com^
||advertising.apple.com^
||advertising.yahoo.com^
||advertising.yandex.ru^
||advice-ads.s3.amazonaws.com^
||adview.cn^
||adx-ad.smart-tv.cn^
||adx-bj.anythinktech.com^
||adx-cn.anythinktech.com^
||adx-drcn.op.dbankcloud.cn^
||adx-open-service.youku.com^
||adx-os.anythinktech.com^
||adx.ads.heytapmobi.com^
||adx.ads.oppomobile.com^
||adxlog-adnet.vivo.com.cn^
||adxlog-adnet.vivo.com.cn.dsa.dnsv1.com.cn^
||adxserver.ad.cmvideo.cn^
||aegis.qq.com^
||afs.googlesyndication.com^
||aiseet.aa.atianqi.com^
||ali-ad.a.yximgs.com^
||alog.umeng.com^
||als.baidu.com^
||amdcopen.m.taobao.com^
||amdcopen.m.umeng.com^
||an.facebook.com^
||analysis.yozocloud.cn^
||analytics-api.samsunghealthcn.com^
||analytics.126.net^
||analytics.95516.com^
||analytics.google.com^
||analytics.pinterest.com^
||analytics.pointdrive.linkedin.com^
||analytics.query.yahoo.com^
||analytics.rayjump.com^
||analytics.s3.amazonaws.com^
||analytics.tiktok.com^
||analytics.woozooo.com^
||analyticsengine.s3.amazonaws.com^
||analyze.lemurbrowser.com^
||andrqd.play.aiseet.atianqi.com^
||ap.dongqiudi.com^
||apd-pcdnwxlogin.teg.tencent-cloud.net^
||apd-pcdnwxnat.teg.tencent-cloud.net^
||apd-pcdnwxstat.teg.tencent-cloud.net^
||api-access.pangolin-sdk-toutiao.com^
||api-access.pangolin-sdk-toutiao1.com^
||api-access.pangolin-sdk-toutiao2.com^
||api-access.pangolin-sdk-toutiao3.com^
||api-access.pangolin-sdk-toutiao4.com^
||api-access.pangolin-sdk-toutiao5.com^
||api-ad-product.huxiu.com^
||api-adservices.apple.com^
||api-gd.hiaiabc.com^
||api-htp.beizi.biz^
||api.ad.xiaomi.com^
||api.e.kuaishou.com^
||api.htp.hubcloud.com.cn^
||api.hzsanjiaomao.com^
||api.installer.xiaomi.com^
||api.jietuhb.com^
||api.kingdata.ksyun.com^
||api.statsig.com^
||api5-normal-quic-lf.ixigua.com^
||apiyd.my91app.com^
||apks.webxiaobai.top^
||app-measurement.com^
||appcloud2.in.zhihu.com^
||applog.lc.quark.cn^
||applog.uc.cn^
||applog.zijieapi.com^
||ata-sdk-uuid-report.dreport.meituan.net^
||auction.unityads.unity3d.com^
||audid-api.taobao.com^
||audid.umeng.com^
||azr.footprintdns.com^
||b1-data.ads.heytapmobi.com^
||baichuan-sdk.alicdn.com^
||baichuan-sdk.taobao.com^
||bdad.123pan.cn^
||bdapi-ads.realmemobile.com^
||bdapi-in-ads.realmemobile.com^
||bdapi.ads.oppomobile.com^
||beacon-api.aliyuncs.com^
||beacon.qq.com^
||beaconcdn.qq.com^
||beacons.gvt2.com^
||beizi.biz^
||bes-mtj.baidu.com^
||bgg.baidu.com^
||bianxian.com^
||bingads.microsoft.com^
||bj.ad.track.66mobi.com^
||books-analytics-events.apple.com^
||browsercfg-drcn.cloud.dbankcloud.cn^
||bsrv.qq.com^
||bugly.qq.com^
||business-api.tiktok.com^
||c.bidtoolads.com^
||c.evidon.com^
||c.gj.qq.com^
||c.kuaiduizuoye.com^
||c.sayhi.360.cn^
||c2.gdt.qq.com^
||canvas-cdn.gdt.qq.com^
||catalog.fjwhcbsh.com^
||cbjs.baidu.com^
||ccs.umeng.com^
||cctv.adsunion.com^
||cdn-ad.wtzw.com^
||cdn-ads.oss-cn-shanghai.aliyuncs.com^
||cdn-plugin-sync-upgrade-juui.hismarttv.com^
||cdn.ad.xiaomi.com^
||cdn.ynuf.aliapp.org^
||cfg.imtt.qq.com^
||chat1.jd.com^
||chiq-cloud.com^
||cj.qidian.com^
||ck.ads.oppomobile.com^
||click.googleanalytics.com^
||click.oneplus.cn^
||clog.miguvideo.com^
||cnlogs.umeng.com^
||cnlogs.umengcloud.com^
||cnzz.com^
||collect.kugou.com^
||commdata.v.qq.com^
||config.chsmarttv.com^
||config.unityads.unity3d.com^
||cpro.baidustatic.com^
||crashlytics.com^
||crashlyticsreports-pa.googleapis.com^
||csjplatform.com^
||cws-cctv.conviva.com^
||data.ads.oppomobile.com^
||data.chsmarttv.com^
||data.mistat.india.xiaomi.com^
||data.mistat.rus.xiaomi.com^
||data.mistat.xiaomi.com^
||diagnosis.ad.xiaomi.com^
||dig.bdurl.net^
||dl.zuimeitianqi.com^
||dlogs.bwton.com^
||dm.toutiao.com^
||domain.aishengji.com^
||doubleclick-cn.net^
||download.changhong.upgrade2.huan.tv^
||downloadxml.changhong.upgrade2.huan.tv^
||drcn-weather.cloud.huawei.com^
||dsp-x.jd.com^
||dsp.fcbox.com^
||dualstack-logs.amap.com^
||dutils.com^
||dxp.baidu.com^
||e.ad.xiaomi.com^
||eclick.baidu.com^
||edge.ads.twitch.tv^
||ef-dongfeng.tanx.com^
||entry.baidu.com^
||errlog.umeng.com^
||errnewlog.umeng.com^
||event.tradplusad.com^
||events-drcn.op.dbankcloud.cn^
||events.reddit.com^
||events.redditmedia.com^
||firebaselogging-pa.googleapis.com^
||flurry.com^
||g-adnet.hiaiabc.com^
||g-staic.ganjingworld.com^
||g2.ganjing.world^
||game.loveota.com^
||gdfp.gifshow.com^
||gemini.yahoo.com^
||geo.yahoo.com^
||getui.cn^
||getui.com^
||getui.net^
||ggx.cmvideo.cn^
||ggx01.miguvideo.com^
||ggx03.miguvideo.com^
||globalapi.ad.xiaomi.com^
||google-analytics.com^
||googleads.g.doubleclick.net^
||googleadservices-cn.com^
||googleadservices.com^
||googletagservices-cn.com^
||googletagservices.com^
||gorgon.youdao.com^
||gromore.pangolin-sdk-toutiao.com^
||grs.dbankcloud.com^
||grs.hicloud.com^
||grs.platform.dbankcloud.ru^
||h-adashx.ut.taobao.com^
||h.trace.qq.com^
||hanlanad.com^
||hexagon-analytics.com^
||hm.baidu.com^
||hmma.baidu.com^
||houyi.kkmh.com^
||hpplay.cn^
||httpdns.bcelive.com^
||httpdns.ocloud.oppomobile.com^
||hugelog.fcbox.com^
||huichuan.sm.cn^
||hw-ot-ad.a.yximgs.com^
||hw.zuimeitianqi.com^
||hwpub-s01-drcn.cloud.dbankcloud.cn^
||hya.comp.360os.com^
||hybrid.miniapp.taobao.com^
||hye.comp.360os.com^
||hyt.comp.360os.com^
||i.snssdk.com^
||iad.apple.com^
||iadctest.qwapi.com^
||iadsdk.apple.com^
||iadworkbench.apple.com^
||ifacelog.iqiyi.com^
||ifs.tanx.com^
||igexin.com^
||ii.gdt.qq.com^
||imag8.pubmatic.com^
||imag86.pubmatic.com^
||image-ad.sm.cn^
||imageplus.baidu.com^
||images.outbrainimg.com^
||images.pinduoduo.com^
||img-c.heytapimage.com^
||img.adnyg.com^
||img.adnyg.com.w.kunlungr.com^
||imtmp.net^
||iot-eu-logser.realme.com^
||iot-logser.realme.com^
||ipv4.kkmh.com^
||irc.qubiankeji.com^
||itv2-up.openspeech.cn^
||ixav-cse.avlyun.com^
||iyfbodn.com^
||janapi.jd.com^
||jiguang.cn^
||jpush.cn^
||jpush.html5.qq.com^
||jpush.io^
||jswebcollects.kugou.com^
||kepler.jd.com^
||kl.67it.com^
||knicks.jd.com^
||ks.pull.yximgs.com^
||launcher.smart-tv.cn^
||launcherimg.smart-tv.cn^
||lf3-ad-union-sdk.pglstatp-toutiao.com^
||lf6-ad-union-sdk.pglstatp-toutiao.com^
||litchiads.com^
||liveats-vod.video.ptqy.gitv.tv^
||livemonitor.huan.tv^
||livep.l.aiseet.atianqi.com^
||lives.l.aiseet.atianqi.com^
||lives.l.ott.video.qq.com^
||lm10111.jtrincc.cn^
||log-api-mn.huxiu.com^
||log-api.huxiu.com^
||log-api.pangolin-sdk-toutiao-b.com^
||log-api.pangolin-sdk-toutiao.com^
||log-report.com^
||log-sdk.gifshow.com^
||log-upload-os.hoyoverse.com^
||log-upload.mihoyo.com^
||log.ad.xiaomi.com^
||log.aispeech.com^
||log.amemv.com^
||log.appstore3.huan.tv^
||log.avlyun.com^
||log.avlyun.sec.intl.miui.com^
||log.byteoversea.com^
||log.fc.yahoo.com^
||log.kuwo.cn^
||log.pinterest.com^
||log.snssdk.com^
||log.stat.kugou.com^
||log.tagtic.cn^
||log.tbs.qq.com^
||log.vcgame.cn^
||log.web.kugou.com^
||log.zijieapi.com^
||log1.cmpassport.com^
||logbak.hicloud.com^
||logs.amap.com^
||logservice.hicloud.com^
||logservice1.hicloud.com^
||logtj.kugou.com^
||logupdate.avlyun.sec.miui.com^
||m-adnet.hiaiabc.com^
||m.ad.zhangyue.com^
||m.atm.youku.com^
||m.kubiqq.com^
||m1.ad.10010.com^
||mapi.m.jd.com^
||masdkv6.3g.qq.com^
||mazu.m.qq.com^
||mbdlog.iqiyi.com^
||metrics.apple.com^
||metrics.data.hicloud.com^
||metrics.icloud.com^
||metrics.mzstatic.com^
||metrics2.data.hicloud.com^
||metrika.yandex.ru^
||mi.gdt.qq.com^
||miav-cse.avlyun.com^
||mime.baidu.com^
||mine.baidu.com^
||mission-pub.smart-tv.cn^
||miui-fxcse.avlyun.com^
||mnqlog.ldmnq.com^
||mobads-logs.baidu.com^
||mobads-pre-config.cdn.bcebos.com^
||mobads.baidu.com^
||mobile.da.mgtv.com^
||mobilelog.upqzfile.com^
||mobileservice.cn^
||mon.zijieapi.com^
||monitor-ads-test.huan.tv^
||monitor-uu.play.aiseet.atianqi.com^
||monitor.music.qq.com^
||monitor.uu.qq.com^
||monsetting.toutiao.com^
||mssdk.volces.com^
||mssdk.zijieapi.com^
||mtj.baidu.com^
||newvoice.chiq5.smart-tv.cn^
||nmetrics.samsung.com^
||notes-analytics-events.apple.com^
||nsclick.baidu.com^
||o2o.api.xiaomi.com^
||oauth-login-drcn.platform.dbankcloud.com^
||offerwall.yandex.net^
||omgmta.play.aiseet.atianqi.com^
||open.e.kuaishou.cn^
||open.e.kuaishou.com^
||open.kuaishouzt.com^
||open.kwaishouzt.com^
||open.kwaizt.com^
||optimus-ads.amap.com^
||orbit.jd.com^
||oth.eve.mdt.qq.com^
||oth.str.mdt.qq.com^
||otheve.play.aiseet.atianqi.com^
||outlookads.live.com^
||p.l.qq.com^
||p.s.360.cn^
||p1-be-pack-sign.pglstatp-toutiao.com^
||p1-lm.adkwai.com^
||p2-be-pack-sign.pglstatp-toutiao.com^
||p2-lm.adkwai.com^
||p2p.huya.com^
||p3-be-pack-sign.pglstatp-toutiao.com^
||p3-lm.adkwai.com^
||p3-tt.byteimg.com^
||p4-be-pack-sign.pglstatp-toutiao.com^
||p5-be-pack-sign.pglstatp-toutiao.com^
||p6-be-pack-sign.pglstatp-toutiao.com^
||pagead2.googleadservices.com^
||pagead2.googlesyndication.com^
||pangolin-sdk-toutiao-b.com^
||pay.sboot.cn^
||pgdt.ugdtimg.com^
||pglstatp-toutiao.com^
||pig.pupuapi.com^
||pixon.ads-pixiv.net^
||pkoplink.com^
||plbslog.umeng.com^
||pms.mb.qq.com^
||policy.video.ptqy.gitv.tv^
||pos.baidu.com^
||proxy.advp.apple.com^
||public.gdtimg.com^
||q.i.gdt.qq.com^
||qqdata.ab.qq.com^
||qwapi.apple.com^
||qzs.gdtimg.com^
||recommend-drcn.hms.dbankcloud.cn^
||report.tv.kohesport.qq.com^
||res.hubcloud.com.cn^
||res1.hubcloud.com.cn^
||res2.hubcloud.com.cn^
||res3.hubcloud.com.cn^
||resolve.umeng.com^
||review.gdtimg.com^
||rms-drcn.platform.dbankcloud.cn^
||roi.soulapp.cn^
||rpt.gdt.qq.com^
||rtb.voiceads.cn^
||s.amazon-adsystem.com^
||s1.qq.com^
||s2.qq.com^
||s3.qq.com^
||saad.ms.zhangyue.net^
||samsung-com.112.2o7.net^
||samsungads.com^
||sanme2.taisantech.com^
||saveu5-normal-lq.zijieapi.com^
||scdown.qq.com^
||scs.openspeech.cn^
||sdk-ab-config.qquanquan.com^
||sdk-cache.video.ptqy.gitv.tv^
||sdk.1rtb.net^
||sdk.beizi.biz^
||sdk.cferw.com^
||sdk.e.qq.com^
||sdk.hzsanjiaomao.com^
||sdk.markmedia.com.cn^
||sdk.mobads.adwangmai.com^
||sdkconf.avlyun.com^
||sdkconfig.ad.intl.xiaomi.com^
||sdkconfig.ad.xiaomi.com^
||sdkconfig.play.aiseet.atianqi.com^
||sdkconfig.video.qq.com^
||sdkoptedge.chinanetcenter.com^
||sdktmp.hubcloud.com.cn^
||sdownload.stargame.com^
||search.ixigua.com^
||search3-search.ixigua.com^
||search5-search-hl.ixigua.com^
||search5-search.ixigua.com^
||securemetrics.apple.com^
||securepubads.g.doubleclick.net^
||sensors-log.dongqiudi.com^
||service.changhong.upgrade2.huan.tv^
||service.vmos.cn^
||sf16-static.i18n-pglstatp.com^
||sf3-fe-tos.pglstatp-toutiao.com^
||shouji.sogou.com^
||sigmob.cn^
||sigmob.com^
||skdisplay.jd.com^
||slb-p2p.vcloud.ks-live.com^
||smad.ms.zhangyue.net^
||smart-tv.cn^
||smartad.10010.com^
||smetrics.samsung.com^
||sms.ads.oppomobile.com^
||sngmta.qq.com^
||snowflake.qq.com^
||stat.dongqiudi.com^
||stat.y.qq.com^
||static.ads-twitter.com^
||statics.woozooo.com^
||stats.qiumibao.com^
||stats.wp.com^
||statsigapi.net^
||stg-data.ads.heytapmobi.com^
||success.ctobsnssdk.com^
||syh-imp.cdnjtzy.com^
||szbdyd.com^
||t-dsp.pinduoduo.com^
||t.l.qq.com^
||t.track.ad.xiaomi.com^
||t002.ottcn.com^
||t1.a.market.xiaomi.com^
||t2.a.market.xiaomi.com^
||t3.a.market.xiaomi.com^
||tangram.e.qq.com^
||tdc.qq.com^
||tdsdk.cpatrk.net^
||tdsdk.xdrig.com^
||tencent-dtv.m.cn.miaozhen.com^
||terms-drcn.platform.dbankcloud.cn^
||test.ad.xiaomi.com^
||test.e.ad.xiaomi.com^
||tj.b.qq.com^
||tj.video.qq.com^
||tmead.y.qq.com^
||tmeadcomm.y.qq.com^
||tmfmazu-wangka.m.qq.com^
||tmfmazu.m.qq.com^
||tmfsdk.m.qq.com^
||tmfsdktcpv4.m.qq.com^
||tnc3-aliec1.toutiaoapi.com^
||tnc3-aliec2.bytedance.com^
||tnc3-aliec2.toutiaoapi.com^
||tnc3-alisc1.bytedance.com^
||tnc3-alisc1.zijieapi.com^
||tnc3-alisc2.zijieapi.com^
||tnc3-bjlgy.bytedance.com^
||tnc3-bjlgy.toutiaoapi.com^
||tnc3-bjlgy.zijieapi.com^
||toblog.ctobsnssdk.com^
||trace.qq.com^
||tracelog-debug.qquanquan.com^
||track.lc.quark.cn^
||track.uc.cn^
||tracker.ai.xiaomi.com^
||tracker.gitee.com^
||tracking.miui.com^
||tracking.rus.miui.com^
||tsvrv.com^
||tvuser-ch.cedock.com^
||tx-ad.a.yximgs.com^
||tx-kmpaudio.pull.yximgs.com^
||tz.sec.xiaomi.com^
||uapi.ads.heytapmobi.com^
||udc.yahoo.com^
||udcm.yahoo.com^
||uedas.qidian.com^
||ulog-sdk.gifshow.com^
||ulogjs.gifshow.com^
||ulogs.umeng.com^
||ulogs.umengcloud.com^
||umengacs.m.taobao.com^
||umengjmacs.m.taobao.com^
||umini.shujupie.com^
||umsns.com^
||union.baidu.cn^
||union.baidu.com^
||update.avlyun.sec.miui.com^
||update.lejiao.tv^
||upgrade-update.hismarttv.com^
||us.l.qq.com^
||v.adintl.cn^
||v.adx.hubcloud.com.cn^
||v1-ad.video.yximgs.com^
||v2-ad.video.yximgs.com^
||v2-api-channel-launcher.hismarttv.com^
||v2.gdt.qq.com^
||v2mi.gdt.qq.com^
||v3-ad.video.yximgs.com^
||v3.gdt.qq.com^
||video-ad.sm.cn^
||video-dsp.pddpic.com^
||video.dispatch.tc.qq.com^
||virusinfo-cloudscan-cn.heytapmobi.com^
||vlive.qqvideo.tc.qq.com^
||volc.bj.ad.track.66mobi.com^
||vungle.com^
||w.l.qq.com^
||w1.askwai.com^
||w1.bskwai.com^
||w1.cskwai.com^
||w1.dskwai.com^
||w1.eskwai.com^
||w1.fskwai.com^
||w1.gskwai.com^
||w1.hskwai.com^
||w1.iskwai.com^
||w1.jskwai.com^
||w1.kskwai.com^
||w1.lskwai.com^
||w1.mskwai.com^
||w1.nskwai.com^
||w1.oskwai.com^
||w1.pskwai.com^
||w1.qskwai.com^
||w1.rskwai.com^
||w1.sskwai.com^
||w1.tskwai.com^
||w1.uskwai.com^
||w1.vskwai.com^
||w1.wskwai.com^
||w1.xskwai.com^
||w1.yskwai.com^
||w1.zskwai.com^
||watson.microsoft.com^
||watson.telemetry.microsoft.com^
||weather-analytics-events.apple.com^
||weather-community-drcn.weather.dbankcloud.cn^
||webstat.qiumibao.com^
||webview.unityads.unity3d.com^
||widgets.outbrain.com^
||widgets.pinterest.com^
||win.gdt.qq.com^
||wn.x.jd.com^
||ws-keyboard.shouji.sogou.com^
||ws.sj.qq.com^
||www42.zskwai.com^
||wxa.wxs.qq.com^
||wximg.wxs.qq.com^
||wxsmw.wxs.qq.com^
||wxsnsad.tc.qq.com^
||wxsnsdy.wxs.qq.com^
||wxsnsdythumb.wxs.qq.com^
||xc.gdt.qq.com^
||xiaomi-dtv.m.cn.miaozhen.com^
||xiaoshuo.wtzw.com^
||xlivrdr.com^
||xlmzc.cnjp-exp.com^
||xlog.jd.com^
||xlviiirdr.com^
||xlviirdr.com^
||yk-ssp.ad.youku.com^
||ykad-data.youku.com^
||ykad-gateway.youku.com^
||youku-acs.m.taobao.com^
||youxi.kugou.com^
||zeus.ad.xiaomi.com^
||zhihu-web-analytics.zhihu.com^
/.*\.*\.shouji\.sogou\.com/
/.*\.[a-zA-Z0-9.-]skwai\.com/
/.*\.a\.market\.xiaomi\.com/
/.*\.data\.hicloud\.com/
/.*\.log\.aliyuncs\.com/
/[a-zA-Z0-9.-]*-ad-[a-zA-Z0-9.-]*\.byteimg\.com/
/[a-zA-Z0-9.-]*-ad\.sm\.cn/
/[a-zA-Z0-9.-]*-ad\.video\.yximgs\.com/
/[a-zA-Z0-9.-]*-ad\.wtzw\.com/
/[a-zA-Z0-9.-]*-be-pack-sign\.pglstatp-toutiao\.com/
/[a-zA-Z0-9.-]*-lm\.adkwai\.com/
/[a-zA-Z0-9.-]*-normal-[a-zA-Z0-9.-]*\.zijieapi\.com/
/[a-zA-Z0-9.-]*-normal\.zijieapi\.com/
/cloudinject[a-zA-Z0-9.-]*-dev\.*\.[a-zA-Z0-9.-]*-[a-zA-Z0-9.-]*-[a-zA-Z0-9.-]*\.amazonaws\.com/
+18 -11
View File
@@ -1,18 +1,25 @@
||events-sandbox.data.msn.cn
||c.msn.cn
||ad.*
||clarity.microsoft.com
||reke.at.sohu.com
||e.so.com
||admin.zlhj.top
||vbng.at.sohu.com
||lb.e.so.com
||vbng.at.sohu.com
||qmsg.qy.net
||c.msn.cn
||events-sandbox.data.msn.cn
||ad.*
||ads.game.iqiyi.com
||admin.zlhj.top
||localhost.msn.cn
@@||www.csjplatform.com
@@||issuepcdn.baidupcs.com
@@||szminorshort.weixin.qq.com
@@||apd-pcdnwxlogin.teg.tencent-cloud.net
||amsg.qy.net
||fluxbak.iqiyi.com
||e.so.com
||clarity.microsoft.com
/hnzhangxin/
/zhangxinchina/
@@||eastday.com
@@||szminorshort.weixin.qq.com
@@||issuepcdn.baidupcs.com
@@||antpcdn.com
@@||claw.guanjia.qq.com
@@||mpcdn.weixin.qq.com
@@||www.csjplatform.com
@@||apd-pcdnwxlogin.teg.tencent-cloud.net
@@||api.tw06.xlmc.sec.miui.com
+5
View File
@@ -0,0 +1,5 @@
{
"blockedDomainsCount": {},
"resolvedDomainsCount": {},
"lastSaved": "2026-03-30T01:01:52.77650853+08:00"
}
+9652
View File
File diff suppressed because it is too large Load Diff
+21
View File
@@ -0,0 +1,21 @@
const fs = require('fs');
// 读取文件内容
const content = fs.readFileSync('./static/api/js/index.js', 'utf8');
// 提取swaggerDocument部分
const match = content.match(/const swaggerDocument = (.*?);/s);
if (match) {
const jsonStr = match[1];
console.log('JSON字符串长度:', jsonStr.length);
// 显示错误位置附近的内容
const errorPos = 70599;
const start = Math.max(0, errorPos - 200);
const end = Math.min(jsonStr.length, errorPos + 200);
console.log('错误位置附近的内容:');
console.log(jsonStr.substring(start, end));
} else {
console.log('未找到swaggerDocument定义');
}
Executable
BIN
View File
Binary file not shown.
+121 -37
View File
@@ -454,7 +454,12 @@ func (s *Server) initRequestInfo(w dns.ResponseWriter, r *dns.Msg) *requestInfo
domain = domain[:len(domain)-1]
}
// 获取查询类型
queryType = dns.TypeToString[r.Question[0].Qtype]
if t, ok := dns.TypeToString[r.Question[0].Qtype]; ok {
queryType = t
} else {
// 处理未知类型,使用数字表示
queryType = fmt.Sprintf("TYPE%d", r.Question[0].Qtype)
}
qType = r.Question[0].Qtype
// 更新查询类型统计
s.updateStats(func(stats *Stats) {
@@ -2471,7 +2476,7 @@ func (s *Server) GetStats() *Stats {
}
// GetQueryLogs 获取查询日志
func (s *Server) GetQueryLogs(limit, offset int, sortField, sortDirection, resultFilter, searchTerm string) []QueryLog {
func (s *Server) GetQueryLogs(limit, offset int, sortField, sortDirection, resultFilter, searchTerm, queryType string) []QueryLog {
s.queryLogsMutex.RLock()
defer s.queryLogsMutex.RUnlock()
@@ -2482,9 +2487,18 @@ func (s *Server) GetQueryLogs(limit, offset int, sortField, sortDirection, resul
if limit <= 0 {
limit = 100 // 默认返回100条日志
}
// 设置合理的上限,防止请求过多数据
if limit > 1000 {
limit = 1000
}
// 创建日志副本用于过滤和排序
var logsCopy []QueryLog
// 预分配切片容量,减少内存分配
var filteredLogs []QueryLog
capacity := len(s.queryLogs)
if capacity > 10000 {
capacity = 10000 // 限制最大容量,避免内存使用过高
}
filteredLogs = make([]QueryLog, 0, capacity)
// 先过滤日志
for _, log := range s.queryLogs {
@@ -2493,62 +2507,101 @@ func (s *Server) GetQueryLogs(limit, offset int, sortField, sortDirection, resul
continue
}
// 应用解析类型过滤
if queryType != "" && log.QueryType != queryType {
continue
}
// 应用搜索过滤
if searchTerm != "" {
// 搜索域名或客户端IP
// 搜索域名或客户端IP,使用strings.Contains的优化版本
if !strings.Contains(log.Domain, searchTerm) && !strings.Contains(log.ClientIP, searchTerm) {
continue
}
}
logsCopy = append(logsCopy, log)
filteredLogs = append(filteredLogs, log)
}
// 排序日志
if sortField != "" {
sort.Slice(logsCopy, func(i, j int) bool {
var a, b interface{}
switch sortField {
case "time":
a = logsCopy[i].Timestamp
b = logsCopy[j].Timestamp
case "clientIp":
a = logsCopy[i].ClientIP
b = logsCopy[j].ClientIP
case "domain":
a = logsCopy[i].Domain
b = logsCopy[j].Domain
case "responseTime":
a = logsCopy[i].ResponseTime
b = logsCopy[j].ResponseTime
case "blockRule":
a = logsCopy[i].BlockRule
b = logsCopy[j].BlockRule
default:
// 默认按时间排序
a = logsCopy[i].Timestamp
b = logsCopy[j].Timestamp
}
// 根据排序方向比较
// 使用更高效的排序方式,避免反射操作
switch sortField {
case "time":
if sortDirection == "asc" {
return compareValues(a, b) < 0
sort.Slice(filteredLogs, func(i, j int) bool {
return filteredLogs[i].Timestamp.Before(filteredLogs[j].Timestamp)
})
} else {
sort.Slice(filteredLogs, func(i, j int) bool {
return filteredLogs[i].Timestamp.After(filteredLogs[j].Timestamp)
})
}
return compareValues(a, b) > 0
case "clientIp":
if sortDirection == "asc" {
sort.Slice(filteredLogs, func(i, j int) bool {
return filteredLogs[i].ClientIP < filteredLogs[j].ClientIP
})
} else {
sort.Slice(filteredLogs, func(i, j int) bool {
return filteredLogs[i].ClientIP > filteredLogs[j].ClientIP
})
}
case "domain":
if sortDirection == "asc" {
sort.Slice(filteredLogs, func(i, j int) bool {
return filteredLogs[i].Domain < filteredLogs[j].Domain
})
} else {
sort.Slice(filteredLogs, func(i, j int) bool {
return filteredLogs[i].Domain > filteredLogs[j].Domain
})
}
case "responseTime":
if sortDirection == "asc" {
sort.Slice(filteredLogs, func(i, j int) bool {
return filteredLogs[i].ResponseTime < filteredLogs[j].ResponseTime
})
} else {
sort.Slice(filteredLogs, func(i, j int) bool {
return filteredLogs[i].ResponseTime > filteredLogs[j].ResponseTime
})
}
case "blockRule":
if sortDirection == "asc" {
sort.Slice(filteredLogs, func(i, j int) bool {
return filteredLogs[i].BlockRule < filteredLogs[j].BlockRule
})
} else {
sort.Slice(filteredLogs, func(i, j int) bool {
return filteredLogs[i].BlockRule > filteredLogs[j].BlockRule
})
}
default:
// 默认按时间降序排序
sort.Slice(filteredLogs, func(i, j int) bool {
return filteredLogs[i].Timestamp.After(filteredLogs[j].Timestamp)
})
}
} else {
// 默认按时间降序排序
sort.Slice(filteredLogs, func(i, j int) bool {
return filteredLogs[i].Timestamp.After(filteredLogs[j].Timestamp)
})
}
// 计算返回范围
start := offset
end := offset + limit
if end > len(logsCopy) {
end = len(logsCopy)
if end > len(filteredLogs) {
end = len(filteredLogs)
}
if start >= len(logsCopy) {
if start >= len(filteredLogs) {
return []QueryLog{} // 没有数据,返回空切片
}
return logsCopy[start:end]
// 直接返回子切片,避免不必要的内存分配
return filteredLogs[start:end]
}
// compareValues 比较两个值
@@ -2593,6 +2646,37 @@ func (s *Server) GetQueryLogsCount() int {
return len(s.queryLogs)
}
// GetQueryLogsCountWithFilter 获取带过滤条件的查询日志总数
func (s *Server) GetQueryLogsCountWithFilter(resultFilter, searchTerm, queryType string) int {
s.queryLogsMutex.RLock()
defer s.queryLogsMutex.RUnlock()
count := 0
for _, log := range s.queryLogs {
// 应用结果过滤
if resultFilter != "" && log.Result != resultFilter {
continue
}
// 应用解析类型过滤
if queryType != "" && log.QueryType != queryType {
continue
}
// 应用搜索过滤
if searchTerm != "" {
// 搜索域名或客户端IP
if !strings.Contains(log.Domain, searchTerm) && !strings.Contains(log.ClientIP, searchTerm) {
continue
}
}
count++
}
return count
}
// GetQueryStats 获取查询统计信息
func (s *Server) GetQueryStats() map[string]interface{} {
s.statsMutex.Lock()
+74
View File
@@ -0,0 +1,74 @@
import socket
import struct
class DNSServer:
def __init__(self, host='0.0.0.0', port=53):
self.host = host
self.port = port
self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.socket.bind((self.host, self.port))
print(f"DNS Server started on {self.host}:{self.port}")
def run(self):
while True:
try:
data, addr = self.socket.recvfrom(512)
print(f"Received query from {addr}")
response = self.handle_query(data)
self.socket.sendto(response, addr)
except Exception as e:
print(f"Error: {e}")
def handle_query(self, data):
# 解析查询
header = data[:12]
query_id, flags, qdcount, ancount, nscount, arcount = struct.unpack('!HHHHHH', header)
# 构建响应头部
# 设置响应标志
flags = 0x8400 # 标准查询响应,无错误
# 解析查询部分
offset = 12
queries = []
for _ in range(qdcount):
qname, offset = self._parse_name(data, offset)
qtype, qclass = struct.unpack('!HH', data[offset:offset+4])
offset += 4
queries.append((qname, qtype, qclass))
# 构建响应
response = header[:2] # 保留查询ID
response += struct.pack('!H', flags)
response += struct.pack('!HHHH', qdcount, 0, 0, 0) # 暂时没有回答、权威和附加记录
# 添加查询部分
for qname, qtype, qclass in queries:
response += self._encode_name(qname)
response += struct.pack('!HH', qtype, qclass)
return response
def _parse_name(self, data, offset):
parts = []
while True:
length = data[offset]
if length == 0:
offset += 1
break
offset += 1
parts.append(data[offset:offset+length].decode('utf-8'))
offset += length
return '.'.join(parts), offset
def _encode_name(self, name):
encoded = b''
for part in name.split('.'):
encoded += struct.pack('!B', len(part))
encoded += part.encode('utf-8')
encoded += b'\x00' # 结束标记
return encoded
if __name__ == '__main__':
server = DNSServer()
server.run()
+120
View File
@@ -0,0 +1,120 @@
package domain
import (
"encoding/json"
"fmt"
"os"
"strings"
)
// DomainInfo 域名信息结构体
type DomainInfo struct {
Domain string `json:"domain"`
Category string `json:"category"`
Company string `json:"company"`
}
// GetDomainInfo 从域名信息数据库中查询域名信息
func GetDomainInfo(domain string) (DomainInfo, error) {
// 读取域名信息文件
data, err := os.ReadFile("./static/domain-info/domains/domain-info.json")
if err != nil {
return DomainInfo{}, err
}
// 解析JSON数据
var domainDB struct {
Categories map[string]string `json:"categories"`
Domains map[string]map[string]interface{} `json:"domains"`
}
if err := json.Unmarshal(data, &domainDB); err != nil {
return DomainInfo{}, err
}
// 遍历域名数据库,查找匹配的域名
for _, services := range domainDB.Domains {
// 获取公司级别的 company 字段
companyLevelCompany := ""
if companyData, ok := services["company"].(string); ok {
companyLevelCompany = companyData
}
for serviceName, serviceInfo := range services {
if serviceName == "company" {
continue
}
// 检查 URL 字段
if urlData, ok := serviceInfo.(map[string]interface{}); ok {
if urlField, ok := urlData["url"]; ok {
switch v := urlField.(type) {
case string:
// 单个 URL
if strings.Contains(v, domain) {
categoryId := ""
if cid, ok := urlData["categoryId"]; ok {
if cidStr, ok := cid.(float64); ok {
categoryId = fmt.Sprintf("%.0f", cidStr)
}
}
categoryName := "未知"
if categoryId != "" {
if name, ok := domainDB.Categories[categoryId]; ok {
categoryName = name
}
}
// 确定公司名:优先使用服务级别的 company 字段,否则使用公司级别的 company 字段
itemCompany := companyLevelCompany
if serviceCompany, ok := urlData["company"].(string); ok {
itemCompany = serviceCompany
}
return DomainInfo{
Domain: domain,
Category: categoryName,
Company: itemCompany,
}, nil
}
case map[string]interface{}:
// 多个 URL
for _, url := range v {
if urlStr, ok := url.(string); ok {
if strings.Contains(urlStr, domain) {
categoryId := ""
if cid, ok := urlData["categoryId"]; ok {
if cidStr, ok := cid.(float64); ok {
categoryId = fmt.Sprintf("%.0f", cidStr)
}
}
categoryName := "未知"
if categoryId != "" {
if name, ok := domainDB.Categories[categoryId]; ok {
categoryName = name
}
}
// 确定公司名:优先使用服务级别的 company 字段,否则使用公司级别的 company 字段
itemCompany := companyLevelCompany
if serviceCompany, ok := urlData["company"].(string); ok {
itemCompany = serviceCompany
}
return DomainInfo{
Domain: domain,
Category: categoryName,
Company: itemCompany,
}, nil
}
}
}
}
}
}
}
}
// 如果没有找到匹配的域名,返回默认信息
return DomainInfo{
Domain: domain,
Category: "未知",
Company: "未知",
}, nil
}
Executable
+12
View File
@@ -0,0 +1,12 @@
#!/bin/sh
set -e -f -u -x
# This script syncs companies DB that we bundle with AdGuard Home. The source
# for this database is https://github.com/AdguardTeam/companiesdb.
#
trackers_url='https://raw.githubusercontent.com/AdguardTeam/companiesdb/main/dist/trackers.json'
output='./trackers.json'
readonly trackers_url output
curl -o "$output" -v "$trackers_url"
+34
View File
@@ -0,0 +1,34 @@
<ul class="f-cb">
<li><a href="http://banshi.beijing.gov.cn/" target="_blank">北京市</a></li>
<li><a href="https://zwfw.tj.gov.cn/" target="_blank">天津市</a></li>
<li><a href="http://www.hbzwfw.gov.cn/" target="_blank">河北省</a></li>
<li><a href="http://www.sxzwfw.gov.cn/icity/public/index" target="_blank">山西省</a></li>
<li><a href="http://zwfw.nmg.gov.cn" target="_blank">内蒙古自治区</a></li>
<li><a href="http://www.lnzwfw.gov.cn" target="_blank">辽宁省</a></li>
<li><a href="http://zwfw.jl.gov.cn/jlszwfw/" target="_blank">吉林省</a></li>
<li><a href="http://zwfw.hlj.gov.cn/" target="_blank">黑龙江省</a></li>
<li><a href="http://zwdt.sh.gov.cn/govPortals/index.do" target="_blank">上海市</a></li>
<li><a href="http://www.jszwfw.gov.cn" target="_blank">江苏省</a></li>
<li><a href="http://www.zjzwfw.gov.cn" target="_blank">浙江省</a></li>
<li><a href="https://www.ahzwfw.gov.cn" target="_blank">安徽省</a></li>
<li><a href="http://zwfw.fujian.gov.cn" target="_blank">福建省</a></li>
<li><a href="http://www.jxzwfww.gov.cn/" target="_blank">江西省</a></li>
<li><a href="https://tysfrz.isdapp.shandong.gov.cn/jpaas-jis-sso-server/sso/entrance/auth-center?appMark=OWWNSJVCC&amp;backUrl=http%3A%2F%2Fwww.shandong.gov.cn%2Fapi-gateway%2Fjpaas-juspace-web-sdywtb%2Ffront%2Fsso%2Flogin-success%3Fgotourl%3DaHR0cDovL3d3dy5zaGFuZG9uZy5nb3YuY24vY29sL2NvbDk0MDkxL2luZGV4Lmh0bWw%3D&amp;userType=1&amp;noLoginBackUrl=http%3A%2F%2Fwww.shandong.gov.cn%2Fcol%2Fcol94091%2Findex.html" target="_blank">山东省</a></li>
<li><a href="http://www.hnzwfw.gov.cn" target="_blank">河南省</a></li>
<li><a href="http://zwfw.hubei.gov.cn" target="_blank">湖北省</a></li>
<li><a href="https://auth.zwfw.hunan.gov.cn/oauth2/authorize?client_id=sXK6HBx3QwuJqaMXqmx2fQ&amp;response_type=redirect&amp;redirect_uri=http://zwfw-new.hunan.gov.cn/" target="_blank">湖南省</a></li>
<li><a href="http://www.gdzwfw.gov.cn" target="_blank">广东省</a></li>
<li><a href="http://zwfw.gxzf.gov.cn" target="_blank">广西壮族自治区</a></li>
<li><a href="https://wssp.hainan.gov.cn/" target="_blank">海南省</a></li>
<li><a href="http://zwykb.cq.gov.cn/" target="_blank">重庆市</a></li>
<li><a href="http://www.sczwfw.gov.cn" target="_blank">四川省</a></li>
<li><a href="https://zwfw.guizhou.gov.cn/index.html" target="_blank">贵州省</a></li>
<li><a href="https://zwfw.yn.gov.cn/portal/" target="_blank">云南省</a></li>
<li><a href="http://www.xzzwfw.gov.cn" target="_blank">西藏自治区</a></li>
<li><a href="https://zwfw.shaanxi.gov.cn/sx/public/index" target="_blank">陕西省</a></li>
<li><a href="https://zwfw.gansu.gov.cn/" target="_blank">甘肃省</a></li>
<li><a href="https://www.qhzwfw.gov.cn/" target="_blank">青海省</a></li>
<li><a href="http://zwfw.nx.gov.cn" target="_blank">宁夏回族自治区</a></li>
<li><a href="https://zwfw.xinjiang.gov.cn/" target="_blank">新疆维吾尔自治区</a></li>
<li><a target="_blank" href="https://zwfw.xjbt.gov.cn">新疆生产建设兵团</a></li>
</ul>
+2 -1
View File
@@ -15,10 +15,11 @@ require (
require (
github.com/google/go-cmp v0.7.0 // indirect
github.com/stretchr/testify v1.10.0 // indirect
github.com/stretchr/testify v1.11.1 // indirect
golang.org/x/mod v0.25.0 // indirect
golang.org/x/net v0.42.0 // indirect
golang.org/x/sync v0.16.0 // indirect
golang.org/x/sys v0.35.0 // indirect
golang.org/x/tools v0.34.0 // indirect
gopkg.in/ini.v1 v1.67.1 // indirect
)
+9
View File
@@ -12,9 +12,16 @@ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZN
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w=
golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww=
golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs=
@@ -27,6 +34,8 @@ golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/tools v0.34.0 h1:qIpSLOxeCYGg9TrcJokLBG4KFA6d795g0xkBkiESGlo=
golang.org/x/tools v0.34.0/go.mod h1:pAP9OwEaY1CAW3HOmg3hLZC5Z0CCmzjAF2UQMSqNARg=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/ini.v1 v1.67.1 h1:tVBILHy0R6e4wkYOn3XmiITt/hEVH4TFMYvAX2Ytz6k=
gopkg.in/ini.v1 v1.67.1/go.mod h1:x/cyOwCgZqOkJoDIJ3c1KNHMo10+nLGAhh+kn3Zizss=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+698 -19
View File
@@ -1,10 +1,12 @@
package http
import (
"bytes"
"encoding/csv"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"os"
"sort"
"strings"
"sync"
@@ -12,6 +14,7 @@ import (
"dns-server/config"
"dns-server/dns"
"dns-server/domain"
"dns-server/gfw"
"dns-server/logger"
"dns-server/shield"
@@ -118,7 +121,10 @@ func (s *Server) Start() error {
}))
mux.HandleFunc("/api/shield/hosts", s.loginRequired(s.handleShieldHosts))
mux.HandleFunc("/api/shield/blacklists", s.loginRequired(s.handleShieldBlacklists))
// 传统查询接口(保持向后兼容)
mux.HandleFunc("/api/query", s.loginRequired(s.handleQuery))
// RESTful 域名查询接口
mux.HandleFunc("/api/domains/", s.loginRequired(s.handleDomainQuery))
mux.HandleFunc("/api/status", s.loginRequired(s.handleStatus))
mux.HandleFunc("/api/config", s.loginRequired(s.handleConfig))
mux.HandleFunc("/api/config/restart", s.loginRequired(s.handleRestart))
@@ -136,7 +142,15 @@ func (s *Server) Start() error {
mux.HandleFunc("/api/logs/stats", s.loginRequired(s.handleLogsStats))
mux.HandleFunc("/api/logs/query", s.loginRequired(s.handleLogsQuery))
mux.HandleFunc("/api/logs/count", s.loginRequired(s.handleLogsCount))
// WebSocket端点
// 域名查询相关接口
mux.HandleFunc("/api/domain/info", s.loginRequired(s.handleDomainInfo))
// 域名信息列表接口
mux.HandleFunc("/api/domain-info", s.loginRequired(s.handleDomainInfoList))
// 威胁查询接口
mux.HandleFunc("/api/threat", s.loginRequired(s.handleThreatQuery))
// 威胁批量查询接口
mux.HandleFunc("/api/threat/batch", s.loginRequired(s.handleThreatBatch))
// WebSocket 端点
mux.HandleFunc("/ws/stats", s.loginRequired(s.handleWebSocketStats))
// 将/api/下的静态文件服务指向static/api目录,放在最后以避免覆盖API端点
@@ -1165,7 +1179,7 @@ func (s *Server) handleShieldHosts(w http.ResponseWriter, r *http.Request) {
}
}
// handleQuery 处理DNS查询请求
// handleQuery 处理DNS查询请求(传统接口,保持向后兼容)
func (s *Server) handleQuery(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
@@ -1174,7 +1188,9 @@ func (s *Server) handleQuery(w http.ResponseWriter, r *http.Request) {
domain := r.URL.Query().Get("domain")
if domain == "" {
http.Error(w, "Domain parameter is required", http.StatusBadRequest)
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusBadRequest)
json.NewEncoder(w).Encode(map[string]string{"error": "需要提供domain参数"})
return
}
@@ -1188,6 +1204,47 @@ func (s *Server) handleQuery(w http.ResponseWriter, r *http.Request) {
json.NewEncoder(w).Encode(blockDetails)
}
// handleDomainQuery 处理RESTful风格的域名查询请求
func (s *Server) handleDomainQuery(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
return
}
// 从URL路径中提取域名参数
// 路径格式: /api/domains/{domain}
path := r.URL.Path
parts := strings.Split(path, "/")
if len(parts) < 4 {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusBadRequest)
json.NewEncoder(w).Encode(map[string]string{"error": "需要提供domain参数"})
return
}
domain := parts[3]
if domain == "" {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusBadRequest)
json.NewEncoder(w).Encode(map[string]string{"error": "需要提供domain参数"})
return
}
// 获取域名屏蔽的详细信息
blockDetails := s.shieldManager.CheckDomainBlockDetails(domain)
// 构建RESTful风格的响应
response := map[string]interface{}{
"domain": domain,
"status": blockDetails["blocked"],
"timestamp": time.Now(),
"details": blockDetails,
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(response)
}
// handleStatus 处理系统状态请求
func (s *Server) handleStatus(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
@@ -1227,7 +1284,7 @@ func saveConfigToFile(config *config.Config, filePath string) error {
if err != nil {
return err
}
return ioutil.WriteFile(filePath, data, 0644)
return os.WriteFile(filePath, data, 0644)
}
// handleConfig 处理配置请求
@@ -1259,6 +1316,9 @@ func (s *Server) handleConfig(w http.ResponseWriter, r *http.Request) {
"CacheSize": s.globalConfig.DNS.CacheSize,
"MaxCacheTTL": s.globalConfig.DNS.MaxCacheTTL,
"MinCacheTTL": s.globalConfig.DNS.MinCacheTTL,
"enableFastReturn": s.globalConfig.DNS.EnableFastReturn,
"domainSpecificDNS": s.globalConfig.DNS.DomainSpecificDNS,
"noDNSSECDomains": s.globalConfig.DNS.NoDNSSECDomains,
},
"HTTPServer": map[string]interface{}{
"port": s.globalConfig.HTTP.Port,
@@ -1270,17 +1330,20 @@ func (s *Server) handleConfig(w http.ResponseWriter, r *http.Request) {
// 更新配置
var req struct {
DNSServer struct {
Port int `json:"port"`
QueryMode string `json:"queryMode"`
UpstreamServers []string `json:"upstreamServers"`
DnssecUpstreamServers []string `json:"dnssecUpstreamServers"`
Timeout int `json:"timeout"`
SaveInterval int `json:"saveInterval"`
EnableIPv6 bool `json:"enableIPv6"`
CacheMode string `json:"cacheMode"`
CacheSize int `json:"cacheSize"`
MaxCacheTTL int `json:"maxCacheTTL"`
MinCacheTTL int `json:"minCacheTTL"`
Port int `json:"port"`
QueryMode string `json:"queryMode"`
UpstreamServers []string `json:"upstreamServers"`
DnssecUpstreamServers []string `json:"dnssecUpstreamServers"`
Timeout int `json:"timeout"`
SaveInterval int `json:"saveInterval"`
EnableIPv6 bool `json:"enableIPv6"`
CacheMode string `json:"cacheMode"`
CacheSize int `json:"cacheSize"`
MaxCacheTTL int `json:"maxCacheTTL"`
MinCacheTTL int `json:"minCacheTTL"`
EnableFastReturn *bool `json:"enableFastReturn"`
DomainSpecificDNS map[string][]string `json:"domainSpecificDNS"`
NoDNSSECDomains []string `json:"noDNSSECDomains"`
} `json:"dnsserver"`
HTTPServer struct {
Port int `json:"port"`
@@ -1333,6 +1396,18 @@ func (s *Server) handleConfig(w http.ResponseWriter, r *http.Request) {
if req.DNSServer.MinCacheTTL > 0 {
s.globalConfig.DNS.MinCacheTTL = req.DNSServer.MinCacheTTL
}
// 更新enableFastReturn
if req.DNSServer.EnableFastReturn != nil {
s.globalConfig.DNS.EnableFastReturn = *req.DNSServer.EnableFastReturn
}
// 更新domainSpecificDNS
if req.DNSServer.DomainSpecificDNS != nil {
s.globalConfig.DNS.DomainSpecificDNS = req.DNSServer.DomainSpecificDNS
}
// 更新noDNSSECDomains
if len(req.DNSServer.NoDNSSECDomains) > 0 {
s.globalConfig.DNS.NoDNSSECDomains = req.DNSServer.NoDNSSECDomains
}
// 更新HTTP配置
if req.HTTPServer.Port > 0 {
@@ -1514,6 +1589,7 @@ func (s *Server) handleLogsQuery(w http.ResponseWriter, r *http.Request) {
sortDirection := r.URL.Query().Get("direction")
resultFilter := r.URL.Query().Get("result")
searchTerm := r.URL.Query().Get("search")
queryType := r.URL.Query().Get("queryType")
if limitStr := r.URL.Query().Get("limit"); limitStr != "" {
fmt.Sscanf(limitStr, "%d", &limit)
@@ -1524,7 +1600,7 @@ func (s *Server) handleLogsQuery(w http.ResponseWriter, r *http.Request) {
}
// 获取日志数据
logs := s.dnsServer.GetQueryLogs(limit, offset, sortField, sortDirection, resultFilter, searchTerm)
logs := s.dnsServer.GetQueryLogs(limit, offset, sortField, sortDirection, resultFilter, searchTerm, queryType)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(logs)
@@ -1537,13 +1613,52 @@ func (s *Server) handleLogsCount(w http.ResponseWriter, r *http.Request) {
return
}
// 获取日志总
count := s.dnsServer.GetQueryLogsCount()
// 获取过滤参
resultFilter := r.URL.Query().Get("result")
searchTerm := r.URL.Query().Get("search")
queryType := r.URL.Query().Get("queryType")
// 获取带过滤条件的日志总数
count := s.dnsServer.GetQueryLogsCountWithFilter(resultFilter, searchTerm, queryType)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]int{"count": count})
}
// handleDomainInfo 处理域名信息查询请求
func (s *Server) handleDomainInfo(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
return
}
// 解析请求体
var req struct {
Domain string `json:"domain"`
}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
http.Error(w, "Invalid request body", http.StatusBadRequest)
return
}
if req.Domain == "" {
http.Error(w, "Domain parameter is required", http.StatusBadRequest)
return
}
// 从域名信息数据库中查询
domainInfo, err := domain.GetDomainInfo(req.Domain)
if err != nil {
http.Error(w, "Failed to query domain info", http.StatusInternalServerError)
return
}
// 返回域名信息
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(domainInfo)
}
// handleRestart 处理重启服务请求
func (s *Server) handleRestart(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
@@ -1664,6 +1779,319 @@ func (s *Server) handleLogout(w http.ResponseWriter, r *http.Request) {
logger.Info("用户注销成功")
}
// handleDomainInfoList 处理域名信息列表请求
func (s *Server) handleDomainInfoList(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
return
}
// 获取查询参数
query := r.URL.Query()
if query.Has("domains") {
// 处理域名信息,支持过滤特定域名
domainFilter := query.Get("domains")
handleDomainsInfo(w, domainFilter)
} else if query.Has("trackers") {
// 处理跟踪器信息,支持过滤特定域名
trackerFilter := query.Get("trackers")
handleTrackersInfo(w, trackerFilter)
} else if query.Has("threats") {
// 处理威胁域名信息,支持过滤特定域名
threatFilter := query.Get("threats")
handleThreatsInfo(w, threatFilter)
} else {
// 直接访问 /domain-info 不提供任何内容
http.Error(w, "No content provided", http.StatusNoContent)
return
}
}
// isService 判断一个对象是否是服务(而不是分组)
func isService(obj map[string]interface{}) bool {
// 服务通常包含 name、url、categoryId 字段
_, hasName := obj["name"]
_, hasUrl := obj["url"]
_, hasCategoryId := obj["categoryId"]
// 如果有 name 和 url,则认为是服务
if hasName && hasUrl {
return true
}
// 如果有 categoryId,也认为是服务
if hasCategoryId {
return true
}
return false
}
// processServiceItem 递归处理服务或分组
func processServiceItem(
serviceName string,
service interface{},
companyLevelCompany string,
domainFilter string,
categories map[string]string,
result *[]map[string]interface{},
) {
serviceMap, ok := service.(map[string]interface{})
if !ok {
return
}
// 跳过 company 字段
if serviceName == "company" {
return
}
// 判断是服务还是分组
if isService(serviceMap) {
// 这是一个服务,进行处理
urlValue := serviceMap["url"]
match := false
// 检查是否需要过滤
if domainFilter != "" {
// 检查服务名称是否包含过滤条件
if serviceName == domainFilter {
match = true
} else {
// 检查 URL 是否包含过滤条件
switch v := urlValue.(type) {
case string:
if strings.Contains(v, domainFilter) {
match = true
}
case map[string]interface{}:
for _, url := range v {
if urlStr, ok := url.(string); ok && strings.Contains(urlStr, domainFilter) {
match = true
break
}
}
}
}
if !match {
return
}
}
// 确定公司名:优先使用服务级别的 company 字段,否则使用公司级别的 company 字段
itemCompany := companyLevelCompany
if serviceCompany, ok := serviceMap["company"].(string); ok {
itemCompany = serviceCompany
}
// 构建响应对象
item := map[string]interface{}{
"icon": serviceMap["icon"],
"name": serviceMap["name"],
"company": itemCompany,
}
// 添加类别
if categoryId, ok := serviceMap["categoryId"].(float64); ok {
categoryIdStr := fmt.Sprintf("%.0f", categoryId)
if category, exists := categories[categoryIdStr]; exists {
item["category"] = category
}
}
*result = append(*result, item)
} else {
// 这是一个分组,递归处理其下的子项
for subName, subService := range serviceMap {
processServiceItem(subName, subService, companyLevelCompany, domainFilter, categories, result)
}
}
}
func handleDomainsInfo(w http.ResponseWriter, domainFilter string) {
// 如果过滤参数为空字符串,返回空数组
if domainFilter == "" {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode([]map[string]interface{}{})
return
}
filePath := "./static/domain-info/domains/domain-info.json"
data, err := os.ReadFile(filePath)
if err != nil {
http.Error(w, "Failed to read domain info file", http.StatusInternalServerError)
logger.Error(fmt.Sprintf("读取域名信息文件失败: %v", err))
return
}
// 解析JSON
var domainInfo struct {
Categories map[string]string `json:"categories"`
Domains map[string]map[string]interface{} `json:"domains"`
}
if err := json.Unmarshal(data, &domainInfo); err != nil {
http.Error(w, "Failed to parse domain info file", http.StatusInternalServerError)
logger.Error(fmt.Sprintf("解析域名信息文件失败: %v", err))
return
}
// 转换为所需格式
var result []map[string]interface{}
for _, services := range domainInfo.Domains {
// 获取公司级别的 company 字段
companyLevelCompany := ""
if companyData, ok := services["company"].(string); ok {
companyLevelCompany = companyData
}
// 遍历所有服务(包括嵌套的分组)
for serviceName, service := range services {
processServiceItem(serviceName, service, companyLevelCompany, domainFilter, domainInfo.Categories, &result)
}
}
// 返回 JSON 响应
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(result)
}
// handleTrackersInfo 处理跟踪器信息请求,返回名称、类别、url、所属单位/公司
func handleTrackersInfo(w http.ResponseWriter, trackerFilter string) {
// 如果过滤参数为空字符串,返回空数组
if trackerFilter == "" {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode([]map[string]interface{}{})
return
}
filePath := "./static/domain-info/tracker/trackers.json"
data, err := os.ReadFile(filePath)
if err != nil {
http.Error(w, "Failed to read trackers file", http.StatusInternalServerError)
logger.Error(fmt.Sprintf("读取跟踪器文件失败: %v", err))
return
}
// 解析JSON
var trackersInfo struct {
Categories map[string]string `json:"categories"`
Trackers map[string]map[string]interface{} `json:"trackers"`
}
if err := json.Unmarshal(data, &trackersInfo); err != nil {
http.Error(w, "Failed to parse trackers file", http.StatusInternalServerError)
logger.Error(fmt.Sprintf("解析跟踪器文件失败: %v", err))
return
}
// 转换为所需格式
var result []map[string]interface{}
for trackerDomain, tracker := range trackersInfo.Trackers {
// 检查是否需要过滤
if trackerFilter != "" {
// 检查跟踪器域名是否包含过滤条件
if !strings.Contains(trackerDomain, trackerFilter) {
// 检查名称是否包含过滤条件
if name, ok := tracker["name"].(string); !ok || !strings.Contains(name, trackerFilter) {
// 检查URL是否包含过滤条件
if url, ok := tracker["url"].(string); !ok || !strings.Contains(url, trackerFilter) {
continue
}
}
}
}
item := map[string]interface{}{
"name": tracker["name"],
"url": tracker["url"],
"company": tracker["companyId"],
}
// 添加类别
if categoryId, ok := tracker["categoryId"].(float64); ok {
categoryIdStr := fmt.Sprintf("%.0f", categoryId)
if category, exists := trackersInfo.Categories[categoryIdStr]; exists {
item["category"] = category
}
}
result = append(result, item)
}
// 返回JSON响应
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(result)
}
// handleThreatsInfo 处理威胁域名信息请求,返回类型、名称、级别、域名
func handleThreatsInfo(w http.ResponseWriter, threatFilter string) {
// 如果过滤参数为空字符串,返回空数组
if threatFilter == "" {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode([]map[string]string{})
return
}
filePath := "./static/domain-info/threats/threats-database.csv"
data, err := os.ReadFile(filePath)
if err != nil {
http.Error(w, "Failed to read threats file", http.StatusInternalServerError)
logger.Error(fmt.Sprintf("读取威胁域名文件失败: %v", err))
return
}
// 解析CSV
reader := csv.NewReader(bytes.NewReader(data))
reader.FieldsPerRecord = -1 // 允许不同长度的记录
// 读取所有记录
records, err := reader.ReadAll()
if err != nil {
http.Error(w, "Failed to parse threats file", http.StatusInternalServerError)
logger.Error(fmt.Sprintf("解析威胁域名文件失败: %v", err))
return
}
// 转换为所需格式
var result []map[string]string
// 跳过标题行
for i, record := range records {
if i == 0 {
continue
}
if len(record) >= 4 {
// 检查是否需要过滤
if threatFilter != "" {
// 检查域名是否包含过滤条件
if !strings.Contains(record[3], threatFilter) {
// 检查名称是否包含过滤条件
if !strings.Contains(record[1], threatFilter) {
// 检查类型是否包含过滤条件
if !strings.Contains(record[0], threatFilter) {
continue
}
}
}
}
item := map[string]string{
"type": record[0],
"name": record[1],
"level": record[2],
"domain": record[3],
}
result = append(result, item)
}
}
// 返回JSON响应
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(result)
}
// handleChangePassword 处理修改密码请求
func (s *Server) handleChangePassword(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
@@ -1709,3 +2137,254 @@ func (s *Server) handleChangePassword(w http.ResponseWriter, r *http.Request) {
json.NewEncoder(w).Encode(map[string]string{"status": "success", "message": "密码修改成功"})
logger.Info("密码修改成功")
}
// handleThreatQuery 处理威胁域名查询请求
// @Summary 查询威胁域名信息
// @Description 根据传入的域名参数查询威胁数据库,返回威胁类型、名称、风险等级和域名
// @Tags threat
// @Accept json
// @Produce json
// @Param domain query string true "要查询的域名"
// @Success 200 {string} string "威胁信息,格式:类型,名称,风险等级,域名"
// @Failure 400 {object} map[string]string "缺少域名参数"
// @Router /api/threat [get]
func (s *Server) handleThreatQuery(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
return
}
// 获取域名参数
domain := r.URL.Query().Get("domain")
if domain == "" {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusBadRequest)
json.NewEncoder(w).Encode(map[string]string{"error": "需要提供 domain 参数"})
return
}
// 读取威胁数据库 CSV 文件
filePath := "./static/domain-info/threats/threats-database.csv"
data, err := os.ReadFile(filePath)
if err != nil {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusInternalServerError)
json.NewEncoder(w).Encode(map[string]string{"error": "读取威胁数据库失败"})
logger.Error(fmt.Sprintf("读取威胁数据库文件失败:%v", err))
return
}
// 解析 CSV
reader := csv.NewReader(bytes.NewReader(data))
reader.FieldsPerRecord = -1 // 允许不同长度的记录
// 读取所有记录
records, err := reader.ReadAll()
if err != nil {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusInternalServerError)
json.NewEncoder(w).Encode(map[string]string{"error": "解析威胁数据库失败"})
logger.Error(fmt.Sprintf("解析威胁数据库文件失败:%v", err))
return
}
// 构建威胁域名映射(支持顶级域名匹配)
threatMap := make(map[string][]string)
for i, record := range records {
if i == 0 {
continue // 跳过标题行
}
if len(record) >= 4 {
threatType := record[0] // 第一列:类型
threatName := record[1] // 第二列:名称
riskLevel := record[2] // 第三列:风险等级
domain := record[3] // 第四列:域名
threatInfo := []string{threatType, threatName, riskLevel}
// 1. 完整域名匹配(所有类型都添加)
threatMap[domain] = threatInfo
// 2. 只有恶意网站类型才添加子域名匹配规则
// 类型判断:钓鱼网站、仿冒网站
// 逻辑:如果威胁数据库中有 sub.example.com,则所有子域名(a.sub.example.com)都应匹配
if threatType == "钓鱼网站" || threatType == "仿冒网站" {
// 对于恶意网站,添加子域名匹配规则
// 例如:sub.example.com -> 添加 .sub.example.com 规则
// 这样 a.sub.example.com 就会匹配
topLevelDomain := "." + domain
// 只有当该顶级域名规则不存在时才添加
if _, exists := threatMap[topLevelDomain]; !exists {
threatMap[topLevelDomain] = threatInfo
}
}
}
}
// 查询单个域名
var result string
// 1. 先检查完整匹配
if threat, exists := threatMap[domain]; exists {
result = fmt.Sprintf("%s,%s,%s,%s", threat[0], threat[1], threat[2], domain)
} else {
// 2. 检查子域名匹配(遍历顶级域名规则)
for threatDomain, threatInfo := range threatMap {
// 只检查以点开头的顶级域名规则
if strings.HasPrefix(threatDomain, ".") && strings.HasSuffix(domain, threatDomain) {
// 额外验证:确保是完整的域名部分匹配
prefix := strings.TrimSuffix(domain, threatDomain)
if len(prefix) > 0 && !strings.HasSuffix(prefix, ".") {
// 不是完整的子域名部分,跳过
continue
}
result = fmt.Sprintf("%s,%s,%s,%s", threatInfo[0], threatInfo[1], threatInfo[2], domain)
break
}
}
}
w.Header().Set("Content-Type", "application/json")
if result == "" {
// 未找到匹配的威胁信息
json.NewEncoder(w).Encode(map[string]string{"message": "无"})
} else {
// 返回威胁信息
json.NewEncoder(w).Encode(map[string]string{"data": result})
}
}
// handleThreatBatch 批量查询威胁域名
// @Summary 批量查询威胁域名
// @Description 批量查询多个域名是否是威胁域名
// @Tags threat
// @Accept json
// @Produce json
// @Param domains body []string true "域名列表"
// @Success 200 {object} map[string]interface{} "批量查询结果"
// @Router /api/threat/batch [post]
func (s *Server) handleThreatBatch(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
return
}
var req struct {
Domains []string `json:"domains"`
}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusBadRequest)
json.NewEncoder(w).Encode(map[string]string{"error": "请求格式错误"})
return
}
// 读取威胁数据库 CSV 文件
filePath := "./static/domain-info/threats/threats-database.csv"
data, err := os.ReadFile(filePath)
if err != nil {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusInternalServerError)
json.NewEncoder(w).Encode(map[string]string{"error": "读取威胁数据库失败"})
logger.Error(fmt.Sprintf("读取威胁数据库文件失败:%v", err))
return
}
// 解析 CSV
reader := csv.NewReader(bytes.NewReader(data))
reader.FieldsPerRecord = -1 // 允许不同长度的记录
// 读取所有记录
records, err := reader.ReadAll()
if err != nil {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusInternalServerError)
json.NewEncoder(w).Encode(map[string]string{"error": "解析威胁数据库失败"})
logger.Error(fmt.Sprintf("解析威胁数据库文件失败:%v", err))
return
}
// 构建威胁域名映射(支持顶级域名匹配)
threatMap := make(map[string][]string)
for i, record := range records {
if i == 0 {
continue // 跳过标题行
}
if len(record) >= 4 {
threatType := record[0] // 第一列:类型
threatName := record[1] // 第二列:名称
riskLevel := record[2] // 第三列:风险等级
domain := record[3] // 第四列:域名
threatInfo := []string{threatType, threatName, riskLevel}
// 1. 完整域名匹配(所有类型都添加)
threatMap[domain] = threatInfo
// 2. 只有恶意网站类型才添加子域名匹配规则
// 类型判断:钓鱼网站、仿冒网站
// 逻辑:如果威胁数据库中有 sub.example.com,则所有子域名(a.sub.example.com)都应匹配
if threatType == "钓鱼网站" || threatType == "仿冒网站" {
// 对于恶意网站,添加子域名匹配规则
// 例如:sub.example.com -> 添加 .sub.example.com 规则
// 这样 a.sub.example.com 就会匹配
topLevelDomain := "." + domain
// 只有当该顶级域名规则不存在时才添加
if _, exists := threatMap[topLevelDomain]; !exists {
threatMap[topLevelDomain] = threatInfo
}
}
}
}
// 批量查询
results := make([]map[string]interface{}, 0, len(req.Domains))
for _, domain := range req.Domains {
// 1. 先检查完整匹配
if threat, exists := threatMap[domain]; exists {
results = append(results, map[string]interface{}{
"domain": domain,
"isThreat": true,
"data": fmt.Sprintf("%s,%s,%s,%s", threat[0], threat[1], threat[2], domain),
})
continue
}
// 2. 检查子域名匹配(遍历顶级域名规则)
matched := false
for threatDomain, threatInfo := range threatMap {
// 只检查以点开头的顶级域名规则
if strings.HasPrefix(threatDomain, ".") && strings.HasSuffix(domain, threatDomain) {
// 验证:确保是有效的子域名匹配
// 例如:test.example.com 匹配 .example.com ✅
// notexample.com 不应该匹配 .example.com ❌
// 去掉 threatDomain 的第一个字符(即去掉开头的点)
suffixToTrim := threatDomain[1:]
prefix := strings.TrimSuffix(domain, suffixToTrim)
// 验证逻辑:前缀不为空且以.结尾,或者前缀为空(完全匹配)
if len(prefix) == 0 || (len(prefix) > 0 && strings.HasSuffix(prefix, ".")) {
results = append(results, map[string]interface{}{
"domain": domain,
"isThreat": true,
"data": fmt.Sprintf("%s,%s,%s,%s", threatInfo[0], threatInfo[1], threatInfo[2], domain),
})
matched = true
break
}
}
}
if !matched {
results = append(results, map[string]interface{}{
"domain": domain,
"isThreat": false,
})
}
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{
"results": results,
})
}
+22294
View File
File diff suppressed because it is too large Load Diff
+83 -65
View File
@@ -31,70 +31,88 @@ import (
// createDefaultConfig 创建默认配置文件
func createDefaultConfig(configFile string) error {
// 默认配置内容
defaultConfig := `{
"dns": {
"port": 53,
"upstreamDNS": [
"223.5.5.5:53",
"223.6.6.6:53"
],
"dnssecUpstreamDNS": [
"8.8.8.8:53",
"1.1.1.1:53"
],
"timeout": 5000,
"saveInterval": 300,
"cacheTTL": 30,
"enableDNSSEC": true,
"queryMode": "parallel"
},
"http": {
"port": 8080,
"host": "0.0.0.0",
"enableAPI": true,
"username": "admin",
"password": "admin"
},
"shield": {
"blacklists": [
{
"name": "AdGuard DNS filter",
"url": "https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/filter.txt",
"enabled": true
},
{
"name": "Adaway Default Blocklist",
"url": "https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-Filters/raw/branch/main/hosts/adaway.txt",
"enabled": true
},
{
"name": "CHN-anti-AD",
"url": "https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-Filters/raw/branch/main/list/easylist.txt",
"enabled": true
},
{
"name": "My GitHub Rules",
"url": "https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/rules/costomize.txt",
"enabled": true
}
],
"updateInterval": 3600,
"blockMethod": "NXDOMAIN",
"customBlockIP": "",
"statsSaveInterval": 60
},
"gfwList": {
"ip": "127.0.0.1",
"content": "./data/gfwlist.txt",
"enabled": true
},
"log": {
"level": "debug",
"maxSize": 100,
"maxBackups": 10,
"maxAge": 30
}
}`
defaultConfig := `# DNS服务器配置文件
# 格式INI格式使用#注释
[dns]
# DNS服务器监听端口
port = 53
# 上游DNS服务器列表逗号分隔
upstreamDNS = 223.5.5.5:53, 223.6.6.6:53
# DNSSEC专用服务器列表逗号分隔
dnssecUpstreamDNS = 8.8.8.8:53, 1.1.1.1:53
# 数据保存间隔
saveInterval = 300
# DNS缓存过期时间分钟
cacheTTL = 30
# 是否启用DNSSEC支持
enableDNSSEC = true
# 查询模式parallel并行请求fastest-ip最快的IP地址
queryMode = parallel
# 查询超时时间毫秒
queryTimeout = 5000
# 是否启用快速返回机制
enableFastReturn = true
# 不验证DNSSEC的域名模式列表逗号分隔
noDNSSECDomains =
# 是否启用IPv6解析AAAA记录
enableIPv6 = false
# 缓存模式memory内存缓存file文件缓存
cacheMode = memory
# 缓存大小限制MB
cacheSize = 100
# 最大缓存TTL分钟
maxCacheTTL = 120
# 最小缓存TTL分钟
minCacheTTL = 5
[http]
# HTTP控制台监听端口
port = 8080
# HTTP控制台监听地址
host = 0.0.0.0
# 是否启用API
enableAPI = true
# 登录用户名
username = admin
# 登录密码
password = admin
[shield]
# 屏蔽规则更新间隔
updateInterval = 3600
# 屏蔽方法: NXDOMAIN, refused, emptyIP, customIP
blockMethod = NXDOMAIN
# 自定义屏蔽IP当BlockMethod为"customIP"时使用
customBlockIP =
# 计数数据保存间隔
statsSaveInterval = 60
# 黑名单配置
# 格式blacklist_名称 = URL,enabled
blacklist_AdGuard_DNS_filter = https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/filter.txt,true
blacklist_Adaway_Default_Blocklist = https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-Filters/raw/branch/main/hosts/adaway.txt,true
blacklist_CHN_anti_AD = https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-Filters/raw/branch/main/list/easylist.txt,true
blacklist_My_GitHub_Rules = https://gitea.amazehome.xyz/AMAZEHOME/hosts-and-filters/raw/branch/main/rules/costomize.txt,true
[gfwList]
# GFWList域名解析的目标IP地址
ip = 127.0.0.1
# GFWList规则文件路径
content = ./data/gfwlist.txt
# 是否启用GFWList功能
enabled = true
[log]
# 日志级别debug, info, warn, error
level = debug
# 日志文件最大大小MB
maxSize = 100
# 日志文件最大备份数
maxBackups = 10
# 日志文件最大保留天数
maxAge = 30
`
// 写入默认配置到文件
return os.WriteFile(configFile, []byte(defaultConfig), 0644)
@@ -162,7 +180,7 @@ func createRequiredFiles(cfg *config.Config) error {
func main() {
// 命令行参数解析
var configFile string
flag.StringVar(&configFile, "config", "config.json", "配置文件路径")
flag.StringVar(&configFile, "config", "config.ini", "配置文件路径")
flag.Parse()
// 检查配置文件是否存在,如果不存在则创建默认配置文件
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../acorn/bin/acorn
+1
View File
@@ -0,0 +1 @@
../baseline-browser-mapping/dist/cli.cjs
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../browserslist/cli.js
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../cssesc/bin/cssesc
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../envinfo/dist/cli.js
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../flat/cli.js
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../import-local/fixtures/cli.js
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../nanoid/bin/nanoid.cjs
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../which/bin/node-which
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../resolve/bin/resolve
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../semver/bin/semver.js
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../svgo/bin/svgo
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../terser/bin/terser
+1
View File
@@ -0,0 +1 @@
../update-browserslist-db/cli.js
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../webpack/bin/webpack.js
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../webpack-cli/bin/cli.js
+21
View File
@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020 Roman Dvornov <rdvornov@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
+256
View File
@@ -0,0 +1,256 @@
# json-ext
[![NPM version](https://img.shields.io/npm/v/@discoveryjs/json-ext.svg)](https://www.npmjs.com/package/@discoveryjs/json-ext)
[![Build Status](https://github.com/discoveryjs/json-ext/actions/workflows/ci.yml/badge.svg)](https://github.com/discoveryjs/json-ext/actions/workflows/ci.yml)
[![Coverage Status](https://coveralls.io/repos/github/discoveryjs/json-ext/badge.svg?branch=master)](https://coveralls.io/github/discoveryjs/json-ext?)
[![NPM Downloads](https://img.shields.io/npm/dm/@discoveryjs/json-ext.svg)](https://www.npmjs.com/package/@discoveryjs/json-ext)
A set of utilities that extend the use of JSON. Designed to be fast and memory efficient
Features:
- [x] `parseChunked()` Parse JSON that comes by chunks (e.g. FS readable stream or fetch response stream)
- [x] `stringifyStream()` Stringify stream (Node.js)
- [x] `stringifyInfo()` Get estimated size and other facts of JSON.stringify() without converting a value to string
- [ ] **TBD** Support for circular references
- [ ] **TBD** Binary representation [branch](https://github.com/discoveryjs/json-ext/tree/binary)
- [ ] **TBD** WHATWG [Streams](https://streams.spec.whatwg.org/) support
## Install
```bash
npm install @discoveryjs/json-ext
```
## API
- [parseChunked(chunkEmitter)](#parsechunkedchunkemitter)
- [stringifyStream(value[, replacer[, space]])](#stringifystreamvalue-replacer-space)
- [stringifyInfo(value[, replacer[, space[, options]]])](#stringifyinfovalue-replacer-space-options)
- [Options](#options)
- [async](#async)
- [continueOnCircular](#continueoncircular)
- [version](#version)
### parseChunked(chunkEmitter)
Works the same as [`JSON.parse()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse) but takes `chunkEmitter` instead of string and returns [Promise](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise).
> NOTE: `reviver` parameter is not supported yet, but will be added in next releases.
> NOTE: WHATWG streams aren't supported yet
When to use:
- It's required to avoid freezing the main thread during big JSON parsing, since this process can be distributed in time
- Huge JSON needs to be parsed (e.g. >500MB on Node.js)
- Needed to reduce memory pressure. `JSON.parse()` needs to receive the entire JSON before parsing it. With `parseChunked()` you may parse JSON as first bytes of it comes. This approach helps to avoid storing a huge string in the memory at a single time point and following GC.
[Benchmark](https://github.com/discoveryjs/json-ext/tree/master/benchmarks#parse-chunked)
Usage:
```js
const { parseChunked } = require('@discoveryjs/json-ext');
// as a regular Promise
parseChunked(chunkEmitter)
.then(data => {
/* data is parsed JSON */
});
// using await (keep in mind that not every runtime has a support for top level await)
const data = await parseChunked(chunkEmitter);
```
Parameter `chunkEmitter` can be:
- [`ReadableStream`](https://nodejs.org/dist/latest-v14.x/docs/api/stream.html#stream_readable_streams) (Node.js only)
```js
const fs = require('fs');
const { parseChunked } = require('@discoveryjs/json-ext');
parseChunked(fs.createReadStream('path/to/file.json'))
```
- Generator, async generator or function that returns iterable (chunks). Chunk might be a `string`, `Uint8Array` or `Buffer` (Node.js only):
```js
const { parseChunked } = require('@discoveryjs/json-ext');
const encoder = new TextEncoder();
// generator
parseChunked(function*() {
yield '{ "hello":';
yield Buffer.from(' "wor'); // Node.js only
yield encoder.encode('ld" }'); // returns Uint8Array(5) [ 108, 100, 34, 32, 125 ]
});
// async generator
parseChunked(async function*() {
for await (const chunk of someAsyncSource) {
yield chunk;
}
});
// function that returns iterable
parseChunked(() => ['{ "hello":', ' "world"}'])
```
Using with [fetch()](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API):
```js
async function loadData(url) {
const response = await fetch(url);
const reader = response.body.getReader();
return parseChunked(async function*() {
while (true) {
const { done, value } = await reader.read();
if (done) {
break;
}
yield value;
}
});
}
loadData('https://example.com/data.json')
.then(data => {
/* data is parsed JSON */
})
```
### stringifyStream(value[, replacer[, space]])
Works the same as [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify), but returns an instance of [`ReadableStream`](https://nodejs.org/dist/latest-v14.x/docs/api/stream.html#stream_readable_streams) instead of string.
> NOTE: WHATWG Streams aren't supported yet, so function available for Node.js only for now
Departs from JSON.stringify():
- Outputs `null` when `JSON.stringify()` returns `undefined` (since streams may not emit `undefined`)
- A promise is resolving and the resulting value is stringifying as a regular one
- A stream in non-object mode is piping to output as is
- A stream in object mode is piping to output as an array of objects
When to use:
- Huge JSON needs to be generated (e.g. >500MB on Node.js)
- Needed to reduce memory pressure. `JSON.stringify()` needs to generate the entire JSON before send or write it to somewhere. With `stringifyStream()` you may send a result to somewhere as first bytes of the result appears. This approach helps to avoid storing a huge string in the memory at a single time point.
- The object being serialized contains Promises or Streams (see Usage for examples)
[Benchmark](https://github.com/discoveryjs/json-ext/tree/master/benchmarks#stream-stringifying)
Usage:
```js
const { stringifyStream } = require('@discoveryjs/json-ext');
// handle events
stringifyStream(data)
.on('data', chunk => console.log(chunk))
.on('error', error => consold.error(error))
.on('finish', () => console.log('DONE!'));
// pipe into a stream
stringifyStream(data)
.pipe(writableStream);
```
Using Promise or ReadableStream in serializing object:
```js
const fs = require('fs');
const { stringifyStream } = require('@discoveryjs/json-ext');
// output will be
// {"name":"example","willSerializeResolvedValue":42,"fromFile":[1, 2, 3],"at":{"any":{"level":"promise!"}}}
stringifyStream({
name: 'example',
willSerializeResolvedValue: Promise.resolve(42),
fromFile: fs.createReadStream('path/to/file.json'), // support file content is "[1, 2, 3]", it'll be inserted as it
at: {
any: {
level: new Promise(resolve => setTimeout(() => resolve('promise!'), 100))
}
}
})
// in case several async requests are used in object, it's prefered
// to put fastest requests first, because in this case
stringifyStream({
foo: fetch('http://example.com/request_takes_2s').then(req => req.json()),
bar: fetch('http://example.com/request_takes_5s').then(req => req.json())
});
```
Using with [`WritableStream`](https://nodejs.org/dist/latest-v14.x/docs/api/stream.html#stream_writable_streams) (Node.js only):
```js
const fs = require('fs');
const { stringifyStream } = require('@discoveryjs/json-ext');
// pipe into a console
stringifyStream(data)
.pipe(process.stdout);
// pipe into a file
stringifyStream(data)
.pipe(fs.createWriteStream('path/to/file.json'));
// wrapping into a Promise
new Promise((resolve, reject) => {
stringifyStream(data)
.on('error', reject)
.pipe(stream)
.on('error', reject)
.on('finish', resolve);
});
```
### stringifyInfo(value[, replacer[, space[, options]]])
`value`, `replacer` and `space` arguments are the same as for `JSON.stringify()`.
Result is an object:
```js
{
minLength: Number, // minimal bytes when values is stringified
circular: [...], // list of circular references
duplicate: [...], // list of objects that occur more than once
async: [...] // list of async values, i.e. promises and streams
}
```
Example:
```js
const { stringifyInfo } = require('@discoveryjs/json-ext');
console.log(
stringifyInfo({ test: true }).minLength
);
// > 13
// that equals '{"test":true}'.length
```
#### Options
##### async
Type: `Boolean`
Default: `false`
Collect async values (promises and streams) or not.
##### continueOnCircular
Type: `Boolean`
Default: `false`
Stop collecting info for a value or not whenever circular reference is found. Setting option to `true` allows to find all circular references.
### version
The version of library, e.g. `"0.3.1"`.
## License
MIT
+791
View File
@@ -0,0 +1,791 @@
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.jsonExt = factory());
})(this, (function () { 'use strict';
var version = "0.5.7";
const PrimitiveType = 1;
const ObjectType = 2;
const ArrayType = 3;
const PromiseType = 4;
const ReadableStringType = 5;
const ReadableObjectType = 6;
// https://tc39.es/ecma262/#table-json-single-character-escapes
const escapableCharCodeSubstitution$1 = { // JSON Single Character Escape Sequences
0x08: '\\b',
0x09: '\\t',
0x0a: '\\n',
0x0c: '\\f',
0x0d: '\\r',
0x22: '\\\"',
0x5c: '\\\\'
};
function isLeadingSurrogate$1(code) {
return code >= 0xD800 && code <= 0xDBFF;
}
function isTrailingSurrogate$1(code) {
return code >= 0xDC00 && code <= 0xDFFF;
}
function isReadableStream$1(value) {
return (
typeof value.pipe === 'function' &&
typeof value._read === 'function' &&
typeof value._readableState === 'object' && value._readableState !== null
);
}
function replaceValue$1(holder, key, value, replacer) {
if (value && typeof value.toJSON === 'function') {
value = value.toJSON();
}
if (replacer !== null) {
value = replacer.call(holder, String(key), value);
}
switch (typeof value) {
case 'function':
case 'symbol':
value = undefined;
break;
case 'object':
if (value !== null) {
const cls = value.constructor;
if (cls === String || cls === Number || cls === Boolean) {
value = value.valueOf();
}
}
break;
}
return value;
}
function getTypeNative$1(value) {
if (value === null || typeof value !== 'object') {
return PrimitiveType;
}
if (Array.isArray(value)) {
return ArrayType;
}
return ObjectType;
}
function getTypeAsync$1(value) {
if (value === null || typeof value !== 'object') {
return PrimitiveType;
}
if (typeof value.then === 'function') {
return PromiseType;
}
if (isReadableStream$1(value)) {
return value._readableState.objectMode ? ReadableObjectType : ReadableStringType;
}
if (Array.isArray(value)) {
return ArrayType;
}
return ObjectType;
}
function normalizeReplacer$1(replacer) {
if (typeof replacer === 'function') {
return replacer;
}
if (Array.isArray(replacer)) {
const allowlist = new Set(replacer
.map(item => {
const cls = item && item.constructor;
return cls === String || cls === Number ? String(item) : null;
})
.filter(item => typeof item === 'string')
);
return [...allowlist];
}
return null;
}
function normalizeSpace$1(space) {
if (typeof space === 'number') {
if (!Number.isFinite(space) || space < 1) {
return false;
}
return ' '.repeat(Math.min(space, 10));
}
if (typeof space === 'string') {
return space.slice(0, 10) || false;
}
return false;
}
var utils = {
escapableCharCodeSubstitution: escapableCharCodeSubstitution$1,
isLeadingSurrogate: isLeadingSurrogate$1,
isTrailingSurrogate: isTrailingSurrogate$1,
type: {
PRIMITIVE: PrimitiveType,
PROMISE: PromiseType,
ARRAY: ArrayType,
OBJECT: ObjectType,
STRING_STREAM: ReadableStringType,
OBJECT_STREAM: ReadableObjectType
},
isReadableStream: isReadableStream$1,
replaceValue: replaceValue$1,
getTypeNative: getTypeNative$1,
getTypeAsync: getTypeAsync$1,
normalizeReplacer: normalizeReplacer$1,
normalizeSpace: normalizeSpace$1
};
const {
normalizeReplacer,
normalizeSpace,
replaceValue,
getTypeNative,
getTypeAsync,
isLeadingSurrogate,
isTrailingSurrogate,
escapableCharCodeSubstitution,
type: {
PRIMITIVE,
OBJECT,
ARRAY,
PROMISE,
STRING_STREAM,
OBJECT_STREAM
}
} = utils;
const charLength2048 = Array.from({ length: 2048 }).map((_, code) => {
if (escapableCharCodeSubstitution.hasOwnProperty(code)) {
return 2; // \X
}
if (code < 0x20) {
return 6; // \uXXXX
}
return code < 128 ? 1 : 2; // UTF8 bytes
});
function stringLength(str) {
let len = 0;
let prevLeadingSurrogate = false;
for (let i = 0; i < str.length; i++) {
const code = str.charCodeAt(i);
if (code < 2048) {
len += charLength2048[code];
} else if (isLeadingSurrogate(code)) {
len += 6; // \uXXXX since no pair with trailing surrogate yet
prevLeadingSurrogate = true;
continue;
} else if (isTrailingSurrogate(code)) {
len = prevLeadingSurrogate
? len - 2 // surrogate pair (4 bytes), since we calculate prev leading surrogate as 6 bytes, substruct 2 bytes
: len + 6; // \uXXXX
} else {
len += 3; // code >= 2048 is 3 bytes length for UTF8
}
prevLeadingSurrogate = false;
}
return len + 2; // +2 for quotes
}
function primitiveLength(value) {
switch (typeof value) {
case 'string':
return stringLength(value);
case 'number':
return Number.isFinite(value) ? String(value).length : 4 /* null */;
case 'boolean':
return value ? 4 /* true */ : 5 /* false */;
case 'undefined':
case 'object':
return 4; /* null */
default:
return 0;
}
}
function spaceLength(space) {
space = normalizeSpace(space);
return typeof space === 'string' ? space.length : 0;
}
var stringifyInfo = function jsonStringifyInfo(value, replacer, space, options) {
function walk(holder, key, value) {
if (stop) {
return;
}
value = replaceValue(holder, key, value, replacer);
let type = getType(value);
// check for circular structure
if (type !== PRIMITIVE && stack.has(value)) {
circular.add(value);
length += 4; // treat as null
if (!options.continueOnCircular) {
stop = true;
}
return;
}
switch (type) {
case PRIMITIVE:
if (value !== undefined || Array.isArray(holder)) {
length += primitiveLength(value);
} else if (holder === root) {
length += 9; // FIXME: that's the length of undefined, should we normalize behaviour to convert it to null?
}
break;
case OBJECT: {
if (visited.has(value)) {
duplicate.add(value);
length += visited.get(value);
break;
}
const valueLength = length;
let entries = 0;
length += 2; // {}
stack.add(value);
for (const key in value) {
if (hasOwnProperty.call(value, key) && (allowlist === null || allowlist.has(key))) {
const prevLength = length;
walk(value, key, value[key]);
if (prevLength !== length) {
// value is printed
length += stringLength(key) + 1; // "key":
entries++;
}
}
}
if (entries > 1) {
length += entries - 1; // commas
}
stack.delete(value);
if (space > 0 && entries > 0) {
length += (1 + (stack.size + 1) * space + 1) * entries; // for each key-value: \n{space}
length += 1 + stack.size * space; // for }
}
visited.set(value, length - valueLength);
break;
}
case ARRAY: {
if (visited.has(value)) {
duplicate.add(value);
length += visited.get(value);
break;
}
const valueLength = length;
length += 2; // []
stack.add(value);
for (let i = 0; i < value.length; i++) {
walk(value, i, value[i]);
}
if (value.length > 1) {
length += value.length - 1; // commas
}
stack.delete(value);
if (space > 0 && value.length > 0) {
length += (1 + (stack.size + 1) * space) * value.length; // for each element: \n{space}
length += 1 + stack.size * space; // for ]
}
visited.set(value, length - valueLength);
break;
}
case PROMISE:
case STRING_STREAM:
async.add(value);
break;
case OBJECT_STREAM:
length += 2; // []
async.add(value);
break;
}
}
let allowlist = null;
replacer = normalizeReplacer(replacer);
if (Array.isArray(replacer)) {
allowlist = new Set(replacer);
replacer = null;
}
space = spaceLength(space);
options = options || {};
const visited = new Map();
const stack = new Set();
const duplicate = new Set();
const circular = new Set();
const async = new Set();
const getType = options.async ? getTypeAsync : getTypeNative;
const root = { '': value };
let stop = false;
let length = 0;
walk(root, '', value);
return {
minLength: isNaN(length) ? Infinity : length,
circular: [...circular],
duplicate: [...duplicate],
async: [...async]
};
};
var stringifyStreamBrowser = () => {
throw new Error('Method is not supported');
};
var textDecoderBrowser = TextDecoder;
const { isReadableStream } = utils;
const STACK_OBJECT = 1;
const STACK_ARRAY = 2;
const decoder = new textDecoderBrowser();
function isObject(value) {
return value !== null && typeof value === 'object';
}
function adjustPosition(error, parser) {
if (error.name === 'SyntaxError' && parser.jsonParseOffset) {
error.message = error.message.replace(/at position (\d+)/, (_, pos) =>
'at position ' + (Number(pos) + parser.jsonParseOffset)
);
}
return error;
}
function append(array, elements) {
// Note: Avoid to use array.push(...elements) since it may lead to
// "RangeError: Maximum call stack size exceeded" for a long arrays
const initialLength = array.length;
array.length += elements.length;
for (let i = 0; i < elements.length; i++) {
array[initialLength + i] = elements[i];
}
}
var parseChunked = function(chunkEmitter) {
let parser = new ChunkParser();
if (isObject(chunkEmitter) && isReadableStream(chunkEmitter)) {
return new Promise((resolve, reject) => {
chunkEmitter
.on('data', chunk => {
try {
parser.push(chunk);
} catch (e) {
reject(adjustPosition(e, parser));
parser = null;
}
})
.on('error', (e) => {
parser = null;
reject(e);
})
.on('end', () => {
try {
resolve(parser.finish());
} catch (e) {
reject(adjustPosition(e, parser));
} finally {
parser = null;
}
});
});
}
if (typeof chunkEmitter === 'function') {
const iterator = chunkEmitter();
if (isObject(iterator) && (Symbol.iterator in iterator || Symbol.asyncIterator in iterator)) {
return new Promise(async (resolve, reject) => {
try {
for await (const chunk of iterator) {
parser.push(chunk);
}
resolve(parser.finish());
} catch (e) {
reject(adjustPosition(e, parser));
} finally {
parser = null;
}
});
}
}
throw new Error(
'Chunk emitter should be readable stream, generator, ' +
'async generator or function returning an iterable object'
);
};
class ChunkParser {
constructor() {
this.value = undefined;
this.valueStack = null;
this.stack = new Array(100);
this.lastFlushDepth = 0;
this.flushDepth = 0;
this.stateString = false;
this.stateStringEscape = false;
this.pendingByteSeq = null;
this.pendingChunk = null;
this.chunkOffset = 0;
this.jsonParseOffset = 0;
}
parseAndAppend(fragment, wrap) {
// Append new entries or elements
if (this.stack[this.lastFlushDepth - 1] === STACK_OBJECT) {
if (wrap) {
this.jsonParseOffset--;
fragment = '{' + fragment + '}';
}
Object.assign(this.valueStack.value, JSON.parse(fragment));
} else {
if (wrap) {
this.jsonParseOffset--;
fragment = '[' + fragment + ']';
}
append(this.valueStack.value, JSON.parse(fragment));
}
}
prepareAddition(fragment) {
const { value } = this.valueStack;
const expectComma = Array.isArray(value)
? value.length !== 0
: Object.keys(value).length !== 0;
if (expectComma) {
// Skip a comma at the beginning of fragment, otherwise it would
// fail to parse
if (fragment[0] === ',') {
this.jsonParseOffset++;
return fragment.slice(1);
}
// When value (an object or array) is not empty and a fragment
// doesn't start with a comma, a single valid fragment starting
// is a closing bracket. If it's not, a prefix is adding to fail
// parsing. Otherwise, the sequence of chunks can be successfully
// parsed, although it should not, e.g. ["[{}", "{}]"]
if (fragment[0] !== '}' && fragment[0] !== ']') {
this.jsonParseOffset -= 3;
return '[[]' + fragment;
}
}
return fragment;
}
flush(chunk, start, end) {
let fragment = chunk.slice(start, end);
// Save position correction an error in JSON.parse() if any
this.jsonParseOffset = this.chunkOffset + start;
// Prepend pending chunk if any
if (this.pendingChunk !== null) {
fragment = this.pendingChunk + fragment;
this.jsonParseOffset -= this.pendingChunk.length;
this.pendingChunk = null;
}
if (this.flushDepth === this.lastFlushDepth) {
// Depth didn't changed, so it's a root value or entry/element set
if (this.flushDepth > 0) {
this.parseAndAppend(this.prepareAddition(fragment), true);
} else {
// That's an entire value on a top level
this.value = JSON.parse(fragment);
this.valueStack = {
value: this.value,
prev: null
};
}
} else if (this.flushDepth > this.lastFlushDepth) {
// Add missed closing brackets/parentheses
for (let i = this.flushDepth - 1; i >= this.lastFlushDepth; i--) {
fragment += this.stack[i] === STACK_OBJECT ? '}' : ']';
}
if (this.lastFlushDepth === 0) {
// That's a root value
this.value = JSON.parse(fragment);
this.valueStack = {
value: this.value,
prev: null
};
} else {
this.parseAndAppend(this.prepareAddition(fragment), true);
}
// Move down to the depths to the last object/array, which is current now
for (let i = this.lastFlushDepth || 1; i < this.flushDepth; i++) {
let value = this.valueStack.value;
if (this.stack[i - 1] === STACK_OBJECT) {
// find last entry
let key;
// eslint-disable-next-line curly
for (key in value);
value = value[key];
} else {
// last element
value = value[value.length - 1];
}
this.valueStack = {
value,
prev: this.valueStack
};
}
} else /* this.flushDepth < this.lastFlushDepth */ {
fragment = this.prepareAddition(fragment);
// Add missed opening brackets/parentheses
for (let i = this.lastFlushDepth - 1; i >= this.flushDepth; i--) {
this.jsonParseOffset--;
fragment = (this.stack[i] === STACK_OBJECT ? '{' : '[') + fragment;
}
this.parseAndAppend(fragment, false);
for (let i = this.lastFlushDepth - 1; i >= this.flushDepth; i--) {
this.valueStack = this.valueStack.prev;
}
}
this.lastFlushDepth = this.flushDepth;
}
push(chunk) {
if (typeof chunk !== 'string') {
// Suppose chunk is Buffer or Uint8Array
// Prepend uncompleted byte sequence if any
if (this.pendingByteSeq !== null) {
const origRawChunk = chunk;
chunk = new Uint8Array(this.pendingByteSeq.length + origRawChunk.length);
chunk.set(this.pendingByteSeq);
chunk.set(origRawChunk, this.pendingByteSeq.length);
this.pendingByteSeq = null;
}
// In case Buffer/Uint8Array, an input is encoded in UTF8
// Seek for parts of uncompleted UTF8 symbol on the ending
// This makes sense only if we expect more chunks and last char is not multi-bytes
if (chunk[chunk.length - 1] > 127) {
for (let seqLength = 0; seqLength < chunk.length; seqLength++) {
const byte = chunk[chunk.length - 1 - seqLength];
// 10xxxxxx - 2nd, 3rd or 4th byte
// 110xxxxx first byte of 2-byte sequence
// 1110xxxx - first byte of 3-byte sequence
// 11110xxx - first byte of 4-byte sequence
if (byte >> 6 === 3) {
seqLength++;
// If the sequence is really incomplete, then preserve it
// for the future chunk and cut off it from the current chunk
if ((seqLength !== 4 && byte >> 3 === 0b11110) ||
(seqLength !== 3 && byte >> 4 === 0b1110) ||
(seqLength !== 2 && byte >> 5 === 0b110)) {
this.pendingByteSeq = chunk.slice(chunk.length - seqLength);
chunk = chunk.slice(0, -seqLength);
}
break;
}
}
}
// Convert chunk to a string, since single decode per chunk
// is much effective than decode multiple small substrings
chunk = decoder.decode(chunk);
}
const chunkLength = chunk.length;
let lastFlushPoint = 0;
let flushPoint = 0;
// Main scan loop
scan: for (let i = 0; i < chunkLength; i++) {
if (this.stateString) {
for (; i < chunkLength; i++) {
if (this.stateStringEscape) {
this.stateStringEscape = false;
} else {
switch (chunk.charCodeAt(i)) {
case 0x22: /* " */
this.stateString = false;
continue scan;
case 0x5C: /* \ */
this.stateStringEscape = true;
}
}
}
break;
}
switch (chunk.charCodeAt(i)) {
case 0x22: /* " */
this.stateString = true;
this.stateStringEscape = false;
break;
case 0x2C: /* , */
flushPoint = i;
break;
case 0x7B: /* { */
// Open an object
flushPoint = i + 1;
this.stack[this.flushDepth++] = STACK_OBJECT;
break;
case 0x5B: /* [ */
// Open an array
flushPoint = i + 1;
this.stack[this.flushDepth++] = STACK_ARRAY;
break;
case 0x5D: /* ] */
case 0x7D: /* } */
// Close an object or array
flushPoint = i + 1;
this.flushDepth--;
if (this.flushDepth < this.lastFlushDepth) {
this.flush(chunk, lastFlushPoint, flushPoint);
lastFlushPoint = flushPoint;
}
break;
case 0x09: /* \t */
case 0x0A: /* \n */
case 0x0D: /* \r */
case 0x20: /* space */
// Move points forward when they points on current position and it's a whitespace
if (lastFlushPoint === i) {
lastFlushPoint++;
}
if (flushPoint === i) {
flushPoint++;
}
break;
}
}
if (flushPoint > lastFlushPoint) {
this.flush(chunk, lastFlushPoint, flushPoint);
}
// Produce pendingChunk if something left
if (flushPoint < chunkLength) {
if (this.pendingChunk !== null) {
// When there is already a pending chunk then no flush happened,
// appending entire chunk to pending one
this.pendingChunk += chunk;
} else {
// Create a pending chunk, it will start with non-whitespace since
// flushPoint was moved forward away from whitespaces on scan
this.pendingChunk = chunk.slice(flushPoint, chunkLength);
}
}
this.chunkOffset += chunkLength;
}
finish() {
if (this.pendingChunk !== null) {
this.flush('', 0, 0);
this.pendingChunk = null;
}
return this.value;
}
}
var src = {
version: version,
stringifyInfo: stringifyInfo,
stringifyStream: stringifyStreamBrowser,
parseChunked: parseChunked
};
return src;
}));
File diff suppressed because one or more lines are too long
+1
View File
@@ -0,0 +1 @@
module.exports = "0.5.7";
+31
View File
@@ -0,0 +1,31 @@
declare module '@discoveryjs/json-ext' {
import { Readable } from 'stream';
type TReplacer =
| ((this: any, key: string, value: any) => any)
| string[]
| number[]
| null;
type TSpace = string | number | null;
type TChunk = string | Buffer | Uint8Array;
export function parseChunked(input: Readable): Promise<any>;
export function parseChunked(input: () => (Iterable<TChunk> | AsyncIterable<TChunk>)): Promise<any>;
export function stringifyStream(value: any, replacer?: TReplacer, space?: TSpace): Readable;
export function stringifyInfo(
value: any,
replacer?: TReplacer,
space?: TSpace,
options?: {
async?: boolean;
continueOnCircular?: boolean;
}
): {
minLength: number;
circular: any[];
duplicate: any[];
async: any[];
};
}
+93
View File
@@ -0,0 +1,93 @@
{
"_from": "@discoveryjs/json-ext@^0.5.0",
"_id": "@discoveryjs/json-ext@0.5.7",
"_inBundle": false,
"_integrity": "sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==",
"_location": "/@discoveryjs/json-ext",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "@discoveryjs/json-ext@^0.5.0",
"name": "@discoveryjs/json-ext",
"escapedName": "@discoveryjs%2fjson-ext",
"scope": "@discoveryjs",
"rawSpec": "^0.5.0",
"saveSpec": null,
"fetchSpec": "^0.5.0"
},
"_requiredBy": [
"/webpack-cli"
],
"_resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz",
"_shasum": "1d572bfbbe14b7704e0ba0f39b74815b84870d70",
"_spec": "@discoveryjs/json-ext@^0.5.0",
"_where": "/root/dns/node_modules/webpack-cli",
"author": {
"name": "Roman Dvornov",
"email": "rdvornov@gmail.com",
"url": "https://github.com/lahmatiy"
},
"browser": {
"./src/stringify-stream.js": "./src/stringify-stream-browser.js",
"./src/text-decoder.js": "./src/text-decoder-browser.js",
"./src/version.js": "./dist/version.js"
},
"bugs": {
"url": "https://github.com/discoveryjs/json-ext/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "A set of utilities that extend the use of JSON",
"devDependencies": {
"@rollup/plugin-commonjs": "^15.1.0",
"@rollup/plugin-json": "^4.1.0",
"@rollup/plugin-node-resolve": "^9.0.0",
"c8": "^7.10.0",
"chalk": "^4.1.0",
"cross-env": "^7.0.3",
"eslint": "^8.10.0",
"mocha": "^8.4.0",
"rollup": "^2.28.2",
"rollup-plugin-terser": "^7.0.2"
},
"engines": {
"node": ">=10.0.0"
},
"files": [
"dist",
"src",
"index.d.ts"
],
"homepage": "https://github.com/discoveryjs/json-ext#readme",
"keywords": [
"json",
"utils",
"stream",
"async",
"promise",
"stringify",
"info"
],
"license": "MIT",
"main": "./src/index",
"name": "@discoveryjs/json-ext",
"repository": {
"type": "git",
"url": "git+https://github.com/discoveryjs/json-ext.git"
},
"scripts": {
"build": "rollup --config",
"build-and-test": "npm run build && npm run test:dist",
"coverage": "c8 --reporter=lcovonly npm test",
"lint": "eslint src test",
"lint-and-test": "npm run lint && npm test",
"prepublishOnly": "npm run lint && npm test && npm run build-and-test",
"test": "mocha --reporter progress",
"test:all": "npm run test:src && npm run test:dist",
"test:dist": "cross-env MODE=dist npm test && cross-env MODE=dist-min npm test",
"test:src": "npm test"
},
"types": "./index.d.ts",
"version": "0.5.7"
}
+6
View File
@@ -0,0 +1,6 @@
module.exports = {
version: require('./version'),
stringifyInfo: require('./stringify-info'),
stringifyStream: require('./stringify-stream'),
parseChunked: require('./parse-chunked')
};
+384
View File
@@ -0,0 +1,384 @@
const { isReadableStream } = require('./utils');
const TextDecoder = require('./text-decoder');
const STACK_OBJECT = 1;
const STACK_ARRAY = 2;
const decoder = new TextDecoder();
function isObject(value) {
return value !== null && typeof value === 'object';
}
function adjustPosition(error, parser) {
if (error.name === 'SyntaxError' && parser.jsonParseOffset) {
error.message = error.message.replace(/at position (\d+)/, (_, pos) =>
'at position ' + (Number(pos) + parser.jsonParseOffset)
);
}
return error;
}
function append(array, elements) {
// Note: Avoid to use array.push(...elements) since it may lead to
// "RangeError: Maximum call stack size exceeded" for a long arrays
const initialLength = array.length;
array.length += elements.length;
for (let i = 0; i < elements.length; i++) {
array[initialLength + i] = elements[i];
}
}
module.exports = function(chunkEmitter) {
let parser = new ChunkParser();
if (isObject(chunkEmitter) && isReadableStream(chunkEmitter)) {
return new Promise((resolve, reject) => {
chunkEmitter
.on('data', chunk => {
try {
parser.push(chunk);
} catch (e) {
reject(adjustPosition(e, parser));
parser = null;
}
})
.on('error', (e) => {
parser = null;
reject(e);
})
.on('end', () => {
try {
resolve(parser.finish());
} catch (e) {
reject(adjustPosition(e, parser));
} finally {
parser = null;
}
});
});
}
if (typeof chunkEmitter === 'function') {
const iterator = chunkEmitter();
if (isObject(iterator) && (Symbol.iterator in iterator || Symbol.asyncIterator in iterator)) {
return new Promise(async (resolve, reject) => {
try {
for await (const chunk of iterator) {
parser.push(chunk);
}
resolve(parser.finish());
} catch (e) {
reject(adjustPosition(e, parser));
} finally {
parser = null;
}
});
}
}
throw new Error(
'Chunk emitter should be readable stream, generator, ' +
'async generator or function returning an iterable object'
);
};
class ChunkParser {
constructor() {
this.value = undefined;
this.valueStack = null;
this.stack = new Array(100);
this.lastFlushDepth = 0;
this.flushDepth = 0;
this.stateString = false;
this.stateStringEscape = false;
this.pendingByteSeq = null;
this.pendingChunk = null;
this.chunkOffset = 0;
this.jsonParseOffset = 0;
}
parseAndAppend(fragment, wrap) {
// Append new entries or elements
if (this.stack[this.lastFlushDepth - 1] === STACK_OBJECT) {
if (wrap) {
this.jsonParseOffset--;
fragment = '{' + fragment + '}';
}
Object.assign(this.valueStack.value, JSON.parse(fragment));
} else {
if (wrap) {
this.jsonParseOffset--;
fragment = '[' + fragment + ']';
}
append(this.valueStack.value, JSON.parse(fragment));
}
}
prepareAddition(fragment) {
const { value } = this.valueStack;
const expectComma = Array.isArray(value)
? value.length !== 0
: Object.keys(value).length !== 0;
if (expectComma) {
// Skip a comma at the beginning of fragment, otherwise it would
// fail to parse
if (fragment[0] === ',') {
this.jsonParseOffset++;
return fragment.slice(1);
}
// When value (an object or array) is not empty and a fragment
// doesn't start with a comma, a single valid fragment starting
// is a closing bracket. If it's not, a prefix is adding to fail
// parsing. Otherwise, the sequence of chunks can be successfully
// parsed, although it should not, e.g. ["[{}", "{}]"]
if (fragment[0] !== '}' && fragment[0] !== ']') {
this.jsonParseOffset -= 3;
return '[[]' + fragment;
}
}
return fragment;
}
flush(chunk, start, end) {
let fragment = chunk.slice(start, end);
// Save position correction an error in JSON.parse() if any
this.jsonParseOffset = this.chunkOffset + start;
// Prepend pending chunk if any
if (this.pendingChunk !== null) {
fragment = this.pendingChunk + fragment;
this.jsonParseOffset -= this.pendingChunk.length;
this.pendingChunk = null;
}
if (this.flushDepth === this.lastFlushDepth) {
// Depth didn't changed, so it's a root value or entry/element set
if (this.flushDepth > 0) {
this.parseAndAppend(this.prepareAddition(fragment), true);
} else {
// That's an entire value on a top level
this.value = JSON.parse(fragment);
this.valueStack = {
value: this.value,
prev: null
};
}
} else if (this.flushDepth > this.lastFlushDepth) {
// Add missed closing brackets/parentheses
for (let i = this.flushDepth - 1; i >= this.lastFlushDepth; i--) {
fragment += this.stack[i] === STACK_OBJECT ? '}' : ']';
}
if (this.lastFlushDepth === 0) {
// That's a root value
this.value = JSON.parse(fragment);
this.valueStack = {
value: this.value,
prev: null
};
} else {
this.parseAndAppend(this.prepareAddition(fragment), true);
}
// Move down to the depths to the last object/array, which is current now
for (let i = this.lastFlushDepth || 1; i < this.flushDepth; i++) {
let value = this.valueStack.value;
if (this.stack[i - 1] === STACK_OBJECT) {
// find last entry
let key;
// eslint-disable-next-line curly
for (key in value);
value = value[key];
} else {
// last element
value = value[value.length - 1];
}
this.valueStack = {
value,
prev: this.valueStack
};
}
} else /* this.flushDepth < this.lastFlushDepth */ {
fragment = this.prepareAddition(fragment);
// Add missed opening brackets/parentheses
for (let i = this.lastFlushDepth - 1; i >= this.flushDepth; i--) {
this.jsonParseOffset--;
fragment = (this.stack[i] === STACK_OBJECT ? '{' : '[') + fragment;
}
this.parseAndAppend(fragment, false);
for (let i = this.lastFlushDepth - 1; i >= this.flushDepth; i--) {
this.valueStack = this.valueStack.prev;
}
}
this.lastFlushDepth = this.flushDepth;
}
push(chunk) {
if (typeof chunk !== 'string') {
// Suppose chunk is Buffer or Uint8Array
// Prepend uncompleted byte sequence if any
if (this.pendingByteSeq !== null) {
const origRawChunk = chunk;
chunk = new Uint8Array(this.pendingByteSeq.length + origRawChunk.length);
chunk.set(this.pendingByteSeq);
chunk.set(origRawChunk, this.pendingByteSeq.length);
this.pendingByteSeq = null;
}
// In case Buffer/Uint8Array, an input is encoded in UTF8
// Seek for parts of uncompleted UTF8 symbol on the ending
// This makes sense only if we expect more chunks and last char is not multi-bytes
if (chunk[chunk.length - 1] > 127) {
for (let seqLength = 0; seqLength < chunk.length; seqLength++) {
const byte = chunk[chunk.length - 1 - seqLength];
// 10xxxxxx - 2nd, 3rd or 4th byte
// 110xxxxx first byte of 2-byte sequence
// 1110xxxx - first byte of 3-byte sequence
// 11110xxx - first byte of 4-byte sequence
if (byte >> 6 === 3) {
seqLength++;
// If the sequence is really incomplete, then preserve it
// for the future chunk and cut off it from the current chunk
if ((seqLength !== 4 && byte >> 3 === 0b11110) ||
(seqLength !== 3 && byte >> 4 === 0b1110) ||
(seqLength !== 2 && byte >> 5 === 0b110)) {
this.pendingByteSeq = chunk.slice(chunk.length - seqLength);
chunk = chunk.slice(0, -seqLength);
}
break;
}
}
}
// Convert chunk to a string, since single decode per chunk
// is much effective than decode multiple small substrings
chunk = decoder.decode(chunk);
}
const chunkLength = chunk.length;
let lastFlushPoint = 0;
let flushPoint = 0;
// Main scan loop
scan: for (let i = 0; i < chunkLength; i++) {
if (this.stateString) {
for (; i < chunkLength; i++) {
if (this.stateStringEscape) {
this.stateStringEscape = false;
} else {
switch (chunk.charCodeAt(i)) {
case 0x22: /* " */
this.stateString = false;
continue scan;
case 0x5C: /* \ */
this.stateStringEscape = true;
}
}
}
break;
}
switch (chunk.charCodeAt(i)) {
case 0x22: /* " */
this.stateString = true;
this.stateStringEscape = false;
break;
case 0x2C: /* , */
flushPoint = i;
break;
case 0x7B: /* { */
// Open an object
flushPoint = i + 1;
this.stack[this.flushDepth++] = STACK_OBJECT;
break;
case 0x5B: /* [ */
// Open an array
flushPoint = i + 1;
this.stack[this.flushDepth++] = STACK_ARRAY;
break;
case 0x5D: /* ] */
case 0x7D: /* } */
// Close an object or array
flushPoint = i + 1;
this.flushDepth--;
if (this.flushDepth < this.lastFlushDepth) {
this.flush(chunk, lastFlushPoint, flushPoint);
lastFlushPoint = flushPoint;
}
break;
case 0x09: /* \t */
case 0x0A: /* \n */
case 0x0D: /* \r */
case 0x20: /* space */
// Move points forward when they points on current position and it's a whitespace
if (lastFlushPoint === i) {
lastFlushPoint++;
}
if (flushPoint === i) {
flushPoint++;
}
break;
}
}
if (flushPoint > lastFlushPoint) {
this.flush(chunk, lastFlushPoint, flushPoint);
}
// Produce pendingChunk if something left
if (flushPoint < chunkLength) {
if (this.pendingChunk !== null) {
// When there is already a pending chunk then no flush happened,
// appending entire chunk to pending one
this.pendingChunk += chunk;
} else {
// Create a pending chunk, it will start with non-whitespace since
// flushPoint was moved forward away from whitespaces on scan
this.pendingChunk = chunk.slice(flushPoint, chunkLength);
}
}
this.chunkOffset += chunkLength;
}
finish() {
if (this.pendingChunk !== null) {
this.flush('', 0, 0);
this.pendingChunk = null;
}
return this.value;
}
};
+231
View File
@@ -0,0 +1,231 @@
const {
normalizeReplacer,
normalizeSpace,
replaceValue,
getTypeNative,
getTypeAsync,
isLeadingSurrogate,
isTrailingSurrogate,
escapableCharCodeSubstitution,
type: {
PRIMITIVE,
OBJECT,
ARRAY,
PROMISE,
STRING_STREAM,
OBJECT_STREAM
}
} = require('./utils');
const charLength2048 = Array.from({ length: 2048 }).map((_, code) => {
if (escapableCharCodeSubstitution.hasOwnProperty(code)) {
return 2; // \X
}
if (code < 0x20) {
return 6; // \uXXXX
}
return code < 128 ? 1 : 2; // UTF8 bytes
});
function stringLength(str) {
let len = 0;
let prevLeadingSurrogate = false;
for (let i = 0; i < str.length; i++) {
const code = str.charCodeAt(i);
if (code < 2048) {
len += charLength2048[code];
} else if (isLeadingSurrogate(code)) {
len += 6; // \uXXXX since no pair with trailing surrogate yet
prevLeadingSurrogate = true;
continue;
} else if (isTrailingSurrogate(code)) {
len = prevLeadingSurrogate
? len - 2 // surrogate pair (4 bytes), since we calculate prev leading surrogate as 6 bytes, substruct 2 bytes
: len + 6; // \uXXXX
} else {
len += 3; // code >= 2048 is 3 bytes length for UTF8
}
prevLeadingSurrogate = false;
}
return len + 2; // +2 for quotes
}
function primitiveLength(value) {
switch (typeof value) {
case 'string':
return stringLength(value);
case 'number':
return Number.isFinite(value) ? String(value).length : 4 /* null */;
case 'boolean':
return value ? 4 /* true */ : 5 /* false */;
case 'undefined':
case 'object':
return 4; /* null */
default:
return 0;
}
}
function spaceLength(space) {
space = normalizeSpace(space);
return typeof space === 'string' ? space.length : 0;
}
module.exports = function jsonStringifyInfo(value, replacer, space, options) {
function walk(holder, key, value) {
if (stop) {
return;
}
value = replaceValue(holder, key, value, replacer);
let type = getType(value);
// check for circular structure
if (type !== PRIMITIVE && stack.has(value)) {
circular.add(value);
length += 4; // treat as null
if (!options.continueOnCircular) {
stop = true;
}
return;
}
switch (type) {
case PRIMITIVE:
if (value !== undefined || Array.isArray(holder)) {
length += primitiveLength(value);
} else if (holder === root) {
length += 9; // FIXME: that's the length of undefined, should we normalize behaviour to convert it to null?
}
break;
case OBJECT: {
if (visited.has(value)) {
duplicate.add(value);
length += visited.get(value);
break;
}
const valueLength = length;
let entries = 0;
length += 2; // {}
stack.add(value);
for (const key in value) {
if (hasOwnProperty.call(value, key) && (allowlist === null || allowlist.has(key))) {
const prevLength = length;
walk(value, key, value[key]);
if (prevLength !== length) {
// value is printed
length += stringLength(key) + 1; // "key":
entries++;
}
}
}
if (entries > 1) {
length += entries - 1; // commas
}
stack.delete(value);
if (space > 0 && entries > 0) {
length += (1 + (stack.size + 1) * space + 1) * entries; // for each key-value: \n{space}
length += 1 + stack.size * space; // for }
}
visited.set(value, length - valueLength);
break;
}
case ARRAY: {
if (visited.has(value)) {
duplicate.add(value);
length += visited.get(value);
break;
}
const valueLength = length;
length += 2; // []
stack.add(value);
for (let i = 0; i < value.length; i++) {
walk(value, i, value[i]);
}
if (value.length > 1) {
length += value.length - 1; // commas
}
stack.delete(value);
if (space > 0 && value.length > 0) {
length += (1 + (stack.size + 1) * space) * value.length; // for each element: \n{space}
length += 1 + stack.size * space; // for ]
}
visited.set(value, length - valueLength);
break;
}
case PROMISE:
case STRING_STREAM:
async.add(value);
break;
case OBJECT_STREAM:
length += 2; // []
async.add(value);
break;
}
}
let allowlist = null;
replacer = normalizeReplacer(replacer);
if (Array.isArray(replacer)) {
allowlist = new Set(replacer);
replacer = null;
}
space = spaceLength(space);
options = options || {};
const visited = new Map();
const stack = new Set();
const duplicate = new Set();
const circular = new Set();
const async = new Set();
const getType = options.async ? getTypeAsync : getTypeNative;
const root = { '': value };
let stop = false;
let length = 0;
walk(root, '', value);
return {
minLength: isNaN(length) ? Infinity : length,
circular: [...circular],
duplicate: [...duplicate],
async: [...async]
};
};
+3
View File
@@ -0,0 +1,3 @@
module.exports = () => {
throw new Error('Method is not supported');
};
+408
View File
@@ -0,0 +1,408 @@
const { Readable } = require('stream');
const {
normalizeReplacer,
normalizeSpace,
replaceValue,
getTypeAsync,
type: {
PRIMITIVE,
OBJECT,
ARRAY,
PROMISE,
STRING_STREAM,
OBJECT_STREAM
}
} = require('./utils');
const noop = () => {};
const hasOwnProperty = Object.prototype.hasOwnProperty;
// TODO: Remove when drop support for Node.js 10
// Node.js 10 has no well-formed JSON.stringify()
// https://github.com/tc39/proposal-well-formed-stringify
// Adopted code from https://bugs.chromium.org/p/v8/issues/detail?id=7782#c12
const wellformedStringStringify = JSON.stringify('\ud800') === '"\\ud800"'
? JSON.stringify
: s => JSON.stringify(s).replace(
/\p{Surrogate}/gu,
m => `\\u${m.charCodeAt(0).toString(16)}`
);
function push() {
this.push(this._stack.value);
this.popStack();
}
function pushPrimitive(value) {
switch (typeof value) {
case 'string':
this.push(this.encodeString(value));
break;
case 'number':
this.push(Number.isFinite(value) ? this.encodeNumber(value) : 'null');
break;
case 'boolean':
this.push(value ? 'true' : 'false');
break;
case 'undefined':
case 'object': // typeof null === 'object'
this.push('null');
break;
default:
this.destroy(new TypeError(`Do not know how to serialize a ${value.constructor && value.constructor.name || typeof value}`));
}
}
function processObjectEntry(key) {
const current = this._stack;
if (!current.first) {
current.first = true;
} else {
this.push(',');
}
if (this.space) {
this.push(`\n${this.space.repeat(this._depth)}${this.encodeString(key)}: `);
} else {
this.push(this.encodeString(key) + ':');
}
}
function processObject() {
const current = this._stack;
// when no keys left, remove obj from stack
if (current.index === current.keys.length) {
if (this.space && current.first) {
this.push(`\n${this.space.repeat(this._depth - 1)}}`);
} else {
this.push('}');
}
this.popStack();
return;
}
const key = current.keys[current.index];
this.processValue(current.value, key, current.value[key], processObjectEntry);
current.index++;
}
function processArrayItem(index) {
if (index !== 0) {
this.push(',');
}
if (this.space) {
this.push(`\n${this.space.repeat(this._depth)}`);
}
}
function processArray() {
const current = this._stack;
if (current.index === current.value.length) {
if (this.space && current.index > 0) {
this.push(`\n${this.space.repeat(this._depth - 1)}]`);
} else {
this.push(']');
}
this.popStack();
return;
}
this.processValue(current.value, current.index, current.value[current.index], processArrayItem);
current.index++;
}
function createStreamReader(fn) {
return function() {
const current = this._stack;
const data = current.value.read(this._readSize);
if (data !== null) {
current.first = false;
fn.call(this, data, current);
} else {
if ((current.first && !current.value._readableState.reading) || current.ended) {
this.popStack();
} else {
current.first = true;
current.awaiting = true;
}
}
};
}
const processReadableObject = createStreamReader(function(data, current) {
this.processValue(current.value, current.index, data, processArrayItem);
current.index++;
});
const processReadableString = createStreamReader(function(data) {
this.push(data);
});
class JsonStringifyStream extends Readable {
constructor(value, replacer, space) {
super({
autoDestroy: true
});
this.getKeys = Object.keys;
this.replacer = normalizeReplacer(replacer);
if (Array.isArray(this.replacer)) {
const allowlist = this.replacer;
this.getKeys = (value) => allowlist.filter(key => hasOwnProperty.call(value, key));
this.replacer = null;
}
this.space = normalizeSpace(space);
this._depth = 0;
this.error = null;
this._processing = false;
this._ended = false;
this._readSize = 0;
this._buffer = '';
this._stack = null;
this._visited = new WeakSet();
this.pushStack({
handler: () => {
this.popStack();
this.processValue({ '': value }, '', value, noop);
}
});
}
encodeString(value) {
if (/[^\x20-\uD799]|[\x22\x5c]/.test(value)) {
return wellformedStringStringify(value);
}
return '"' + value + '"';
}
encodeNumber(value) {
return value;
}
processValue(holder, key, value, callback) {
value = replaceValue(holder, key, value, this.replacer);
let type = getTypeAsync(value);
switch (type) {
case PRIMITIVE:
if (callback !== processObjectEntry || value !== undefined) {
callback.call(this, key);
pushPrimitive.call(this, value);
}
break;
case OBJECT:
callback.call(this, key);
// check for circular structure
if (this._visited.has(value)) {
return this.destroy(new TypeError('Converting circular structure to JSON'));
}
this._visited.add(value);
this._depth++;
this.push('{');
this.pushStack({
handler: processObject,
value,
index: 0,
first: false,
keys: this.getKeys(value)
});
break;
case ARRAY:
callback.call(this, key);
// check for circular structure
if (this._visited.has(value)) {
return this.destroy(new TypeError('Converting circular structure to JSON'));
}
this._visited.add(value);
this.push('[');
this.pushStack({
handler: processArray,
value,
index: 0
});
this._depth++;
break;
case PROMISE:
this.pushStack({
handler: noop,
awaiting: true
});
Promise.resolve(value)
.then(resolved => {
this.popStack();
this.processValue(holder, key, resolved, callback);
this.processStack();
})
.catch(error => {
this.destroy(error);
});
break;
case STRING_STREAM:
case OBJECT_STREAM:
callback.call(this, key);
// TODO: Remove when drop support for Node.js 10
// Used `_readableState.endEmitted` as fallback, since Node.js 10 has no `readableEnded` getter
if (value.readableEnded || value._readableState.endEmitted) {
return this.destroy(new Error('Readable Stream has ended before it was serialized. All stream data have been lost'));
}
if (value.readableFlowing) {
return this.destroy(new Error('Readable Stream is in flowing mode, data may have been lost. Trying to pause stream.'));
}
if (type === OBJECT_STREAM) {
this.push('[');
this.pushStack({
handler: push,
value: this.space ? '\n' + this.space.repeat(this._depth) + ']' : ']'
});
this._depth++;
}
const self = this.pushStack({
handler: type === OBJECT_STREAM ? processReadableObject : processReadableString,
value,
index: 0,
first: false,
ended: false,
awaiting: !value.readable || value.readableLength === 0
});
const continueProcessing = () => {
if (self.awaiting) {
self.awaiting = false;
this.processStack();
}
};
value.once('error', error => this.destroy(error));
value.once('end', () => {
self.ended = true;
continueProcessing();
});
value.on('readable', continueProcessing);
break;
}
}
pushStack(node) {
node.prev = this._stack;
return this._stack = node;
}
popStack() {
const { handler, value } = this._stack;
if (handler === processObject || handler === processArray || handler === processReadableObject) {
this._visited.delete(value);
this._depth--;
}
this._stack = this._stack.prev;
}
processStack() {
if (this._processing || this._ended) {
return;
}
try {
this._processing = true;
while (this._stack !== null && !this._stack.awaiting) {
this._stack.handler.call(this);
if (!this._processing) {
return;
}
}
this._processing = false;
} catch (error) {
this.destroy(error);
return;
}
if (this._stack === null && !this._ended) {
this._finish();
this.push(null);
}
}
push(data) {
if (data !== null) {
this._buffer += data;
// check buffer overflow
if (this._buffer.length < this._readSize) {
return;
}
// flush buffer
data = this._buffer;
this._buffer = '';
this._processing = false;
}
super.push(data);
}
_read(size) {
// start processing
this._readSize = size || this.readableHighWaterMark;
this.processStack();
}
_finish() {
this._ended = true;
this._processing = false;
this._stack = null;
this._visited = null;
if (this._buffer && this._buffer.length) {
super.push(this._buffer); // flush buffer
}
this._buffer = '';
}
_destroy(error, cb) {
this.error = this.error || error;
this._finish();
cb(error);
}
}
module.exports = function createJsonStringifyStream(value, replacer, space) {
return new JsonStringifyStream(value, replacer, space);
};
+1
View File
@@ -0,0 +1 @@
module.exports = TextDecoder;
+1
View File
@@ -0,0 +1 @@
module.exports = require('util').TextDecoder;
+149
View File
@@ -0,0 +1,149 @@
const PrimitiveType = 1;
const ObjectType = 2;
const ArrayType = 3;
const PromiseType = 4;
const ReadableStringType = 5;
const ReadableObjectType = 6;
// https://tc39.es/ecma262/#table-json-single-character-escapes
const escapableCharCodeSubstitution = { // JSON Single Character Escape Sequences
0x08: '\\b',
0x09: '\\t',
0x0a: '\\n',
0x0c: '\\f',
0x0d: '\\r',
0x22: '\\\"',
0x5c: '\\\\'
};
function isLeadingSurrogate(code) {
return code >= 0xD800 && code <= 0xDBFF;
}
function isTrailingSurrogate(code) {
return code >= 0xDC00 && code <= 0xDFFF;
}
function isReadableStream(value) {
return (
typeof value.pipe === 'function' &&
typeof value._read === 'function' &&
typeof value._readableState === 'object' && value._readableState !== null
);
}
function replaceValue(holder, key, value, replacer) {
if (value && typeof value.toJSON === 'function') {
value = value.toJSON();
}
if (replacer !== null) {
value = replacer.call(holder, String(key), value);
}
switch (typeof value) {
case 'function':
case 'symbol':
value = undefined;
break;
case 'object':
if (value !== null) {
const cls = value.constructor;
if (cls === String || cls === Number || cls === Boolean) {
value = value.valueOf();
}
}
break;
}
return value;
}
function getTypeNative(value) {
if (value === null || typeof value !== 'object') {
return PrimitiveType;
}
if (Array.isArray(value)) {
return ArrayType;
}
return ObjectType;
}
function getTypeAsync(value) {
if (value === null || typeof value !== 'object') {
return PrimitiveType;
}
if (typeof value.then === 'function') {
return PromiseType;
}
if (isReadableStream(value)) {
return value._readableState.objectMode ? ReadableObjectType : ReadableStringType;
}
if (Array.isArray(value)) {
return ArrayType;
}
return ObjectType;
}
function normalizeReplacer(replacer) {
if (typeof replacer === 'function') {
return replacer;
}
if (Array.isArray(replacer)) {
const allowlist = new Set(replacer
.map(item => {
const cls = item && item.constructor;
return cls === String || cls === Number ? String(item) : null;
})
.filter(item => typeof item === 'string')
);
return [...allowlist];
}
return null;
}
function normalizeSpace(space) {
if (typeof space === 'number') {
if (!Number.isFinite(space) || space < 1) {
return false;
}
return ' '.repeat(Math.min(space, 10));
}
if (typeof space === 'string') {
return space.slice(0, 10) || false;
}
return false;
}
module.exports = {
escapableCharCodeSubstitution,
isLeadingSurrogate,
isTrailingSurrogate,
type: {
PRIMITIVE: PrimitiveType,
PROMISE: PromiseType,
ARRAY: ArrayType,
OBJECT: ObjectType,
STRING_STREAM: ReadableStringType,
OBJECT_STREAM: ReadableObjectType
},
isReadableStream,
replaceValue,
getTypeNative,
getTypeAsync,
normalizeReplacer,
normalizeSpace
};
+1
View File
@@ -0,0 +1 @@
module.exports = require('../package.json').version;
+21
View File
@@ -0,0 +1,21 @@
MIT License
Copyright (c) Meta Platforms, Inc. and affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
+3
View File
@@ -0,0 +1,3 @@
# `@jest/schemas`
Experimental and currently incomplete module for JSON schemas for [Jest's](https://jestjs.io/) configuration.
+63
View File
@@ -0,0 +1,63 @@
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {Static} from '@sinclair/typebox';
import {TBoolean} from '@sinclair/typebox';
import {TNull} from '@sinclair/typebox';
import {TNumber} from '@sinclair/typebox';
import {TObject} from '@sinclair/typebox';
import {TReadonlyOptional} from '@sinclair/typebox';
import {TString} from '@sinclair/typebox';
declare const RawSnapshotFormat: TObject<{
callToJSON: TReadonlyOptional<TBoolean>;
compareKeys: TReadonlyOptional<TNull>;
escapeRegex: TReadonlyOptional<TBoolean>;
escapeString: TReadonlyOptional<TBoolean>;
highlight: TReadonlyOptional<TBoolean>;
indent: TReadonlyOptional<TNumber>;
maxDepth: TReadonlyOptional<TNumber>;
maxWidth: TReadonlyOptional<TNumber>;
min: TReadonlyOptional<TBoolean>;
printBasicPrototype: TReadonlyOptional<TBoolean>;
printFunctionName: TReadonlyOptional<TBoolean>;
theme: TReadonlyOptional<
TObject<{
comment: TReadonlyOptional<TString<string>>;
content: TReadonlyOptional<TString<string>>;
prop: TReadonlyOptional<TString<string>>;
tag: TReadonlyOptional<TString<string>>;
value: TReadonlyOptional<TString<string>>;
}>
>;
}>;
export declare const SnapshotFormat: TObject<{
callToJSON: TReadonlyOptional<TBoolean>;
compareKeys: TReadonlyOptional<TNull>;
escapeRegex: TReadonlyOptional<TBoolean>;
escapeString: TReadonlyOptional<TBoolean>;
highlight: TReadonlyOptional<TBoolean>;
indent: TReadonlyOptional<TNumber>;
maxDepth: TReadonlyOptional<TNumber>;
maxWidth: TReadonlyOptional<TNumber>;
min: TReadonlyOptional<TBoolean>;
printBasicPrototype: TReadonlyOptional<TBoolean>;
printFunctionName: TReadonlyOptional<TBoolean>;
theme: TReadonlyOptional<
TObject<{
comment: TReadonlyOptional<TString<string>>;
content: TReadonlyOptional<TString<string>>;
prop: TReadonlyOptional<TString<string>>;
tag: TReadonlyOptional<TString<string>>;
value: TReadonlyOptional<TString<string>>;
}>
>;
}>;
export declare type SnapshotFormat = Static<typeof RawSnapshotFormat>;
export {};
+60
View File
@@ -0,0 +1,60 @@
'use strict';
Object.defineProperty(exports, '__esModule', {
value: true
});
exports.SnapshotFormat = void 0;
function _typebox() {
const data = require('@sinclair/typebox');
_typebox = function () {
return data;
};
return data;
}
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
const RawSnapshotFormat = _typebox().Type.Partial(
_typebox().Type.Object({
callToJSON: _typebox().Type.Readonly(_typebox().Type.Boolean()),
compareKeys: _typebox().Type.Readonly(_typebox().Type.Null()),
escapeRegex: _typebox().Type.Readonly(_typebox().Type.Boolean()),
escapeString: _typebox().Type.Readonly(_typebox().Type.Boolean()),
highlight: _typebox().Type.Readonly(_typebox().Type.Boolean()),
indent: _typebox().Type.Readonly(
_typebox().Type.Number({
minimum: 0
})
),
maxDepth: _typebox().Type.Readonly(
_typebox().Type.Number({
minimum: 0
})
),
maxWidth: _typebox().Type.Readonly(
_typebox().Type.Number({
minimum: 0
})
),
min: _typebox().Type.Readonly(_typebox().Type.Boolean()),
printBasicPrototype: _typebox().Type.Readonly(_typebox().Type.Boolean()),
printFunctionName: _typebox().Type.Readonly(_typebox().Type.Boolean()),
theme: _typebox().Type.Readonly(
_typebox().Type.Partial(
_typebox().Type.Object({
comment: _typebox().Type.Readonly(_typebox().Type.String()),
content: _typebox().Type.Readonly(_typebox().Type.String()),
prop: _typebox().Type.Readonly(_typebox().Type.String()),
tag: _typebox().Type.Readonly(_typebox().Type.String()),
value: _typebox().Type.Readonly(_typebox().Type.String())
})
)
)
})
);
const SnapshotFormat = _typebox().Type.Strict(RawSnapshotFormat);
exports.SnapshotFormat = SnapshotFormat;
+60
View File
@@ -0,0 +1,60 @@
{
"_from": "@jest/schemas@^29.6.3",
"_id": "@jest/schemas@29.6.3",
"_inBundle": false,
"_integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==",
"_location": "/@jest/schemas",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "@jest/schemas@^29.6.3",
"name": "@jest/schemas",
"escapedName": "@jest%2fschemas",
"scope": "@jest",
"rawSpec": "^29.6.3",
"saveSpec": null,
"fetchSpec": "^29.6.3"
},
"_requiredBy": [
"/@jest/types"
],
"_resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz",
"_shasum": "430b5ce8a4e0044a7e3819663305a7b3091c8e03",
"_spec": "@jest/schemas@^29.6.3",
"_where": "/root/dns/node_modules/@jest/types",
"bugs": {
"url": "https://github.com/jestjs/jest/issues"
},
"bundleDependencies": false,
"dependencies": {
"@sinclair/typebox": "^0.27.8"
},
"deprecated": false,
"description": "Experimental and currently incomplete module for JSON schemas for [Jest's](https://jestjs.io/) configuration.",
"engines": {
"node": "^14.15.0 || ^16.10.0 || >=18.0.0"
},
"exports": {
".": {
"types": "./build/index.d.ts",
"default": "./build/index.js"
},
"./package.json": "./package.json"
},
"gitHead": "fb7d95c8af6e0d65a8b65348433d8a0ea0725b5b",
"homepage": "https://github.com/jestjs/jest#readme",
"license": "MIT",
"main": "./build/index.js",
"name": "@jest/schemas",
"publishConfig": {
"access": "public"
},
"repository": {
"type": "git",
"url": "git+https://github.com/jestjs/jest.git",
"directory": "packages/jest-schemas"
},
"types": "./build/index.d.ts",
"version": "29.6.3"
}
+21
View File
@@ -0,0 +1,21 @@
MIT License
Copyright (c) Meta Platforms, Inc. and affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
+30
View File
@@ -0,0 +1,30 @@
# @jest/types
This package contains shared types of Jest's packages.
If you are looking for types of [Jest globals](https://jestjs.io/docs/api), you can import them from `@jest/globals` package:
```ts
import {describe, expect, it} from '@jest/globals';
describe('my tests', () => {
it('works', () => {
expect(1).toBe(1);
});
});
```
If you prefer to omit imports, a similar result can be achieved installing the [@types/jest](https://npmjs.com/package/@types/jest) package. Note that this is a third party library maintained at [DefinitelyTyped](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/jest) and may not cover the latest Jest features.
Another use-case for `@types/jest` is a typed Jest config as those types are not provided by Jest out of the box:
```ts
// jest.config.ts
import {Config} from '@jest/types';
const config: Config.InitialOptions = {
// some typed config
};
export default config;
```
+1
View File
@@ -0,0 +1 @@
'use strict';
+1
View File
@@ -0,0 +1 @@
'use strict';
+1
View File
@@ -0,0 +1 @@
'use strict';
+1
View File
@@ -0,0 +1 @@
'use strict';
+1
View File
@@ -0,0 +1 @@
'use strict';
+1204
View File
File diff suppressed because it is too large Load Diff
+1
View File
@@ -0,0 +1 @@
'use strict';
+69
View File
@@ -0,0 +1,69 @@
{
"_from": "@jest/types@^29.6.3",
"_id": "@jest/types@29.6.3",
"_inBundle": false,
"_integrity": "sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==",
"_location": "/@jest/types",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "@jest/types@^29.6.3",
"name": "@jest/types",
"escapedName": "@jest%2ftypes",
"scope": "@jest",
"rawSpec": "^29.6.3",
"saveSpec": null,
"fetchSpec": "^29.6.3"
},
"_requiredBy": [
"/jest-util"
],
"_resolved": "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz",
"_shasum": "1131f8cf634e7e84c5e77bab12f052af585fba59",
"_spec": "@jest/types@^29.6.3",
"_where": "/root/dns/node_modules/jest-util",
"bugs": {
"url": "https://github.com/jestjs/jest/issues"
},
"bundleDependencies": false,
"dependencies": {
"@jest/schemas": "^29.6.3",
"@types/istanbul-lib-coverage": "^2.0.0",
"@types/istanbul-reports": "^3.0.0",
"@types/node": "*",
"@types/yargs": "^17.0.8",
"chalk": "^4.0.0"
},
"deprecated": false,
"description": "This package contains shared types of Jest's packages.",
"devDependencies": {
"@tsd/typescript": "^5.0.4",
"tsd-lite": "^0.7.0"
},
"engines": {
"node": "^14.15.0 || ^16.10.0 || >=18.0.0"
},
"exports": {
".": {
"types": "./build/index.d.ts",
"default": "./build/index.js"
},
"./package.json": "./package.json"
},
"gitHead": "fb7d95c8af6e0d65a8b65348433d8a0ea0725b5b",
"homepage": "https://github.com/jestjs/jest#readme",
"license": "MIT",
"main": "./build/index.js",
"name": "@jest/types",
"publishConfig": {
"access": "public"
},
"repository": {
"type": "git",
"url": "git+https://github.com/jestjs/jest.git",
"directory": "packages/jest-types"
},
"types": "./build/index.d.ts",
"version": "29.6.3"
}
+19
View File
@@ -0,0 +1,19 @@
Copyright 2024 Justin Ridgewell <justin@ridgewell.name>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
+227
View File
@@ -0,0 +1,227 @@
# @jridgewell/gen-mapping
> Generate source maps
`gen-mapping` allows you to generate a source map during transpilation or minification.
With a source map, you're able to trace the original location in the source file, either in Chrome's
DevTools or using a library like [`@jridgewell/trace-mapping`][trace-mapping].
You may already be familiar with the [`source-map`][source-map] package's `SourceMapGenerator`. This
provides the same `addMapping` and `setSourceContent` API.
## Installation
```sh
npm install @jridgewell/gen-mapping
```
## Usage
```typescript
import { GenMapping, addMapping, setSourceContent, toEncodedMap, toDecodedMap } from '@jridgewell/gen-mapping';
const map = new GenMapping({
file: 'output.js',
sourceRoot: 'https://example.com/',
});
setSourceContent(map, 'input.js', `function foo() {}`);
addMapping(map, {
// Lines start at line 1, columns at column 0.
generated: { line: 1, column: 0 },
source: 'input.js',
original: { line: 1, column: 0 },
});
addMapping(map, {
generated: { line: 1, column: 9 },
source: 'input.js',
original: { line: 1, column: 9 },
name: 'foo',
});
assert.deepEqual(toDecodedMap(map), {
version: 3,
file: 'output.js',
names: ['foo'],
sourceRoot: 'https://example.com/',
sources: ['input.js'],
sourcesContent: ['function foo() {}'],
mappings: [
[ [0, 0, 0, 0], [9, 0, 0, 9, 0] ]
],
});
assert.deepEqual(toEncodedMap(map), {
version: 3,
file: 'output.js',
names: ['foo'],
sourceRoot: 'https://example.com/',
sources: ['input.js'],
sourcesContent: ['function foo() {}'],
mappings: 'AAAA,SAASA',
});
```
### Smaller Sourcemaps
Not everything needs to be added to a sourcemap, and needless markings can cause signficantly
larger file sizes. `gen-mapping` exposes `maybeAddSegment`/`maybeAddMapping` APIs that will
intelligently determine if this marking adds useful information. If not, the marking will be
skipped.
```typescript
import { maybeAddMapping } from '@jridgewell/gen-mapping';
const map = new GenMapping();
// Adding a sourceless marking at the beginning of a line isn't useful.
maybeAddMapping(map, {
generated: { line: 1, column: 0 },
});
// Adding a new source marking is useful.
maybeAddMapping(map, {
generated: { line: 1, column: 0 },
source: 'input.js',
original: { line: 1, column: 0 },
});
// But adding another marking pointing to the exact same original location isn't, even if the
// generated column changed.
maybeAddMapping(map, {
generated: { line: 1, column: 9 },
source: 'input.js',
original: { line: 1, column: 0 },
});
assert.deepEqual(toEncodedMap(map), {
version: 3,
names: [],
sources: ['input.js'],
sourcesContent: [null],
mappings: 'AAAA',
});
```
## Benchmarks
```
node v18.0.0
amp.js.map
Memory Usage:
gen-mapping: addSegment 5852872 bytes
gen-mapping: addMapping 7716042 bytes
source-map-js 6143250 bytes
source-map-0.6.1 6124102 bytes
source-map-0.8.0 6121173 bytes
Smallest memory usage is gen-mapping: addSegment
Adding speed:
gen-mapping: addSegment x 441 ops/sec ±2.07% (90 runs sampled)
gen-mapping: addMapping x 350 ops/sec ±2.40% (86 runs sampled)
source-map-js: addMapping x 169 ops/sec ±2.42% (80 runs sampled)
source-map-0.6.1: addMapping x 167 ops/sec ±2.56% (80 runs sampled)
source-map-0.8.0: addMapping x 168 ops/sec ±2.52% (80 runs sampled)
Fastest is gen-mapping: addSegment
Generate speed:
gen-mapping: decoded output x 150,824,370 ops/sec ±0.07% (102 runs sampled)
gen-mapping: encoded output x 663 ops/sec ±0.22% (98 runs sampled)
source-map-js: encoded output x 197 ops/sec ±0.45% (84 runs sampled)
source-map-0.6.1: encoded output x 198 ops/sec ±0.33% (85 runs sampled)
source-map-0.8.0: encoded output x 197 ops/sec ±0.06% (93 runs sampled)
Fastest is gen-mapping: decoded output
***
babel.min.js.map
Memory Usage:
gen-mapping: addSegment 37578063 bytes
gen-mapping: addMapping 37212897 bytes
source-map-js 47638527 bytes
source-map-0.6.1 47690503 bytes
source-map-0.8.0 47470188 bytes
Smallest memory usage is gen-mapping: addMapping
Adding speed:
gen-mapping: addSegment x 31.05 ops/sec ±8.31% (43 runs sampled)
gen-mapping: addMapping x 29.83 ops/sec ±7.36% (51 runs sampled)
source-map-js: addMapping x 20.73 ops/sec ±6.22% (38 runs sampled)
source-map-0.6.1: addMapping x 20.03 ops/sec ±10.51% (38 runs sampled)
source-map-0.8.0: addMapping x 19.30 ops/sec ±8.27% (37 runs sampled)
Fastest is gen-mapping: addSegment
Generate speed:
gen-mapping: decoded output x 381,379,234 ops/sec ±0.29% (96 runs sampled)
gen-mapping: encoded output x 95.15 ops/sec ±2.98% (72 runs sampled)
source-map-js: encoded output x 15.20 ops/sec ±7.41% (33 runs sampled)
source-map-0.6.1: encoded output x 16.36 ops/sec ±10.46% (31 runs sampled)
source-map-0.8.0: encoded output x 16.06 ops/sec ±6.45% (31 runs sampled)
Fastest is gen-mapping: decoded output
***
preact.js.map
Memory Usage:
gen-mapping: addSegment 416247 bytes
gen-mapping: addMapping 419824 bytes
source-map-js 1024619 bytes
source-map-0.6.1 1146004 bytes
source-map-0.8.0 1113250 bytes
Smallest memory usage is gen-mapping: addSegment
Adding speed:
gen-mapping: addSegment x 13,755 ops/sec ±0.15% (98 runs sampled)
gen-mapping: addMapping x 13,013 ops/sec ±0.11% (101 runs sampled)
source-map-js: addMapping x 4,564 ops/sec ±0.21% (98 runs sampled)
source-map-0.6.1: addMapping x 4,562 ops/sec ±0.11% (99 runs sampled)
source-map-0.8.0: addMapping x 4,593 ops/sec ±0.11% (100 runs sampled)
Fastest is gen-mapping: addSegment
Generate speed:
gen-mapping: decoded output x 379,864,020 ops/sec ±0.23% (93 runs sampled)
gen-mapping: encoded output x 14,368 ops/sec ±4.07% (82 runs sampled)
source-map-js: encoded output x 5,261 ops/sec ±0.21% (99 runs sampled)
source-map-0.6.1: encoded output x 5,124 ops/sec ±0.58% (99 runs sampled)
source-map-0.8.0: encoded output x 5,434 ops/sec ±0.33% (96 runs sampled)
Fastest is gen-mapping: decoded output
***
react.js.map
Memory Usage:
gen-mapping: addSegment 975096 bytes
gen-mapping: addMapping 1102981 bytes
source-map-js 2918836 bytes
source-map-0.6.1 2885435 bytes
source-map-0.8.0 2874336 bytes
Smallest memory usage is gen-mapping: addSegment
Adding speed:
gen-mapping: addSegment x 4,772 ops/sec ±0.15% (100 runs sampled)
gen-mapping: addMapping x 4,456 ops/sec ±0.13% (97 runs sampled)
source-map-js: addMapping x 1,618 ops/sec ±0.24% (97 runs sampled)
source-map-0.6.1: addMapping x 1,622 ops/sec ±0.12% (99 runs sampled)
source-map-0.8.0: addMapping x 1,631 ops/sec ±0.12% (100 runs sampled)
Fastest is gen-mapping: addSegment
Generate speed:
gen-mapping: decoded output x 379,107,695 ops/sec ±0.07% (99 runs sampled)
gen-mapping: encoded output x 5,421 ops/sec ±1.60% (89 runs sampled)
source-map-js: encoded output x 2,113 ops/sec ±1.81% (98 runs sampled)
source-map-0.6.1: encoded output x 2,126 ops/sec ±0.10% (100 runs sampled)
source-map-0.8.0: encoded output x 2,176 ops/sec ±0.39% (98 runs sampled)
Fastest is gen-mapping: decoded output
```
[source-map]: https://www.npmjs.com/package/source-map
[trace-mapping]: https://github.com/jridgewell/sourcemaps/tree/main/packages/trace-mapping
+292
View File
@@ -0,0 +1,292 @@
// src/set-array.ts
var SetArray = class {
constructor() {
this._indexes = { __proto__: null };
this.array = [];
}
};
function cast(set) {
return set;
}
function get(setarr, key) {
return cast(setarr)._indexes[key];
}
function put(setarr, key) {
const index = get(setarr, key);
if (index !== void 0) return index;
const { array, _indexes: indexes } = cast(setarr);
const length = array.push(key);
return indexes[key] = length - 1;
}
function remove(setarr, key) {
const index = get(setarr, key);
if (index === void 0) return;
const { array, _indexes: indexes } = cast(setarr);
for (let i = index + 1; i < array.length; i++) {
const k = array[i];
array[i - 1] = k;
indexes[k]--;
}
indexes[key] = void 0;
array.pop();
}
// src/gen-mapping.ts
import {
encode
} from "@jridgewell/sourcemap-codec";
import { TraceMap, decodedMappings } from "@jridgewell/trace-mapping";
// src/sourcemap-segment.ts
var COLUMN = 0;
var SOURCES_INDEX = 1;
var SOURCE_LINE = 2;
var SOURCE_COLUMN = 3;
var NAMES_INDEX = 4;
// src/gen-mapping.ts
var NO_NAME = -1;
var GenMapping = class {
constructor({ file, sourceRoot } = {}) {
this._names = new SetArray();
this._sources = new SetArray();
this._sourcesContent = [];
this._mappings = [];
this.file = file;
this.sourceRoot = sourceRoot;
this._ignoreList = new SetArray();
}
};
function cast2(map) {
return map;
}
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
return addSegmentInternal(
false,
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content
);
}
function addMapping(map, mapping) {
return addMappingInternal(false, map, mapping);
}
var maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
return addSegmentInternal(
true,
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content
);
};
var maybeAddMapping = (map, mapping) => {
return addMappingInternal(true, map, mapping);
};
function setSourceContent(map, source, content) {
const {
_sources: sources,
_sourcesContent: sourcesContent
// _originalScopes: originalScopes,
} = cast2(map);
const index = put(sources, source);
sourcesContent[index] = content;
}
function setIgnore(map, source, ignore = true) {
const {
_sources: sources,
_sourcesContent: sourcesContent,
_ignoreList: ignoreList
// _originalScopes: originalScopes,
} = cast2(map);
const index = put(sources, source);
if (index === sourcesContent.length) sourcesContent[index] = null;
if (ignore) put(ignoreList, index);
else remove(ignoreList, index);
}
function toDecodedMap(map) {
const {
_mappings: mappings,
_sources: sources,
_sourcesContent: sourcesContent,
_names: names,
_ignoreList: ignoreList
// _originalScopes: originalScopes,
// _generatedRanges: generatedRanges,
} = cast2(map);
removeEmptyFinalLines(mappings);
return {
version: 3,
file: map.file || void 0,
names: names.array,
sourceRoot: map.sourceRoot || void 0,
sources: sources.array,
sourcesContent,
mappings,
// originalScopes,
// generatedRanges,
ignoreList: ignoreList.array
};
}
function toEncodedMap(map) {
const decoded = toDecodedMap(map);
return Object.assign({}, decoded, {
// originalScopes: decoded.originalScopes.map((os) => encodeOriginalScopes(os)),
// generatedRanges: encodeGeneratedRanges(decoded.generatedRanges as GeneratedRange[]),
mappings: encode(decoded.mappings)
});
}
function fromMap(input) {
const map = new TraceMap(input);
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
putAll(cast2(gen)._names, map.names);
putAll(cast2(gen)._sources, map.sources);
cast2(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
cast2(gen)._mappings = decodedMappings(map);
if (map.ignoreList) putAll(cast2(gen)._ignoreList, map.ignoreList);
return gen;
}
function allMappings(map) {
const out = [];
const { _mappings: mappings, _sources: sources, _names: names } = cast2(map);
for (let i = 0; i < mappings.length; i++) {
const line = mappings[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const generated = { line: i + 1, column: seg[COLUMN] };
let source = void 0;
let original = void 0;
let name = void 0;
if (seg.length !== 1) {
source = sources.array[seg[SOURCES_INDEX]];
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
if (seg.length === 5) name = names.array[seg[NAMES_INDEX]];
}
out.push({ generated, source, original, name });
}
}
return out;
}
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
const {
_mappings: mappings,
_sources: sources,
_sourcesContent: sourcesContent,
_names: names
// _originalScopes: originalScopes,
} = cast2(map);
const line = getIndex(mappings, genLine);
const index = getColumnIndex(line, genColumn);
if (!source) {
if (skipable && skipSourceless(line, index)) return;
return insert(line, index, [genColumn]);
}
assert(sourceLine);
assert(sourceColumn);
const sourcesIndex = put(sources, source);
const namesIndex = name ? put(names, name) : NO_NAME;
if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content != null ? content : null;
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
return;
}
return insert(
line,
index,
name ? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] : [genColumn, sourcesIndex, sourceLine, sourceColumn]
);
}
function assert(_val) {
}
function getIndex(arr, index) {
for (let i = arr.length; i <= index; i++) {
arr[i] = [];
}
return arr[index];
}
function getColumnIndex(line, genColumn) {
let index = line.length;
for (let i = index - 1; i >= 0; index = i--) {
const current = line[i];
if (genColumn >= current[COLUMN]) break;
}
return index;
}
function insert(array, index, value) {
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
}
function removeEmptyFinalLines(mappings) {
const { length } = mappings;
let len = length;
for (let i = len - 1; i >= 0; len = i, i--) {
if (mappings[i].length > 0) break;
}
if (len < length) mappings.length = len;
}
function putAll(setarr, array) {
for (let i = 0; i < array.length; i++) put(setarr, array[i]);
}
function skipSourceless(line, index) {
if (index === 0) return true;
const prev = line[index - 1];
return prev.length === 1;
}
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
if (index === 0) return false;
const prev = line[index - 1];
if (prev.length === 1) return false;
return sourcesIndex === prev[SOURCES_INDEX] && sourceLine === prev[SOURCE_LINE] && sourceColumn === prev[SOURCE_COLUMN] && namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME);
}
function addMappingInternal(skipable, map, mapping) {
const { generated, source, original, name, content } = mapping;
if (!source) {
return addSegmentInternal(
skipable,
map,
generated.line - 1,
generated.column,
null,
null,
null,
null,
null
);
}
assert(original);
return addSegmentInternal(
skipable,
map,
generated.line - 1,
generated.column,
source,
original.line - 1,
original.column,
name,
content
);
}
export {
GenMapping,
addMapping,
addSegment,
allMappings,
fromMap,
maybeAddMapping,
maybeAddSegment,
setIgnore,
setSourceContent,
toDecodedMap,
toEncodedMap
};
//# sourceMappingURL=gen-mapping.mjs.map
File diff suppressed because one or more lines are too long
+358
View File
@@ -0,0 +1,358 @@
(function (global, factory) {
if (typeof exports === 'object' && typeof module !== 'undefined') {
factory(module, require('@jridgewell/sourcemap-codec'), require('@jridgewell/trace-mapping'));
module.exports = def(module);
} else if (typeof define === 'function' && define.amd) {
define(['module', '@jridgewell/sourcemap-codec', '@jridgewell/trace-mapping'], function(mod) {
factory.apply(this, arguments);
mod.exports = def(mod);
});
} else {
const mod = { exports: {} };
factory(mod, global.sourcemapCodec, global.traceMapping);
global = typeof globalThis !== 'undefined' ? globalThis : global || self;
global.genMapping = def(mod);
}
function def(m) { return 'default' in m.exports ? m.exports.default : m.exports; }
})(this, (function (module, require_sourcemapCodec, require_traceMapping) {
"use strict";
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __commonJS = (cb, mod) => function __require() {
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
};
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// umd:@jridgewell/sourcemap-codec
var require_sourcemap_codec = __commonJS({
"umd:@jridgewell/sourcemap-codec"(exports, module2) {
module2.exports = require_sourcemapCodec;
}
});
// umd:@jridgewell/trace-mapping
var require_trace_mapping = __commonJS({
"umd:@jridgewell/trace-mapping"(exports, module2) {
module2.exports = require_traceMapping;
}
});
// src/gen-mapping.ts
var gen_mapping_exports = {};
__export(gen_mapping_exports, {
GenMapping: () => GenMapping,
addMapping: () => addMapping,
addSegment: () => addSegment,
allMappings: () => allMappings,
fromMap: () => fromMap,
maybeAddMapping: () => maybeAddMapping,
maybeAddSegment: () => maybeAddSegment,
setIgnore: () => setIgnore,
setSourceContent: () => setSourceContent,
toDecodedMap: () => toDecodedMap,
toEncodedMap: () => toEncodedMap
});
module.exports = __toCommonJS(gen_mapping_exports);
// src/set-array.ts
var SetArray = class {
constructor() {
this._indexes = { __proto__: null };
this.array = [];
}
};
function cast(set) {
return set;
}
function get(setarr, key) {
return cast(setarr)._indexes[key];
}
function put(setarr, key) {
const index = get(setarr, key);
if (index !== void 0) return index;
const { array, _indexes: indexes } = cast(setarr);
const length = array.push(key);
return indexes[key] = length - 1;
}
function remove(setarr, key) {
const index = get(setarr, key);
if (index === void 0) return;
const { array, _indexes: indexes } = cast(setarr);
for (let i = index + 1; i < array.length; i++) {
const k = array[i];
array[i - 1] = k;
indexes[k]--;
}
indexes[key] = void 0;
array.pop();
}
// src/gen-mapping.ts
var import_sourcemap_codec = __toESM(require_sourcemap_codec());
var import_trace_mapping = __toESM(require_trace_mapping());
// src/sourcemap-segment.ts
var COLUMN = 0;
var SOURCES_INDEX = 1;
var SOURCE_LINE = 2;
var SOURCE_COLUMN = 3;
var NAMES_INDEX = 4;
// src/gen-mapping.ts
var NO_NAME = -1;
var GenMapping = class {
constructor({ file, sourceRoot } = {}) {
this._names = new SetArray();
this._sources = new SetArray();
this._sourcesContent = [];
this._mappings = [];
this.file = file;
this.sourceRoot = sourceRoot;
this._ignoreList = new SetArray();
}
};
function cast2(map) {
return map;
}
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
return addSegmentInternal(
false,
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content
);
}
function addMapping(map, mapping) {
return addMappingInternal(false, map, mapping);
}
var maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
return addSegmentInternal(
true,
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content
);
};
var maybeAddMapping = (map, mapping) => {
return addMappingInternal(true, map, mapping);
};
function setSourceContent(map, source, content) {
const {
_sources: sources,
_sourcesContent: sourcesContent
// _originalScopes: originalScopes,
} = cast2(map);
const index = put(sources, source);
sourcesContent[index] = content;
}
function setIgnore(map, source, ignore = true) {
const {
_sources: sources,
_sourcesContent: sourcesContent,
_ignoreList: ignoreList
// _originalScopes: originalScopes,
} = cast2(map);
const index = put(sources, source);
if (index === sourcesContent.length) sourcesContent[index] = null;
if (ignore) put(ignoreList, index);
else remove(ignoreList, index);
}
function toDecodedMap(map) {
const {
_mappings: mappings,
_sources: sources,
_sourcesContent: sourcesContent,
_names: names,
_ignoreList: ignoreList
// _originalScopes: originalScopes,
// _generatedRanges: generatedRanges,
} = cast2(map);
removeEmptyFinalLines(mappings);
return {
version: 3,
file: map.file || void 0,
names: names.array,
sourceRoot: map.sourceRoot || void 0,
sources: sources.array,
sourcesContent,
mappings,
// originalScopes,
// generatedRanges,
ignoreList: ignoreList.array
};
}
function toEncodedMap(map) {
const decoded = toDecodedMap(map);
return Object.assign({}, decoded, {
// originalScopes: decoded.originalScopes.map((os) => encodeOriginalScopes(os)),
// generatedRanges: encodeGeneratedRanges(decoded.generatedRanges as GeneratedRange[]),
mappings: (0, import_sourcemap_codec.encode)(decoded.mappings)
});
}
function fromMap(input) {
const map = new import_trace_mapping.TraceMap(input);
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
putAll(cast2(gen)._names, map.names);
putAll(cast2(gen)._sources, map.sources);
cast2(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
cast2(gen)._mappings = (0, import_trace_mapping.decodedMappings)(map);
if (map.ignoreList) putAll(cast2(gen)._ignoreList, map.ignoreList);
return gen;
}
function allMappings(map) {
const out = [];
const { _mappings: mappings, _sources: sources, _names: names } = cast2(map);
for (let i = 0; i < mappings.length; i++) {
const line = mappings[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const generated = { line: i + 1, column: seg[COLUMN] };
let source = void 0;
let original = void 0;
let name = void 0;
if (seg.length !== 1) {
source = sources.array[seg[SOURCES_INDEX]];
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
if (seg.length === 5) name = names.array[seg[NAMES_INDEX]];
}
out.push({ generated, source, original, name });
}
}
return out;
}
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
const {
_mappings: mappings,
_sources: sources,
_sourcesContent: sourcesContent,
_names: names
// _originalScopes: originalScopes,
} = cast2(map);
const line = getIndex(mappings, genLine);
const index = getColumnIndex(line, genColumn);
if (!source) {
if (skipable && skipSourceless(line, index)) return;
return insert(line, index, [genColumn]);
}
assert(sourceLine);
assert(sourceColumn);
const sourcesIndex = put(sources, source);
const namesIndex = name ? put(names, name) : NO_NAME;
if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content != null ? content : null;
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
return;
}
return insert(
line,
index,
name ? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] : [genColumn, sourcesIndex, sourceLine, sourceColumn]
);
}
function assert(_val) {
}
function getIndex(arr, index) {
for (let i = arr.length; i <= index; i++) {
arr[i] = [];
}
return arr[index];
}
function getColumnIndex(line, genColumn) {
let index = line.length;
for (let i = index - 1; i >= 0; index = i--) {
const current = line[i];
if (genColumn >= current[COLUMN]) break;
}
return index;
}
function insert(array, index, value) {
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
}
function removeEmptyFinalLines(mappings) {
const { length } = mappings;
let len = length;
for (let i = len - 1; i >= 0; len = i, i--) {
if (mappings[i].length > 0) break;
}
if (len < length) mappings.length = len;
}
function putAll(setarr, array) {
for (let i = 0; i < array.length; i++) put(setarr, array[i]);
}
function skipSourceless(line, index) {
if (index === 0) return true;
const prev = line[index - 1];
return prev.length === 1;
}
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
if (index === 0) return false;
const prev = line[index - 1];
if (prev.length === 1) return false;
return sourcesIndex === prev[SOURCES_INDEX] && sourceLine === prev[SOURCE_LINE] && sourceColumn === prev[SOURCE_COLUMN] && namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME);
}
function addMappingInternal(skipable, map, mapping) {
const { generated, source, original, name, content } = mapping;
if (!source) {
return addSegmentInternal(
skipable,
map,
generated.line - 1,
generated.column,
null,
null,
null,
null,
null
);
}
assert(original);
return addSegmentInternal(
skipable,
map,
generated.line - 1,
generated.column,
source,
original.line - 1,
original.column,
name,
content
);
}
}));
//# sourceMappingURL=gen-mapping.umd.js.map
File diff suppressed because one or more lines are too long
+88
View File
@@ -0,0 +1,88 @@
import type { SourceMapInput } from '@jridgewell/trace-mapping';
import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types';
export type { DecodedSourceMap, EncodedSourceMap, Mapping };
export type Options = {
file?: string | null;
sourceRoot?: string | null;
};
/**
* Provides the state to generate a sourcemap.
*/
export declare class GenMapping {
private _names;
private _sources;
private _sourcesContent;
private _mappings;
private _ignoreList;
file: string | null | undefined;
sourceRoot: string | null | undefined;
constructor({ file, sourceRoot }?: Options);
}
/**
* A low-level API to associate a generated position with an original source position. Line and
* column here are 0-based, unlike `addMapping`.
*/
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source?: null, sourceLine?: null, sourceColumn?: null, name?: null, content?: null): void;
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name?: null, content?: string | null): void;
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name: string, content?: string | null): void;
/**
* A high-level API to associate a generated position with an original source position. Line is
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
*/
export declare function addMapping(map: GenMapping, mapping: {
generated: Pos;
source?: null;
original?: null;
name?: null;
content?: null;
}): void;
export declare function addMapping(map: GenMapping, mapping: {
generated: Pos;
source: string;
original: Pos;
name?: null;
content?: string | null;
}): void;
export declare function addMapping(map: GenMapping, mapping: {
generated: Pos;
source: string;
original: Pos;
name: string;
content?: string | null;
}): void;
/**
* Same as `addSegment`, but will only add the segment if it generates useful information in the
* resulting map. This only works correctly if segments are added **in order**, meaning you should
* not add a segment with a lower generated line/column than one that came before.
*/
export declare const maybeAddSegment: typeof addSegment;
/**
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
* not add a mapping with a lower generated line/column than one that came before.
*/
export declare const maybeAddMapping: typeof addMapping;
/**
* Adds/removes the content of the source file to the source map.
*/
export declare function setSourceContent(map: GenMapping, source: string, content: string | null): void;
export declare function setIgnore(map: GenMapping, source: string, ignore?: boolean): void;
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export declare function toDecodedMap(map: GenMapping): DecodedSourceMap;
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export declare function toEncodedMap(map: GenMapping): EncodedSourceMap;
/**
* Constructs a new GenMapping, using the already present mappings of the input.
*/
export declare function fromMap(input: SourceMapInput): GenMapping;
/**
* Returns an array of high-level mapping objects for every recorded segment, which could then be
* passed to the `source-map` library.
*/
export declare function allMappings(map: GenMapping): Mapping[];
+32
View File
@@ -0,0 +1,32 @@
type Key = string | number | symbol;
/**
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
* index of the `key` in the backing array.
*
* This is designed to allow synchronizing a second array with the contents of the backing array,
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
* and there are never duplicates.
*/
export declare class SetArray<T extends Key = Key> {
private _indexes;
array: readonly T[];
constructor();
}
/**
* Gets the index associated with `key` in the backing array, if it is already present.
*/
export declare function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined;
/**
* Puts `key` into the backing array, if it is not already present. Returns
* the index of the `key` in the backing array.
*/
export declare function put<T extends Key>(setarr: SetArray<T>, key: T): number;
/**
* Pops the last added item out of the SetArray.
*/
export declare function pop<T extends Key>(setarr: SetArray<T>): void;
/**
* Removes the key, if it exists in the set.
*/
export declare function remove<T extends Key>(setarr: SetArray<T>, key: T): void;
export {};
+12
View File
@@ -0,0 +1,12 @@
type GeneratedColumn = number;
type SourcesIndex = number;
type SourceLine = number;
type SourceColumn = number;
type NamesIndex = number;
export type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
export declare const COLUMN = 0;
export declare const SOURCES_INDEX = 1;
export declare const SOURCE_LINE = 2;
export declare const SOURCE_COLUMN = 3;
export declare const NAMES_INDEX = 4;
export {};
+43
View File
@@ -0,0 +1,43 @@
import type { SourceMapSegment } from './sourcemap-segment';
export interface SourceMapV3 {
file?: string | null;
names: readonly string[];
sourceRoot?: string;
sources: readonly (string | null)[];
sourcesContent?: readonly (string | null)[];
version: 3;
ignoreList?: readonly number[];
}
export interface EncodedSourceMap extends SourceMapV3 {
mappings: string;
}
export interface DecodedSourceMap extends SourceMapV3 {
mappings: readonly SourceMapSegment[][];
}
export interface Pos {
line: number;
column: number;
}
export interface OriginalPos extends Pos {
source: string;
}
export interface BindingExpressionRange {
start: Pos;
expression: string;
}
export type Mapping = {
generated: Pos;
source: undefined;
original: undefined;
name: undefined;
} | {
generated: Pos;
source: string;
original: Pos;
name: string;
} | {
generated: Pos;
source: string;
original: Pos;
name: undefined;
};
+99
View File
@@ -0,0 +1,99 @@
{
"_from": "@jridgewell/gen-mapping@^0.3.5",
"_id": "@jridgewell/gen-mapping@0.3.13",
"_inBundle": false,
"_integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==",
"_location": "/@jridgewell/gen-mapping",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "@jridgewell/gen-mapping@^0.3.5",
"name": "@jridgewell/gen-mapping",
"escapedName": "@jridgewell%2fgen-mapping",
"scope": "@jridgewell",
"rawSpec": "^0.3.5",
"saveSpec": null,
"fetchSpec": "^0.3.5"
},
"_requiredBy": [
"/@jridgewell/source-map"
],
"_resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz",
"_shasum": "6342a19f44347518c93e43b1ac69deb3c4656a1f",
"_spec": "@jridgewell/gen-mapping@^0.3.5",
"_where": "/root/dns/node_modules/@jridgewell/source-map",
"author": {
"name": "Justin Ridgewell",
"email": "justin@ridgewell.name"
},
"bugs": {
"url": "https://github.com/jridgewell/sourcemaps/issues"
},
"bundleDependencies": false,
"dependencies": {
"@jridgewell/sourcemap-codec": "^1.5.0",
"@jridgewell/trace-mapping": "^0.3.24"
},
"deprecated": false,
"description": "Generate source maps",
"exports": {
".": [
{
"import": {
"types": "./types/gen-mapping.d.mts",
"default": "./dist/gen-mapping.mjs"
},
"default": {
"types": "./types/gen-mapping.d.cts",
"default": "./dist/gen-mapping.umd.js"
}
},
"./dist/gen-mapping.umd.js"
],
"./package.json": "./package.json"
},
"files": [
"dist",
"src",
"types"
],
"homepage": "https://github.com/jridgewell/sourcemaps/tree/main/packages/gen-mapping",
"keywords": [
"source",
"map"
],
"license": "MIT",
"main": "dist/gen-mapping.umd.js",
"module": "dist/gen-mapping.mjs",
"name": "@jridgewell/gen-mapping",
"repository": {
"type": "git",
"url": "git+https://github.com/jridgewell/sourcemaps.git",
"directory": "packages/gen-mapping"
},
"scripts": {
"benchmark": "run-s build:code benchmark:*",
"benchmark:install": "cd benchmark && npm install",
"benchmark:only": "node --expose-gc benchmark/index.js",
"build": "run-s -n build:code build:types",
"build:code": "node ../../esbuild.mjs gen-mapping.ts",
"build:types": "run-s build:types:force build:types:emit build:types:mts",
"build:types:emit": "tsc --project tsconfig.build.json",
"build:types:force": "rimraf tsconfig.build.tsbuildinfo",
"build:types:mts": "node ../../mts-types.mjs",
"clean": "run-s -n clean:code clean:types",
"clean:code": "tsc --build --clean tsconfig.build.json",
"clean:types": "rimraf dist types",
"lint": "run-s -n lint:types lint:format",
"lint:format": "npm run test:format -- --write",
"lint:types": "npm run test:types -- --fix",
"prepublishOnly": "npm run-s -n build test",
"test": "run-s -n test:types test:only test:format",
"test:format": "prettier --check '{src,test}/**/*.ts'",
"test:only": "mocha",
"test:types": "eslint '{src,test}/**/*.ts'"
},
"types": "types/gen-mapping.d.cts",
"version": "0.3.13"
}
+614
View File
@@ -0,0 +1,614 @@
import { SetArray, put, remove } from './set-array';
import {
encode,
// encodeGeneratedRanges,
// encodeOriginalScopes
} from '@jridgewell/sourcemap-codec';
import { TraceMap, decodedMappings } from '@jridgewell/trace-mapping';
import {
COLUMN,
SOURCES_INDEX,
SOURCE_LINE,
SOURCE_COLUMN,
NAMES_INDEX,
} from './sourcemap-segment';
import type { SourceMapInput } from '@jridgewell/trace-mapping';
// import type { OriginalScope, GeneratedRange } from '@jridgewell/sourcemap-codec';
import type { SourceMapSegment } from './sourcemap-segment';
import type {
DecodedSourceMap,
EncodedSourceMap,
Pos,
Mapping,
// BindingExpressionRange,
// OriginalPos,
// OriginalScopeInfo,
// GeneratedRangeInfo,
} from './types';
export type { DecodedSourceMap, EncodedSourceMap, Mapping };
export type Options = {
file?: string | null;
sourceRoot?: string | null;
};
const NO_NAME = -1;
/**
* Provides the state to generate a sourcemap.
*/
export class GenMapping {
declare private _names: SetArray<string>;
declare private _sources: SetArray<string>;
declare private _sourcesContent: (string | null)[];
declare private _mappings: SourceMapSegment[][];
// private declare _originalScopes: OriginalScope[][];
// private declare _generatedRanges: GeneratedRange[];
declare private _ignoreList: SetArray<number>;
declare file: string | null | undefined;
declare sourceRoot: string | null | undefined;
constructor({ file, sourceRoot }: Options = {}) {
this._names = new SetArray();
this._sources = new SetArray();
this._sourcesContent = [];
this._mappings = [];
// this._originalScopes = [];
// this._generatedRanges = [];
this.file = file;
this.sourceRoot = sourceRoot;
this._ignoreList = new SetArray();
}
}
interface PublicMap {
_names: GenMapping['_names'];
_sources: GenMapping['_sources'];
_sourcesContent: GenMapping['_sourcesContent'];
_mappings: GenMapping['_mappings'];
// _originalScopes: GenMapping['_originalScopes'];
// _generatedRanges: GenMapping['_generatedRanges'];
_ignoreList: GenMapping['_ignoreList'];
}
/**
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
* with public access modifiers.
*/
function cast(map: unknown): PublicMap {
return map as any;
}
/**
* A low-level API to associate a generated position with an original source position. Line and
* column here are 0-based, unlike `addMapping`.
*/
export function addSegment(
map: GenMapping,
genLine: number,
genColumn: number,
source?: null,
sourceLine?: null,
sourceColumn?: null,
name?: null,
content?: null,
): void;
export function addSegment(
map: GenMapping,
genLine: number,
genColumn: number,
source: string,
sourceLine: number,
sourceColumn: number,
name?: null,
content?: string | null,
): void;
export function addSegment(
map: GenMapping,
genLine: number,
genColumn: number,
source: string,
sourceLine: number,
sourceColumn: number,
name: string,
content?: string | null,
): void;
export function addSegment(
map: GenMapping,
genLine: number,
genColumn: number,
source?: string | null,
sourceLine?: number | null,
sourceColumn?: number | null,
name?: string | null,
content?: string | null,
): void {
return addSegmentInternal(
false,
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content,
);
}
/**
* A high-level API to associate a generated position with an original source position. Line is
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
*/
export function addMapping(
map: GenMapping,
mapping: {
generated: Pos;
source?: null;
original?: null;
name?: null;
content?: null;
},
): void;
export function addMapping(
map: GenMapping,
mapping: {
generated: Pos;
source: string;
original: Pos;
name?: null;
content?: string | null;
},
): void;
export function addMapping(
map: GenMapping,
mapping: {
generated: Pos;
source: string;
original: Pos;
name: string;
content?: string | null;
},
): void;
export function addMapping(
map: GenMapping,
mapping: {
generated: Pos;
source?: string | null;
original?: Pos | null;
name?: string | null;
content?: string | null;
},
): void {
return addMappingInternal(false, map, mapping as Parameters<typeof addMappingInternal>[2]);
}
/**
* Same as `addSegment`, but will only add the segment if it generates useful information in the
* resulting map. This only works correctly if segments are added **in order**, meaning you should
* not add a segment with a lower generated line/column than one that came before.
*/
export const maybeAddSegment: typeof addSegment = (
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content,
) => {
return addSegmentInternal(
true,
map,
genLine,
genColumn,
source,
sourceLine,
sourceColumn,
name,
content,
);
};
/**
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
* not add a mapping with a lower generated line/column than one that came before.
*/
export const maybeAddMapping: typeof addMapping = (map, mapping) => {
return addMappingInternal(true, map, mapping as Parameters<typeof addMappingInternal>[2]);
};
/**
* Adds/removes the content of the source file to the source map.
*/
export function setSourceContent(map: GenMapping, source: string, content: string | null): void {
const {
_sources: sources,
_sourcesContent: sourcesContent,
// _originalScopes: originalScopes,
} = cast(map);
const index = put(sources, source);
sourcesContent[index] = content;
// if (index === originalScopes.length) originalScopes[index] = [];
}
export function setIgnore(map: GenMapping, source: string, ignore = true) {
const {
_sources: sources,
_sourcesContent: sourcesContent,
_ignoreList: ignoreList,
// _originalScopes: originalScopes,
} = cast(map);
const index = put(sources, source);
if (index === sourcesContent.length) sourcesContent[index] = null;
// if (index === originalScopes.length) originalScopes[index] = [];
if (ignore) put(ignoreList, index);
else remove(ignoreList, index);
}
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export function toDecodedMap(map: GenMapping): DecodedSourceMap {
const {
_mappings: mappings,
_sources: sources,
_sourcesContent: sourcesContent,
_names: names,
_ignoreList: ignoreList,
// _originalScopes: originalScopes,
// _generatedRanges: generatedRanges,
} = cast(map);
removeEmptyFinalLines(mappings);
return {
version: 3,
file: map.file || undefined,
names: names.array,
sourceRoot: map.sourceRoot || undefined,
sources: sources.array,
sourcesContent,
mappings,
// originalScopes,
// generatedRanges,
ignoreList: ignoreList.array,
};
}
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export function toEncodedMap(map: GenMapping): EncodedSourceMap {
const decoded = toDecodedMap(map);
return Object.assign({}, decoded, {
// originalScopes: decoded.originalScopes.map((os) => encodeOriginalScopes(os)),
// generatedRanges: encodeGeneratedRanges(decoded.generatedRanges as GeneratedRange[]),
mappings: encode(decoded.mappings as SourceMapSegment[][]),
});
}
/**
* Constructs a new GenMapping, using the already present mappings of the input.
*/
export function fromMap(input: SourceMapInput): GenMapping {
const map = new TraceMap(input);
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
putAll(cast(gen)._names, map.names);
putAll(cast(gen)._sources, map.sources as string[]);
cast(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
cast(gen)._mappings = decodedMappings(map) as GenMapping['_mappings'];
// TODO: implement originalScopes/generatedRanges
if (map.ignoreList) putAll(cast(gen)._ignoreList, map.ignoreList);
return gen;
}
/**
* Returns an array of high-level mapping objects for every recorded segment, which could then be
* passed to the `source-map` library.
*/
export function allMappings(map: GenMapping): Mapping[] {
const out: Mapping[] = [];
const { _mappings: mappings, _sources: sources, _names: names } = cast(map);
for (let i = 0; i < mappings.length; i++) {
const line = mappings[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const generated = { line: i + 1, column: seg[COLUMN] };
let source: string | undefined = undefined;
let original: Pos | undefined = undefined;
let name: string | undefined = undefined;
if (seg.length !== 1) {
source = sources.array[seg[SOURCES_INDEX]];
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
if (seg.length === 5) name = names.array[seg[NAMES_INDEX]];
}
out.push({ generated, source, original, name } as Mapping);
}
}
return out;
}
// This split declaration is only so that terser can elminiate the static initialization block.
function addSegmentInternal<S extends string | null | undefined>(
skipable: boolean,
map: GenMapping,
genLine: number,
genColumn: number,
source: S,
sourceLine: S extends string ? number : null | undefined,
sourceColumn: S extends string ? number : null | undefined,
name: S extends string ? string | null | undefined : null | undefined,
content: S extends string ? string | null | undefined : null | undefined,
): void {
const {
_mappings: mappings,
_sources: sources,
_sourcesContent: sourcesContent,
_names: names,
// _originalScopes: originalScopes,
} = cast(map);
const line = getIndex(mappings, genLine);
const index = getColumnIndex(line, genColumn);
if (!source) {
if (skipable && skipSourceless(line, index)) return;
return insert(line, index, [genColumn]);
}
// Sigh, TypeScript can't figure out sourceLine and sourceColumn aren't nullish if source
// isn't nullish.
assert<number>(sourceLine);
assert<number>(sourceColumn);
const sourcesIndex = put(sources, source);
const namesIndex = name ? put(names, name) : NO_NAME;
if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content ?? null;
// if (sourcesIndex === originalScopes.length) originalScopes[sourcesIndex] = [];
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
return;
}
return insert(
line,
index,
name
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
: [genColumn, sourcesIndex, sourceLine, sourceColumn],
);
}
function assert<T>(_val: unknown): asserts _val is T {
// noop.
}
function getIndex<T>(arr: T[][], index: number): T[] {
for (let i = arr.length; i <= index; i++) {
arr[i] = [];
}
return arr[index];
}
function getColumnIndex(line: SourceMapSegment[], genColumn: number): number {
let index = line.length;
for (let i = index - 1; i >= 0; index = i--) {
const current = line[i];
if (genColumn >= current[COLUMN]) break;
}
return index;
}
function insert<T>(array: T[], index: number, value: T) {
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
}
function removeEmptyFinalLines(mappings: SourceMapSegment[][]) {
const { length } = mappings;
let len = length;
for (let i = len - 1; i >= 0; len = i, i--) {
if (mappings[i].length > 0) break;
}
if (len < length) mappings.length = len;
}
function putAll<T extends string | number>(setarr: SetArray<T>, array: T[]) {
for (let i = 0; i < array.length; i++) put(setarr, array[i]);
}
function skipSourceless(line: SourceMapSegment[], index: number): boolean {
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
// doesn't generate any useful information.
if (index === 0) return true;
const prev = line[index - 1];
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
// genrate any new information. Else, this segment will end the source/named segment and point to
// a sourceless position, which is useful.
return prev.length === 1;
}
function skipSource(
line: SourceMapSegment[],
index: number,
sourcesIndex: number,
sourceLine: number,
sourceColumn: number,
namesIndex: number,
): boolean {
// A source/named segment at the start of a line gives position at that genColumn
if (index === 0) return false;
const prev = line[index - 1];
// If the previous segment is sourceless, then we're transitioning to a source.
if (prev.length === 1) return false;
// If the previous segment maps to the exact same source position, then this segment doesn't
// provide any new position information.
return (
sourcesIndex === prev[SOURCES_INDEX] &&
sourceLine === prev[SOURCE_LINE] &&
sourceColumn === prev[SOURCE_COLUMN] &&
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME)
);
}
function addMappingInternal<S extends string | null | undefined>(
skipable: boolean,
map: GenMapping,
mapping: {
generated: Pos;
source: S;
original: S extends string ? Pos : null | undefined;
name: S extends string ? string | null | undefined : null | undefined;
content: S extends string ? string | null | undefined : null | undefined;
},
) {
const { generated, source, original, name, content } = mapping;
if (!source) {
return addSegmentInternal(
skipable,
map,
generated.line - 1,
generated.column,
null,
null,
null,
null,
null,
);
}
assert<Pos>(original);
return addSegmentInternal(
skipable,
map,
generated.line - 1,
generated.column,
source as string,
original.line - 1,
original.column,
name,
content,
);
}
/*
export function addOriginalScope(
map: GenMapping,
data: {
start: Pos;
end: Pos;
source: string;
kind: string;
name?: string;
variables?: string[];
},
): OriginalScopeInfo {
const { start, end, source, kind, name, variables } = data;
const {
_sources: sources,
_sourcesContent: sourcesContent,
_originalScopes: originalScopes,
_names: names,
} = cast(map);
const index = put(sources, source);
if (index === sourcesContent.length) sourcesContent[index] = null;
if (index === originalScopes.length) originalScopes[index] = [];
const kindIndex = put(names, kind);
const scope: OriginalScope = name
? [start.line - 1, start.column, end.line - 1, end.column, kindIndex, put(names, name)]
: [start.line - 1, start.column, end.line - 1, end.column, kindIndex];
if (variables) {
scope.vars = variables.map((v) => put(names, v));
}
const len = originalScopes[index].push(scope);
return [index, len - 1, variables];
}
*/
// Generated Ranges
/*
export function addGeneratedRange(
map: GenMapping,
data: {
start: Pos;
isScope: boolean;
originalScope?: OriginalScopeInfo;
callsite?: OriginalPos;
},
): GeneratedRangeInfo {
const { start, isScope, originalScope, callsite } = data;
const {
_originalScopes: originalScopes,
_sources: sources,
_sourcesContent: sourcesContent,
_generatedRanges: generatedRanges,
} = cast(map);
const range: GeneratedRange = [
start.line - 1,
start.column,
0,
0,
originalScope ? originalScope[0] : -1,
originalScope ? originalScope[1] : -1,
];
if (originalScope?.[2]) {
range.bindings = originalScope[2].map(() => [[-1]]);
}
if (callsite) {
const index = put(sources, callsite.source);
if (index === sourcesContent.length) sourcesContent[index] = null;
if (index === originalScopes.length) originalScopes[index] = [];
range.callsite = [index, callsite.line - 1, callsite.column];
}
if (isScope) range.isScope = true;
generatedRanges.push(range);
return [range, originalScope?.[2]];
}
export function setEndPosition(range: GeneratedRangeInfo, pos: Pos) {
range[0][2] = pos.line - 1;
range[0][3] = pos.column;
}
export function addBinding(
map: GenMapping,
range: GeneratedRangeInfo,
variable: string,
expression: string | BindingExpressionRange,
) {
const { _names: names } = cast(map);
const bindings = (range[0].bindings ||= []);
const vars = range[1];
const index = vars!.indexOf(variable);
const binding = getIndex(bindings, index);
if (typeof expression === 'string') binding[0] = [put(names, expression)];
else {
const { start } = expression;
binding.push([put(names, expression.expression), start.line - 1, start.column]);
}
}
*/
+82
View File
@@ -0,0 +1,82 @@
type Key = string | number | symbol;
/**
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
* index of the `key` in the backing array.
*
* This is designed to allow synchronizing a second array with the contents of the backing array,
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
* and there are never duplicates.
*/
export class SetArray<T extends Key = Key> {
declare private _indexes: Record<T, number | undefined>;
declare array: readonly T[];
constructor() {
this._indexes = { __proto__: null } as any;
this.array = [];
}
}
interface PublicSet<T extends Key> {
array: T[];
_indexes: SetArray<T>['_indexes'];
}
/**
* Typescript doesn't allow friend access to private fields, so this just casts the set into a type
* with public access modifiers.
*/
function cast<T extends Key>(set: SetArray<T>): PublicSet<T> {
return set as any;
}
/**
* Gets the index associated with `key` in the backing array, if it is already present.
*/
export function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined {
return cast(setarr)._indexes[key];
}
/**
* Puts `key` into the backing array, if it is not already present. Returns
* the index of the `key` in the backing array.
*/
export function put<T extends Key>(setarr: SetArray<T>, key: T): number {
// The key may or may not be present. If it is present, it's a number.
const index = get(setarr, key);
if (index !== undefined) return index;
const { array, _indexes: indexes } = cast(setarr);
const length = array.push(key);
return (indexes[key] = length - 1);
}
/**
* Pops the last added item out of the SetArray.
*/
export function pop<T extends Key>(setarr: SetArray<T>): void {
const { array, _indexes: indexes } = cast(setarr);
if (array.length === 0) return;
const last = array.pop()!;
indexes[last] = undefined;
}
/**
* Removes the key, if it exists in the set.
*/
export function remove<T extends Key>(setarr: SetArray<T>, key: T): void {
const index = get(setarr, key);
if (index === undefined) return;
const { array, _indexes: indexes } = cast(setarr);
for (let i = index + 1; i < array.length; i++) {
const k = array[i];
array[i - 1] = k;
indexes[k]!--;
}
indexes[key] = undefined;
array.pop();
}

Some files were not shown because too many files have changed in this diff Show More