Use config file to store DoH resolvers

This commit is contained in:
Xylitol
2023-12-30 19:50:14 +08:00
parent 085972c3db
commit 24a846c87a
6 changed files with 46 additions and 43 deletions

View File

@ -1,4 +1,5 @@
"""A simple HTTP server for configuration.""" """A simple HTTP server for configuration."""
import ast
import http import http
import json import json
import string import string
@ -20,6 +21,10 @@ with open(_basedir / "templates/source.html", "r", encoding="utf-8") as html:
with open(_basedir / "templates/index.html", "r", encoding="utf-8") as html: with open(_basedir / "templates/index.html", "r", encoding="utf-8") as html:
_index_tmpl = string.Template(html.read()) _index_tmpl = string.Template(html.read())
# initialize the DoH resolvers
with open(_basedir / "../resolvers.conf", "r", encoding="utf-8") as doh_reader:
_doh_resolvers = ast.literal_eval(doh_reader.read())
def render_index(saved=None): def render_index(saved=None):
"""Render the index page.""" """Render the index page."""
@ -49,7 +54,9 @@ def render_index(saved=None):
source["priority"] = saved_conf["priority"] source["priority"] = saved_conf["priority"]
source_html += _source_tmpl.substitute(source) source_html += _source_tmpl.substitute(source)
return _index_tmpl.substitute(sources=source_html, version=plugin_version()) return _index_tmpl.substitute(
sources=source_html, resolvers=_doh_resolvers, version=plugin_version()
)
def render_config(site, site_conf, saved_conf): def render_config(site, site_conf, saved_conf):
@ -69,8 +76,8 @@ def load_sites():
"""Load the list of sites and types from flow definitions.""" """Load the list of sites and types from flow definitions."""
sites = {} sites = {}
for filepath in (_basedir / "../scrapeflows").glob("*.json"): for filepath in (_basedir / "../scrapeflows").glob("*.json"):
with open(filepath, "r", encoding="utf-8") as flowdef_json: with open(filepath, "r", encoding="utf-8") as def_reader:
flowdef = json.load(flowdef_json) flowdef = json.load(def_reader)
site = flowdef["site"] site = flowdef["site"]
site_conf = sites.get(site, {}) site_conf = sites.get(site, {})
site_conf["doh_enabled"] = flowdef.get("doh_enabled", False) site_conf["doh_enabled"] = flowdef.get("doh_enabled", False)
@ -115,8 +122,8 @@ class RequestHandler(http.server.SimpleHTTPRequestHandler):
if not filepath.exists(): if not filepath.exists():
return True return True
with open(filepath, "r", encoding="utf-8") as reader: with open(filepath, "r", encoding="utf-8") as auth_reader:
saved_auth = reader.read() saved_auth = auth_reader.read()
if self.headers.get("Authorization") is not None: if self.headers.get("Authorization") is not None:
auth_header = self.headers.get("Authorization") auth_header = self.headers.get("Authorization")
@ -141,8 +148,8 @@ class RequestHandler(http.server.SimpleHTTPRequestHandler):
filepath = _basedir / "../scrapeflows.conf" filepath = _basedir / "../scrapeflows.conf"
if filepath.exists(): if filepath.exists():
with open(filepath, "r", encoding="utf-8") as reader: with open(filepath, "r", encoding="utf-8") as conf_reader:
saved_conf = json.load(reader) saved_conf = json.load(conf_reader)
self.wfile.write(render_index(saved_conf).encode("utf-8")) self.wfile.write(render_index(saved_conf).encode("utf-8"))
else: else:
self.wfile.write(_index_html.encode("utf-8")) self.wfile.write(_index_html.encode("utf-8"))
@ -166,8 +173,8 @@ class RequestHandler(http.server.SimpleHTTPRequestHandler):
if filepath is not None: if filepath is not None:
content_length = int(self.headers["Content-Length"]) content_length = int(self.headers["Content-Length"])
body = self.rfile.read(content_length) body = self.rfile.read(content_length)
with open(filepath, "w", encoding="utf-8") as w: with open(filepath, "w", encoding="utf-8") as writer:
w.write(body.decode("utf-8")) writer.write(body.decode("utf-8"))
self.send_response(200) self.send_response(200)
self.end_headers() self.end_headers()

View File

@ -115,16 +115,7 @@
<script src="https://cdn.jsdelivr.net/npm/@materializecss/materialize@1.2.2/dist/js/materialize.min.js"></script> <script src="https://cdn.jsdelivr.net/npm/@materializecss/materialize@1.2.2/dist/js/materialize.min.js"></script>
<script type="text/javascript"> <script type="text/javascript">
const resolvers = [ const resolvers = ${resolvers}
"1.0.0.1",
'1.1.1.1',
'9.9.9.9',
'149.112.112.112',
"208.67.220.220",
"208.67.222.222",
"dns.google",
"dns.adguard-dns.com",
]
document.addEventListener('DOMContentLoaded', () => { document.addEventListener('DOMContentLoaded', () => {
M.AutoInit(); M.AutoInit();
M.Chips.init(document.getElementById('doh'), { M.Chips.init(document.getElementById('doh'), {

19
resolvers.conf Normal file
View File

@ -0,0 +1,19 @@
[
# https://developers.cloudflare.com/1.1.1.1/encryption/dns-over-https
"1.0.0.1",
"1.1.1.1",
# https://support.quad9.net/hc/en-us
"9.9.9.9",
"149.112.112.112",
# https://support.opendns.com/hc/en-us
"208.67.220.220",
"208.67.222.222",
# https://developers.google.com/speed/public-dns/docs/doh
"dns.google",
# https://adguard-dns.io/public-dns.html
"dns.adguard-dns.com",
]

View File

@ -1,4 +1,5 @@
"""The implementation of the doh function.""" """The implementation of the doh function."""
import ast
import base64 import base64
import concurrent import concurrent
import concurrent.futures import concurrent.futures
@ -9,6 +10,7 @@ import struct
import urllib import urllib
import urllib.request import urllib.request
from dataclasses import dataclass from dataclasses import dataclass
from pathlib import Path
from typing import Dict, List, Optional from typing import Dict, List, Optional
from scraper.functions import Args, Func from scraper.functions import Args, Func
@ -24,25 +26,9 @@ _executor = concurrent.futures.ThreadPoolExecutor()
# define default DoH configuration # define default DoH configuration
_doh_timeout = 5 _doh_timeout = 5
_doh_cache: Dict[str, str] = {} _doh_cache: Dict[str, str] = {}
_doh_resolvers = [ _resolvers_conf = Path(__file__).resolve().parent / "../../resolvers.conf"
# https://developers.cloudflare.com/1.1.1.1/encryption/dns-over-https with open(_resolvers_conf, "r", encoding="utf-8") as doh_reader:
"1.0.0.1", _doh_resolvers = ast.literal_eval(doh_reader.read())
"1.1.1.1",
# https://support.quad9.net/hc/en-us
"9.9.9.9",
"149.112.112.112",
# https://support.opendns.com/hc/en-us
"208.67.220.220",
"208.67.222.222",
# https://developers.google.com/speed/public-dns/docs/doh
"dns.google",
# https://adguard-dns.io/public-dns.html
"dns.adguard-dns.com",
]
def _patched_getaddrinfo(host, *args, **kwargs): def _patched_getaddrinfo(host, *args, **kwargs):

View File

@ -125,12 +125,12 @@ class ScrapeFlow:
flowconf = None flowconf = None
if _flowconf_path.exists(): if _flowconf_path.exists():
with open(_flowconf_path, "r", encoding="utf-8") as reader: with open(_flowconf_path, "r", encoding="utf-8") as conf_reader:
flowconf = json.load(reader) flowconf = json.load(conf_reader)
for filepath in path.glob("*.json"): for filepath in path.glob("*.json"):
with open(filepath, "r", encoding="utf-8") as flowdef_json: with open(filepath, "r", encoding="utf-8") as def_reader:
flowdef = json.load(flowdef_json) flowdef = json.load(def_reader)
site = flowdef["site"] site = flowdef["site"]
siteconf = None siteconf = None
if flowconf is not None and site in flowconf: if flowconf is not None and site in flowconf:

View File

@ -50,7 +50,7 @@ setup(
"configserver" "configserver"
], ],
package_data={ package_data={
"": ["run.sh", "INFO"], "": ["run.sh", "resolvers.conf", "INFO"],
"scrapeflows": ["*.json"], "scrapeflows": ["*.json"],
"configserver": ["templates/*.html"], "configserver": ["templates/*.html"],
}, },