Merge branch 'develop'

This commit is contained in:
C5H12O5
2023-11-07 18:05:24 +08:00
10 changed files with 167 additions and 32 deletions

1
.gitignore vendored
View File

@ -382,3 +382,4 @@ cython_debug/
INFO
.cache_*
scrapeflows.conf
configserver/authorization

View File

@ -22,7 +22,8 @@ with open(_basedir / "templates/index.html", "r", encoding="utf-8") as html:
def render_index(saved=None):
"""Render the index page."""
source_html = ""
for site, site_conf in load_sites().items():
sites = load_sites()
for site, site_conf in sites.items():
saved_conf = saved.get(site) if saved is not None else None
config_html = render_config(site, site_conf, saved_conf)
types = site_conf["types"]
@ -30,8 +31,8 @@ def render_index(saved=None):
"site": site,
"movie": "selected" if "movie" in types else "disabled",
"tvshow": "selected" if "tvshow" in types else "disabled",
"priority": 999,
"config": config_html
"priority": len(sites),
"config": config_html,
}
if saved_conf is not None:
saved_types = saved_conf["types"]
@ -100,33 +101,66 @@ _index_html = render_index()
class RequestHandler(http.server.SimpleHTTPRequestHandler):
"""Request handler for the HTTP server."""
def do_AUTH(self):
filepath = _basedir / "authorization"
if not filepath.exists():
return True
with open(filepath, "r", encoding="utf-8") as reader:
saved_auth = reader.read()
if self.headers.get("Authorization") is not None:
auth_header = self.headers.get("Authorization")
if auth_header.split("Basic ")[1] == saved_auth:
return True
self.send_response(401)
self.send_header("WWW-Authenticate", 'Basic realm="Login Required"')
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(b"Unauthorized")
return False
def do_GET(self):
if not self.do_AUTH():
return
if self.path == "/":
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
conf_path = _basedir / "../scrapeflows.conf"
if conf_path.exists():
with open(conf_path, "r", encoding="utf-8") as reader:
filepath = _basedir / "../scrapeflows.conf"
if filepath.exists():
with open(filepath, "r", encoding="utf-8") as reader:
saved_conf = json.load(reader)
self.wfile.write(render_index(saved_conf).encode("utf-8"))
else:
self.wfile.write(_index_html.encode("utf-8"))
elif self.path.endswith("/exit"):
elif self.path == "/exit":
self.send_response(200)
self.end_headers()
self.server.server_close()
sys.exit()
def do_POST(self):
content_length = int(self.headers["Content-Length"])
body = self.rfile.read(content_length)
with open(_basedir / "../scrapeflows.conf", "w", encoding="utf-8") as w:
w.write(body.decode("utf-8"))
self.send_response(200)
self.end_headers()
if not self.do_AUTH():
return
filepath = None
if self.path == "/save":
filepath = _basedir / "../scrapeflows.conf"
elif self.path == "/auth":
filepath = _basedir / "authorization"
if filepath is not None:
content_length = int(self.headers["Content-Length"])
body = self.rfile.read(content_length)
with open(filepath, "w", encoding="utf-8") as w:
w.write(body.decode("utf-8"))
self.send_response(200)
self.end_headers()
if __name__ == "__main__":

View File

@ -19,6 +19,12 @@
<ion-icon name="open-outline"></ion-icon>
</a>
</li>
<li>
<a href="#auth" class="modal-trigger">
Auth
<ion-icon name="person-circle-outline"></ion-icon>
</a>
</li>
<li>
<a href="#exit" class="modal-trigger">
Exit
@ -68,6 +74,26 @@
</div>
</div>
<div id="auth" class="modal">
<div class="modal-content">
<h6>Enable basic authentication?</h6>
<div class="row">
<div class="input-field col s6">
<input id="username" type="text" class="validate">
<label for="username">Username</label>
</div>
<div class="input-field col s6">
<input id="password" type="password" class="validate">
<label for="password">Password</label>
</div>
</div>
</div>
<div class="modal-footer">
<a class="modal-close waves-effect btn-flat">Cancel</a>
<a id="auth-btn" class="modal-close waves-effect waves-green btn-flat">Confirm</a>
</div>
</div>
<script src="https://cdnjs.cloudflare.com/ajax/libs/materialize/1.0.0/js/materialize.min.js"></script>
<script type="module" src="https://unpkg.com/ionicons@7.1.0/dist/ionicons/ionicons.esm.js"></script>
<script nomodule src="https://unpkg.com/ionicons@7.1.0/dist/ionicons/ionicons.js"></script>
@ -99,10 +125,45 @@
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(data)
body: JSON.stringify(data, null, 2)
}).then(response => {
if (response.ok) {
M.toast({ html: 'Save successful!', classes: 'center-align green lighten-2' })
M.toast({
html: 'Save successful!',
classes: 'center-align green lighten-2',
displayLength: 1000
})
} else {
throw new Error(response.statusText);
}
}).catch(error => {
M.toast({ html: error, classes: 'center-align red lighten-2' })
});
});
var authBtn = document.getElementById('auth-btn');
authBtn.addEventListener('click', event => {
event.preventDefault();
var username = document.getElementById('username').value;
var password = document.getElementById('password').value;
if (username == '' && password == '') {
return;
}
fetch('/auth', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: btoa(username + ':' + password)
}).then(response => {
if (response.ok) {
M.toast({
html: 'Authentication enabled!',
classes: 'center-align green lighten-2',
displayLength: 1000
})
} else {
throw new Error(response.statusText);
}

View File

@ -71,7 +71,14 @@
"source": "subject",
"into": {
"movie": {
"summary": "['xp_text', './/span[@property=\"v:summary\"]']"
"summary": "['xp_text', './/span[@property=\"v:summary\"]']",
"extra": {
"[plugin_id]": {
"reference": {
"imdb": "['re_match', 'IMDb:</span>\\s*(.*?)\\s*<br>']"
}
}
}
}
}
}

View File

@ -85,6 +85,9 @@
"[plugin_id]": {
"tvshow": {
"summary": "['xp_text', './/span[@property=\"v:summary\"]']"
},
"reference": {
"imdb": "['re_match', 'IMDb:</span>\\s*(.*?)\\s*<br>']"
}
}
}

View File

@ -69,8 +69,19 @@
"director": "['re_matches', '\"name\":\"([^\"]*?)\"[^{{}}]*?\"department\":\"Directing\"']",
"extra": {
"[plugin_id]": {
"poster": ["['xp_text', './poster_path', 'prefix', 'https://image.tmdb.org/t/p/w500']"],
"backdrop": ["['xp_text', './backdrop_path', 'prefix', 'https://image.tmdb.org/t/p/original']"]
"poster": [
"['xp_text', './poster_path', 'prefix', 'https://image.tmdb.org/t/p/w500']"
],
"backdrop": [
"['xp_text', './backdrop_path', 'prefix', 'https://image.tmdb.org/t/p/original']"
],
"reference": {
"themoviedb": "['xp_text', './id', 'int']",
"imdb": "['xp_text', './imdb_id']"
},
"collection_id": {
"themoviedb": "['xp_text', './belongs_to_collection/id', 'int']"
}
}
}
},

View File

@ -63,8 +63,12 @@
"summary": "['xp_text', './overview']",
"extra": {
"[plugin_id]": {
"poster": ["['xp_text', './poster_path', 'prefix', 'https://image.tmdb.org/t/p/w500']"],
"backdrop": ["['xp_text', './backdrop_path', 'prefix', 'https://image.tmdb.org/t/p/original']"]
"poster": [
"['xp_text', './poster_path', 'prefix', 'https://image.tmdb.org/t/p/w500']"
],
"backdrop": [
"['xp_text', './backdrop_path', 'prefix', 'https://image.tmdb.org/t/p/original']"
]
}
}
}

View File

@ -44,7 +44,7 @@
"steps": [
{
"http": {
"url": "https://api.tmdb.org/3/tv/{id}?api_key={$parent[apikey]}&language={$parent[lang]}&append_to_response=content_ratings",
"url": "https://api.tmdb.org/3/tv/{id}?api_key={$parent[apikey]}&language={$parent[lang]}&append_to_response=content_ratings,external_ids",
"method": "GET",
"headers": {
"Accept": "application/json"
@ -69,10 +69,18 @@
"summary": "['xp_text', './overview']",
"extra": {
"[plugin_id]": {
"poster": ["['xp_text', './poster_path', 'prefix', 'https://image.tmdb.org/t/p/w500']"],
"backdrop": ["['xp_text', './backdrop_path', 'prefix', 'https://image.tmdb.org/t/p/original']"]
"poster": [
"['xp_text', './poster_path', 'prefix', 'https://image.tmdb.org/t/p/w500']"
],
"backdrop": [
"['xp_text', './backdrop_path', 'prefix', 'https://image.tmdb.org/t/p/original']"
]
}
}
},
"reference": {
"themoviedb_tv": "['xp_text', './id', 'int']",
"imdb": "['xp_text', './external_ids/imdb_id']"
}
}
}
@ -106,7 +114,9 @@
"director": "['re_matches', '\"department\":\"Directing\"[^{{}}]*?\"name\":\"([^\"]*?)\"']",
"extra": {
"[plugin_id]": {
"poster": ["['xp_text', './still_path', 'prefix', 'https://image.tmdb.org/t/p/w500']"]
"poster": [
"['xp_text', './still_path', 'prefix', 'https://image.tmdb.org/t/p/w500']"
]
}
}
},

View File

@ -48,7 +48,7 @@ def collect(args: CollectArgs, context: dict) -> None:
target = context.get(ctxkey)
if isinstance(target, list) and isinstance(result, list):
target.extend(result)
target.extend(x for x in result if x not in target)
elif isinstance(target, dict) and isinstance(result, dict):
dict_update(target, result)
else:
@ -121,7 +121,7 @@ def _xpath_find(strategy: str, expr: str, etree: Element):
elif strategy == "text":
return etree.findtext(expr)
elif strategy == "texts":
return [e.text for e in etree.findall(expr)]
return list(dict.fromkeys(e.text for e in etree.findall(expr)))
elif strategy.startswith("attr_"):
elem = etree.find(expr)
if elem is not None:

View File

@ -22,14 +22,18 @@ def strftime(
return time.strftime(pattern, time.localtime(timestamp))
def dict_update(d: dict, u: dict) -> dict:
def dict_update(d1: dict, d2: dict) -> dict:
"""Recursively update a dictionary."""
for k, v in u.items():
if k in d and isinstance(d[k], dict) and isinstance(v, dict):
d[k] = dict_update(d[k], v)
for k, v2 in d2.items():
v1 = d1.get(k, None)
if isinstance(v1, dict) and isinstance(v2, dict):
d1[k] = dict_update(d1[k], v2)
elif isinstance(v1, list) and isinstance(v2, list):
d1[k].extend(x for x in v2 if x not in v1)
else:
d[k] = v
return d
d1[k] = v2
return d1
def strip(obj: Any) -> Any: