mirror of
https://github.com/hacs/integration.git
synced 2025-08-20 16:14:55 +00:00
Use GraphQL to generate release content (#3848)
This commit is contained in:
@ -114,6 +114,7 @@ REPOSITORY_KEYS_TO_EXPORT = (
|
|||||||
("last_version", None),
|
("last_version", None),
|
||||||
("manifest_name", None),
|
("manifest_name", None),
|
||||||
("open_issues", 0),
|
("open_issues", 0),
|
||||||
|
("prerelease", None),
|
||||||
("stargazers_count", 0),
|
("stargazers_count", 0),
|
||||||
("topics", []),
|
("topics", []),
|
||||||
)
|
)
|
||||||
@ -165,6 +166,7 @@ class RepositoryData:
|
|||||||
manifest_name: str = None
|
manifest_name: str = None
|
||||||
new: bool = True
|
new: bool = True
|
||||||
open_issues: int = 0
|
open_issues: int = 0
|
||||||
|
prerelease: str = None
|
||||||
published_tags: list[str] = []
|
published_tags: list[str] = []
|
||||||
releases: bool = False
|
releases: bool = False
|
||||||
selected_tag: str = None
|
selected_tag: str = None
|
||||||
@ -569,9 +571,11 @@ class HacsRepository:
|
|||||||
),
|
),
|
||||||
validate,
|
validate,
|
||||||
)
|
)
|
||||||
except BaseException: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
# lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||||
|
except BaseException:
|
||||||
validate.errors.append(
|
validate.errors.append(
|
||||||
f"Download of {self.repository_manifest.filename} was not completed"
|
f"Download of {
|
||||||
|
self.repository_manifest.filename} was not completed"
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_download_zip_file(
|
async def async_download_zip_file(
|
||||||
@ -610,7 +614,8 @@ class HacsRepository:
|
|||||||
return
|
return
|
||||||
|
|
||||||
validate.errors.append(f"[{content['name']}] was not downloaded")
|
validate.errors.append(f"[{content['name']}] was not downloaded")
|
||||||
except BaseException: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
# lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||||
|
except BaseException:
|
||||||
validate.errors.append("Download was not completed")
|
validate.errors.append("Download was not completed")
|
||||||
|
|
||||||
async def download_content(self, version: string | None = None) -> None:
|
async def download_content(self, version: string | None = None) -> None:
|
||||||
@ -719,7 +724,8 @@ class HacsRepository:
|
|||||||
)
|
)
|
||||||
if response:
|
if response:
|
||||||
return json_loads(decode_content(response.data.content))
|
return json_loads(decode_content(response.data.content))
|
||||||
except BaseException: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
# lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||||
|
except BaseException:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
async def async_get_info_file_contents(self, *, version: str | None = None, **kwargs) -> str:
|
async def async_get_info_file_contents(self, *, version: str | None = None, **kwargs) -> str:
|
||||||
@ -820,7 +826,8 @@ class HacsRepository:
|
|||||||
)
|
)
|
||||||
|
|
||||||
except (
|
except (
|
||||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
# lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||||
|
BaseException
|
||||||
) as exception:
|
) as exception:
|
||||||
self.logger.debug("%s Removing %s failed with %s", self.string, local_path, exception)
|
self.logger.debug("%s Removing %s failed with %s", self.string, local_path, exception)
|
||||||
return False
|
return False
|
||||||
@ -945,7 +952,8 @@ class HacsRepository:
|
|||||||
):
|
):
|
||||||
persistent_directory = Backup(
|
persistent_directory = Backup(
|
||||||
hacs=self.hacs,
|
hacs=self.hacs,
|
||||||
local_path=f"{self.content.path.local}/{self.repository_manifest.persistent_directory}",
|
local_path=f"{
|
||||||
|
self.content.path.local}/{self.repository_manifest.persistent_directory}",
|
||||||
backup_path=tempfile.gettempdir() + "/hacs_persistent_directory/",
|
backup_path=tempfile.gettempdir() + "/hacs_persistent_directory/",
|
||||||
)
|
)
|
||||||
await self.hacs.hass.async_add_executor_job(persistent_directory.create)
|
await self.hacs.hass.async_add_executor_job(persistent_directory.create)
|
||||||
@ -1272,7 +1280,8 @@ class HacsRepository:
|
|||||||
self.validate.errors.append(f"[{content.name}] was not downloaded.")
|
self.validate.errors.append(f"[{content.name}] was not downloaded.")
|
||||||
|
|
||||||
except (
|
except (
|
||||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
# lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||||
|
BaseException
|
||||||
) as exception:
|
) as exception:
|
||||||
self.validate.errors.append(f"Download was not completed [{exception}]")
|
self.validate.errors.append(f"Download was not completed [{exception}]")
|
||||||
|
|
||||||
@ -1332,7 +1341,8 @@ class HacsRepository:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
result = await self.hacs.async_download_file(
|
result = await self.hacs.async_download_file(
|
||||||
f"https://raw.githubusercontent.com/{self.data.full_name}/{target_version}/{filename}",
|
f"https://raw.githubusercontent.com/{
|
||||||
|
self.data.full_name}/{target_version}/{filename}",
|
||||||
nolog=True,
|
nolog=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1349,7 +1359,8 @@ class HacsRepository:
|
|||||||
self.logger.debug("%s Getting hacs.json for version=%s", self.string, version)
|
self.logger.debug("%s Getting hacs.json for version=%s", self.string, version)
|
||||||
try:
|
try:
|
||||||
result = await self.hacs.async_download_file(
|
result = await self.hacs.async_download_file(
|
||||||
f"https://raw.githubusercontent.com/{self.data.full_name}/{version}/hacs.json",
|
f"https://raw.githubusercontent.com/{
|
||||||
|
self.data.full_name}/{version}/hacs.json",
|
||||||
nolog=True,
|
nolog=True,
|
||||||
)
|
)
|
||||||
if result is None:
|
if result is None:
|
||||||
|
@ -47,6 +47,7 @@ EXPORTED_DOWNLOADED_REPOSITORY_DATA = EXPORTED_REPOSITORY_DATA + (
|
|||||||
("last_version", None),
|
("last_version", None),
|
||||||
("manifest_name", None),
|
("manifest_name", None),
|
||||||
("open_issues", 0),
|
("open_issues", 0),
|
||||||
|
("prerelease", None),
|
||||||
("published_tags", []),
|
("published_tags", []),
|
||||||
("releases", False),
|
("releases", False),
|
||||||
("selected_tag", None),
|
("selected_tag", None),
|
||||||
@ -289,6 +290,7 @@ class HacsData:
|
|||||||
repository.data.selected_tag = repository_data.get("selected_tag")
|
repository.data.selected_tag = repository_data.get("selected_tag")
|
||||||
repository.data.show_beta = repository_data.get("show_beta", False)
|
repository.data.show_beta = repository_data.get("show_beta", False)
|
||||||
repository.data.last_version = repository_data.get("last_version")
|
repository.data.last_version = repository_data.get("last_version")
|
||||||
|
repository.data.prerelease = repository_data.get("prerelease")
|
||||||
repository.data.last_commit = repository_data.get("last_commit")
|
repository.data.last_commit = repository_data.get("last_commit")
|
||||||
repository.data.installed_version = repository_data.get("version_installed")
|
repository.data.installed_version = repository_data.get("version_installed")
|
||||||
repository.data.installed_commit = repository_data.get("installed_commit")
|
repository.data.installed_commit = repository_data.get("installed_commit")
|
||||||
@ -301,6 +303,9 @@ class HacsData:
|
|||||||
repository_data.get("manifest") or repository_data.get("repository_manifest") or {}
|
repository_data.get("manifest") or repository_data.get("repository_manifest") or {}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if repository.data.prerelease == repository.data.last_version:
|
||||||
|
repository.data.prerelease = None
|
||||||
|
|
||||||
if repository.localpath is not None and is_safe(self.hacs, repository.localpath):
|
if repository.localpath is not None and is_safe(self.hacs, repository.localpath):
|
||||||
# Set local path
|
# Set local path
|
||||||
repository.content.path.local = repository.localpath
|
repository.content.path.local = repository.localpath
|
||||||
|
@ -118,6 +118,7 @@ V2_COMMON_DATA_JSON_SCHEMA = {
|
|||||||
vol.Required("last_fetched"): vol.Any(int, float),
|
vol.Required("last_fetched"): vol.Any(int, float),
|
||||||
vol.Required("last_updated"): str,
|
vol.Required("last_updated"): str,
|
||||||
vol.Optional("last_version"): str,
|
vol.Optional("last_version"): str,
|
||||||
|
vol.Optional("prerelease"): str,
|
||||||
vol.Required("manifest"): {
|
vol.Required("manifest"): {
|
||||||
vol.Optional("country"): vol.Any([str], False),
|
vol.Optional("country"): vol.Any([str], False),
|
||||||
vol.Optional("name"): str,
|
vol.Optional("name"): str,
|
||||||
|
@ -57,6 +57,34 @@ log_handler.addHandler(stream_handler)
|
|||||||
|
|
||||||
OUTPUT_DIR = os.path.join(os.getcwd(), "outputdata")
|
OUTPUT_DIR = os.path.join(os.getcwd(), "outputdata")
|
||||||
|
|
||||||
|
GQL_RELEASES = """
|
||||||
|
query ($owner: String!, $repo: String!) {
|
||||||
|
repository(owner: $owner, name: $repo) {
|
||||||
|
latestRelease {
|
||||||
|
tagName
|
||||||
|
releaseAssets(first: 5) {
|
||||||
|
nodes {
|
||||||
|
name
|
||||||
|
downloadCount
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
releases(last: 1, orderBy: {field: CREATED_AT, direction: ASC}) {
|
||||||
|
nodes {
|
||||||
|
tagName
|
||||||
|
isPrerelease
|
||||||
|
releaseAssets(first: 5) {
|
||||||
|
nodes {
|
||||||
|
name
|
||||||
|
downloadCount
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
def jsonprint(data: any):
|
def jsonprint(data: any):
|
||||||
print(
|
print(
|
||||||
@ -130,9 +158,7 @@ class AdjustedHacsData(HacsData):
|
|||||||
"""Store the repository data."""
|
"""Store the repository data."""
|
||||||
data = {"manifest": {}}
|
data = {"manifest": {}}
|
||||||
for key, default in HACS_MANIFEST_KEYS_TO_EXPORT:
|
for key, default in HACS_MANIFEST_KEYS_TO_EXPORT:
|
||||||
if (
|
if (value := getattr(repository.repository_manifest, key, default)) != default:
|
||||||
value := getattr(repository.repository_manifest, key, default)
|
|
||||||
) != default:
|
|
||||||
data["manifest"][key] = value
|
data["manifest"][key] = value
|
||||||
|
|
||||||
for key, default in REPOSITORY_KEYS_TO_EXPORT:
|
for key, default in REPOSITORY_KEYS_TO_EXPORT:
|
||||||
@ -166,8 +192,7 @@ class AdjustedHacs(HacsBase):
|
|||||||
self.core.config_path = None
|
self.core.config_path = None
|
||||||
self.configuration.token = token
|
self.configuration.token = token
|
||||||
self.data = AdjustedHacsData(hacs=self)
|
self.data = AdjustedHacsData(hacs=self)
|
||||||
self.data_client = HacsDataClient(
|
self.data_client = HacsDataClient(session=session, client_name="HACS/Generator")
|
||||||
session=session, client_name="HACS/Generator")
|
|
||||||
|
|
||||||
self.github = GitHub(
|
self.github = GitHub(
|
||||||
token,
|
token,
|
||||||
@ -210,41 +235,61 @@ class AdjustedHacs(HacsBase):
|
|||||||
"%s Fetching repository releases",
|
"%s Fetching repository releases",
|
||||||
repository.string,
|
repository.string,
|
||||||
)
|
)
|
||||||
response = await self.githubapi.generic(
|
|
||||||
endpoint=f"/repos/{repository.data.full_name}/releases/latest",
|
repoowner, reponame = repository.data.full_name.split("/")
|
||||||
etag=repository.data.etag_releases,
|
response = await self.githubapi.graphql(
|
||||||
|
query=GQL_RELEASES,
|
||||||
|
variables={"owner": repoowner, "repo": reponame},
|
||||||
)
|
)
|
||||||
response.data = (
|
|
||||||
GitHubReleaseModel(
|
if (data := response.data["data"]["repository"]) is not None and (
|
||||||
response.data) if response.data else None
|
last_release_nodes := data.get("releases", {}).get("nodes", [])
|
||||||
)
|
):
|
||||||
repository.data.etag_releases = response.etag
|
|
||||||
if (releases := response.data) is not None:
|
|
||||||
repository.data.releases = True
|
repository.data.releases = True
|
||||||
repository.releases.objects = [releases]
|
|
||||||
repository.data.published_tags = [
|
latest_release = data.get("latestRelease", {})
|
||||||
x.tag_name for x in repository.releases.objects
|
last_release = last_release_nodes[0]
|
||||||
]
|
|
||||||
if (
|
if (
|
||||||
next_version := next(iter(repository.data.published_tags), None)
|
repository.data.prerelease
|
||||||
) != repository.data.last_version:
|
and repository.data.prerelease != last_release["tagName"]
|
||||||
repository.data.last_version = next_version
|
) or (
|
||||||
|
repository.data.last_version
|
||||||
|
and repository.data.last_version != latest_release["tagName"]
|
||||||
|
):
|
||||||
repository.data.etag_repository = None
|
repository.data.etag_repository = None
|
||||||
|
|
||||||
except GitHubNotModifiedException:
|
release_assets = latest_release.get("releaseAssets", {}).get("nodes", [])
|
||||||
repository.data.releases = True
|
repository.data.downloads = (
|
||||||
repository.logger.info(
|
release_assets[0]["downloadCount"] if release_assets else 0
|
||||||
"%s Release data is up to date",
|
)
|
||||||
repository.string,
|
repository.data.published_tags = [repository.data.last_version]
|
||||||
)
|
repository.releases.objects = [
|
||||||
|
GitHubReleaseModel(
|
||||||
|
{
|
||||||
|
"tag_name": repository.data.last_version,
|
||||||
|
"assets": [
|
||||||
|
{
|
||||||
|
"name": a["name"],
|
||||||
|
"download_count": a["downloadCount"],
|
||||||
|
}
|
||||||
|
for a in release_assets
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
repository.data.prerelease = last_release.get("tagName")
|
||||||
|
|
||||||
|
if repository.data.prerelease == repository.data.last_version:
|
||||||
|
repository.data.prerelease = None
|
||||||
|
|
||||||
except GitHubNotFoundException:
|
except GitHubNotFoundException:
|
||||||
repository.data.releases = False
|
repository.data.releases = False
|
||||||
repository.logger.info(
|
repository.logger.info("%s No releases found", repository.string)
|
||||||
"%s No releases found", repository.string)
|
|
||||||
except GitHubException as exception:
|
except GitHubException as exception:
|
||||||
repository.data.releases = False
|
repository.data.releases = False
|
||||||
repository.logger.warning(
|
repository.logger.warning("%s %s", repository.string, exception)
|
||||||
"%s %s", repository.string, exception)
|
|
||||||
|
|
||||||
await repository.common_update(
|
await repository.common_update(
|
||||||
force=repository.data.etag_repository is None,
|
force=repository.data.etag_repository is None,
|
||||||
@ -330,8 +375,7 @@ class AdjustedHacs(HacsBase):
|
|||||||
continue
|
continue
|
||||||
repository = self.repositories.get_by_full_name(repo)
|
repository = self.repositories.get_by_full_name(repo)
|
||||||
if repository is not None:
|
if repository is not None:
|
||||||
self.queue.add(self.concurrent_update_repository(
|
self.queue.add(self.concurrent_update_repository(repository=repository))
|
||||||
repository=repository))
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
self.queue.add(
|
self.queue.add(
|
||||||
@ -409,8 +453,7 @@ class AdjustedHacs(HacsBase):
|
|||||||
async def generate_category_data(category: str, repository_name: str = None):
|
async def generate_category_data(category: str, repository_name: str = None):
|
||||||
"""Generate data."""
|
"""Generate data."""
|
||||||
async with ClientSession() as session:
|
async with ClientSession() as session:
|
||||||
hacs = AdjustedHacs(
|
hacs = AdjustedHacs(session=session, token=os.getenv("DATA_GENERATOR_TOKEN"))
|
||||||
session=session, token=os.getenv("DATA_GENERATOR_TOKEN"))
|
|
||||||
os.makedirs(os.path.join(OUTPUT_DIR, category), exist_ok=True)
|
os.makedirs(os.path.join(OUTPUT_DIR, category), exist_ok=True)
|
||||||
os.makedirs(os.path.join(OUTPUT_DIR, "diff"), exist_ok=True)
|
os.makedirs(os.path.join(OUTPUT_DIR, "diff"), exist_ok=True)
|
||||||
force = os.environ.get("FORCE_REPOSITORY_UPDATE") == "True"
|
force = os.environ.get("FORCE_REPOSITORY_UPDATE") == "True"
|
||||||
@ -450,11 +493,7 @@ async def generate_category_data(category: str, repository_name: str = None):
|
|||||||
)
|
)
|
||||||
|
|
||||||
did_raise = False
|
did_raise = False
|
||||||
if (
|
if not updated_data or len(updated_data) == 0 or not isinstance(updated_data, dict):
|
||||||
not updated_data
|
|
||||||
or len(updated_data) == 0
|
|
||||||
or not isinstance(updated_data, dict)
|
|
||||||
):
|
|
||||||
print_error_and_exit(f"Updated data is empty", category)
|
print_error_and_exit(f"Updated data is empty", category)
|
||||||
did_raise = True
|
did_raise = True
|
||||||
|
|
||||||
@ -471,8 +510,7 @@ async def generate_category_data(category: str, repository_name: str = None):
|
|||||||
print_error_and_exit(f"Invalid data: {errors}", category)
|
print_error_and_exit(f"Invalid data: {errors}", category)
|
||||||
|
|
||||||
if did_raise:
|
if did_raise:
|
||||||
print_error_and_exit(
|
print_error_and_exit("Validation did raise but did not exit!", category)
|
||||||
"Validation did raise but did not exit!", category)
|
|
||||||
sys.exit(1) # Fallback, should not be reached
|
sys.exit(1) # Fallback, should not be reached
|
||||||
|
|
||||||
with open(
|
with open(
|
||||||
|
@ -1252,6 +1252,23 @@
|
|||||||
"https://api.github.com/repos/hacs-test-org/theme-basic/releases": 1,
|
"https://api.github.com/repos/hacs-test-org/theme-basic/releases": 1,
|
||||||
"https://data-v2.hacs.xyz/theme/data.json": 1
|
"https://data-v2.hacs.xyz/theme/data.json": 1
|
||||||
},
|
},
|
||||||
|
"tests/scripts/data/test_generate_category_data.py::test_generate_category_data_with_prior_content[category_test_data0]": {
|
||||||
|
"https://api.github.com/graphql": 1,
|
||||||
|
"https://api.github.com/rate_limit": 1,
|
||||||
|
"https://api.github.com/repos/hacs-test-org/integration-basic": 1,
|
||||||
|
"https://api.github.com/repos/hacs-test-org/integration-basic-custom": 1,
|
||||||
|
"https://api.github.com/repos/hacs-test-org/integration-basic-custom/contents/custom_components/example/manifest.json": 1,
|
||||||
|
"https://api.github.com/repos/hacs-test-org/integration-basic-custom/contents/hacs.json": 1,
|
||||||
|
"https://api.github.com/repos/hacs-test-org/integration-basic-custom/git/trees/1.0.0": 1,
|
||||||
|
"https://api.github.com/repos/hacs-test-org/integration-basic-custom/releases": 1,
|
||||||
|
"https://api.github.com/repos/hacs-test-org/integration-basic/git/trees/1.0.0": 1,
|
||||||
|
"https://api.github.com/repos/hacs/default/contents/integration": 1,
|
||||||
|
"https://api.github.com/repos/hacs/integration": 1,
|
||||||
|
"https://api.github.com/repos/hacs/integration/branches/main": 1,
|
||||||
|
"https://api.github.com/repos/hacs/integration/releases": 1,
|
||||||
|
"https://data-v2.hacs.xyz/integration/data.json": 1,
|
||||||
|
"https://data-v2.hacs.xyz/removed/repositories.json": 1
|
||||||
|
},
|
||||||
"tests/test_config_flow.py::test_flow_with_activation_failure": {
|
"tests/test_config_flow.py::test_flow_with_activation_failure": {
|
||||||
"https://github.com/login/device/code": 1,
|
"https://github.com/login/device/code": 1,
|
||||||
"https://github.com/login/oauth/access_token": 2
|
"https://github.com/login/oauth/access_token": 2
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Test generate category data."""
|
"""Test generate category data."""
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
@ -56,9 +57,7 @@ async def test_generate_category_data_single_repository(
|
|||||||
category_test_data['repository']}/repositories.json",
|
category_test_data['repository']}/repositories.json",
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(
|
with open(f"{OUTPUT_DIR}/summary.json", encoding="utf-8") as file:
|
||||||
f"{OUTPUT_DIR}/summary.json", encoding="utf-8"
|
|
||||||
) as file:
|
|
||||||
snapshots.assert_match(
|
snapshots.assert_match(
|
||||||
safe_json_dumps(json.loads(file.read())),
|
safe_json_dumps(json.loads(file.read())),
|
||||||
f"scripts/data/generate_category_data/single/{category_test_data['category']}/{
|
f"scripts/data/generate_category_data/single/{category_test_data['category']}/{
|
||||||
@ -97,11 +96,92 @@ async def test_generate_category_data(
|
|||||||
category_test_data['category']}/repositories.json",
|
category_test_data['category']}/repositories.json",
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(
|
with open(f"{OUTPUT_DIR}/summary.json", encoding="utf-8") as file:
|
||||||
f"{OUTPUT_DIR}/summary.json", encoding="utf-8"
|
|
||||||
) as file:
|
|
||||||
snapshots.assert_match(
|
snapshots.assert_match(
|
||||||
safe_json_dumps(recursive_remove_key(json.loads(file.read()), ())),
|
safe_json_dumps(recursive_remove_key(json.loads(file.read()), ())),
|
||||||
f"scripts/data/generate_category_data/{
|
f"scripts/data/generate_category_data/{
|
||||||
category_test_data['category']}/summary.json",
|
category_test_data['category']}/summary.json",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("category_test_data", [{"category": "integration"}])
|
||||||
|
async def test_generate_category_data_with_prior_content(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
response_mocker: ResponseMocker,
|
||||||
|
snapshots: SnapshotFixture,
|
||||||
|
category_test_data: CategoryTestData,
|
||||||
|
):
|
||||||
|
"""Test behaviour with prior content."""
|
||||||
|
response_mocker.add(
|
||||||
|
f"https://data-v2.hacs.xyz/{category_test_data['category']}/data.json",
|
||||||
|
MockedResponse(
|
||||||
|
content={
|
||||||
|
"1296269": {
|
||||||
|
"description": "This your first repo!",
|
||||||
|
"domain": "example",
|
||||||
|
"downloads": 42,
|
||||||
|
"etag_repository": "321",
|
||||||
|
"full_name": "hacs-test-org/integration-basic",
|
||||||
|
"last_updated": "2011-01-26T19:06:43Z",
|
||||||
|
"last_version": "1.0.0",
|
||||||
|
"manifest": {"name": "Proxy manifest"},
|
||||||
|
"manifest_name": "Proxy manifest",
|
||||||
|
"stargazers_count": 80,
|
||||||
|
"topics": ["api", "atom", "electron", "octocat"],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
)
|
||||||
|
response_mocker.add(
|
||||||
|
"https://api.github.com/graphql",
|
||||||
|
MockedResponse(
|
||||||
|
content={
|
||||||
|
"data": {
|
||||||
|
"repository": {
|
||||||
|
"latestRelease": {
|
||||||
|
"tagName": "1.0.0",
|
||||||
|
"releaseAssets": {
|
||||||
|
"nodes": [{"name": "basic.zip", "downloadCount": 4321}]
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"releases": {
|
||||||
|
"nodes": [
|
||||||
|
{
|
||||||
|
"tagName": "2.0.0b0",
|
||||||
|
"isPrerelease": True,
|
||||||
|
"releaseAssets": {
|
||||||
|
"nodes": [{"name": "basic.zip", "downloadCount": 1234}]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
)
|
||||||
|
await generate_category_data(category_test_data["category"])
|
||||||
|
|
||||||
|
with open(f"{OUTPUT_DIR}/{category_test_data['category']}/data.json", encoding="utf-8") as file:
|
||||||
|
snapshots.assert_match(
|
||||||
|
safe_json_dumps(recursive_remove_key(
|
||||||
|
json.loads(file.read()), ("last_fetched",))),
|
||||||
|
f"scripts/data/generate_category_data_with_prior_content/{
|
||||||
|
category_test_data['category']}/data.json",
|
||||||
|
)
|
||||||
|
|
||||||
|
with open(
|
||||||
|
f"{OUTPUT_DIR}/{category_test_data['category']}/repositories.json", encoding="utf-8"
|
||||||
|
) as file:
|
||||||
|
snapshots.assert_match(
|
||||||
|
safe_json_dumps(recursive_remove_key(json.loads(file.read()), ())),
|
||||||
|
f"scripts/data/generate_category_data_with_prior_content/{
|
||||||
|
category_test_data['category']}/repositories.json",
|
||||||
|
)
|
||||||
|
|
||||||
|
with open(f"{OUTPUT_DIR}/summary.json", encoding="utf-8") as file:
|
||||||
|
snapshots.assert_match(
|
||||||
|
safe_json_dumps(recursive_remove_key(json.loads(file.read()), ())),
|
||||||
|
f"scripts/data/generate_category_data_with_prior_content/{
|
||||||
|
category_test_data['category']}/summary.json",
|
||||||
|
)
|
||||||
|
@ -131,6 +131,7 @@
|
|||||||
"manifest_name": "HACS",
|
"manifest_name": "HACS",
|
||||||
"new": false,
|
"new": false,
|
||||||
"open_issues": 2,
|
"open_issues": 2,
|
||||||
|
"prerelease": null,
|
||||||
"published_tags": [],
|
"published_tags": [],
|
||||||
"releases": true,
|
"releases": true,
|
||||||
"selected_tag": null,
|
"selected_tag": null,
|
||||||
|
@ -74,6 +74,7 @@
|
|||||||
"manifest_name": "HACS",
|
"manifest_name": "HACS",
|
||||||
"new": false,
|
"new": false,
|
||||||
"open_issues": 2,
|
"open_issues": 2,
|
||||||
|
"prerelease": null,
|
||||||
"published_tags": [],
|
"published_tags": [],
|
||||||
"releases": true,
|
"releases": true,
|
||||||
"selected_tag": null,
|
"selected_tag": null,
|
||||||
|
@ -0,0 +1,43 @@
|
|||||||
|
{
|
||||||
|
"1296269": {
|
||||||
|
"description": "This your first repo!",
|
||||||
|
"domain": "example",
|
||||||
|
"downloads": 4321,
|
||||||
|
"etag_repository": "321",
|
||||||
|
"full_name": "hacs-test-org/integration-basic",
|
||||||
|
"last_updated": "2011-01-26T19:06:43Z",
|
||||||
|
"last_version": "1.0.0",
|
||||||
|
"manifest": {
|
||||||
|
"name": "Proxy manifest"
|
||||||
|
},
|
||||||
|
"manifest_name": "Proxy manifest",
|
||||||
|
"prerelease": "2.0.0b0",
|
||||||
|
"stargazers_count": 80,
|
||||||
|
"topics": [
|
||||||
|
"api",
|
||||||
|
"atom",
|
||||||
|
"electron",
|
||||||
|
"octocat"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"91296269": {
|
||||||
|
"description": "This your first repo!",
|
||||||
|
"domain": "example",
|
||||||
|
"downloads": 42,
|
||||||
|
"etag_repository": "321",
|
||||||
|
"full_name": "hacs-test-org/integration-basic-custom",
|
||||||
|
"last_updated": "2011-01-26T19:06:43Z",
|
||||||
|
"last_version": "1.0.0",
|
||||||
|
"manifest": {
|
||||||
|
"name": "Proxy manifest"
|
||||||
|
},
|
||||||
|
"manifest_name": "Proxy manifest",
|
||||||
|
"stargazers_count": 80,
|
||||||
|
"topics": [
|
||||||
|
"api",
|
||||||
|
"atom",
|
||||||
|
"electron",
|
||||||
|
"octocat"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,4 @@
|
|||||||
|
[
|
||||||
|
"hacs-test-org/integration-basic",
|
||||||
|
"hacs-test-org/integration-basic-custom"
|
||||||
|
]
|
@ -0,0 +1,19 @@
|
|||||||
|
{
|
||||||
|
"changed": 2,
|
||||||
|
"changed_pct": 100,
|
||||||
|
"current_count": 1,
|
||||||
|
"diff": 1,
|
||||||
|
"new_count": 2,
|
||||||
|
"rate_limit": {
|
||||||
|
"core": {
|
||||||
|
"limit": 5000,
|
||||||
|
"reset": 1691591363,
|
||||||
|
"used": 1
|
||||||
|
},
|
||||||
|
"graphql": {
|
||||||
|
"limit": 5000,
|
||||||
|
"reset": 1691593228,
|
||||||
|
"used": 7
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Reference in New Issue
Block a user