|
Packit |
63bb0d |
#!/usr/bin/python3
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
import datetime
|
|
Packit |
63bb0d |
import dnf
|
|
Packit |
63bb0d |
import hashlib
|
|
Packit |
63bb0d |
import hawkey
|
|
Packit |
63bb0d |
import json
|
|
Packit |
63bb0d |
import sys
|
|
Packit |
63bb0d |
import tempfile
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
DNF_ERROR_EXIT_CODE = 10
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
def timestamp_to_rfc3339(timestamp):
|
|
Packit |
63bb0d |
d = datetime.datetime.utcfromtimestamp(package.buildtime)
|
|
Packit |
63bb0d |
return d.strftime('%Y-%m-%dT%H:%M:%SZ')
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
def dnfrepo(desc, parent_conf=None):
|
|
Packit |
63bb0d |
"""Makes a dnf.repo.Repo out of a JSON repository description"""
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
repo = dnf.repo.Repo(desc["id"], parent_conf)
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
if "baseurl" in desc:
|
|
Packit |
63bb0d |
repo.baseurl = desc["baseurl"]
|
|
Packit |
63bb0d |
elif "metalink" in desc:
|
|
Packit |
63bb0d |
repo.metalink = desc["metalink"]
|
|
Packit |
63bb0d |
elif "mirrorlist" in desc:
|
|
Packit |
63bb0d |
repo.mirrorlist = desc["mirrorlist"]
|
|
Packit |
63bb0d |
else:
|
|
Packit |
63bb0d |
assert False
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
if desc.get("ignoressl", False):
|
|
Packit |
63bb0d |
repo.sslverify = False
|
|
Packit |
63bb0d |
if "sslcacert" in desc:
|
|
Packit |
63bb0d |
repo.sslcacert = desc["sslcacert"]
|
|
Packit |
63bb0d |
if "sslclientkey" in desc:
|
|
Packit |
63bb0d |
repo.sslclientkey = desc["sslclientkey"]
|
|
Packit |
63bb0d |
if "sslclientcert" in desc:
|
|
Packit |
63bb0d |
repo.sslclientcert = desc["sslclientcert"]
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
# In dnf, the default metadata expiration time is 48 hours. However,
|
|
Packit |
63bb0d |
# some repositories never expire the metadata, and others expire it much
|
|
Packit |
63bb0d |
# sooner than that. Therefore we must make this configurable. If nothing
|
|
Packit |
63bb0d |
# is provided, we default to never expiring the metadata, as hardcoding
|
|
Packit |
63bb0d |
# some arbitrary does not seem very helpful.
|
|
Packit |
63bb0d |
repo.metadata_expire = desc.get("metadata_expire", "-1")
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
return repo
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
def create_base(repos, module_platform_id, persistdir, cachedir, arch):
|
|
Packit |
63bb0d |
base = dnf.Base()
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
# Enable fastestmirror to ensure we choose the fastest mirrors for
|
|
Packit |
63bb0d |
# downloading metadata (when depsolving) and downloading packages.
|
|
Packit |
63bb0d |
base.conf.fastestmirror = True
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
# Try another mirror if it takes longer than 5 seconds to connect.
|
|
Packit |
63bb0d |
base.conf.timeout = 5
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
# Set the rest of the dnf configuration.
|
|
Packit |
63bb0d |
base.conf.module_platform_id = module_platform_id
|
|
Packit |
63bb0d |
base.conf.config_file_path = "/dev/null"
|
|
Packit |
63bb0d |
base.conf.persistdir = persistdir
|
|
Packit |
63bb0d |
base.conf.cachedir = cachedir
|
|
Packit |
63bb0d |
base.conf.substitutions['arch'] = arch
|
|
Packit |
63bb0d |
base.conf.substitutions['basearch'] = dnf.rpm.basearch(arch)
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
for repo in repos:
|
|
Packit |
63bb0d |
base.repos.add(dnfrepo(repo, base.conf))
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
base.fill_sack(load_system_repo=False)
|
|
Packit |
63bb0d |
return base
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
def exit_with_dnf_error(kind: str, reason: str):
|
|
Packit |
63bb0d |
json.dump({"kind": kind, "reason": reason}, sys.stdout)
|
|
Packit |
63bb0d |
sys.exit(DNF_ERROR_EXIT_CODE)
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
def repo_checksums(base):
|
|
Packit |
63bb0d |
checksums = {}
|
|
Packit |
63bb0d |
for repo in base.repos.iter_enabled():
|
|
Packit |
63bb0d |
# Uses the same algorithm as libdnf to find cache dir:
|
|
Packit |
63bb0d |
# https://github.com/rpm-software-management/libdnf/blob/master/libdnf/repo/Repo.cpp#L1288
|
|
Packit |
63bb0d |
if repo.metalink:
|
|
Packit |
63bb0d |
url = repo.metalink
|
|
Packit |
63bb0d |
elif repo.mirrorlist:
|
|
Packit |
63bb0d |
url = repo.mirrorlist
|
|
Packit |
63bb0d |
elif repo.baseurl:
|
|
Packit |
63bb0d |
url = repo.baseurl[0]
|
|
Packit |
63bb0d |
else:
|
|
Packit |
63bb0d |
assert False
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
digest = hashlib.sha256(url.encode()).hexdigest()[:16]
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
repomd_file = f"{repo.id}-{digest}/repodata/repomd.xml"
|
|
Packit |
63bb0d |
with open(f"{base.conf.cachedir}/{repomd_file}", "rb") as f:
|
|
Packit |
63bb0d |
repomd = f.read()
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
checksums[repo.id] = "sha256:" + hashlib.sha256(repomd).hexdigest()
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
return checksums
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
call = json.load(sys.stdin)
|
|
Packit |
63bb0d |
command = call["command"]
|
|
Packit |
63bb0d |
arguments = call["arguments"]
|
|
Packit |
63bb0d |
repos = arguments.get("repos", {})
|
|
Packit |
63bb0d |
arch = arguments["arch"]
|
|
Packit |
63bb0d |
cachedir = arguments["cachedir"]
|
|
Packit |
63bb0d |
module_platform_id = arguments["module_platform_id"]
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
with tempfile.TemporaryDirectory() as persistdir:
|
|
Packit |
63bb0d |
try:
|
|
Packit |
63bb0d |
base = create_base(
|
|
Packit |
63bb0d |
repos,
|
|
Packit |
63bb0d |
module_platform_id,
|
|
Packit |
63bb0d |
persistdir,
|
|
Packit |
63bb0d |
cachedir,
|
|
Packit |
63bb0d |
arch
|
|
Packit |
63bb0d |
)
|
|
Packit |
63bb0d |
except dnf.exceptions.Error as e:
|
|
Packit |
63bb0d |
exit_with_dnf_error(
|
|
Packit |
63bb0d |
type(e).__name__,
|
|
Packit |
63bb0d |
f"Error occurred when setting up repo: {e}"
|
|
Packit |
63bb0d |
)
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
if command == "dump":
|
|
Packit |
63bb0d |
packages = []
|
|
Packit |
63bb0d |
for package in base.sack.query().available():
|
|
Packit |
63bb0d |
packages.append({
|
|
Packit |
63bb0d |
"name": package.name,
|
|
Packit |
63bb0d |
"summary": package.summary,
|
|
Packit |
63bb0d |
"description": package.description,
|
|
Packit |
63bb0d |
"url": package.url,
|
|
Packit |
63bb0d |
"epoch": package.epoch,
|
|
Packit |
63bb0d |
"version": package.version,
|
|
Packit |
63bb0d |
"release": package.release,
|
|
Packit |
63bb0d |
"arch": package.arch,
|
|
Packit |
63bb0d |
"buildtime": timestamp_to_rfc3339(package.buildtime),
|
|
Packit |
63bb0d |
"license": package.license
|
|
Packit |
63bb0d |
})
|
|
Packit |
63bb0d |
json.dump({
|
|
Packit |
63bb0d |
"checksums": repo_checksums(base),
|
|
Packit |
63bb0d |
"packages": packages
|
|
Packit |
63bb0d |
}, sys.stdout)
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
elif command == "depsolve":
|
|
Packit |
63bb0d |
errors = []
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
try:
|
|
Packit |
63bb0d |
base.install_specs(
|
|
Packit |
63bb0d |
arguments["package-specs"],
|
|
Packit |
63bb0d |
exclude=arguments.get("exclude-specs", [])
|
|
Packit |
63bb0d |
)
|
|
Packit |
63bb0d |
except dnf.exceptions.MarkingErrors as e:
|
|
Packit |
63bb0d |
exit_with_dnf_error(
|
|
Packit |
63bb0d |
"MarkingErrors",
|
|
Packit |
63bb0d |
f"Error occurred when marking packages for installation: {e}"
|
|
Packit |
63bb0d |
)
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
try:
|
|
Packit |
63bb0d |
base.resolve()
|
|
Packit |
63bb0d |
except dnf.exceptions.DepsolveError as e:
|
|
Packit |
63bb0d |
exit_with_dnf_error(
|
|
Packit |
63bb0d |
"DepsolveError",
|
|
Packit |
63bb0d |
(
|
|
Packit |
63bb0d |
"There was a problem depsolving "
|
|
Packit |
63bb0d |
f"{arguments['package-specs']}: {e}"
|
|
Packit |
63bb0d |
)
|
|
Packit |
63bb0d |
)
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
dependencies = []
|
|
Packit |
63bb0d |
for tsi in base.transaction:
|
|
Packit |
63bb0d |
# Avoid using the install_set() helper, as it does not guarantee
|
|
Packit |
63bb0d |
# a stable order
|
|
Packit |
63bb0d |
if tsi.action not in dnf.transaction.FORWARD_ACTIONS:
|
|
Packit |
63bb0d |
continue
|
|
Packit |
63bb0d |
package = tsi.pkg
|
|
Packit |
63bb0d |
|
|
Packit |
63bb0d |
dependencies.append({
|
|
Packit |
63bb0d |
"name": package.name,
|
|
Packit |
63bb0d |
"epoch": package.epoch,
|
|
Packit |
63bb0d |
"version": package.version,
|
|
Packit |
63bb0d |
"release": package.release,
|
|
Packit |
63bb0d |
"arch": package.arch,
|
|
Packit |
63bb0d |
"repo_id": package.reponame,
|
|
Packit |
63bb0d |
"path": package.relativepath,
|
|
Packit |
63bb0d |
"remote_location": package.remote_location(),
|
|
Packit |
63bb0d |
"checksum": (
|
|
Packit |
63bb0d |
f"{hawkey.chksum_name(package.chksum[0])}:"
|
|
Packit |
63bb0d |
f"{package.chksum[1].hex()}"
|
|
Packit |
63bb0d |
)
|
|
Packit |
63bb0d |
})
|
|
Packit |
63bb0d |
json.dump({
|
|
Packit |
63bb0d |
"checksums": repo_checksums(base),
|
|
Packit |
63bb0d |
"dependencies": dependencies
|
|
Packit |
63bb0d |
}, sys.stdout)
|