mirror of
https://github.com/PartialVolume/shredos.x86_64.git
synced 2026-02-20 09:35:26 +00:00
Update buildroot to 2024.02.2, kernel to 6.6.2, GPU & DRM drivers, nwipe to v0.37 plus many others.
This commit is contained in:
103
utils/add-custom-hashes
Executable file
103
utils/add-custom-hashes
Executable file
@@ -0,0 +1,103 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
# Add hash files for packages with custom versions for
|
||||
# BR2_DOWNLOAD_FORCE_CHECK_HASHES=y
|
||||
#
|
||||
# Run in a configured Buildroot directory, E.G.
|
||||
# make foo_defconfig; ./utils/add-custom-hashes
|
||||
|
||||
# print BR-style message
|
||||
# message <info message>
|
||||
message() {
|
||||
tput smso 2>/dev/null
|
||||
echo "$*"
|
||||
tput rmso 2>/dev/null
|
||||
}
|
||||
|
||||
# print error message and exit
|
||||
# die <error message>
|
||||
die() {
|
||||
echo "Error: $*" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
# get package(s) for download file, if any
|
||||
# get_pkgs <json> <file>
|
||||
get_pkgs() {
|
||||
jq --arg file "$2" -r \
|
||||
'to_entries[] | select(.value.downloads[0].source == $file) | .key | strings' "$1"
|
||||
}
|
||||
|
||||
# get download dir for package
|
||||
# get_pkg_dl_dir <json> <package>
|
||||
get_pkg_dl_dir() {
|
||||
jq --arg pkg "$2" -r '.[$pkg].dl_dir | strings' "$1"
|
||||
}
|
||||
|
||||
# generate hash file for download file
|
||||
# gen_hash <dir> <file>
|
||||
gen_hash() {
|
||||
(
|
||||
cd "$1" && printf '# Locally calculated\nsha256 ' && sha256sum "$2"
|
||||
)
|
||||
}
|
||||
|
||||
command -v jq >/dev/null || die 'Script needs jq'
|
||||
|
||||
[ -e .config ] || \
|
||||
die "No .config found, please run this in a configured Buildroot (O=) directory"
|
||||
|
||||
message Collecting data
|
||||
|
||||
eval "$(make -s VARS='TOPDIR DL_DIR BR_NO_CHECK_HASH_FOR BR2_GLOBAL_PATCH_DIR' QUOTED_VARS=YES printvars)"
|
||||
# global patch dir may already have quotes
|
||||
BR2_GLOBAL_PATCH_DIR=$(echo "$BR2_GLOBAL_PATCH_DIR" | tr -d '"')
|
||||
|
||||
[ -n "$BR2_GLOBAL_PATCH_DIR" ] || die "No BR2_GLOBAL_PATCH_DIR defined, nothing to do"
|
||||
[ -n "$BR_NO_CHECK_HASH_FOR" ] || die "No packages without hashes found, nothing to do"
|
||||
|
||||
[ -d "$TOPDIR" ] || die "TOPDIR ($TOPDIR) does not look correct"
|
||||
[ -d "$DL_DIR" ] || die "DL_DIR ($DL_DIR) does not look correct"
|
||||
|
||||
# patch dir may contain multiple dirs, use the last one
|
||||
# shellcheck disable=SC2086 # we need the word splitting
|
||||
set -- $BR2_GLOBAL_PATCH_DIR
|
||||
if [ $# -gt 1 ]; then
|
||||
BR2_GLOBAL_PATCH_DIR="${!#}";
|
||||
message BR2_GLOBAL_PATCH_DIR contains multiple directories, using "$BR2_GLOBAL_PATCH_DIR"
|
||||
fi
|
||||
|
||||
# patch dir may be relative to TOPDIR
|
||||
case "$BR2_GLOBAL_PATCH_DIR" in
|
||||
/*) ;;
|
||||
*) BR2_GLOBAL_PATCH_DIR="$TOPDIR/$BR2_GLOBAL_PATCH_DIR"
|
||||
;;
|
||||
esac
|
||||
|
||||
[ -d "$BR2_GLOBAL_PATCH_DIR" ] \
|
||||
|| die "BR2_GLOBAL_PATCH_DIR ($BR2_GLOBAL_PATCH_DIR) does not look correct"
|
||||
|
||||
trap 'rm -f "$JSON"' EXIT
|
||||
JSON=$(mktemp)
|
||||
make show-info > "$JSON"
|
||||
|
||||
# ensure files have been downloaded, but without checking
|
||||
make BR2_DOWNLOAD_FORCE_CHECK_HASHES= source
|
||||
|
||||
message Updating hashes
|
||||
|
||||
for file in $BR_NO_CHECK_HASH_FOR; do
|
||||
for pkg in $(get_pkgs "$JSON" "$file"); do
|
||||
HASHFILE="$BR2_GLOBAL_PATCH_DIR/$pkg/$pkg.hash"
|
||||
PKG_DL_DIR=$(get_pkg_dl_dir "$JSON" "$pkg")
|
||||
message "Adding hash for $file to $HASHFILE"
|
||||
mkdir -p "${HASHFILE%/*}"
|
||||
gen_hash "$DL_DIR/$PKG_DL_DIR" "$file" > "$HASHFILE"
|
||||
done
|
||||
done
|
||||
|
||||
message Verifying hashes
|
||||
|
||||
make clean
|
||||
make BR2_DOWNLOAD_FORCE_CHECK_HASHES=y source
|
||||
@@ -69,7 +69,7 @@ def parse_args():
|
||||
help='override the default list of ignored warnings')
|
||||
|
||||
parser.add_argument("--manual-url", action="store",
|
||||
default="http://nightly.buildroot.org/",
|
||||
default="https://nightly.buildroot.org/",
|
||||
help="default: %(default)s")
|
||||
parser.add_argument("--verbose", "-v", action="count", default=0)
|
||||
parser.add_argument("--quiet", "-q", action="count", default=0)
|
||||
@@ -233,16 +233,18 @@ def check_file_using_lib(fname):
|
||||
nwarnings += warn
|
||||
|
||||
lastline = ""
|
||||
for lineno, text in enumerate(open(fname, "r", errors="surrogateescape").readlines()):
|
||||
nlines += 1
|
||||
for name, cf in objects:
|
||||
if cf.disable.search(lastline):
|
||||
continue
|
||||
warn, fail = print_warnings(cf.check_line(lineno + 1, text), name in xfail)
|
||||
if fail > 0:
|
||||
failed.add(name)
|
||||
nwarnings += warn
|
||||
lastline = text
|
||||
with open(fname, "r", errors="surrogateescape") as f:
|
||||
for lineno, text in enumerate(f):
|
||||
nlines += 1
|
||||
for name, cf in objects:
|
||||
if cf.disable.search(lastline):
|
||||
continue
|
||||
line_sts = cf.check_line(lineno + 1, text)
|
||||
warn, fail = print_warnings(line_sts, name in xfail)
|
||||
if fail > 0:
|
||||
failed.add(name)
|
||||
nwarnings += warn
|
||||
lastline = text
|
||||
|
||||
for name, cf in objects:
|
||||
warn, fail = print_warnings(cf.after(), name in xfail)
|
||||
|
||||
@@ -366,3 +366,76 @@ class VariableWithBraces(_CheckFunction):
|
||||
return ["{}:{}: use $() to delimit variables, not ${{}}"
|
||||
.format(self.filename, lineno),
|
||||
text]
|
||||
|
||||
|
||||
class CPEVariables(_CheckFunction):
|
||||
"""
|
||||
Check that the values for the CPE variables are not the default.
|
||||
- CPE_ID_* variables must not be set to their default
|
||||
- CPE_ID_VALID must not be set if a non-default CPE_ID variable is set
|
||||
"""
|
||||
def before(self):
|
||||
pkg, _ = os.path.splitext(os.path.basename(self.filename))
|
||||
self.CPE_fields_defaults = {
|
||||
"VALID": "NO",
|
||||
"PREFIX": "cpe:2.3:a",
|
||||
"VENDOR": f"{pkg}_project",
|
||||
"PRODUCT": pkg,
|
||||
"VERSION": None,
|
||||
"UPDATE": "*",
|
||||
}
|
||||
self.valid = None
|
||||
self.non_defaults = 0
|
||||
self.CPE_FIELDS_RE = re.compile(
|
||||
r"^\s*(.+_CPE_ID_({}))\s*=\s*(.+)$"
|
||||
.format("|".join(self.CPE_fields_defaults)),
|
||||
)
|
||||
self.VERSION_RE = re.compile(
|
||||
rf"^(HOST_)?{pkg.upper().replace('-', '_')}_VERSION\s*=\s*(.+)$",
|
||||
)
|
||||
self.COMMENT_RE = re.compile(r"^\s*#.*")
|
||||
|
||||
def check_line(self, lineno, text):
|
||||
text = self.COMMENT_RE.sub('', text.rstrip())
|
||||
|
||||
# WARNING! The VERSION_RE can _also_ match the same lines as CPE_FIELDS_RE,
|
||||
# but not the other way around. So we must first check for CPE_FIELDS_RE,
|
||||
# and if not matched, then and only then check for VERSION_RE.
|
||||
match = self.CPE_FIELDS_RE.match(text)
|
||||
if match:
|
||||
var, field, val = match.groups()
|
||||
return self._check_field(lineno, text, field, var, val)
|
||||
|
||||
match = self.VERSION_RE.match(text)
|
||||
if match:
|
||||
self.CPE_fields_defaults["VERSION"] = match.groups()[1]
|
||||
|
||||
def after(self):
|
||||
# "VALID" counts in the non-defaults; so when "VALID" is present,
|
||||
# 1 non-default means only "VALID" is present, so that's OK.
|
||||
if self.valid and self.non_defaults > 1:
|
||||
return ["{}:{}: 'YES' is implied when a non-default CPE_ID field is specified: {} ({}#cpe-id)".format(
|
||||
self.filename,
|
||||
self.valid["lineno"],
|
||||
self.valid["text"],
|
||||
self.url_to_manual,
|
||||
)]
|
||||
|
||||
def _check_field(self, lineno, text, field, var, val):
|
||||
if field == "VERSION" and self.CPE_fields_defaults[field] is None:
|
||||
return ["{}:{}: expecting package version to be set before CPE_ID_VERSION".format(
|
||||
self.filename,
|
||||
lineno,
|
||||
)]
|
||||
if val == self.CPE_fields_defaults[field]:
|
||||
return ["{}:{}: '{}' is the default value for {} ({}#cpe-id)".format(
|
||||
self.filename,
|
||||
lineno,
|
||||
val,
|
||||
var,
|
||||
self.url_to_manual,
|
||||
)]
|
||||
else:
|
||||
if field == "VALID":
|
||||
self.valid = {"lineno": lineno, "text": text}
|
||||
self.non_defaults += 1
|
||||
|
||||
@@ -62,6 +62,7 @@ class Sob(_CheckFunction):
|
||||
"({}#_format_and_licensing_of_the_package_patches)"
|
||||
.format(self.filename, self.url_to_manual)]
|
||||
|
||||
|
||||
class Upstream(_CheckFunction):
|
||||
UPSTREAM_ENTRY = re.compile(r"^Upstream: .*$")
|
||||
|
||||
|
||||
@@ -36,11 +36,14 @@ symbols_defined_only_for_barebox_variant = [
|
||||
]
|
||||
# toolchain/toolchain/toolchain.mk
|
||||
# toolchain/toolchain-buildroot/toolchain-buildroot.mk
|
||||
# toolchain/toolchain-bare-metal-buildroot/toolchain-bare-metal-buildroot.mk
|
||||
symbols_not_defined_for_fake_virtual_packages = [
|
||||
'BR2_PACKAGE_HAS_TOOLCHAIN',
|
||||
'BR2_PACKAGE_HAS_TOOLCHAIN_BUILDROOT',
|
||||
'BR2_PACKAGE_HAS_TOOLCHAIN_BARE_METAL_BUILDROOT',
|
||||
'BR2_PACKAGE_PROVIDES_TOOLCHAIN',
|
||||
'BR2_PACKAGE_PROVIDES_TOOLCHAIN_BUILDROOT',
|
||||
'BR2_PACKAGE_PROVIDES_TOOLCHAIN_BARE_METAL_BUILDROOT',
|
||||
]
|
||||
# fs/common.mk
|
||||
suffixes_not_defined_for_all_rootfs_types = [
|
||||
|
||||
@@ -9,18 +9,46 @@ else
|
||||
# Support git-worktree
|
||||
GIT_DIR="$(cd "${MAIN_DIR}" && git rev-parse --no-flags --git-common-dir)"
|
||||
fi
|
||||
# shellcheck disable=SC2016
|
||||
IMAGE=$(grep ^image: "${MAIN_DIR}/.gitlab-ci.yml" | \
|
||||
sed -e 's,^image: ,,g' | sed -e 's,\$CI_REGISTRY,registry.gitlab.com,g')
|
||||
if test -z "${IMAGE}" ; then
|
||||
# shellcheck disable=SC2016
|
||||
IMAGE=$(grep ^image: "${MAIN_DIR}/.gitlab-ci.yml" | \
|
||||
sed -e 's,^image: ,,g' | sed -e 's,\$CI_REGISTRY,registry.gitlab.com,g')
|
||||
fi
|
||||
|
||||
declare -a docker_opts=(
|
||||
-i
|
||||
--rm
|
||||
--user "$(id -u):$(id -g)"
|
||||
--mount "type=bind,src=${MAIN_DIR},dst=${MAIN_DIR}"
|
||||
--workdir "${MAIN_DIR}"
|
||||
--workdir "$(pwd)"
|
||||
--security-opt label=disable
|
||||
--network host
|
||||
)
|
||||
|
||||
declare -a mountpoints=(
|
||||
"${MAIN_DIR}"
|
||||
"$(pwd)"
|
||||
)
|
||||
|
||||
# curl lists (and recognises and uses) other types of *_proxy variables,
|
||||
# but only those make sense for Buildroot:
|
||||
for env in all_proxy http_proxy https_proxy ftp_proxy no_proxy; do
|
||||
if [ "${!env}" ]; then
|
||||
docker_opts+=( --env "${env}" )
|
||||
# The lower-case variant takes precedence on the upper-case one
|
||||
# (dixit curl)
|
||||
continue
|
||||
fi
|
||||
# http_proxy is only lower-case (dixit curl)
|
||||
if [ "${env}" = http_proxy ]; then
|
||||
continue
|
||||
fi
|
||||
# All the others also exist in the upper-case variant
|
||||
env="${env^^}"
|
||||
if [ "${!env}" ]; then
|
||||
docker_opts+=( --env "${env}" )
|
||||
fi
|
||||
done
|
||||
|
||||
# Empty GIT_DIR means that we are not in a workdir, *and* git is too old
|
||||
# to know about worktrees, so we're not in a worktree either. So it means
|
||||
# we're in the main git working copy, and thus we don't need to mount the
|
||||
@@ -31,9 +59,27 @@ if [ "${GIT_DIR}" ]; then
|
||||
# not absolute, GIT_DIR is relative to MAIN_DIR. If it's an absolute
|
||||
# path already (in a wordir), then that's a noop.
|
||||
GIT_DIR="$(cd "${MAIN_DIR}"; readlink -e "${GIT_DIR}")"
|
||||
docker_opts+=( --mount "type=bind,src=${GIT_DIR},dst=${GIT_DIR}" )
|
||||
mountpoints+=( "${GIT_DIR}" )
|
||||
|
||||
# 'repo' stores .git/objects separately.
|
||||
if [ -L "${GIT_DIR}/objects" ]; then
|
||||
# GITDIR is already an absolute path, but for symetry
|
||||
# with the above, keep the same cd+readlink construct.
|
||||
OBJECTS_DIR="$(cd "${MAIN_DIR}"; readlink -e "${GIT_DIR}/objects")"
|
||||
mountpoints+=( "${OBJECTS_DIR}" )
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "${BR2_DL_DIR}" ]; then
|
||||
mountpoints+=( "${BR2_DL_DIR}" )
|
||||
docker_opts+=( --env BR2_DL_DIR )
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2013 # can't use while-read because of the assignment
|
||||
for dir in $(printf '%s\n' "${mountpoints[@]}" |LC_ALL=C sort -u); do
|
||||
docker_opts+=( --mount "type=bind,src=${dir},dst=${dir}" )
|
||||
done
|
||||
|
||||
if tty -s; then
|
||||
docker_opts+=( -t )
|
||||
fi
|
||||
|
||||
@@ -315,6 +315,10 @@ def fixup_config(sysinfo, configfile):
|
||||
'BR2_TOOLCHAIN_BUILDROOT=y' in configlines:
|
||||
return False
|
||||
|
||||
if 'BR2_TOOLCHAIN_BARE_METAL_BUILDROOT=y\n' in configlines:
|
||||
configlines.remove('BR2_TOOLCHAIN_BARE_METAL_BUILDROOT_ARCH=""\n')
|
||||
configlines.append('BR2_TOOLCHAIN_BARE_METAL_BUILDROOT_ARCH="microblazeel-xilinx-elf"\n')
|
||||
|
||||
if 'BR2_PACKAGE_AUFS_UTIL=y\n' in configlines and \
|
||||
'BR2_PACKAGE_AUFS_UTIL_VERSION=""\n' in configlines:
|
||||
return False
|
||||
|
||||
@@ -12,8 +12,16 @@ brmake
|
||||
nothing.
|
||||
|
||||
check-package
|
||||
a script that checks the coding style of a package's Config.in and
|
||||
.mk files, and also tests them for various types of typoes.
|
||||
a script that checks the coding style across the buildroot tree. It
|
||||
checks package's Config.in and .mk files, runs shellcheck for all shell
|
||||
scripts, flake8 for python files, checks for typoes, etc.
|
||||
It checks the .checkpackageignore file if errors should be ignored and
|
||||
errors if there's a file listed that doesn't produce an error.
|
||||
|
||||
docker-run
|
||||
a script that runs a command (like make check-package) inside the
|
||||
buildroot CI docker container; pass no command to get an interactive
|
||||
shell.
|
||||
|
||||
genrandconfig
|
||||
a script that generates a random configuration, used by the autobuilders
|
||||
|
||||
@@ -43,6 +43,56 @@ except ImportError:
|
||||
liclookup = None
|
||||
|
||||
|
||||
def toml_load(f):
|
||||
with open(f, 'rb') as fh:
|
||||
ex = None
|
||||
|
||||
# Try standard library tomllib first
|
||||
try:
|
||||
from tomllib import load
|
||||
return load(fh)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Try regular tomli next
|
||||
try:
|
||||
from tomli import load
|
||||
return load(fh)
|
||||
except ImportError as e:
|
||||
ex = e
|
||||
|
||||
# Try pip's vendored tomli
|
||||
try:
|
||||
from pip._vendor.tomli import load
|
||||
try:
|
||||
return load(fh)
|
||||
except TypeError:
|
||||
# Fallback to handle older version
|
||||
try:
|
||||
fh.seek(0)
|
||||
w = io.TextIOWrapper(fh, encoding="utf8", newline="")
|
||||
return load(w)
|
||||
finally:
|
||||
w.detach()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Try regular toml last
|
||||
try:
|
||||
from toml import load
|
||||
fh.seek(0)
|
||||
w = io.TextIOWrapper(fh, encoding="utf8", newline="")
|
||||
try:
|
||||
return load(w)
|
||||
finally:
|
||||
w.detach()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
print('This package needs tomli')
|
||||
raise ex
|
||||
|
||||
|
||||
def setup_decorator(func, method):
|
||||
"""
|
||||
Decorator for distutils.core.setup and setuptools.setup.
|
||||
@@ -316,6 +366,35 @@ class BuildrootPackage():
|
||||
os.chdir(current_dir)
|
||||
sys.path.remove(self.tmp_extract)
|
||||
|
||||
def load_pyproject(self):
|
||||
"""
|
||||
Loads the corresponding pyproject.toml and store its metadata
|
||||
"""
|
||||
current_dir = os.getcwd()
|
||||
os.chdir(self.tmp_extract)
|
||||
sys.path.insert(0, self.tmp_extract)
|
||||
try:
|
||||
pyproject_data = toml_load('pyproject.toml')
|
||||
try:
|
||||
self.setup_metadata = pyproject_data.get('project', {})
|
||||
self.metadata_name = self.setup_metadata.get('name', self.real_name)
|
||||
build_system = pyproject_data.get('build-system', {})
|
||||
build_backend = build_system.get('build-backend', None)
|
||||
if build_backend and build_backend == 'flit_core.buildapi':
|
||||
self.setup_metadata['method'] = 'flit'
|
||||
elif build_system.get('backend-path', None):
|
||||
self.setup_metadata['method'] = 'pep517'
|
||||
else:
|
||||
self.setup_metadata['method'] = 'unknown'
|
||||
except KeyError:
|
||||
print('ERROR: Could not determine package metadata for {pkg}.\n'
|
||||
.format(pkg=self.real_name))
|
||||
raise
|
||||
except FileNotFoundError:
|
||||
raise
|
||||
os.chdir(current_dir)
|
||||
sys.path.remove(self.tmp_extract)
|
||||
|
||||
def get_requirements(self, pkg_folder):
|
||||
"""
|
||||
Retrieve dependencies from the metadata found in the setup.py script of
|
||||
@@ -332,8 +411,8 @@ class BuildrootPackage():
|
||||
for req in self.pkg_req]
|
||||
|
||||
# get rid of commented lines and also strip the package strings
|
||||
self.pkg_req = [item.strip() for item in self.pkg_req
|
||||
if len(item) > 0 and item[0] != '#']
|
||||
self.pkg_req = {item.strip() for item in self.pkg_req
|
||||
if len(item) > 0 and item[0] != '#'}
|
||||
|
||||
req_not_found = self.pkg_req
|
||||
self.pkg_req = list(map(pkg_buildroot_name, self.pkg_req))
|
||||
@@ -620,8 +699,12 @@ class BuildrootPackage():
|
||||
if help_lines[-1][-1] != '.':
|
||||
help_lines[-1] += '.'
|
||||
|
||||
home_page = md_info.get('home_page', None) or \
|
||||
md_info.get('project_urls', {}).get('Homepage', None) # noqa: E127
|
||||
home_page = md_info.get('home_page', None)
|
||||
|
||||
if not home_page:
|
||||
project_urls = md_info.get('project_urls', None)
|
||||
if project_urls:
|
||||
home_page = project_urls.get('Homepage', None)
|
||||
|
||||
if home_page:
|
||||
# \t + two spaces is 3 char long
|
||||
@@ -699,9 +782,12 @@ def main():
|
||||
except ImportError as err:
|
||||
if 'buildutils' in str(err):
|
||||
print('This package needs buildutils')
|
||||
continue
|
||||
else:
|
||||
raise
|
||||
continue
|
||||
try:
|
||||
package.load_pyproject()
|
||||
except Exception:
|
||||
raise
|
||||
except (AttributeError, KeyError) as error:
|
||||
print('Error: Could not install package {pkg}: {error}'.format(
|
||||
pkg=package.real_name, error=error))
|
||||
|
||||
Reference in New Issue
Block a user