Compare commits

..

3 commits

Author SHA1 Message Date
ac72de14fc
forgejo-ci: build.sh is now local rather than patched
Some checks failed
/ lint (pull_request) Failing after 29s
/ build-x86_64 (pull_request) Successful in 2m33s
/ deploy-x86_64 (pull_request) Successful in 28s
/ build-aarch64 (pull_request) Successful in 8m44s
/ deploy-aarch64 (pull_request) Successful in 55s
2024-08-24 21:29:31 -04:00
013f4597b1
user/codeberg-pages-server: new aport
Some checks failed
/ lint (pull_request) Failing after 31s
/ build-x86_64 (pull_request) Successful in 2m13s
/ deploy-x86_64 (pull_request) Successful in 27s
/ deploy-aarch64 (pull_request) Has been skipped
/ build-aarch64 (pull_request) Failing after 1m25s
2024-08-23 09:03:54 -04:00
5fe2d0a78b
forgejo-ci: deploy to $branch-testing when PR is a WIP 2024-08-23 00:30:05 -04:00
354 changed files with 16813 additions and 12862 deletions

View file

@ -7,7 +7,7 @@
set -eu -o pipefail
readonly APORTSDIR=$CI_PROJECT_DIR
readonly REPOS="backports user pmos"
readonly REPOS="backports user"
readonly ALPINE_REPOS="main community testing"
readonly ARCH=$(apk --print-arch)
# gitlab variables
@ -16,8 +16,6 @@ readonly BASEBRANCH=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME
: "${REPODEST:=$HOME/packages}"
: "${MIRROR:=https://ayakael.net/api/packages/forge/alpine}"
: "${ALPINE_MIRROR:=http://dl-cdn.alpinelinux.org/alpine}"
: "${PMOS_MIRROR:=http://mirror.postmarketos.org/postmarketos}"
: "${PMOS_KEY:=https://git.syndicate-lang.org/synit/pmbootstrap/raw/commit/8efee86388408c0d8de45c64fe383580ffd91700/pmb/data/keys/build.postmarketos.org.rsa.pub}"
: "${MAX_ARTIFACT_SIZE:=300000000}" #300M
: "${CI_DEBUG_BUILD:=}"
@ -139,22 +137,6 @@ setup_system() {
git config --global init.defaultBranch master
}
setup_pmos() {
local release
case $BASEBRANCH in
v3.21) release="v24.12";;
v3.20) release="v24.6";;
v3.19) release="v23.12";;
edge) release=master;;
*) die "Branch \"$BASEBRANCH\" not supported!"
esac
doas wget "$PMOS_KEY" -P /etc/apk/keys
doas sh -c "echo $PMOS_MIRROR/$release >> /etc/apk/repositories"
doas apk update || true
}
sysinfo() {
printf ">>> Host system information (arch: %s, release: %s) <<<\n" "$ARCH" "$(get_release)"
printf "- Number of Cores: %s\n" "$(nproc)"
@ -162,7 +144,6 @@ sysinfo() {
printf "- Free space: %s\n" "$(df -hP / | awk '/\/$/ {print $4}')"
}
copy_artifacts() {
cd "$APORTSDIR"
@ -201,7 +182,7 @@ setup_system || die "Failed to setup system"
# git no longer allows to execute in repositories owned by different users
doas chown -R buildozer: .
fetch_flags="-qnu"
fetch_flags="-qn"
debugging && fetch_flags="-v"
git fetch $fetch_flags "$CI_MERGE_REQUEST_PROJECT_URL" \
@ -223,7 +204,6 @@ build_limit=$CI_ALPINE_BUILD_LIMIT
for repo in $(changed_repos); do
set_repositories_for "$repo"
[ "$repo" == "pmos" ] && setup_pmos
built_aports=0
changed_aports_in_repo=$(changed_aports "$repo")
changed_aports_in_repo_count=$(echo "$changed_aports_in_repo" | wc -l)

View file

@ -1,197 +0,0 @@
#!/bin/bash
# expects the following env variables:
# downstream: downstream repo
#
# env variables to track minor or bug-fix updates
# minor_only: array of packages that should only track minor releases (seperate by space)
# default: none
# all packages: all
# fix_only: array of packages that should only track bug fix releases (seperated by space)
# default: none
# all packages: all
#
# If either minor_only or fix_only is set, only packages with semantic versioning schemes
# will be tracked.
#
# If a package is both minor_only and fix_only, the minor releases will be tracked
#
# If a - is placed in front of package name, it'll be excluded from the update rule
#
# optional env variables
# ALL_PACKAGES: when true, ignore is package is owned by me
# skip_package: array of packages to skip, place a - in front of package name to not exclude it
# add_package: array of additional packages to check
#
repo=${downstream/*\/}
release=${downstream/\/$repo/}
release=${release/*\/}
release=${release/v}
arch=$(apk --print-arch)
# add special case for postmarketos
[ "$release" == "postmarketos" ] && { release=$repo; repo="pmos"; arch="aarch64"; }
[ "$release" == "master" ] && release=edge
is_semantic() {
local downstream_version_dot=${1//[^.]}
if [[ ${#downstream_version_dot} -eq 2 ]]; then
return 0
fi
return 1
}
echo "Checking $downstream for out of date packages"
curl --silent $downstream/$arch/APKINDEX.tar.gz | tar -O -zx APKINDEX > APKINDEX
if [ "$ALL_PACKAGES" == "true" ]; then
owned_by_you=$(awk -F ':' '{if($1=="o"){print $2}}' APKINDEX | sort | uniq)
echo "Found $(printf '%s\n' $owned_by_you | wc -l ) packages"
else
owned_by_you=$(awk -v RS= -v ORS="\n\n" '/m:Antoine Martin \(ayakael\) <dev@ayakael.net>/' APKINDEX | awk -F ':' '{if($1=="o"){print $2}}' | sort | uniq)
echo "Found $(printf '%s\n' $owned_by_you | wc -l ) packages owned by you"
fi
# add additionnal packages
owned_by_you="$owned_by_you $add_package"
rm -f out_of_date not_in_anitya
for pkg in $owned_by_you; do
downstream_version=$(sed -n "/^P:$pkg$/,/^$/p" APKINDEX | awk -F ':' '{if($1=="V"){print $2}}' | sort -V | tail -n 1)
downstream_version=${downstream_version/-*}
# skip package if in $skip_package array
if [[ "$skip_package" == *all* || "$skip_package" == *$pkg* ]] && [[ "$skip_package" != *-$pkg* ]]; then
echo "$pkg skipped"
continue
fi
# special cases where package is not semantic
case $pkg in
# track u-boot-pine64-pinenote against mainline u-boot, and track upstream rockchip blobs
u-boot-pine64-pinenote)
upstream_version="$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/u-boot" | jq -r '.stable_versions.[]' | head -n1)"
# some reason the commit now not in APKINDEX, using master instead
#commit=$(sed -n "/^P:$pkg$/,/^$/p" APKINDEX | awk -F ':' '{if($1=="c"){print $2}}')
#commit=${commit/-dirty/}
commit=master
# fetches upstream version for blobs using ini file
upstream_trust="$(curl --fail -s 'https://raw.githubusercontent.com/rockchip-linux/rkbin/master/RKTRUST/RK3566TRUST_ULTRA.ini' | grep bl31 | awk -F '=' '{if($1"="PATH){print $2}}' | grep -o -P '(?<=_v).*(?=.elf)')"
upstream_ddr="$(curl --fail -s 'https://raw.githubusercontent.com/rockchip-linux/rkbin/master/RKBOOT/RK3566MINIALL_ULTRA.ini' | grep ddr | awk -F '=' '{if($1"="PATH){print $2}}' | head -n 1 | grep -o -P '(?<=_v).*(?=.bin)')"
# extracts downstream version via _trust_ver and _ddr_ver variable
downstream_trust=$(curl --fail -X GET -s "https://gitlab.postmarketos.org/postmarketOS/pmaports/-/raw/$commit/device/testing/u-boot-pine64-pinenote/APKBUILD" | awk -F '=' '{if($1=="_trust_ver"){print $2}}')
downstream_ddr=$(curl --fail -X GET -s "https://gitlab.postmarketos.org/postmarketOS/pmaports/-/raw/$commit/device/testing/u-boot-pine64-pinenote/APKBUILD" | awk -F '=' '{if($1=="_ddr_ver"){print $2}}')
# compares versions and creates newline in out_of_date if problematic
if [ "$upstream_trust" != "$downstream_trust" ]; then
echo "$pkg new Trust blob $upstream_trust version available"
echo "$pkg(trust) $downstream_trust $upstream_trust $repo $release" >> out_of_date
fi
if [ "$upstream_ddr" != "$downstream_ddr" ]; then
echo "$pkg new ddr blob $upstream_ddr version available"
echo "$pkg(ddr) $downstream_ddr $upstream_ddr $repo $release" >> out_of_date
fi
;;
# release-monitoring omits the extra B, while we keep it but put it after the version no.
looking-glass) upstream_version="$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/$pkg" | jq -r '.stable_versions.[]' | head -n1)b";;
# we want to track both Firefox security upgrades + Zotero upgrades
zotero)
commit=$(sed -n "/^P:$pkg$/,/^$/p" APKINDEX | awk -F ':' '{if($1=="c"){print $2}}')
downstream_fx_ver=$(curl --fail -X GET -s "https://gitlab.alpinelinux.org/alpine/aports/-/raw/$commit/community/zotero/APKBUILD" | awk -F '=' '{if($1=="_fxver"){print $2}}')
upstream_fx_ver=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/firefox-esr" | jq -r ".stable_versions.[] | match(\"${downstream_fx_ver/.*.*}.*\").string" | head -n1)
if [ "$upstream_fx_ver" != "$downstream_fx_ver" ]; then
echo "$pkg new Firefox $upstream_fx_ver version available"
echo "$pkg(fx_ver) $downstream_fx_ver $upstream_fx_ver $repo $release" >> out_of_date
fi
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/$pkg" | jq -r '.stable_versions.[]' | head -n1)
;;
# aports omits the -beta part of the version
freetube) upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/v2/packages/?name=$pkg&distribution=Alpine" | jq -r '.items.[].version' | sed "s|-beta||");;
# we only track x.x.1xx feature branches of SDK and stage0
dotnet*sdk|dotnet*stage0) upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/$pkg" | jq -r ".stable_versions.[] | match(\"${downstream_version::-2}.*\").string" | sed 's|-.*||' | head -n1);;
# we want to track both current major version and upstream latest
electron)
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/v2/projects/?name=$pkg&distribution=Alpine" | jq -r '.items.[].stable_versions' | jq -r ".[] | match(\"${downstream_version/.*}.*\").string" | head -n 1)
latest_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/v2/packages/?name=$pkg&distribution=Alpine" | jq -r '.items.[].stable_version' )
# append version number to signal that this is not latest major version
if [ "${upstream_version/.*}" != "${latest_version/.*}" ]; then
echo "$pkg(${latest_version/.*}) major version available"
echo "$pkg(${latest_version/.*}) $downstream_version $latest_version $repo $release" >> out_of_date
pkg="$pkg(${upstream_version/.*})"
fi
;;
# we want to track LTS (even releases) rather than latest
arm-trusted-firmware)
if [[ "$fix_only" == *all* || "$fix_only" == *$pkg* ]] || [[ "$minor_only" == *all* || "$minor_only" == *$pkg* ]]; then
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/$pkg" | jq -r ".stable_versions.[] | match(\"${downstream_version%.*}.*\").string" | head -n1)
else
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/$pkg" | jq -r '.stable_versions.[] | select(test("^[0-9]+\\.[0-9]+\\.[0-9]+$")) | select(split(".") | .[1] | tonumber | . % 2 == 0)' | head -n1)
fi
;;
# track linux-pine64-pinenote against latest
linux-pine64-pinenote)
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/315000" | jq -r ".stable_versions.[] | match(\"${downstream_version%.*}.*\").string" | head -n1)
latest_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/315000" | jq -r ".stable_versions.[]" | head -n1)
# append version number to signal that this is not latest major version
if [ "${upstream_version/.*.*}" != "${latest_version/.*.*}" ]; then
echo "$pkg(${latest_version/.*.*}) major version available"
echo "$pkg(${latest_version/.*.*}) $downstream_version $latest_version $repo $release" >> out_of_date
pkg="$pkg(${upstream_version%.*})"
fi
;;
# track linux-radxa against BSP kernel (usually got awful late
linux-radxa)
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/$pkg" | jq -r '.stable_versions.[]' | head -n1)
upstream_version=${upstream_version/-*}
;;
dotnet*-sdk|dotnet*-stage0)
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/141853" | jq -r ".stable_versions.[] | match(\"${downstream_version::-2}.*\").string" | head -n 1)
;;
dotnet*-runtime)
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/220385" | jq -r ".stable_versions.[] | match(\"${downstream_version%.*}.*\").string" | head -n 1)
;;
# removes last bit in github tag from usbboot release, as not needed
raspberrypi-usbboot) curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/$pkg" | jq -r '.stable_versions.[]' | head -n1 | sed 's|-.*||';;
*)
if [[ "$minor_only" == *all* || "$minor_only" == *$pkg* ]] && [[ "$minor_only" != *-$pkg* ]]; then
# continues when package version scheme is not semantic, but minor_only or fix_only is set
if ! is_semantic $downstream_version; then
echo "$pkg is not semantic, and minor_only is set"
continue
fi
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/$pkg" | jq -r ".stable_versions.[] | match(\"${downstream_version%.*.*}.*\").string" | head -n1)
elif [[ "$fix_only" == *all* || "$fix_only" == *$pkg* ]] && [[ "$fix_only" != *-$pkg* ]]; then
# continues when package version scheme is not semantic, but minor_only or fix_only is set
if ! is_semantic $downstream_version; then
echo "$pkg is not semantic, and fix_only is set"
continue
fi
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/$pkg" | jq -r ".stable_versions.[] | match(\"${downstream_version%.*}.*\").string" | head -n1)
else
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/$pkg" | jq -r '.stable_versions.[]' | head -n1)
fi
;;
esac
if [ -z "$upstream_version" ]; then
echo "$pkg not in anitya"
# do not track not_in_anitya if either minor_only or fix_only is set
if [ -z ${minor_only+x} ] && [ -z ${fix_only+x} ]; then
echo "$pkg" >> not_in_anitya
fi
elif [ "$downstream_version" != "$(printf '%s\n' $upstream_version $downstream_version | sort -V | head -n 1)" ]; then
echo "$pkg higher downstream $upstream_version"
continue
elif [ "$upstream_version" != "$downstream_version" ]; then
echo "$pkg upstream version $upstream_version does not match downstream version $downstream_version in $release"
echo "$pkg $downstream_version $upstream_version $repo $release" >> out_of_date
fi
done

View file

@ -1,22 +0,0 @@
#!/bin/sh
TARGET_REPO=$1
ARCH="x86 x86_64 armhf armv7 aarch64 ppc64le s390x mips64 riscv64 loongarch64"
for arch in $ARCH; do
# check if repo exists
wget --spider $TARGET_REPO/$arch/APKINDEX.tar.gz -o /dev/null || continue
echo ">>> Clearing repo $TARGET_REPO/$arch"
curl --silent $TARGET_REPO/$arch/APKINDEX.tar.gz | tar -O -zx APKINDEX > APKINDEX
pkgs=$(awk -F ':' '{if($1=="o"){print $2}}' APKINDEX | sort | uniq)
for pkg in $pkgs; do
pkgvers=$(sed -n "/^P:$pkg$/,/^$/p" APKINDEX | awk -F ':' '{if($1=="V"){print $2}}')
for pkgver in $pkgvers; do
echo "Deleting $pkg-$pkgver of arch $arch from $TARGET_REPO"
curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN -X DELETE $TARGET_REPO/$arch/$pkg-$pkgver.apk
done
done
done

View file

@ -1,190 +0,0 @@
#!/bin/bash
# expects:
# env variable ISSUE_TOKEN
# file out_of_date
IFS='
'
repo=${downstream/*\/}
does_it_exist() {
name=$1
downstream_version=$2
upstream_version=$3
repo=$4
release=$5
query="$repo/$name: upgrade to $upstream_version"
if [ "$release" != "edge" ]; then
query="%22[$release] $query%22"
elif [ "$repo" != "pmos" ] && [ "$repo" != "user" ]; then
# workaround to this query matching both stable and edge branch
query="%22$query%22&labels=Edge"
else
query="%22$query%22"
fi
query="$(echo $query | sed 's| |%20|g' | sed 's|:|%3A|g' | sed 's|/|%2F|g' | sed 's|\[|%5B|g' | sed 's|\]|%5D|g')"
result="$(curl -L --silent -X 'GET' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues&sort=latest" \
-H 'accept: application/json' \
-H "Authorization: token $ISSUE_TOKEN"
)"
if [ "$result" == "[]" ]; then
return 1
fi
}
is_it_old() {
name=$1
downstream_version=$2
upstream_version=$3
repo=$4
release=$5
query="$repo/$name: upgrade to"
if [ "$release" != "edge" ]; then
query="%22[$release] $query%22"
elif [ "$repo" != "pmos" ] && [ "$repo" != "user" ]; then
# workaround to this query matching both stable and edge branch
query="%22$query%22&labels=Edge"
else
query="%22$query%22"
fi
query="$(echo $query | sed 's| |%20|g' | sed 's|:|%3A|g' | sed 's|/|%2F|g' | sed 's|\[|%5B|g' | sed 's|\]|%5D|g')"
result="$(curl -L --silent -X 'GET' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues&sort=latest" \
-H 'accept: application/json' \
-H "authorization: token $ISSUE_TOKEN"
)"
result_title="$(echo $result | jq -r '.[].title' )"
result_id="$(echo $result | jq -r '.[].number' )"
result_upstream_version="$(echo $result_title | awk '{print $4}')"
if [ "$upstream_version" != "$result_upstream_version" ]; then
echo $result_id
else
echo 0
fi
}
update_title() {
name=$1
downstream_version=$2
upstream_version=$3
repo=$4
release=$5
id=$6
title="$repo/$name: upgrade to $upstream_version"
if [ "$release" != "edge" ]; then title="[$release] $title"; fi
result=$(curl -L --silent -X 'PATCH' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues/$id" \
-H 'accept: application/json' \
-H "authorization: token $ISSUE_TOKEN" \
-H 'Content-Type: application/json' \
-d "{\"title\": \"$title\"}"
)
return 0
}
create_issue() {
name=$1
downstream_version=$2
upstream_version=$3
repo=$4
release=$5
title="$repo/$name: upgrade to $upstream_version"
if [ "$release" != "edge" ]; then title="[$release] $title"; fi
result=$(curl -L --silent -X 'POST' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues" \
-H 'accept: application/json' \
-H "authorization: token $ISSUE_TOKEN" \
-H 'Content-Type: application/json' \
-d "{
\"title\": \"$title\",
\"labels\": [
$LABEL_NUMBER
]
}")
return 0
}
if [ -f out_of_date ]; then
out_of_date="$(cat out_of_date)"
echo "Detected $(wc -l out_of_date) out-of-date packages, creating issues"
for pkg in $out_of_date; do
name="$(echo $pkg | awk '{print $1}')"
downstream_version="$(echo $pkg | awk '{print $2}')"
upstream_version="$(echo $pkg | awk '{print $3}')"
repo="$(echo $pkg | awk '{print $4}')"
release="$(echo $pkg | awk '{print $5}')"
if does_it_exist $name $downstream_version $upstream_version $repo $release; then
echo "Issue for $repo/$name already exists"
continue
fi
id=$(is_it_old $name $downstream_version $upstream_version $repo $release)
if [ "$id" != "0" ] && [ -n "$id" ]; then
echo "Issue for $repo/$name needs updating"
update_title $name $downstream_version $upstream_version $repo $release $id
continue
fi
echo "Creating issue for $repo/$name"
create_issue $name $downstream_version $upstream_version $repo $release
done
fi
if [ -f not_in_anitya ]; then
query="Add missing $repo packages to anitya"
query="%22$(echo $query | sed 's| |%20|g')%22"
result="$(curl -L --silent -X 'GET' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues&sort=latest" \
-H 'accept: application/json' \
-H "authorization: token $ISSUE_TOKEN"
)"
if [ "$result" == "[]" ]; then
echo "Creating anitya issue"
result=$(curl -L --silent -X 'POST' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues" \
-H 'accept: application/json' \
-H "authorization: token $ISSUE_TOKEN" \
-H 'Content-Type: application/json' \
-d "{
\"title\": \"Add missing $repo packages to anitya\",
\"body\": \"- [ ] $(sed '{:q;N;s/\n/\\n- [ ] /g;t q}' not_in_anitya)\",
\"labels\": [
$LABEL_NUMBER
]
}")
else
echo "Updating anitya issue"
result_id="$(echo $result | jq -r '.[].number' )"
result=$(curl -L --silent -X 'PATCH' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues/$result_id" \
-H 'accept: application/json' \
-H "authorization: token $ISSUE_TOKEN" \
-H 'Content-Type: application/json' \
-d "{
\"body\": \"- [ ] $(sed '{:q;N;s/\n/\\n- [ ] /g;t q}' not_in_anitya)\"
}"
)
fi
fi

View file

@ -4,7 +4,7 @@
set -eu -o pipefail
readonly REPOS="backports user"
readonly BASEBRANCH=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME
readonly BASEBRANCH=$GITHUB_BASE_REF
readonly TARGET_REPO=$CI_ALPINE_REPO
apkgs=$(find package -type f -name "*.apk")
@ -14,6 +14,14 @@ for apk in $apkgs; do
arch=$(echo $apk | awk -F '/' '{print $3}')
name=$(echo $apk | awk -F '/' '{print $4}')
if [ "$(curl -s $GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/pulls/$GITHUB_EVENT_NUMBER | jq .draft)" == "true" ]; then
# if draft, send to -testing branch
branch="$branch-testing"
else
# if not draft, assume that this was sent to $branch-testing and nuke it
curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN -X DELETE $TARGET_REPO/$BASEBRANCH/$branch-testing/$arch/$name
fi
echo "Sending $name of arch $arch to $TARGET_REPO/$BASEBRANCH/$branch"
return=$(curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN --upload-file $apk $TARGET_REPO/$BASEBRANCH/$branch 2>&1)
echo $return

View file

@ -1,54 +0,0 @@
#!/bin/bash
TARGET_RELEASE=$1
shift
TARGET_ARCH=$@
CURRENT_ARCH=$(cat /etc/apk/arch)
if [ -z "$TARGET_RELEASE" ]; then
echo ">>> No target release specified, assumming edge"
TARGET_RELEASE=edge
fi
[[ "$TARGET_RELEASE" == "edge" ]] && TARGET_BRANCH=master || TARGET_BRANCH="${TARGET_RELEASE/v}-stable"
if [[ ! -d "aports-$TARGET_RELEASE" ]]; then
echo ">>> Fetching aports for $TARGET_RELEASE"
git init aports-$TARGET_RELEASE
git -C aports-$TARGET_RELEASE remote add origin https://gitlab.alpinelinux.org/alpine/aports
git -C aports-$TARGET_RELEASE fetch --depth 1 origin $TARGET_BRANCH
git -C aports-$TARGET_RELEASE checkout $TARGET_BRANCH
[[ $? -ne 0 ]] && { echo ">>> Git fetch failed, does your release exist?"; exit; } || true
fi
if [ -z "$TARGET_ARCH" ]; then
echo ">>> No arch specified, assuming target to all arches supported by upstream for release $TARGET_RELEASE"
TARGET_ARCH=$(cat aports-$TARGET_RELEASE/scripts/mkimg.minirootfs.sh | tr -d "\t" | awk -F "=" '{if($1=="arch"){print $2}}' | tr -d \" | sed "s| $CURRENT_ARCH||")
if [ -z "$TARGET_ARCH" ]; then
echo ">>> Could not compute arches that are supported, does your release exist?"
exit
fi
fi
. /usr/share/abuild/functions.sh
for arch in $TARGET_ARCH; do
if [[ "$(arch_to_hostspec $arch)" == "unknown" ]]; then
echo ">>> $arch not valid arch, please chose among the following"
sed -n '/^arch_to_hostspec/,/esac$/ {s/esac//;p;}' /usr/share/abuild/functions.sh | sed -e '/unknown/d' -e '/arch/d' -e '/case/d' -e "/$CURRENT_ARCH/d" | awk '{print $1}' | tr -d ')'
exit
fi
done
echo ">>> Targetting $TARGET_ARCH for cross generation"
(
cd aports-$TARGET_RELEASE/scripts
# this stops bootstrap from building the whole base system
sed -i 's|^msg "Cross building base system"|exit; msg "Cross building base system"|' bootstrap.sh
for arch in $TARGET_ARCH; do
echo ">>> Building cross-compilers for $arch"
./bootstrap.sh $arch
[[ $? -ne 0 ]] && { echo ">>> Cross-build failure"; exit; } || true
done
echo ">>> Building done"
)

View file

@ -2,6 +2,10 @@ on:
pull_request:
types: [ assigned, opened, synchronize, reopened ]
concurrency:
group: ${{ github.head_ref || github.ref_name }}
cancel-in-progress: true
jobs:
build-aarch64:
runs-on: aarch64
@ -15,7 +19,6 @@ jobs:
steps:
- name: Environment setup
run: |
doas apk upgrade -a
doas apk add nodejs git patch curl
cd /etc/apk/keys
doas curl -JO https://ayakael.net/api/packages/forge/alpine/key
@ -24,11 +27,9 @@ jobs:
with:
fetch-depth: 500
- name: Package build
run: |
${{ github.workspace }}/.forgejo/bin/build.sh
touch packages/dummy
run: ${{ github.workspace }}/.forgejo/bin/build.sh
- name: Package upload
uses: actions/upload-artifact@v3
uses: forgejo/upload-artifact@v3
with:
name: package
path: packages
@ -40,15 +41,15 @@ jobs:
image: alpine:latest
env:
CI_ALPINE_REPO: 'https://ayakael.net/api/packages/forge/alpine'
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
FORGE_REPO_TOKEN: ${{ secrets.FORGE_REPO_TOKEN }}
FORGE_REPO_USER: ${{ vars.FORGE_REPO_USER }}
GITHUB_EVENT_NUMBER: ${{ github.event.number }}
steps:
- name: Setting up environment
run: apk add nodejs curl findutils git gawk
run: apk add nodejs curl findutils git gawk jq
- name: Repo pull
uses: actions/checkout@v4
- name: Package download
uses: actions/download-artifact@v3
uses: forgejo/download-artifact@v3
- name: Package deployment
run: ${{ github.workspace }}/.forgejo/bin/deploy.sh

View file

@ -1,54 +0,0 @@
on:
pull_request:
types: [ assigned, opened, synchronize, reopened ]
jobs:
build-armv7:
runs-on: armv7
container:
image: alpinelinux/alpine-gitlab-ci:latest
env:
CI_PROJECT_DIR: ${{ github.workspace }}
CI_DEBUG_BUILD: ${{ runner.debug }}
CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }}
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
steps:
- name: Environment setup
run: |
doas apk upgrade -a
doas apk add nodejs git patch curl
cd /etc/apk/keys
doas curl -JO https://ayakael.net/api/packages/forge/alpine/key
- name: Repo pull
uses: actions/checkout@v4
with:
fetch-depth: 500
- name: Package build
run: |
${{ github.workspace }}/.forgejo/bin/build.sh
touch packages/dummy
- name: Package upload
uses: actions/upload-artifact@v3
with:
name: package
path: packages
deploy-armv7:
needs: [build-armv7]
runs-on: armv7
container:
image: alpine:latest
env:
CI_ALPINE_REPO: 'https://ayakael.net/api/packages/forge/alpine'
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
FORGE_REPO_TOKEN: ${{ secrets.FORGE_REPO_TOKEN }}
FORGE_REPO_USER: ${{ vars.FORGE_REPO_USER }}
steps:
- name: Setting up environment
run: apk add nodejs curl findutils git gawk
- name: Repo pull
uses: actions/checkout@v4
- name: Package download
uses: actions/download-artifact@v3
- name: Package deployment
run: ${{ github.workspace }}/.forgejo/bin/deploy.sh

View file

@ -1,59 +0,0 @@
on:
workflow_dispatch:
inputs:
target_arch:
description: 'target arch'
required: false
type: string
jobs:
build-cross:
runs-on: x86_64
container:
image: alpinelinux/alpine-gitlab-ci:latest
env:
CI_PROJECT_DIR: ${{ github.workspace }}
CI_DEBUG_BUILD: ${{ runner.debug }}
CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }}
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.ref_name }}
steps:
- name: Environment setup
run: |
doas apk upgrade -a
doas apk add nodejs git patch curl bash
cd /etc/apk/keys
doas curl -JO https://ayakael.net/api/packages/forge/alpine/key
- name: Repo pull
uses: actions/checkout@v4
with:
fetch-depth: 500
- name: Package build
run: |
${{ github.workspace }}/.forgejo/bin/build.sh
${{ github.workspace }}/.forgejo/bin/generate-cross.sh ${{ github.ref_name }} ${{ inputs.target_arch }}
mv -v /home/buildozer/packages/main ${{ github.workspace }}/packages/cross
- name: Package upload
uses: actions/upload-artifact@v3
with:
name: package
path: packages
deploy-cross:
needs: [build-cross]
runs-on: x86_64
container:
image: alpine:latest
env:
CI_ALPINE_REPO: 'https://ayakael.net/api/packages/forge/alpine'
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.ref_name }}
FORGE_REPO_TOKEN: ${{ secrets.FORGE_REPO_TOKEN }}
FORGE_REPO_USER: ${{ vars.FORGE_REPO_USER }}
steps:
- name: Setting up environment
run: apk add nodejs curl findutils git gawk
- name: Repo pull
uses: actions/checkout@v4
- name: Package download
uses: actions/download-artifact@v3
- name: Package deployment
run: ${{ github.workspace }}/.forgejo/bin/deploy.sh

View file

@ -2,6 +2,10 @@ on:
pull_request:
types: [ assigned, opened, synchronize, reopened ]
concurrency:
group: ${{ github.head_ref || github.ref_name }}
cancel-in-progress: true
jobs:
build-x86_64:
runs-on: x86_64
@ -15,7 +19,6 @@ jobs:
steps:
- name: Environment setup
run: |
doas apk upgrade -a
doas apk add nodejs git patch curl
cd /etc/apk/keys
doas curl -JO https://ayakael.net/api/packages/forge/alpine/key
@ -24,11 +27,9 @@ jobs:
with:
fetch-depth: 500
- name: Package build
run: |
${{ github.workspace }}/.forgejo/bin/build.sh
touch packages/dummy
run: ${{ github.workspace }}/.forgejo/bin/build.sh
- name: Package upload
uses: actions/upload-artifact@v3
uses: forgejo/upload-artifact@v3
with:
name: package
path: packages
@ -40,15 +41,15 @@ jobs:
image: alpine:latest
env:
CI_ALPINE_REPO: 'https://ayakael.net/api/packages/forge/alpine'
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
FORGE_REPO_TOKEN: ${{ secrets.FORGE_REPO_TOKEN }}
FORGE_REPO_USER: ${{ vars.FORGE_REPO_USER }}
GITHUB_EVENT_NUMBER: ${{ github.event.number }}
steps:
- name: Setting up environment
run: apk add nodejs curl findutils git gawk
run: apk add nodejs curl findutils git gawk jq
- name: Repo pull
uses: actions/checkout@v4
- name: Package download
uses: actions/download-artifact@v3
uses: forgejo/download-artifact@v3
- name: Package deployment
run: ${{ github.workspace }}/.forgejo/bin/deploy.sh

View file

@ -1,28 +0,0 @@
on:
workflow_dispatch:
schedule:
- cron: '0 5 * * *'
jobs:
check-backports:
name: Check backports repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://ayakael.net/api/packages/forge/alpine/v3.23/backports
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 1
ALL_PACKAGES: true
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh

View file

@ -1,116 +0,0 @@
on:
workflow_dispatch:
schedule:
- cron: '0 5 * * *'
jobs:
check-community-edge:
name: Check community(edge) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/edge/community
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 4
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh
check-community-3.23:
name: Check community(3.23) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/v3.23/community
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 13
fix_only: all -git-annex
skip_package: dotnet9-stage0 dotnet8-stage0 py3-boto3 py3-botocore dotnet10-stage0
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh
check-community-3.22:
name: Check community(3.22) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/v3.22/community
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 13
skip_package: 'all -dotnet8-runtime -dotnet9-runtime'
fix_only: all
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh
check-community-3.21:
name: Check community(3.21) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/v3.21/community
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 13
skip_package: 'all -dotnet8-runtime -dotnet9-runtime'
fix_only: all
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh
check-community-3.20:
name: Check community(3.20) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/v3.20/community
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 13
skip_package: 'all -dotnet8-runtime'
fix_only: all
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh

View file

@ -1,112 +0,0 @@
on:
workflow_dispatch:
schedule:
- cron: '0 5 * * *'
jobs:
check-main-edge:
name: Check main(edge) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/edge/main
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 4
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh
check-main-3.23:
name: Check main(3.23) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/v3.23/main
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 13
fix_only: all
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh
check-main-3.22:
name: Check main(3.22) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/v3.22/main
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 13
fix_only: all
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh
check-main-3.21:
name: Check main(3.21) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/v3.21/main
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 13
fix_only: all
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh
check-main-3.20:
name: Check main(3.20) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/v3.20/main
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 13
fix_only: all
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh

View file

@ -1,50 +0,0 @@
on:
workflow_dispatch:
schedule:
- cron: '0 5 * * *'
jobs:
check-pmos-edge:
name: Check pmos(edge) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: http://mirror.postmarketos.org/postmarketos/master
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 14
skip_package: device-clockworkpi-uconsole-radxa-cm5 device-pine64-pinenote u-boot-radxa-cm5
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh
check-pmos-latest:
name: Check pmos(v25.12) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: http://mirror.postmarketos.org/postmarketos/v25.12
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 14
fix_only: all
skip_package: device-clockworkpi-uconsole-radxa-cm5 device-pine64-pinenote u-boot-radxa-cm5
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh

View file

@ -1,28 +0,0 @@
on:
workflow_dispatch:
schedule:
- cron: '0 5 * * *'
jobs:
check-testing:
name: Check testing repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/edge/testing
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 4
skip_package: dotnet6-stage0 dotnet6-build
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh

View file

@ -1,27 +0,0 @@
on:
workflow_dispatch:
schedule:
- cron: '0 5 * * *'
jobs:
check-user:
name: Check user repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://ayakael.net/api/packages/forge/alpine/edge/user
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 12
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh

View file

@ -1,25 +0,0 @@
on:
workflow_dispatch:
inputs:
target_repo:
description: 'target repo'
default: 'edge/user'
required: true
type: string
jobs:
clear-repo:
runs-on: x86_64
container:
image: alpine:latest
env:
TARGET_REPO: 'https://ayakael.net/api/packages/forge/alpine/${{ inputs.target_repo }}'
FORGE_REPO_TOKEN: ${{ secrets.FORGE_REPO_TOKEN }}
FORGE_REPO_USER: ${{ vars.FORGE_REPO_USER }}
steps:
- name: Setting up environment
run: apk add nodejs curl findutils git gawk
- name: Repo pull
uses: actions/checkout@v4
- name: Clear repo
run: ${{ github.workspace }}/.forgejo/bin/clear-repo.sh $TARGET_REPO

View file

@ -14,9 +14,7 @@ jobs:
CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }}
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
steps:
- run: |
doas apk upgrade -a
doas apk add nodejs git
- run: doas apk add nodejs git
- uses: actions/checkout@v4
with:
fetch-depth: 500

View file

@ -5,24 +5,23 @@ Upstream: https://ayakael.net/forge/ayaports
This repository contains aports that are not yet merged in the official Alpine
Linux repository or dont adhere to Alpine polices. Packages are automatically
built using CI. Once built, they are deployed to a Forgejo repository, making
built using CI. Once built, they are deployed to a git-lfs repository, making
them available to apk.
Branches are matched to Alpine releases.
## Repositories
You can browse all the repositories at https://ayakael.net/forge/ayaports
You can browse all the repositories at https://codeberg.org/ayakael/ayaports
Affixed to each repository description is the appropriate link for use in
`/etc/apk/repositories`.
#### Backports
```
https://ayakael.net/api/packages/forge/alpine/v3.21/backports
https://ayakael.net/api/packages/forge/alpine/edge/backports
```
Aports from the official Alpine repositories backported from edge. This is only
available and kept up-to-date on latest stable release.
Aports from the official Alpine repositories backported from edge.
#### User
```

View file

@ -0,0 +1,48 @@
# Contributor: Kay Thomas <kaythomas@pm.me>
# Maintainer: Kay Thomas <kaythomas@pm.me>
pkgname=airsonic-advanced
_sha=1397446f979b1cdea283eec89ce4f0eae7d63450
pkgver=11.0.0_git20230217
pkgrel=0
pkgdesc="Modern implementation of the Airsonic fork with several key performance and feature enhancements"
url="https://github.com/airsonic-advanced/airsonic-advanced"
# inconsistent test and build failures on other arches
arch="x86_64"
license="GPL-3.0-or-later"
depends="openjdk11"
makedepends="maven"
subpackages="$pkgname-openrc"
pkgusers="airsonic-advanced"
pkggroups="airsonic-advanced"
install="$pkgname.pre-install"
source="$pkgname-$pkgver.tar.gz::https://github.com/airsonic-advanced/airsonic-advanced/archive/$_sha.tar.gz
maven.patch
airsonic-advanced.initd
"
builddir="$srcdir/$pkgname-$_sha"
build() {
mvn clean package -DskipTests
}
check() {
mvn test
}
package() {
install -dm755 -o airsonic-advanced -g airsonic-advanced \
"$pkgdir"/var/airsonic
install -m755 -o airsonic-advanced -g airsonic-advanced \
"$builddir"/airsonic-main/target/airsonic.war \
"$pkgdir"/var/airsonic/airsonic.war
install -Dm755 "$srcdir"/$pkgname.initd \
"$pkgdir"/etc/init.d/$pkgname
}
sha512sums="
f415620bdbed9fb3874afbf30d9362e68b1e9e8e90dbbed4ca3206b643cad97ca0558e64ec5b4440382f0ec908c3325e321ea3631c38ff9a2109163c8f0cfe0b airsonic-advanced-11.0.0_git20230217.tar.gz
6cb52fee19815fcdf2596e55d97d3e750321b1df7a4fec36fc9bc2a57d4be979a3905a42d3aa9dbeb2bf0d4f56edbf344f13551219b8e4d2ca583abd4bb5c8f9 maven.patch
ca87e6a7199950e6ac52aeb076a03f831d60ee9d4ceed47366bbd78443765d205796d895ebb244051d8033e5b2e9ccd648d20434039c854b8b50e766cc5cd10d airsonic-advanced.initd
"

View file

@ -0,0 +1,14 @@
#!/sbin/openrc-run
supervisor=supervise-daemon
name="airsonic-advanced"
command="/usr/lib/jvm/java-11-openjdk/jre/bin/java"
command_args="-jar airsonic.war"
command_user="airsonic-advanced:airsonic-advanced"
directory="/var/airsonic"
pidfile="/run/airsonic-advanced.pid"
depend() {
need net localmount
after firewall
}

View file

@ -0,0 +1,6 @@
#!/bin/sh
addgroup -S airsonic-advanced 2>/dev/null
adduser -S -D -H -s /sbin/nologin -G airsonic-advanced -g airsonic-advanced airsonic-advanced 2>/dev/null
exit 0

View file

@ -0,0 +1,8 @@
fixes maven 3.9 breaking change
https://maven.apache.org/docs/3.9.0/release-notes.html#potentially-breaking-core-changes
--- airsonic-advanced-1397446f979b1cdea283eec89ce4f0eae7d63450/.mvn/maven.config
+++ airsonic-advanced-1397446f979b1cdea283eec89ce4f0eae7d63450/.mvn/maven.config
@@ -1 +1,2 @@
---settings ./.mvn/settings.xml
+--settings
+./.mvn/settings.xml

View file

@ -1,13 +0,0 @@
diff --git a/setup/build.py b/setup/build.py
index 956ad7504f..aa9d7ea028 100644
--- a/setup/build.py
+++ b/setup/build.py
@@ -662,7 +662,7 @@ def build_headless(self):
f.seek(0), f.truncate()
f.write(raw)
bdir = os.path.join(bdir, 'build')
- cmd = [CMAKE]
+ cmd = [CMAKE, '-GUnix Makefiles']
if is_macos_universal_build:
cmd += ['-DCMAKE_OSX_ARCHITECTURES=x86_64;arm64']
if sw and os.path.exists(os.path.join(sw, 'qt')):

View file

@ -1,46 +0,0 @@
piper uses a function in espeak-ng that is upstreamed but not in a
release.
diff --git a/setup/extensions.json b/setup/extensions.json
index b39ce6d..c105031 100644
--- a/setup/extensions.json
+++ b/setup/extensions.json
@@ -134,14 +134,6 @@
"error": "!podofo_error",
"needs_c++": "17"
},
- {
- "name": "piper",
- "sources": "calibre/utils/tts/piper.cpp",
- "needs_c++": "17",
- "libraries": "!piper_libs",
- "lib_dirs": "!piper_lib_dirs",
- "inc_dirs": "!piper_inc_dirs"
- },
{
"name": "html_as_json",
"sources": "calibre/srv/html_as_json.cpp",
diff --git a/src/calibre/constants.py b/src/calibre/constants.py
index fa4b211459..7b27768953 100644
--- a/src/calibre/constants.py
+++ b/src/calibre/constants.py
@@ -258,7 +258,6 @@ def __init__(self):
'rcc_backend',
'icu',
'speedup',
- 'piper',
'html_as_json',
'fast_css_transform',
'fast_html_entities',
diff --git a/src/calibre/utils/run_tests.py b/src/calibre/utils/run_tests.py
index ffd0f95c04..c80a35f83d 100644
--- a/src/calibre/utils/run_tests.py
+++ b/src/calibre/utils/run_tests.py
@@ -192,6 +192,7 @@ def test_import_of_all_python_modules(self):
}
if 'SKIP_SPEECH_TESTS' in os.environ:
exclude_packages.add('calibre.gui2.tts')
+ exclude_modules.add('calibre.utils.tts.piper')
if not isbsd:
exclude_modules.add('calibre.devices.usbms.hal')
d = os.path.dirname

View file

@ -1,6 +1,6 @@
# Maintainer: Cowington Post <cowingtonpost@gmail.com>
pkgname=calibre
pkgver=8.14.0
pkgver=7.12.0
pkgrel=0
pkgdesc="Ebook management application"
# qt6-webengine
@ -41,13 +41,11 @@ depends="
qt6-qtimageformats
qt6-qtsvg
qt6-qtwebengine
qt6-qtbase-private-dev
udisks2
"
makedepends="
cmake
curl
ffmpeg-dev
hunspell-dev
hyphen-dev
libmtp-dev
@ -56,7 +54,6 @@ makedepends="
podofo-dev
py3-pyqt-builder
py3-pyqt6-sip
py3-qt6
py3-sip
python3-dev
qt6-qtbase-dev
@ -71,15 +68,12 @@ subpackages="
"
source="https://download.calibre-ebook.com/$pkgver/calibre-$pkgver.tar.xz
0001-$pkgname-no-update.patch
0002-$pkgname-use-make.patch
0003-$pkgname-disable-piper.patch
"
# net: downloads iso-codes
# !check: no tests ran
options="net !check"
export LANG="en_US.UTF-8"
export PATH="$PATH:/usr/lib/qt6/bin"
prepare() {
default_prepare
@ -106,6 +100,7 @@ package() {
python3 setup.py install \
--staging-root="$pkgdir"/usr \
--no-compile \
--system-plugins-location=/usr/share/calibre/system-plugins
cp -a man-pages/ "$pkgdir"/usr/share/man
@ -116,8 +111,6 @@ package() {
}
sha512sums="
edb32e47b083e10fbf53088e485737f3b61bb642ce6c4dd444e58a6618979c3b05b77ceffc4b8cb42e35eee7dcc2b94145abc22030ffd8b5de63e45b321fbf72 calibre-8.14.0.tar.xz
ee654260d7047f0579a659b8907439a407fb561affcef84141126840452e7b98d10bb5e0a69e0cc809d9ba68729570900a0e7251f18b2056a94b0213880f1363 calibre-7.12.0.tar.xz
eb8e7ce40ff8b8daf6e7e55a5dff8ec4dff06c45744266bb48b3194e92ab1196bc91468203e3c2ca1e5144166a7d6be90e6cf0253513e761b56a4c85be4c2c76 0001-calibre-no-update.patch
bbb7253257073ae14840b3b4697943fe129d862b49cabd9388ea24cbd0259e68a1d359870334772164897f0c781db121de55fcdf5bccc841e36c021abe56f1ec 0002-calibre-use-make.patch
0efcf35944cd0f42d6f3572839647fc5c8336562db3f71655211d3de682e155b6d6fee4d281f9576201156e0bc828b6a579a8708a27791e4e4d604d456416954 0003-calibre-disable-piper.patch
"

View file

@ -1,2 +0,0 @@
#!/bin/sh
/usr/bin/electron "/usr/lib/caprine"

36
backports/coin/APKBUILD Normal file
View file

@ -0,0 +1,36 @@
# Contributor: Aiden Grossman <agrossman154@yahoo.com>
# Maintainer: Aiden Grossman <agrossman154@yahoo.com>
pkgname=coin
pkgver=4.0.0
pkgrel=6
pkgdesc="OpenGL OpenInventor compatible graphics library"
url="https://github.com/coin3d/coin"
license="BSD-3-Clause"
arch="all"
makedepends="boost-dev cmake glu-dev graphviz samurai"
subpackages="$pkgname-dev"
source="https://github.com/coin3d/coin/releases/download/Coin-$pkgver/coin-$pkgver-src.tar.gz
TestSuitePatch.patch
"
builddir="$srcdir/coin"
build() {
cmake -B build -G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCOIN_BUILD_TESTS=ON
cmake --build build
}
check() {
cmake --build build --target test
}
package() {
DESTDIR="$pkgdir" cmake --install build
}
sha512sums="
e036276a243bfe252569cee1b67d38b8633fcf35bdf4e366a92ca67e23799d54d91fe272c23b383c451d330cee284809f28f237857493948149e0da1ebd64fae coin-4.0.0-src.tar.gz
aab464244b13371badf0878e5bfbcce859a42756cf8c7657d1480318aa291d296eac2741219c346bae056f761c5f46857f8fd1ec1c4129f86bc10236d3869deb TestSuitePatch.patch
"

View file

@ -0,0 +1,11 @@
--- ./testsuite/TestSuiteUtils.cpp
+++ ./testsuite/TestSuiteUtils.cpp
@@ -39,7 +39,7 @@
#elif defined(_WIN32)
#define USE_WIN32
#else //_WIN32
-#error Unknown system
+#define USE_POSIX
#endif //POSIX
#include <Inventor/errors/SoDebugError.h>

75
backports/cura/APKBUILD Normal file
View file

@ -0,0 +1,75 @@
# Contributor: Anjandev Momi <anjan@momi.ca>
# Maintainer: Anjandev Momi <anjan@momi.ca>
pkgname=cura
# uranium and curaengine packages must be updated in sync with this verion number
# py3-pynest2d and fdm-materials should be checked as well, but their versions are not always in sync
pkgver=5.2.2
pkgrel=1
pkgdesc="3D printer / slicing GUI built on top of the Uranium framework"
url="https://ultimaker.com/software/ultimaker-cura"
arch="noarch !ppc64le !x86 !armhf !riscv64 !s390x !armv7" # ppc64le: no py3-keyring
# x86: no curaengine
# armhf: no uranium, qt5-qtquickcontrols, qt5-qtquickcontrols2, qt5-qtgraphicaleffects
# riscv64: no uranium
# s390x: no py3-trimesh, no py3-numpy-stl
# armv7: no py3-trimesh
license="LGPL-3.0-or-later"
# add cura-binary-data to depends when packaged
depends="
curaengine
fdm-materials
uranium
py3-arcus
py3-keyring
py3-numpy-stl
py3-pyclipper
py3-pynest2d
py3-pyserial
py3-qt6
py3-requests
py3-trimesh
py3-zeroconf
"
makedepends="samurai cmake gettext gettext-dev" # needs msginit from gettext
checkdepends="py3-pytest"
subpackages="$pkgname-lang"
source="$pkgname-$pkgver.tar.gz::https://github.com/Ultimaker/Cura/archive/refs/tags/$pkgver.tar.gz
AppDesktopData.patch
CuraVersion.patch
cmake-helpers.patch
cmake.patch"
builddir="$srcdir/Cura-$pkgver"
options="!check" # tests broken after v5.x
build() {
local pyver="$(python3 -c 'import sys; print(f"{sys.version_info.major}.{sys.version_info.minor}")')"
cmake -B build -G Ninja \
-DCURA_VERSION=$pkgver \
-DPython_VERSION=$pyver \
-DURANIUM_DIR=/usr/share/uranium \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_INSTALL_LIBDIR=lib \
-DGETTEXT_MSGINIT_EXECUTABLE=msginit \
-DCURA_BINARY_DATA_DIRECTORY=/usr/share/cura \
-DCMAKE_BUILD_TYPE=minsizerel
cmake --build build
}
package() {
DESTDIR="$pkgdir" cmake --install build
mv $pkgdir/usr/bin/cura_app.py $pkgdir/usr/bin/cura
# don't ever send any user or print info through the internet to Ultimaker
rm -rf "$pkgdir/usr/lib/cura/plugins/SliceInfoPlugin"
install -d "$pkgdir"/usr/share/locale
mv "$pkgdir"/usr/share/cura/resources/i18n/* "$pkgdir"/usr/share/locale/
}
sha512sums="
5d4e0fdc740d0c048905e2b87cc8c73eedea59b54766b74760505902007b365582d22b46b1cfdcd6914828840865c10a3beb0ef6a1f04ea181c81d44f42434bc cura-5.2.2.tar.gz
214e373f6cab7e3ccac12c96d1b5ca636d8d1e9ecdadaae84fc28fb429969c7c2d6055ce2a01b6db3ad85ab6cbc8d135cf2c26c77d7cfe13a73eb81aa5e85f11 AppDesktopData.patch
e3bb302db70ca195b2ce9831e71302c8ee2a51955fecc7264a495d7d4fc9c107cfd48811aa5865f16671e7b1ae126f95d3d7bbb6a70f367f7f91a2b32bce377b CuraVersion.patch
0db4ff97e7f82ae1a9dbc9c330d08c3e46249feeb3fb630f7c4e2de73749327337ec041680c39a07e0b5034c1b3f3656d75614ab4dc2f39861c8e27bdb2a58ef cmake-helpers.patch
05a73f892700ff6279230385b04180873a62b7413fa7f7d55ae150f1bcee57ef05eda0bd7fe444fe660ab66a044c958f42badd33b743fca81033ae8f19dd3805 cmake.patch
"

View file

@ -0,0 +1,58 @@
--- /dev/null
+++ ./com.ultimaker.cura.appdata.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Copyright 2016 Richard Hughes <richard@hughsie.com> -->
+<component type="desktop">
+ <id>com.ultimaker.cura.desktop</id>
+ <metadata_license>CC0-1.0</metadata_license>
+ <project_license>LGPL-3.0 and CC-BY-SA-4.0</project_license>
+ <name>Cura</name>
+ <summary>The world's most advanced 3d printer software</summary>
+ <description>
+ <p>
+ Cura creates a seamless integration between hardware, software and
+ materials for the best 3D printing experience around.
+ Cura supports the 3MF, OBJ and STL file formats and is available on
+ Windows, Mac and Linux.
+ </p>
+ <ul>
+ <li>Novices can start printing right away</li>
+ <li>Experts are able to customize 300 settings to achieve the best results</li>
+ <li>Optimized profiles for Ultimaker materials</li>
+ <li>Supported by a global network of Ultimaker certified service partners</li>
+ <li>Print multiple objects at once with different settings for each object</li>
+ <li>Cura supports STL, 3MF and OBJ file formats</li>
+ <li>Open source and completely free</li>
+ </ul>
+ </description>
+ <screenshots>
+ <screenshot type="default">
+ 
+ </screenshot>
+ </screenshots>
+ <url type="homepage">https://ultimaker.com/software/ultimaker-cura?utm_source=cura&amp;utm_medium=software&amp;utm_campaign=cura-update-linux</url>
+ <translation type="gettext">Cura</translation>
+</component>
--- /dev/null
+++ ./com.ultimaker.cura.desktop.in
@@ -0,0 +1,19 @@
+[Desktop Entry]
+Name=Ultimaker Cura
+Name[de]=Ultimaker Cura
+Name[nl]=Ultimaker Cura
+GenericName=3D Printing Software
+GenericName[de]=3D-Druck-Software
+GenericName[nl]=3D-printsoftware
+Comment=Cura converts 3D models into paths for a 3D printer. It prepares your print for maximum accuracy, minimum printing time and good reliability with many extra features that make your print come out great.
+Comment[de]=Cura wandelt 3D-Modelle in Pfade für einen 3D-Drucker um. Es bereitet Ihren Druck für maximale Genauigkeit, minimale Druckzeit und guter Zuverlässigkeit mit vielen zusätzlichen Funktionen vor, damit Ihr Druck großartig wird.
+Comment[nl]=Cura converteert 3D-modellen naar paden voor een 3D printer. Het bereidt je print voor om zeer precies, snel en betrouwbaar te kunnen printen, met veel extra functionaliteit om je print er goed uit te laten komen.
+Exec=@CMAKE_INSTALL_FULL_BINDIR@/cura %F
+TryExec=@CMAKE_INSTALL_FULL_BINDIR@/cura
+Icon=cura-icon
+Terminal=false
+Type=Application
+MimeType=model/stl;application/vnd.ms-3mfdocument;application/prs.wavefront-obj;image/bmp;image/gif;image/jpeg;image/png;text/x-gcode;application/x-amf;application/x-ply;application/x-ctm;model/vnd.collada+xml;model/gltf-binary;model/gltf+json;model/vnd.collada+xml+zip;
+Categories=Graphics;
+Keywords=3D;Printing;Slicer;
+StartupWMClass=cura.real

View file

@ -0,0 +1,16 @@
--- /dev/null
+++ ./cura/CuraVersion.py.in
@@ -0,0 +1,13 @@
+# Copyright (c) 2020 Ultimaker B.V.
+# Cura is released under the terms of the LGPLv3 or higher.
+
+CuraAppName = "@CURA_APP_NAME@"
+CuraAppDisplayName = "@CURA_APP_DISPLAY_NAME@"
+CuraVersion = "@CURA_VERSION@"
+CuraBuildType = "@CURA_BUILDTYPE@"
+CuraDebugMode = True if "@_cura_debugmode@" == "ON" else False
+CuraCloudAPIRoot = "@CURA_CLOUD_API_ROOT@"
+CuraCloudAPIVersion = "@CURA_CLOUD_API_VERSION@"
+CuraCloudAccountAPIRoot = "@CURA_CLOUD_ACCOUNT_API_ROOT@"
+CuraMarketplaceRoot = "@CURA_MARKETPLACE_ROOT@"
+CuraDigitalFactoryURL = "@CURA_DIGITAL_FACTORY_URL@"

View file

@ -0,0 +1,95 @@
--- /dev/null
+++ ./cmake/CuraPluginInstall.cmake
@@ -0,0 +1,92 @@
+# Copyright (c) 2022 Ultimaker B.V.
+# CuraPluginInstall.cmake is released under the terms of the LGPLv3 or higher.
+
+#
+# This module detects all plugins that need to be installed and adds them using the CMake install() command.
+# It detects all plugin folder in the path "plugins/*" where there's a "plugin.json" in it.
+#
+# Plugins can be configured to NOT BE INSTALLED via the variable "CURA_NO_INSTALL_PLUGINS" as a list of string in the
+# form of "a;b;c" or "a,b,c". By default all plugins will be installed.
+#
+
+option(PRINT_PLUGIN_LIST "Should the list of plugins that are installed be printed?" ON)
+
+# Options or configuration variables
+set(CURA_NO_INSTALL_PLUGINS "" CACHE STRING "A list of plugins that should not be installed, separated with ';' or ','.")
+
+file(GLOB_RECURSE _plugin_json_list ${CMAKE_SOURCE_DIR}/plugins/*/plugin.json)
+list(LENGTH _plugin_json_list _plugin_json_list_len)
+
+# Sort the lists alphabetically so we can handle cases like this:
+# - plugins/my_plugin/plugin.json
+# - plugins/my_plugin/my_module/plugin.json
+# In this case, only "plugins/my_plugin" should be added via install().
+set(_no_install_plugin_list ${CURA_NO_INSTALL_PLUGINS})
+# Sanitize the string so the comparison will be case-insensitive.
+string(STRIP "${_no_install_plugin_list}" _no_install_plugin_list)
+string(TOLOWER "${_no_install_plugin_list}" _no_install_plugin_list)
+
+# WORKAROUND counterpart of what's in cura-build.
+string(REPLACE "," ";" _no_install_plugin_list "${_no_install_plugin_list}")
+
+list(LENGTH _no_install_plugin_list _no_install_plugin_list_len)
+
+if(_no_install_plugin_list_len GREATER 0)
+ list(SORT _no_install_plugin_list)
+endif()
+if(_plugin_json_list_len GREATER 0)
+ list(SORT _plugin_json_list)
+endif()
+
+# Check all plugin directories and add them via install() if needed.
+set(_install_plugin_list "")
+foreach(_plugin_json_path ${_plugin_json_list})
+ get_filename_component(_plugin_dir ${_plugin_json_path} DIRECTORY)
+ file(RELATIVE_PATH _rel_plugin_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_plugin_dir})
+ get_filename_component(_plugin_dir_name ${_plugin_dir} NAME)
+
+ # Make plugin name comparison case-insensitive
+ string(TOLOWER "${_plugin_dir_name}" _plugin_dir_name_lowercase)
+
+ # Check if this plugin needs to be skipped for installation
+ set(_add_plugin ON) # Indicates if this plugin should be added to the build or not.
+ set(_is_no_install_plugin OFF) # If this plugin will not be added, this indicates if it's because the plugin is
+ # specified in the NO_INSTALL_PLUGINS list.
+ if(_no_install_plugin_list)
+ if("${_plugin_dir_name_lowercase}" IN_LIST _no_install_plugin_list)
+ set(_add_plugin OFF)
+ set(_is_no_install_plugin ON)
+ endif()
+ endif()
+
+ # Make sure this is not a subdirectory in a plugin that's already in the install list
+ if(_add_plugin)
+ foreach(_known_install_plugin_dir ${_install_plugin_list})
+ if(_plugin_dir MATCHES "${_known_install_plugin_dir}.+")
+ set(_add_plugin OFF)
+ break()
+ endif()
+ endforeach()
+ endif()
+
+ if(_add_plugin)
+ if(${PRINT_PLUGIN_LIST})
+ message(STATUS "[+] PLUGIN TO INSTALL: ${_rel_plugin_dir}")
+ endif()
+ get_filename_component(_rel_plugin_parent_dir ${_rel_plugin_dir} DIRECTORY)
+ install(DIRECTORY ${_rel_plugin_dir}
+ DESTINATION lib${LIB_SUFFIX}/cura/${_rel_plugin_parent_dir}
+ PATTERN "__pycache__" EXCLUDE
+ PATTERN "*.qmlc" EXCLUDE
+ )
+ list(APPEND _install_plugin_list ${_plugin_dir})
+ elseif(_is_no_install_plugin)
+ if(${PRINT_PLUGIN_LIST})
+ message(STATUS "[-] PLUGIN TO REMOVE : ${_rel_plugin_dir}")
+ endif()
+ execute_process(COMMAND ${Python_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/cmake/mod_bundled_packages_json.py
+ -d ${CMAKE_CURRENT_SOURCE_DIR}/resources/bundled_packages
+ ${_plugin_dir_name}
+ RESULT_VARIABLE _mod_json_result)
+ endif()
+endforeach()

View file

@ -0,0 +1,85 @@
--- ./CMakeLists.txt.orig
+++ ./CMakeLists.txt
@@ -1,10 +1,6 @@
# Copyright (c) 2022 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
-# NOTE: This is only being used for translation scripts.
-
-# For MSVC flags, will be ignored on non-Windows OS's and this project in general. Only needed for cura-build-environment.
-cmake_policy(SET CMP0091 NEW)
project(cura)
cmake_minimum_required(VERSION 3.18)
@@ -15,8 +11,44 @@
set(URANIUM_DIR "${CMAKE_SOURCE_DIR}/../Uranium" CACHE PATH "The location of the Uranium repository")
set(URANIUM_SCRIPTS_DIR "${URANIUM_DIR}/scripts" CACHE PATH "The location of the scripts directory of the Uranium repository")
+option(CURA_DEBUGMODE "Enable debug dialog and other debug features" OFF)
+if(CURA_DEBUGMODE)
+ set(_cura_debugmode "ON")
+endif()
+
option(GENERATE_TRANSLATIONS "Should the translations be generated?" ON)
+set(CURA_APP_NAME "cura" CACHE STRING "Short name of Cura, used for configuration folder")
+set(CURA_APP_DISPLAY_NAME "Ultimaker Cura" CACHE STRING "Display name of Cura")
+set(CURA_VERSION "master" CACHE STRING "Version name of Cura")
+set(CURA_BUILDTYPE "" CACHE STRING "Build type of Cura, eg. 'PPA'")
+set(CURA_CLOUD_API_ROOT "" CACHE STRING "Alternative Cura cloud API root")
+set(CURA_CLOUD_API_VERSION "" CACHE STRING "Alternative Cura cloud API version")
+set(CURA_CLOUD_ACCOUNT_API_ROOT "" CACHE STRING "Alternative Cura cloud account API version")
+set(CURA_MARKETPLACE_ROOT "" CACHE STRING "Alternative Marketplace location")
+set(CURA_DIGITAL_FACTORY_URL "" CACHE STRING "Alternative Digital Factory location")
+
+configure_file(${CMAKE_SOURCE_DIR}/com.ultimaker.cura.desktop.in ${CMAKE_BINARY_DIR}/com.ultimaker.cura.desktop @ONLY)
+
+configure_file(cura/CuraVersion.py.in CuraVersion.py @ONLY)
+
+if(NOT DEFINED Python_VERSION)
+ set(Python_VERSION
+ 3.11
+ CACHE STRING "Python Version" FORCE)
+ message(STATUS "Setting Python version to ${Python_VERSION}. Set Python_VERSION if you want to compile against an other version.")
+endif()
+if(APPLE)
+ set(Python_FIND_FRAMEWORK NEVER)
+endif()
+find_package(Python ${Python_VERSION} EXACT REQUIRED COMPONENTS Interpreter)
+message(STATUS "Linking and building ${project_name} against Python ${Python_VERSION}")
+if(NOT DEFINED Python_SITELIB_LOCAL)
+ set(Python_SITELIB_LOCAL
+ "${Python_SITELIB}"
+ CACHE PATH "Local alternative site-package location to install Cura" FORCE)
+endif()
+
if(NOT ${URANIUM_DIR} STREQUAL "")
set(CMAKE_MODULE_PATH "${CMAKE_MODULE_PATH};${URANIUM_DIR}/cmake")
endif()
@@ -29,4 +61,24 @@
if(${GENERATE_TRANSLATIONS})
CREATE_TRANSLATION_TARGETS()
endif()
-endif()
\ No newline at end of file
+endif()
+
+install(DIRECTORY resources DESTINATION ${CMAKE_INSTALL_DATADIR}/cura)
+
+include(CuraPluginInstall)
+
+install(FILES cura_app.py DESTINATION ${CMAKE_INSTALL_BINDIR}
+ PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
+install(DIRECTORY cura DESTINATION "${Python_SITELIB_LOCAL}")
+install(FILES ${CMAKE_BINARY_DIR}/CuraVersion.py DESTINATION "${Python_SITELIB_LOCAL}/cura/")
+if(NOT APPLE AND NOT WIN32)
+ install(FILES ${CMAKE_BINARY_DIR}/com.ultimaker.cura.desktop
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/applications)
+ install(FILES ${CMAKE_SOURCE_DIR}/resources/images/cura-icon.png
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/icons/hicolor/128x128/apps/)
+ install(FILES com.ultimaker.cura.appdata.xml
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/metainfo)
+ install(FILES cura.sharedmimeinfo
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/mime/packages/
+ RENAME cura.xml )
+endif()

26
backports/dex/APKBUILD Normal file
View file

@ -0,0 +1,26 @@
# Contributor: Anjandev Momi <anjan@momi.ca>
# Maintainer: Anjandev Momi <anjan@momi.ca>
pkgname=dex
pkgver=0.9.0
pkgrel=1
pkgdesc="program to generate and execute DesktopEntry files of the Application type"
url="https://github.com/jceb/dex"
arch="all"
license="GPL-3.0-or-later"
depends="python3"
makedepends="py3-sphinx"
subpackages="$pkgname-doc"
source="$pkgname-$pkgver.tar.gz::https://github.com/jceb/dex/archive/refs/tags/v$pkgver.tar.gz"
options="!check" # no testsuite
build() {
make
}
package() {
make install PREFIX=/usr MANPREFIX=/usr/share/man DESTDIR="$pkgdir"
}
sha512sums="
d68f5482cb0948f27a724437ddfc6de9a0f502bfd0d5c60c76fb85dda3c30e4c432013e530f6a91138c9ac9ff36b3824cd5e382e9d29bb9fb2ec2b9de4133094 dex-0.9.0.tar.gz
"

View file

@ -1,7 +1,7 @@
# Contributor: lauren n. liberda <lauren@selfisekai.rocks>
# Maintainer: lauren n. liberda <lauren@selfisekai.rocks>
pkgname=electron-tasje
pkgver=0.7.3
pkgver=0.7.1
pkgrel=0
pkgdesc="Tiny replacement for electron-builder"
url="https://codeberg.org/selfisekai/electron_tasje/"
@ -31,5 +31,5 @@ package() {
}
sha512sums="
251b7eabe74acdb5c7394f9d4d735b736acf555352785a9896ddaeed37632b238e823e1bb639e1f5a44a50455957ec41e1a585a3b2a9919b5818bb40843bd877 electron_tasje-0.7.3.tar.gz
665ccbd6cb357c25d55daed4ad3b3ce008da258054951d9d069a5b12e72dd5812d534f906868e6b18e78949f058069a961c394c6f21ab3b3fab5393c330445e5 electron_tasje-0.7.1.tar.gz
"

View file

@ -1,21 +0,0 @@
From adbc495726382c023b755c35aea36c6e9cad1950 Mon Sep 17 00:00:00 2001
From: LN Liberda <lauren@selfisekai.rocks>
Date: Sat, 23 Aug 2025 03:11:09 +0200
Subject: [PATCH] hotfix: ignore a new warning in rust 1.89
---
third_party/rust/chromium_crates_io/vendor/qr_code-v2/src/lib.rs | 1 -
1 file changed, 1 deletion(-)
diff --git a/third_party/rust/chromium_crates_io/vendor/qr_code-v2/src/lib.rs b/third_party/rust/chromium_crates_io/vendor/qr_code-v2/src/lib.rs
index 1b729621c2f47..55d392c6da72f 100644
--- a/third_party/rust/chromium_crates_io/vendor/qr_code-v2/src/lib.rs
+++ b/third_party/rust/chromium_crates_io/vendor/qr_code-v2/src/lib.rs
@@ -5,7 +5,6 @@
//!
#![deny(missing_docs)]
-#![deny(warnings)]
#![allow(
clippy::must_use_candidate, // This is just annoying.
clippy::use_self, // Rust 1.33 doesn't support Self::EnumVariant, let's try again in 1.37.

View file

@ -1,32 +1,30 @@
# Contributor: lauren n. liberda <lauren@selfisekai.rocks>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: lauren n. liberda <lauren@selfisekai.rocks>
pkgname=electron
pkgver=39.2.7
_gittag=v"${pkgver/_beta/-beta.}"
pkgver=30.0.9
_semver="${pkgver/_beta/-beta.}"
pkgrel=0
_chromium=142.0.7444.235
_copium_tag=142.0
_chromium=124.0.6367.233
_copium_tag=124.5
_depot_tools=495b23b39aaba2ca3b55dd27cadc523f1cb17ee6
pkgdesc="Electron cross-platform desktop toolkit"
url="https://github.com/electron/electron"
# armv7: Segmentation fault on builder despite building in CI
arch="aarch64 x86_64" # same as chromium
license="MIT"
depends="gtk+3.0 so:libudev.so.1 xdg-utils"
_llvmver=21
makedepends="
ada-dev
alsa-lib-dev
aom-dev
base64-dev
bash
brotli-dev
bsd-compat-headers
bzip2-dev
c-ares-dev
cairo-dev
clang$_llvmver-dev
clang$_llvmver-rtlib
clang-dev
clang-extra-tools
compiler-rt
crc32c-dev
cups-dev
curl-dev
@ -45,14 +43,13 @@ makedepends="
gn
gzip
harfbuzz-dev
highway-dev
hdrhistogram-c-dev
highway-dev
hunspell-dev
http-parser-dev
hwdata-dev
java-jdk
jpeg-dev
jsoncpp-dev
krb5-dev
lcms2-dev
libarchive-tools
@ -60,6 +57,7 @@ makedepends="
libbsd-dev
libcap-dev
libdrm-dev
libevent-dev
libexif-dev
libgcrypt-dev
libjpeg-turbo-dev
@ -76,9 +74,8 @@ makedepends="
libxscrnsaver-dev
libxslt-dev
linux-headers
lld$_llvmver
llhttp-dev
llvm$_llvmver
lld
llvm
mesa-dev
minizip-dev
nghttp2-dev
@ -97,89 +94,81 @@ makedepends="
py3-setuptools
py3-six
python3
re2-dev
qt5-qtbase-dev
rsync
rust
rust-bindgen
samurai
simdutf-dev
snappy-dev
speex-dev
spirv-tools-dev
sqlite-dev
woff2-dev
xcb-proto
yarn-berry
yarn
zlib-dev
zstd-dev
"
subpackages="$pkgname-lang $pkgname-dev"
# the lower patches are specific to electron, the top ones are from the equivalent chromium version
source="
https://ayakael.net/api/packages/mirrors/generic/electron/$_gittag/electron-$_gittag-$_chromium.tar.zst
source="https://ab-sn.lnl.gay/electron-$_semver-$_chromium.tar.zst
copium-$_copium_tag.tar.gz::https://codeberg.org/selfisekai/copium/archive/$_copium_tag.tar.gz
0001-hotfix-ignore-a-new-warning-in-rust-1.89.patch
chromium-revert-drop-of-system-java.patch
compiler.patch
disable-dns_config_service.patch
disable-failing-tests.patch
fc-cache-version.patch
fix-ffmpeg-codec-list.patch
fix-missing-cstdint-include-musl.patch
fix-opus.patch
fstatat-32bit.patch
gdbinit.patch
generic-sensor-include.patch
headless-shell-no-license.patch
import-version.patch
libstdc++13.patch
mman.patch
musl-auxv.patch
musl-sandbox.patch
musl-tid-caching.patch
net-test-no-vpython.patch
net-test-pyws3-py3.12.patch
musl-v8-monotonic-pthread-cont_timedwait.patch
no-execinfo.patch
no-mallinfo.patch
no-res-ninit-nclose.patch
no-sandbox-settls.patch
partalloc-no-tagging-arm64.patch
perfetto-libstdc++.patch
pvalloc.patch
random-fixes.patch
quiche-array.patch
system-zstd.patch
temp-failure-retry.patch
yes-musl.patch
electron_icon.patch
electron_python-jinja-3.10.patch
electron_webpack-hash.patch
electron_unbundle-node.patch
electron_system-zlib-headers.patch
electron_do-not-strip-binaries.patch
electron_shell-file-dialog-drop-glibc.patch
electron_use-system-yarn.patch
icon.patch
python-jinja-3.10.patch
webpack-hash.patch
unbundle-node.patch
default.conf
electron.desktop
electron-launcher.sh
"
_copium_patches="
cr131-v8-non4k-pages.patch
cr133-ffmpeg-no-noh264parse.patch
cr133-is-musl-libcxx.patch
cr138-node-version-check.patch
cr140-musl-prctl.patch
cr142-autofill-incomplete-formfielddata.patch
cr124-iwyu-sys-select-dawn-terminal.patch
cr124-libwebp-shim-sharpyuv.patch
"
# Avoid conflicting providers
sonameprefix="$pkgname:"
# tests are todo for some base checks
options="!check net suid"
builddir="$srcdir/electron-$_gittag-$_chromium"
builddir="$srcdir/electron-$_semver-$_chromium"
export CC="/usr/lib/llvm$_llvmver/bin/clang"
export CXX="/usr/lib/llvm$_llvmver/bin/clang++"
export PATH="$PATH:/usr/lib/qt5/bin"
export CC=clang
export CXX=clang++
# required to find the tools
export NM="/usr/lib/llvm$_llvmver/bin/llvm-nm"
export AR="/usr/lib/llvm$_llvmver/bin/llvm-ar"
export LD="/usr/lib/llvm$_llvmver/bin/clang++"
export AR=llvm-ar
export NM=llvm-nm
export LD=clang++
# less log spam, reproducible
export CFLAGS="${CFLAGS/-g/} -O2 -Wno-builtin-macro-redefined -Wno-deprecated-declarations -Wno-shift-count-overflow -Wno-ignored-attributes"
export CXXFLAGS="${CXXFLAGS/-g/} -O2 -Wno-builtin-macro-redefined -Wno-deprecated-declarations -Wno-invalid-constexpr"
# _LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_EXTENSIVE is set by project
export CXXFLAGS="${CXXFLAGS/-D_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_FAST/}"
export CPPFLAGS="${CPPFLAGS/-g/} -D__DATE__= -D__TIME__= -D__TIMESTAMP__="
case "$CARCH" in
aarch64|arm*|riscv64)
@ -192,10 +181,6 @@ esac
# breaks chromium-based stuff
export CXXFLAGS="${CXXFLAGS/-D_GLIBCXX_ASSERTIONS=1}"
# workaround to error: undefined symbol: __rustc::__rust_dealloc
# with 000*.patch patches
export RUSTC_BOOTSTRAP=1
# creates a dist tarball that does not need to git clone everything at build time.
_distbucket="sakamoto/lnl-aports-snapshots/"
snapshot() {
@ -203,7 +188,6 @@ snapshot() {
# vpython3 execs system python3 with this set
export VPYTHON_BYPASS="manually managed python not supported by chrome operations"
export CHROMIUM_BUILDTOOLS_PATH="$srcdir/src/buildtools"
export DEPOT_TOOLS_UPDATE=0
mkdir -p "$srcdir"
cd "$srcdir"
@ -221,7 +205,7 @@ snapshot() {
echo "solutions = [
{
\"name\": \"src/electron\",
\"url\": \"https://github.com/electron/electron.git@$_gittag\",
\"url\": \"https://github.com/electron/electron.git@v$_semver\",
\"deps_file\": \"DEPS\",
\"managed\": False,
\"custom_deps\": {
@ -236,8 +220,9 @@ snapshot() {
--nohooks
python3 src/build/landmines.py
python3 src/build/util/lastchange.py -m DAWN_COMMIT_HASH -s src/third_party/dawn \
--revision src/gpu/webgpu/DAWN_VERSION --header src/gpu/webgpu/dawn_commit_hash.h
python3 src/build/util/lastchange.py -o src/build/util/LASTCHANGE
python3 src/build/util/lastchange.py -s src/third_party/dawn \
--revision src/gpu/webgpu/DAWN_VERSION
python3 src/build/util/lastchange.py -m GPU_LISTS_VERSION \
--revision-id-only --header src/gpu/config/gpu_lists_version.h
python3 src/build/util/lastchange.py -m SKIA_COMMIT_HASH \
@ -249,31 +234,28 @@ snapshot() {
python3 src/electron/script/apply_all_patches.py \
src/electron/patches/config.json
mv src $pkgname-$_gittag-$_chromium
mv src $pkgname-$_semver-$_chromium
# extra binaries are most likely things we don't want, so nuke them all
for elf in $(scanelf -RA -F "%F" $pkgname-$_gittag-$_chromium); do
for elf in $(scanelf -RA -F "%F" $pkgname-$_semver-$_chromium); do
rm -f "$elf"
done
msg "generating tarball.. (this takes a while)"
tar -cf $pkgname-$_gittag-$_chromium.tar \
tar -cf $pkgname-$_semver-$_chromium.tar \
--exclude="ChangeLog*" \
--exclude="testdata/*" \
--exclude="test_data/*" \
--exclude="android_rust_toolchain/*" \
--exclude="third_party/instrumented_libs/binaries" \
--exclude-backups \
--exclude-caches-all \
--exclude-vcs \
$pkgname-$_gittag-$_chromium
$pkgname-$_semver-$_chromium
zstd --auto-threads=logical --ultra --long -22 -T"${ZSTD_LIMIT:-0}" -vv $pkgname-$_gittag-$_chromium.tar -o "$SRCDEST"/$pkgname-$_gittag-$_chromium.tar.zst
mcli cp "$SRCDEST"/$pkgname-$_gittag-$_chromium.tar.zst "$_distbucket"
zstd --auto-threads=logical --ultra --long -22 -T"${ZSTD_LIMIT:-0}" -vv $pkgname-$_semver-$_chromium.tar -o "$SRCDEST"/$pkgname-$_semver-$_chromium.tar.zst
mcli cp "$SRCDEST"/$pkgname-$_semver-$_chromium.tar.zst "$_distbucket"
}
export RUSTC_BOOTSTRAP=1
prepare() {
default_prepare
@ -285,7 +267,7 @@ prepare() {
;;
esac
done
if [ -n "$failed" ]; then
if [ ! -z "$failed" ]; then
error "The following patches failed to apply:"
for i in $failed; do
printf " %s\n" "$i" >&2
@ -309,9 +291,9 @@ prepare() {
git config commit.gpgsign false
git add LICENSE
git commit -m "init"
git tag "$_gittag"
git tag "v$_semver"
git pack-refs
yarn install --immutable --mode=skip-build
yarn install --frozen-lockfile --no-scripts
)
(
@ -319,19 +301,12 @@ prepare() {
./update_npm_deps
)
# generate dawn_commit_hash
# TODO: remove on next update as it'll be generated after
# https://ayakael.net/mirrors/electron/commit/7623f4a14ab44fa4f4343e47d9d681c9b4aa984c
python3 build/util/lastchange.py -m DAWN_COMMIT_HASH -s third_party/dawn \
--revision gpu/webgpu/DAWN_VERSION --header gpu/webgpu/dawn_commit_hash.h
# reusable system library settings
# flatbuffers - tensorflow has a few static_asserts for a specific patch version
# libavif - https://github.com/AOMediaCodec/libavif/commit/50a541469c98009016af8dcc9f83a1be79f3a7d9
# libaom - https://aomedia.googlesource.com/aom/+/706ee36dcc82%5E%21/
# but watch this space: https://aomedia-review.googlesource.com/c/aom/+/188606
# jsoncpp, re2, snappy, swiftshader-*, woff2 - requires use_custom_libcxx=false
# icu 76 does not build - https://bugs.gentoo.org/943216
# jsoncpp, re2, snappy, swiftshader, vulkan, woff2 - needs use_custom_libcxx=false
local chromium_use_system="
brotli
crc32c
@ -343,7 +318,9 @@ prepare() {
freetype
harfbuzz-ng
highway
icu
libdrm
libevent
libjpeg
libsecret
libusb
@ -352,14 +329,12 @@ prepare() {
libxslt
openh264
opus
simdutf
zlib
zstd
"
for _lib in $chromium_use_system jinja2 libjpeg_turbo unrar; do
for _lib in $chromium_use_system jinja2 libjpeg_turbo; do
msg "Removing buildscripts for system provided $_lib"
_lib="${_lib/swiftshader-/swiftshader/third_party/}"
find . -type f -path "*third_party/$_lib/*" \
\! -path "*third_party/$_lib/chromium/*" \
\! -path "*third_party/$_lib/google/*" \
@ -372,9 +347,10 @@ prepare() {
-delete
done
# llhttp - 9.x needed, 8.x in repo (2023-12-17)
# ada - needs use_custom_libcxx=false
local node_use_system="
llhttp
base64
brotli
cares
corepack
@ -411,12 +387,6 @@ prepare() {
$chromium_use_system
python3 third_party/libaddressinput/chromium/tools/update-strings.py
# flatc is used in build workflows since https://crrev.com/c/5595037,
# but the pre-generated files are still checked-in. remove to make sure
# they're not used. (if used, they will break builds on version mismatch.)
# https://github.com/tensorflow/tensorflow/issues/62298
# find third_party/tflite/ -name '*_generated.h' -delete
# prevent annoying errors when regenerating gni
sed -i 's,^update_readme$,#update_readme,' \
third_party/libvpx/generate_gni.sh
@ -429,9 +399,8 @@ prepare() {
sed -i -e 's/\<xmlMalloc\>/malloc/' -e 's/\<xmlFree\>/free/' \
third_party/blink/renderer/core/xml/*.cc \
third_party/blink/renderer/core/xml/parser/xml_document_parser.cc \
third_party/libxml/chromium/*.cc
echo "$CTARGET" >> build/rust/known-target-triples.txt
third_party/libxml/chromium/*.cc \
third_party/maldoca/src/maldoca/ole/oss_utils.h
_configure
}
@ -440,6 +409,8 @@ _configure() {
cd "$builddir"
msg "Configuring build"
local clang_ver="$(clang -dumpversion)"
case "$USE_CCACHE" in
1)
local cc_wrapper="ccache"
@ -462,13 +433,14 @@ _configure() {
chrome_pgo_phase=0
clang_base_path=\"/usr\"
clang_use_chrome_plugins=false
clang_version=\"$_llvmver\"
clang_version=\"${clang_ver%%.*}\"
custom_toolchain=\"//build/toolchain/linux/unbundle:default\"
disable_fieldtrial_testing_config=true
enable_hangout_services_extension=true
enable_nacl=false
enable_nocompile_tests=false
enable_rust=true
enable_stripping=false
enable_rust=true
enable_vr=false
fatal_linker_warnings=false
ffmpeg_branding=\"Chrome\"
@ -478,20 +450,16 @@ _configure() {
is_clang=true
is_component_ffmpeg=true
is_debug=false
is_musl=true
is_official_build=true
link_pulseaudio=true
node_version_check=false
proprietary_codecs=true
regenerate_x11_protos=true
rtc_link_pipewire=true
rtc_use_pipewire=true
rustc_version=\"yes\"
rust_bindgen_root=\"/usr\"
rust_sysroot_absolute=\"/usr\"
safe_browsing_use_unrar=false
symbol_level=$symbol_level
treat_warnings_as_errors=false
use_clang_modules=false
use_custom_libcxx=true
use_lld=true
use_pulseaudio=true
@ -505,11 +473,12 @@ _configure() {
skia_use_dawn=false
use_dawn=false
use_system_ada=false
use_system_base64=true
use_system_cares=true
use_system_histogram=true
use_system_lcms2=true
use_system_libffi=true
use_system_llhttp=true
use_system_llhttp=false
use_system_nghttp2=true
"
@ -519,13 +488,17 @@ _configure() {
}
build() {
export ELECTRON_OUT_DIR="$builddir"/out/Release/
export PATH="$PATH:/usr/lib/qt5/bin"
ninja -C out/Release \
copy_node_headers \
# ~1 GB per concurrent job is not enough with gcc
_njobs="${NJOBS:-"$(python3 -c 'import os; print(max((os.cpu_count() - (10 if os.uname().machine == "aarch64" else 8), 1)))')"}"
ninja -C out/Release -j$_njobs \
electron_dist_zip \
node_gypi_headers \
node_version_header
cp -vf out/Release/gen/node_headers/include/node/config.gypi third_party/electron_node/config.gypi
}
package() {
@ -539,7 +512,13 @@ package() {
install -Dm755 "$srcdir"/default.conf "$pkgdir"/etc/electron/default.conf
mkdir -p "$pkgdir"/usr/include/electron
cp -rv "$builddir"/out/Release/gen/node_headers "$pkgdir"/usr/include/electron
(
cd third_party/electron_node/
HEADERS_ONLY=1 python3 ./tools/install.py install "$pkgdir" "/usr/include/electron/node_headers"
)
# required overrides
install -Dm644 out/Release/gen/node_headers/include/node/* -t "$pkgdir"/usr/include/electron/node_headers/include/node
ln -sv /usr/include/electron/node_headers/include/node "$pkgdir"/usr/include/electron/node
mkdir -p "$pkgdir"/usr/include/electron/node_headers/include/nan
@ -565,37 +544,41 @@ lang() {
}
sha512sums="
6dc7161f6df396e2b7569b0a607e264b43a2d7215de65164dc2ca04c019df93ea0a67dec2490071c09c8a03f90605faaf3880f2d843f838bb5d841bba204c298 electron-v39.2.7-142.0.7444.235.tar.zst
30b298549804e7753b0b639b72417ba081e964676862b6c7d73ad73cdf806883f20e4a4b36e67a6c375eaf2dd97686cf21b90b062400d3b61fba86da4d239bfa copium-142.0.tar.gz
69b45005451ccd69c354b4c2910e92371cb801665f5e300dbecd36f8bc4ce68e77a431b5dac07c0937787debb4e93b7aadefa0a1e76c4ae334d2547ca3ca14ff 0001-hotfix-ignore-a-new-warning-in-rust-1.89.patch
dc254dd79e135aeac3e9c03eb055e3bc17980fc213f8c4d8d7921a575be7f9c26b91f110a6dcb01c0a824a7d9375c09f8a61c8858c20c11d79c03f873e2cb3f9 compiler.patch
1bee1448e409fedff635388ee6f1efa6d23c29ae3e6b6fd31452c56974adb40fcd0088c82d1e643d549154663e402942cbab9807dff5aff2d8997a09de6f5655 disable-dns_config_service.patch
0ef9168b8b1a4779bc4c8df718735e06d29e459dcfd00f8cbf9a4edaf9fade8089225219e46dead7de81de716bddc8d745dc2069db0ee7f7e5d2f64c5236e2ab disable-failing-tests.patch
0050857a9a9553c10fd502fe70606bce48269c9b48fa82ce9e111575637a0c03578e923c82fc639fcb574fc3337aeef50d8a0aea5e512ae4eab83b8c3d732cf6 fc-cache-version.patch
87f63d83139562e058f3f649eb1f62bf100dd92c2bb6ee393fdce0c8f7d7c188a7062394647aafe4e82c0a8fbbffeb613edc5c8dd9415dd9dda777827ea371c5 fix-ffmpeg-codec-list.patch
97024407a16fb41ec56fcc6df5552165ce4eea34fc51b17ecbf30a7e35406baccf8a3001a795a06d1e92d32e134d9d7a18d59fa74eda1b1bc23b59bc4947929b electron-30.0.9-124.0.6367.233.tar.zst
13c647dc2024e27ae8a4d7e8f1202037a342f4a7054477226665c332029e1b6f1d8b99004c2b2809bcf1e6c19b1359ef5e1c971552d7ced59c5b43d5a836af88 copium-124.5.tar.gz
29bb685e03356a77df5fd347cdf55194cc8b3265c421cc76e54d64edefc329dbcb052deb26b22e8f587ce68456876c071de1b7d258dd0fcc6ee66c875ec4a020 chromium-revert-drop-of-system-java.patch
54eb147c0af2ba096d1df375a289b339ee855ab1a9114e7c747753f0274a6bafb7212c1859b7885454c4529d9a5e3bd9559fc14e8e006f23ccd381895fa68d15 compiler.patch
4057cc78f10bfd64092bc35a373869abb1d68b880cdbca70422f39ffd78a929c19c7728d4d4c40709aaba25581148a93ae5343e724849fd35323062ed68753fa disable-dns_config_service.patch
111bc22fb704d97759988268a40d6b356c51b0bd7a8119a694e905ffe21850ff64e91566cd0dd0c9d62fcb46dca8acc821436c34eb0ba78be872ee4f7ec88a7b disable-failing-tests.patch
5fc5c012c1db6cf1ba82f38c6f3f4f5ca3a209e47ac708a74de379b018e0649b7694877c9571ef79002dde875ffc07b458a3355425f1c01867f362c66c2bc1bf fc-cache-version.patch
9200f78bad70e95c648a5e8392d50642190600f655c6baa366ff6467ebad52d3b3f305dad58f3610da67136f4b723557653b174ec5c25be8d8737ee04d9ee09f fix-missing-cstdint-include-musl.patch
b24563e9a738c00fce7ff2fbdee3d7c024d9125d7c74d9ab90af6bdb16f7ec8419f2c8aa78c0640f6d5d81c17dc2c673a194401d354f466749672729b48ed068 fix-opus.patch
c63dee5044353eb306a39ca1526158c0f003ab310ecb03d1c368dc2a979454590c84b8d3c15484517d5e66bb8add9b231da9abbadf2e50850abd72ac1345c4ab fstatat-32bit.patch
33ee60863cc438ef57ffef92ba4cf67a856a5ffc16138bce241bcf87e47b15154aa86918e793c26f7ec4dc62a445257ad5673ed7001daf22c4043cf6cc57da7f gdbinit.patch
36a764fa73443b47d38050b52dbe6ad2fa8d67201ff4ccdbad13b52308ef165ca046aac6f9609fe35890a6485f0f3e672e78cc41e3e44f3cdc7f145e540524e8 generic-sensor-include.patch
a94cf7a0670abf5178abba33c619cc6d41d73f2e16c7a1fd5b152152f5077df103e049d166e3b8627797c38113821d2f2e6b64cd48d132c1e90ad32d63a349f5 headless-shell-no-license.patch
51f1959bd622af26a1c3a1f4b0ad9a5bfa461057aa4cf9960c568dddf8ac47d55989c277f5d5ab5db040a04c54925a531af7a1cc767559218b408eaa6bdd7577 musl-sandbox.patch
8de65109ece27ea63bd469f2220c56b8c752ba0a50fdf390082a2d5ae74b8e010199126175569f6d5084270dd4e0571e68aec32c0bca8211a6699925b3a09124 import-version.patch
c49a1b06e061faa430d66dd5b404ef6c843e4d8a6e9012e963009a161b4726f8eb92c4da8fa710f8861f6e4daa8be5f68abee41a7d9fc02f2a0eb61ce53b5fdd libstdc++13.patch
e75f57ae34c97ca1caf15fa4b4106c6c1e79c31ed66869cf92ed9ea0c449886c9511e455047c17c1e9ad8b9a46ad4948511a4f2995a4b6030fb4d1c7ae21d038 mman.patch
99bcc7dd485b404a90c606a96addab1d900852128d44fb8cea8acc7303189ef87c89a7b0e749fd0e10c5ef5f6bf1fadeb5c16a34503cab6a59938ce2653d887e musl-auxv.patch
50c274a420bb8a7f14fcb56e40920dac8f708792a4520789b4987facea459bef88113d5a2b60fa8c57bee6e92bff3617d6b73fa305c8c44614c638971cffd440 musl-sandbox.patch
e7163ac5810ac85366cef2447412287c856e3d67c6b77f219a6e5a418b1965b98e449c409424ad0704a5bded9355dd0aec3dc4585918ce5a2ab36c079707afe2 musl-tid-caching.patch
3b7420d58d13dfc4baab5065e3017f666f51fed6de087af42a660a839d7b4444b50d1a93204322d213df36c6722eaf6b08d46d50dc374198a342da2675fafff5 net-test-no-vpython.patch
e487662b6606ea526ddd716c31e6b9ad3d61f1bee5356cd94b78a903efb3928338cbb48e3d5840b34c3b70a71e8361a228430bd50e707ad301228a7049d59e37 net-test-pyws3-py3.12.patch
a250cff50d282b02ce0f28880d0a2b4fb8e7df51bc072bfeeddc561c29a7c76453dbcbc7b17b82966a7b30a31409d2555720d1dcf963e1b3fb8a2a06a6abcf46 no-execinfo.patch
0b41aeb6b212f9c3f61aa0a8d3085c9e865a2e68f3270ceec2376aab67f337ac46eaea7da36d3fd7219e2a1cb731b7aa2d3fb619a374d2b7653976b9f4f384bb no-mallinfo.patch
92eb002718026611f5542362ad69b67f0a398ff71b3fca5c05d55cb5c6f9f29334e5e127bb4860cfaa3fba0f0d4c901e2b98808217e7dc02e254a64a5c9521aa musl-v8-monotonic-pthread-cont_timedwait.patch
73bca6c6f9873f2f11cef04f3f41f71e0ae86e7e2d77e14db4298675fec390744c5081f6fdb14052e5ee2b5885be1198c3aa6068eb2b656d1a665c0c3f36e708 no-execinfo.patch
8e17101d69e23b456a9c03dc2fe95bcd56846389707ba6f4720192a9e9168406d20d9168dbebbb3a47d921ec92e478f0e390f46e6b9bb43a34dda217c6e6448b no-mallinfo.patch
e4c4e5bc6f828f9c883dd418c0ba01887949c29c311f76206a1ec29f620b0c0ba0452949dc2778a9c46ea066405857536964a36436a68eecf7da7952736333cf no-res-ninit-nclose.patch
6dc4d8dc92e685dace62265a1ddb3aebc558aed54d20ff6d36b030be0c48d7e84662326c31363612492574d9a03c62653cdc21a60995b97dee1d75cae86a9f9b no-sandbox-settls.patch
b75908a45ee2f4f806eec8d86fca2f51fda3531b88de48ef4539c364a40d7e2897cdaf38b715682d712648e3f43aac983055e688385f85fa7b7204ffb6d617e1 partalloc-no-tagging-arm64.patch
03f829a2da633533ef3fd0f287f5ec602d936a97a98b53cd2415553c2537ae9d571f35397ca7c9fb3f4b0806c300e3b189569f8d979ca132e1a2a4dae7206396 pvalloc.patch
d4ac7f350806b4410ccb1df3b0ad7e90a7b6d724a16919761aa2d47a6f21008c7374da528b05b754ee712b85d23adfb113c7f7b9ca2ed5b47644fe3ea0cb9119 partalloc-no-tagging-arm64.patch
8e1aca983890c78d81a6f888b2cf1aa42878d1f8523e87d63b800e1e468cbfd33e5ff6a0975775ca222fe82f30c6497da95505da01b091c8776a44c98ac86f0f perfetto-libstdc++.patch
2eb434b4fc6aee77026492644cd86772a543d9845f112a75cd4c3e1f25c9435cc31f8454c1c73223451fc9be69b86e822ff68821978f67f2fc8bcba50296d8e0 pvalloc.patch
803b8117c65132f76bec42054a4b2257a078b15b07fd08645fec2dfd51aa4e0075a9015300cd579d4ae0d757d9850b9988e080cfc2eea093f6684fdf82c4722c random-fixes.patch
86f612dd2b39602984a75b1b11c2ab8bc8cc6b4e78fae998570a777a6901ae45fdcdb22e46dd006dab703a0674e64c72cf8120af2dc5b9e78004f402c7e65358 quiche-array.patch
b3beb98b539fe160fbc493ba410ae0f68540cc4b6834f1f8ce9a22c3f4f59ef5d583ad48793e10549fd02a701f833a3969791ef4524322cd1e715ca5bf226bc8 system-zstd.patch
e48693e6b7aeebf69a5acbf80d9a35defe4c23835121dfeb58b051ac7c527e758a41004f4d193274fe1b01c0bfb1dbc77b09cb6a404a3fdee507a2918afb0edb temp-failure-retry.patch
465107da7818b237e3c144a318ab80c3c9343b51ed38b8971ef204692d13346929becbe94cefad4c153788d3a200642143584d5ca070f6304e768ba2139c19ec electron_icon.patch
e05180199ee1d559e4e577cedd3e589844ecf40d98a86321bf1bea5607b02eeb5feb486deddae40e1005b644550331f6b8500177aa7e79bcb3750d3c1ceb76c3 electron_python-jinja-3.10.patch
2aa340854316f1284217c0ca17cbf44953684ad6c7da90815117df30928612eb9fb9ffb734b948dfc309cd25d1a67cd57f77aac2d052a3dd9aca07a3a58cbb30 electron_webpack-hash.patch
c7f57929943a86f9e5f333da9d5691da88038770eeb46dd0a0719962c934deb2879f0e7a1ed714e9383e38ee4d68eb754501f362c4d7cdee76cfc2e980b21272 electron_unbundle-node.patch
4d9287d4cdfe27fbfb7be3d4b26c0c40edbd6a0c3ff926d60f2093ca09c15bcb58e20c2ccc8c0606aafd66c6d25a54225bc329cb056d8c5b297db4c6d0e768e6 electron_system-zlib-headers.patch
7031ddb61a858e95d83366185a53b5a2e4be9abe0aa4957543e0621cad57175ffef31bd87b8be25255184bb4cb30ec4fbced055407c6c8c7940c9e240b25d498 electron_do-not-strip-binaries.patch
0f8f36c21cc50c80e378691265845ff10fa53953d6cd5352fe71efcba489f956e50d374d8f634dadc3569c4901a81a1f308a3e69140c0f9136e0777022b9520f electron_shell-file-dialog-drop-glibc.patch
3fd20144ed171cf9706899a1481141c7fa3e98b17d600cdc5a3a68ba39059cebd9e5ccb5534af3e262f689df381bc3cb630ac24e46dd6f6c72eac4f4b6b14b35 electron_use-system-yarn.patch
914ccf649d7771f19f209ab97f99c481aebc6f66174d68e8b539f6ad4a70bc8cb0fae2df6dadbf0415958ffb3574c420fe029079dcce45f5e5add4db2e903566 yes-musl.patch
465107da7818b237e3c144a318ab80c3c9343b51ed38b8971ef204692d13346929becbe94cefad4c153788d3a200642143584d5ca070f6304e768ba2139c19ec icon.patch
e05180199ee1d559e4e577cedd3e589844ecf40d98a86321bf1bea5607b02eeb5feb486deddae40e1005b644550331f6b8500177aa7e79bcb3750d3c1ceb76c3 python-jinja-3.10.patch
2aa340854316f1284217c0ca17cbf44953684ad6c7da90815117df30928612eb9fb9ffb734b948dfc309cd25d1a67cd57f77aac2d052a3dd9aca07a3a58cbb30 webpack-hash.patch
ebb18a0e2eba4b4606e900fa82f4b57fe91dcbdc943e17544bccb3c9a011a49b4331cdbee59629e44b80184bad4ea54ec887c0bfcd00cda8d5686060dbf365e3 unbundle-node.patch
e8ea87c547546011c4c8fc2de30e4f443b85cd4cfcff92808e2521d2f9ada03feefb8e1b0cf0f6b460919c146e56ef8d5ad4bb5e2461cc5247c30d92eb4d068e default.conf
191559fc7aa1ea0353c6fb0cc321ee1d5803a0e44848c8be941cfab96277b0de6a59962d373e2a2a1686c8f9be2bcf2d2f33706759a339a959e297d3f7fda463 electron.desktop
5f7ba5ad005f196facec1c0f26108356b64cafb1e5cfa462ff714a33b8a4c757ac00bfcb080da09eb5b65032f8eb245d9676a61ec554515d125ed63912708648 electron-launcher.sh

View file

@ -1,59 +0,0 @@
# electron
This is the `electron` package for Alpine Linux.
Please report any issues [using Gitlab](https://gitlab.alpinelinux.org/alpine/aports/-/issues/new) and tag @ayakael
## Building electron
Electron is an application framework based on `chromium`. Just like `chromium`,
and any Google application, the build process is a form of [hostile
architecture] (https://en.wikipedia.org/wiki/Hostile_architecture) It's quite
literally chromium with patches applied on top for the most part. The build
process applies a series of git patches against `chromium` from directories
with a script.
Its source code isn't available as a downloadable tarball. It is only fetchable
using Google's `gclient` available in `depot_tools` with a reimplemented
version in the `teapot` package. By executing, `abuild snapshot`, the tarball
can be fetched and packaged, as long as `gclient` is in your path. For ease of
maintenance, a workflow on [Ayakael's Forge](https://ayakael.net/mirrors/electron)
automatically fetches and packages the source code on new releases and makes it
available in a [generic Forgejo repository](https://ayakael.net/mirrors/-/packages/generic/electron).
## Electron maintenance cycle
Security / bug fixes land from upstream land randomly, but chromium security fixes land
basically weekly around Tuesday in `America/Los_Angeles`. Minor relases only require
an upgrade to the `electron` packages. It is advisable to follow chromium weekly
security fixes, although following `electron` minor releases is fine.
Major version upgrades require a more thorough approach. For one, most changes
can be backported from `chromium` APKBUILD by diffing the previous version
packaged with `electron` with the current (set with `_chromium` var). You also
need to rebuild all `electron` apps, with patches sometimes necessary when
upstream bumps to a new `nodejs` major verion. Major electron releases are
every two `chromium` major releases, with [dates known well ahead]
(https://chromiumdash.appspot.com/schedule) with a few major releases of
`electron` [officially supported at a time](https://www.electronjs.org/docs/latest/tutorial/electron-timelines).
Steps, in a nutshell:
1. Set `pkgver` to up-to-date version
2. Optional: fetch source-code using `abuild snapshot`, making sure `gclient`
is in your path
3. Update source checksum using `abuild checksum`
4. If major update, backport changes from `chromium` aport and bump `pkgrel`
for all electron-based applications.
## Why is this package still in testing
[Work is under way](https://gitlab.alpinelinux.org/alpine/aports/-/issues/15760)
to make this aport ready for `community`
Until that happens, this package is also kept-to-date against the latest
release of Alpine Linux in [Ayakael's Forge](https://ayakael.net/forge/-/packages/alpine/signal-desktop)
This is true of all Ayakael's packages still in `testing`.

View file

@ -0,0 +1,17 @@
This was dropped for some reason in 6951c37cecd05979b232a39e5c10e6346a0f74ef
allows using /usr/bin/java instead of a downloaded one (that doesn't work on musl)
--
--- a/third_party/closure_compiler/compiler.py 2021-05-20 04:17:53.000000000 +0200
+++ b/third_party/closure_compiler/compiler.py 2021-05-20 04:17:53.000000000 +0200
@@ -13,8 +13,9 @@
_CURRENT_DIR = os.path.join(os.path.dirname(__file__))
-_JAVA_PATH = os.path.join(_CURRENT_DIR, "..", "jdk", "current", "bin", "java")
-assert os.path.isfile(_JAVA_PATH), "java only allowed in android builds"
+_JAVA_BIN = "java"
+_JDK_PATH = os.path.join(_CURRENT_DIR, "..", "jdk", "current", "bin", "java")
+_JAVA_PATH = _JDK_PATH if os.path.isfile(_JDK_PATH) else _JAVA_BIN
class Compiler(object):
"""Runs the Closure compiler on given source files to typecheck them

View file

@ -1,12 +1,14 @@
--- ./build/config/compiler/BUILD.gn.orig
+++ ./build/config/compiler/BUILD.gn
@@ -658,22 +658,6 @@
@@ -613,24 +613,6 @@
}
}
- # TODO(crbug.com/40283598): This causes binary size growth and potentially
- # TODO(crbug.com/1488374): This causes binary size growth and potentially
- # other problems.
- if (default_toolchain != "//build/toolchain/cros:target") {
- # TODO(crbug.com/1491036): This isn't supported by Cronet's mainline llvm version.
- if (default_toolchain != "//build/toolchain/cros:target" &&
- !llvm_android_mainline) {
- cflags += [
- "-mllvm",
- "-split-threshold-for-reg-with-hint=0",
@ -20,94 +22,50 @@
- }
- }
-
# TODO(crbug.com/40192287): Investigate why/if this should be needed.
# TODO(crbug.com/1235145): Investigate why/if this should be needed.
if (is_win) {
cflags += [ "/clang:-ffp-contract=off" ]
@@ -1273,8 +1257,8 @@
# simplicity we always explicitly set the architecture.
if (current_cpu == "x64") {
if (is_clang && !is_android && !is_fuchsia && !is_chromeos_device) {
- cflags += [ "--target=x86_64-unknown-linux-gnu" ]
- ldflags += [ "--target=x86_64-unknown-linux-gnu" ]
+ cflags += [ "--target=x86_64-alpine-linux-musl" ]
+ ldflags += [ "--target=x86_64-alpine-linux-musl" ]
} else {
cflags += [ "-m64" ]
ldflags += [ "-m64" ]
@@ -1282,8 +1266,8 @@
cflags += [ "-msse3" ]
} else if (current_cpu == "x86") {
if (is_clang && !is_android && !is_chromeos_device) {
- cflags += [ "--target=i386-unknown-linux-gnu" ]
- ldflags += [ "--target=i386-unknown-linux-gnu" ]
+ cflags += [ "--target=i586-alpine-linux-musl" ]
+ ldflags += [ "--target=i586-alpine-linux-musl" ]
} else {
cflags += [ "-m32" ]
ldflags += [ "-m32" ]
@@ -1294,8 +1278,8 @@
]
@@ -1005,17 +987,6 @@
# `-nodefaultlibs` from the linker invocation from Rust, which would be used
# to compile dylibs on Android, such as for constructing unit test APKs.
"-Cdefault-linker-libraries",
-
- # To make Rust .d files compatible with ninja
- "-Zdep-info-omit-d-target",
-
- # If a macro panics during compilation, show which macro and where it is
- # defined.
- "-Zmacro-backtrace",
-
- # For deterministic builds, keep the local machine's current working
- # directory from appearing in build outputs.
- "-Zremap-cwd-prefix=.",
]
if (!is_win || force_rustc_color_output) {
@@ -1182,8 +1153,8 @@
} else if (current_cpu == "arm") {
if (is_clang && !is_android && !is_chromeos_device) {
if (is_clang && !is_android && !is_nacl &&
!(is_chromeos_lacros && is_chromeos_device)) {
- cflags += [ "--target=arm-linux-gnueabihf" ]
- ldflags += [ "--target=arm-linux-gnueabihf" ]
+ cflags += [ "--target=armv7-alpine-linux-musleabihf" ]
+ ldflags += [ "--target=armv7-alpine-linux-musleabihf" ]
}
cflags += [
"-march=$arm_arch",
@@ -1306,8 +1290,8 @@
}
if (!is_nacl) {
cflags += [
@@ -1197,8 +1168,8 @@
} else if (current_cpu == "arm64") {
if (is_clang && !is_android && !is_fuchsia && !is_chromeos_device) {
if (is_clang && !is_android && !is_nacl && !is_fuchsia &&
!(is_chromeos_lacros && is_chromeos_device)) {
- cflags += [ "--target=aarch64-linux-gnu" ]
- ldflags += [ "--target=aarch64-linux-gnu" ]
+ cflags += [ "--target=aarch64-alpine-linux-musl" ]
+ ldflags += [ "--target=aarch64-alpine-linux-musl" ]
}
} else if (current_cpu == "mipsel") {
} else if (current_cpu == "mipsel" && !is_nacl) {
ldflags += [ "-Wl,--hash-style=sysv" ]
@@ -1551,22 +1535,22 @@
ldflags += [ "-maix64" ]
}
} else if (is_clang) {
- cflags += [ "--target=powerpc64le-unknown-linux-gnu" ]
- ldflags += [ "--target=powerpc64le-unknown-linux-gnu" ]
+ cflags += [ "--target=powerpc64le-alpine-linux-musl" ]
+ ldflags += [ "--target=powerpc64le-alpine-linux-musl" ]
} else {
cflags += [ "-m64" ]
ldflags += [ "-m64" ]
}
} else if (current_cpu == "riscv64") {
if (is_clang && !is_android) {
- cflags += [ "--target=riscv64-linux-gnu" ]
- ldflags += [ "--target=riscv64-linux-gnu" ]
+ cflags += [ "--target=riscv64-alpine-linux-musl" ]
+ ldflags += [ "--target=riscv64-alpine-linux-musl" ]
}
cflags += [ "-mabi=lp64d" ]
} else if (current_cpu == "loong64") {
if (is_clang) {
- cflags += [ "--target=loongarch64-linux-gnu" ]
- ldflags += [ "--target=loongarch64-linux-gnu" ]
+ cflags += [ "--target=loongarch64-alpine-linux-musl" ]
+ ldflags += [ "--target=loongarch64-alpine-linux-musl" ]
}
cflags += [
"-mabi=lp64d",
@@ -1574,8 +1558,8 @@
]
} else if (current_cpu == "s390x") {
if (is_clang) {
- cflags += [ "--target=s390x-unknown-linux-gnu" ]
- ldflags += [ "--target=s390x-unknown-linux-gnu" ]
+ cflags += [ "--target=s390x-alpine-linux-musl" ]
+ ldflags += [ "--target=s390x-alpine-linux-musl" ]
}
cflags += [ "-m64" ]
ldflags += [ "-m64" ]
@@ -2274,7 +2258,7 @@
@@ -1983,7 +1954,7 @@
defines = [ "_HAS_NODISCARD" ]
}
} else {
@ -118,7 +76,7 @@
cflags += [ "-Wextra" ]
--- ./build/config/rust.gni.orig
+++ ./build/config/rust.gni
@@ -178,11 +178,11 @@
@@ -186,11 +186,11 @@
rust_abi_target = ""
if (is_linux || is_chromeos) {
if (current_cpu == "arm64") {
@ -133,32 +91,16 @@
} else if (current_cpu == "arm") {
if (arm_float_abi == "hard") {
float_suffix = "hf"
@@ -200,25 +200,21 @@
# The thumbv7 vs. armv7 distinction is for legacy reasons and both
# targets in fact target Thumb, see:
# https://github.com/rust-lang/rust/issues/44722
- if (arm_use_neon) {
- rust_abi_target = "thumbv7neon-unknown-linux-gnueabi" + float_suffix
- } else {
- rust_abi_target = "armv7-unknown-linux-gnueabi" + float_suffix
- }
@@ -199,13 +199,13 @@
}
if (arm_arch == "armv7-a" || arm_arch == "armv7") {
# No way to inform Rust about the -a suffix.
- rust_abi_target = "armv7-unknown-linux-gnueabi" + float_suffix
+ rust_abi_target = "armv7-alpine-linux-musleabi" + float_suffix
} else {
- rust_abi_target = "arm-unknown-linux-gnueabi" + float_suffix
+ rust_abi_target = "armv6-alpine-linux-musleabi" + float_suffix
}
} else if (current_cpu == "riscv64") {
- rust_abi_target = "riscv64gc-unknown-linux-gnu"
+ rust_abi_target = "riscv64-alpine-linux-musl"
} else if (current_cpu == "ppc64") {
- rust_abi_target = "powerpc64le-unknown-linux-gnu"
+ rust_abi_target = "powerpc64le-alpine-linux-musl"
} else if (current_cpu == "s390x") {
- rust_abi_target = "s390x-unknown-linux-gnu"
+ rust_abi_target = "s390x-alpine-linux-musl"
} else if (current_cpu == "loong64") {
- rust_abi_target = "loongarch64-unknown-linux-gnu"
+ rust_abi_target = "loongarch64-alpine-linux-musl"
} else {
# Best guess for other future platforms.
- rust_abi_target = current_cpu + "-unknown-linux-gnu"
@ -168,9 +110,9 @@
import("//build/config/android/abi.gni")
--- ./build/config/clang/BUILD.gn.orig
+++ ./build/config/clang/BUILD.gn
@@ -207,22 +207,23 @@
assert(false) # Unhandled cpu type
}
@@ -128,14 +128,15 @@
} else if (is_apple) {
_dir = "darwin"
} else if (is_linux || is_chromeos) {
+ _dir = "linux"
if (current_cpu == "x64") {
@ -185,18 +127,6 @@
} else if (current_cpu == "arm64") {
- _dir = "aarch64-unknown-linux-gnu"
+ _suffix = "-aarch64"
} else if (current_cpu == "loong64") {
- _dir = "loongarch64-unknown-linux-gnu"
+ _suffix = "-loongarch64"
} else if (current_cpu == "riscv64") {
- _dir = "riscv64-unknown-linux-gnu"
+ _suffix = "-riscv64"
} else if (current_cpu == "ppc64") {
- _dir = "ppc64le-unknown-linux-gnu"
+ _suffix = "-powerpc64le"
} else if (current_cpu == "s390x") {
- _dir = "s390x-unknown-linux-gnu"
+ _suffix = "-s390x"
} else {
assert(false) # Unhandled cpu type
}

View file

@ -2,7 +2,7 @@ diff --git a/net/dns/BUILD.gn b/net/dns/BUILD.gn
index f36bf68..805d9a6 100644
--- a/net/dns/BUILD.gn
+++ b/net/dns/BUILD.gn
@@ -142,8 +142,8 @@
@@ -130,8 +130,8 @@ source_set("dns") {
]
} else if (is_linux) {
sources += [
@ -13,11 +13,3 @@ index f36bf68..805d9a6 100644
]
} else if (is_posix) {
sources += [
@@ -455,7 +455,6 @@
if (is_android) {
sources += [ "dns_config_service_android_unittest.cc" ]
} else if (is_linux) {
- sources += [ "dns_config_service_linux_unittest.cc" ]
} else if (is_posix) {
sources += [ "dns_config_service_posix_unittest.cc" ]
}

View file

@ -2,6 +2,86 @@ safesprintf emitnull:
error: conversion from 'std::nullptr_t' to 'const internal::Arg' is ambiguous
const internal::Arg arg_array[] = { args... };
flatmap incompletetype:
error: static assertion failed due to requirement 'std::__is_complete_or_unbounded(std::__type_identity<std::pair<A, A>>{})': template argument must be a complete class or an unbounded array
static_assert(std::__is_complete_or_unbounded(__type_identity<_Tp>{}),
i18n, time:
various icu failures (new icu time formatting? internal api difference?)
a ton of these fail:
Expected equality of these values:
u"Monday 16 May Saturday 28 May"
Which is: u"Monday 16 May \x2013 Saturday 28 May"
DateIntervalFormat(begin_time, end_time, DATE_FORMAT_MONTH_WEEKDAY_DAY)
Which is: u"Monday 16\x2009\x2013\x2009Saturday 28 May"
../../base/i18n/time_formatting_unittest.cc:84: Failure
Expected equality of these values:
clock12h_pm
Which is: u"3:42 PM"
TimeFormatTimeOfDay(time)
Which is: u"3:42\x202FPM"
.. and so on
fileutiltest filetofile:
../../base/files/file_util_unittest.cc:2692: Failure
Value of: stream
Actual: true
Expected: false
stacktracetest: crashes (this doesn't seem to use execinfo so probably relies on glibc internal layout for tracing here)
platformthreadtest canchangethreadtype:
../../base/threading/platform_thread_unittest.cc:445: Failure
Expected equality of these values:
PlatformThread::CanChangeThreadType(ThreadType::kBackground, ThreadType::kResourceEfficient)
Which is: true
kCanIncreasePriority
Which is: false
scopedfdownershiptrackingtest crashonunownedclose: fails due to scoped-file-no-close.patch
stackcontainer customallocator:
../../base/containers/stack_container_unittest.cc:211: Failure
Expected equality of these values:
1
Allocator::deallocated
Which is: 0
nativelibrarytest loadlibrarypreferownsymbols: crashes (probably musl dlopen does not play nice here)
spantest empty: crashes (this looks fishy)
readelfbuildid: crashes (this looks like glibc dynamic linker semantics)
nss db unittest: various nss failures: e.g.:
../../net/cert/nss_cert_database_unittest.cc:209: Failure
Expected equality of these values:
OK
Which is: 0
cert_db_->ImportFromPKCS12(GetPublicSlot(), pkcs12_data, u"12345", true, nullptr)
Which is: -702
processutiltest cloneflags: fails in CI (ulimit? too many threads?)
../../base/process/process_util_unittest.cc:1434: Failure
Value of: process.IsValid()
Actual: false
Expected: true
addresstrackerlinuxnetlinktest:
../../net/base/address_tracker_linux_unittest.cc:886: Failure
Value of: child.process.IsValid()
Actual: false
Expected: true
ToAddressDoesNotDereference: ; Expected `get_for_extraction_cnt` to be 1 but got 0;
DataCapturedManyThreads: flaky
ProcessAlternativeServicesTest.Process*: crashed ?
--- a/base/strings/safe_sprintf_unittest.cc
+++ b/base/strings/safe_sprintf_unittest.cc
@@ -740,6 +740,7 @@
@ -20,3 +100,262 @@ safesprintf emitnull:
TEST(SafeSPrintfTest, PointerSize) {
// The internal data representation is a 64bit value, independent of the
--- a/base/containers/flat_map_unittest.cc
+++ b/base/containers/flat_map_unittest.cc
@@ -52,6 +52,7 @@
} // namespace
+#if 0
TEST(FlatMap, IncompleteType) {
struct A {
using Map = flat_map<A, A>;
@@ -65,6 +66,7 @@
A a;
}
+#endif
TEST(FlatMap, RangeConstructor) {
flat_map<int, int>::value_type input_vals[] = {
--- a/base/BUILD.gn
+++ b/base/BUILD.gn
@@ -3194,21 +3194,6 @@
"hash/md5_constexpr_unittest.cc",
"hash/md5_unittest.cc",
"hash/sha1_unittest.cc",
- "i18n/break_iterator_unittest.cc",
- "i18n/case_conversion_unittest.cc",
- "i18n/char_iterator_unittest.cc",
- "i18n/character_encoding_unittest.cc",
- "i18n/file_util_icu_unittest.cc",
- "i18n/icu_string_conversions_unittest.cc",
- "i18n/icu_util_unittest.cc",
- "i18n/message_formatter_unittest.cc",
- "i18n/number_formatting_unittest.cc",
- "i18n/rtl_unittest.cc",
- "i18n/streaming_utf8_validator_unittest.cc",
- "i18n/string_search_unittest.cc",
- "i18n/time_formatting_unittest.cc",
- "i18n/timezone_unittest.cc",
- "i18n/transliterator_unittest.cc",
"immediate_crash_unittest.cc",
"json/json_parser_unittest.cc",
"json/json_reader_unittest.cc",
--- a/base/files/file_util_unittest.cc
+++ b/base/files/file_util_unittest.cc
@@ -2686,6 +2686,7 @@
}
}
+#if 0
TEST_F(FileUtilTest, FileToFILE) {
File file;
FILE* stream = FileToFILE(std::move(file), "w");
@@ -2700,6 +2701,7 @@
EXPECT_FALSE(file.IsValid());
EXPECT_TRUE(CloseFile(stream));
}
+#endif
TEST_F(FileUtilTest, FILEToFile) {
ScopedFILE stream;
--- a/base/debug/stack_trace_unittest.cc
+++ b/base/debug/stack_trace_unittest.cc
@@ -345,6 +345,7 @@
// sometimes we read fp / pc from the place that previously held
// uninitialized value.
// TODO(crbug.com/1132511): Enable this test on Fuchsia.
+#if 0
#if defined(MEMORY_SANITIZER) || BUILDFLAG(IS_FUCHSIA)
#define MAYBE_TraceStackFramePointersFromBuffer \
DISABLED_TraceStackFramePointersFromBuffer
@@ -357,6 +358,7 @@
const void* frames[kDepth];
ExpectStackFramePointers<kDepth>(frames, kDepth, /*copy_stack=*/true);
}
+#endif
#if BUILDFLAG(IS_ANDROID) || BUILDFLAG(IS_APPLE)
#define MAYBE_StackEnd StackEnd
--- a/base/threading/platform_thread_unittest.cc
+++ b/base/threading/platform_thread_unittest.cc
@@ -416,6 +416,7 @@
// platforms for all priorities. This not being the case. This test documents
// and hardcodes what we know. Please inform scheduler-dev@chromium.org if this
// proprerty changes for a given platform.
+#if 0
TEST(PlatformThreadTest, CanChangeThreadType) {
#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
// On Ubuntu, RLIMIT_NICE and RLIMIT_RTPRIO are 0 by default, so we won't be
@@ -472,6 +473,7 @@
ThreadType::kBackground));
#endif
}
+#endif
TEST(PlatformThreadTest, SetCurrentThreadTypeTest) {
TestPriorityResultingFromThreadType(ThreadType::kBackground,
--- a/base/files/scoped_file_linux_unittest.cc
+++ b/base/files/scoped_file_linux_unittest.cc
@@ -42,11 +42,13 @@
EXPECT_DEATH(ScopedFD(fd.get()), "");
}
+#if 0
TEST_F(ScopedFDOwnershipTrackingTest, CrashOnUnownedClose) {
ScopedFD fd = OpenFD();
subtle::EnableFDOwnershipEnforcement(true);
EXPECT_DEATH(close(fd.get()), "");
}
+#endif
#endif // defined(GTEST_HAS_DEATH_TEST)
--- a/base/native_library_unittest.cc
+++ b/base/native_library_unittest.cc
@@ -139,6 +139,7 @@
// Verifies that the |prefer_own_symbols| option satisfies its guarantee that
// a loaded library will always prefer local symbol resolution before
// considering global symbols.
+#if 0
TEST(NativeLibraryTest, LoadLibraryPreferOwnSymbols) {
NativeLibraryOptions options;
options.prefer_own_symbols = true;
@@ -171,6 +172,7 @@
EXPECT_EQ(2, NativeLibraryTestIncrement());
EXPECT_EQ(3, NativeLibraryTestIncrement());
}
+#endif
#endif // !BUILDFLAG(IS_ANDROID) && !defined(THREAD_SANITIZER) && \
// !defined(MEMORY_SANITIZER)
--- a/base/containers/span_unittest.cc
+++ b/base/containers/span_unittest.cc
@@ -995,6 +995,7 @@
}
}
+#if 0
TEST(SpanTest, Empty) {
{
span<int> span;
@@ -1014,6 +1015,7 @@
EXPECT_TRUE(span_of_checked_iterators.empty());
}
}
+#endif
TEST(SpanTest, OperatorAt) {
static constexpr int kArray[] = {1, 6, 1, 8, 0};
--- a/base/debug/elf_reader_unittest.cc
+++ b/base/debug/elf_reader_unittest.cc
@@ -194,6 +194,7 @@
}
}
+#if 0
TEST(ElfReaderTestWithCurrentImage, ReadElfBuildId) {
#if BUILDFLAG(IS_ANDROID)
// On Android the library loader memory maps the full so file.
@@ -229,6 +230,7 @@
UnloadNativeLibrary(library);
#endif
}
+#endif
} // namespace debug
} // namespace base
--- a/net/BUILD.gn
+++ b/net/BUILD.gn
@@ -4826,7 +4826,6 @@
sources += [
"cert/internal/system_trust_store_nss_unittest.cc",
"cert/internal/trust_store_nss_unittest.cc",
- "cert/nss_cert_database_unittest.cc",
"cert/x509_util_nss_unittest.cc",
]
if (!is_castos) {
--- a/base/process/process_util_unittest.cc
+++ b/base/process/process_util_unittest.cc
@@ -1419,7 +1419,7 @@
return kSuccess;
}
-#if defined(CLONE_NEWUSER) && defined(CLONE_NEWPID)
+#if 0 && defined(CLONE_NEWUSER) && defined(CLONE_NEWPID)
TEST_F(ProcessUtilTest, CloneFlags) {
if (!PathExists(FilePath("/proc/self/ns/user")) ||
!PathExists(FilePath("/proc/self/ns/pid"))) {
--- a/net/base/address_tracker_linux_unittest.cc
+++ b/net/base/address_tracker_linux_unittest.cc
@@ -831,6 +831,7 @@
//
// This test creates multiple concurrent `AddressTrackerLinux` instances in
// separate processes, each in their own PID namespaces.
+#if 0
TEST(AddressTrackerLinuxNetlinkTest, TestInitializeTwoTrackersInPidNamespaces) {
// This test initializes `kNumChildren` instances of `AddressTrackerLinux` in
// tracking mode, each in their own child process running in a PID namespace.
@@ -901,6 +902,7 @@
ASSERT_EQ(exit_code, 0);
}
}
+#endif
MULTIPROCESS_TEST_MAIN(ChildProcessInitializeTrackerForTesting) {
base::test::TaskEnvironment task_env(
--- a/base/trace_event/trace_event_unittest.cc
+++ b/base/trace_event/trace_event_unittest.cc
@@ -1368,6 +1368,7 @@
}
// Test that data sent from multiple threads is gathered
+#if 0
TEST_F(TraceEventTestFixture, DataCapturedManyThreads) {
BeginTrace();
@@ -1408,6 +1409,7 @@
delete task_complete_events[i];
}
}
+#endif
// Test that thread and process names show up in the trace.
// In SDK build, thread names are not tracked inside //base. Instead, there's
--- a/base/allocator/partition_allocator/src/partition_alloc/pointers/raw_ptr_unittest.cc
+++ b/base/allocator/partition_allocator/src/partition_alloc/pointers/raw_ptr_unittest.cc
@@ -1481,6 +1481,7 @@
// `base::to_address()` will use the dereference operator. This is not
// what we want; this test enforces extraction semantics for
// `to_address()`.
+#if 0
TEST_F(RawPtrTest, ToAddressDoesNotDereference) {
CountingRawPtr<int> ptr = nullptr;
int* raw = base::to_address(ptr);
@@ -1492,6 +1493,7 @@
.get_for_duplication_cnt = 0}),
CountersMatch());
}
+#endif
TEST_F(RawPtrTest, ToAddressGivesBackRawAddress) {
int* raw = nullptr;
--- a/net/http/http_stream_factory_unittest.cc
+++ b/net/http/http_stream_factory_unittest.cc
@@ -3477,6 +3477,7 @@
DefaultCTPolicyEnforcer ct_policy_enforcer_;
};
+#if 0
TEST_F(ProcessAlternativeServicesTest, ProcessEmptyAltSvc) {
session_ =
std::make_unique<HttpNetworkSession>(session_params_, session_context_);
@@ -3585,6 +3586,7 @@
alternatives[0].host_port_pair());
EXPECT_EQ(0u, alternatives[0].advertised_versions().size());
}
+#endif
} // namespace

View file

@ -1,127 +0,0 @@
diff --git a/electron/BUILD.gn.orig b/electron/BUILD.gn
index b08f434..4062428 100644
--- a/electron/BUILD.gn.orig
+++ b/electron/BUILD.gn
@@ -44,7 +44,6 @@ if (is_mac) {
if (is_linux) {
import("//build/config/linux/pkg_config.gni")
- import("//electron/build/linux/strip_binary.gni")
import("//tools/generate_stubs/rules.gni")
pkg_config("gio_unix") {
@@ -1424,18 +1423,6 @@ dist_zip("electron_dist_zip") {
":licenses",
]
if (is_linux) {
- if (is_official_build) {
- data_deps += [
- ":strip_chrome_crashpad_handler",
- ":strip_chrome_sandbox",
- ":strip_electron_binary",
- ":strip_libEGL_shlib",
- ":strip_libGLESv2_shlib",
- ":strip_libffmpeg_shlib",
- ":strip_libvk_swiftshader_shlib",
- ]
- }
-
data_deps += [ "//sandbox/linux:chrome_sandbox" ]
}
deps = data_deps
@@ -1481,16 +1468,6 @@ group("electron_mksnapshot") {
dist_zip("electron_mksnapshot_zip") {
data_deps = mksnapshot_deps
- if (is_linux && is_official_build) {
- data_deps += [
- ":strip_libEGL_shlib",
- ":strip_libGLESv2_shlib",
- ":strip_libffmpeg_shlib",
- ":strip_libvk_swiftshader_shlib",
- ":strip_mksnapshot_binary",
- ":strip_v8_context_snapshot_generator_binary",
- ]
- }
deps = data_deps
outputs = [ "$root_build_dir/mksnapshot.zip" ]
}
@@ -1637,78 +1614,3 @@ group("release_build") {
]
}
}
-
-if (is_linux && is_official_build) {
- strip_binary("strip_electron_binary") {
- binary_input = "$root_out_dir/$electron_project_name"
- symbol_output = "$root_out_dir/debug/$electron_project_name.debug"
- compress_debug_sections = true
- deps = [ ":electron_app" ]
- }
-
- strip_binary("strip_chrome_crashpad_handler") {
- binary_input = "$root_out_dir/chrome_crashpad_handler"
- symbol_output = "$root_out_dir/debug/chrome_crashpad_handler.debug"
- compress_debug_sections = true
- deps = [ "//components/crash/core/app:chrome_crashpad_handler" ]
- }
-
- strip_binary("strip_chrome_sandbox") {
- binary_input = "$root_out_dir/chrome_sandbox"
- symbol_output = "$root_out_dir/debug/chrome-sandbox.debug"
- compress_debug_sections = true
- deps = [ "//sandbox/linux:chrome_sandbox" ]
- }
-
- strip_binary("strip_libEGL_shlib") {
- binary_input = "$root_out_dir/libEGL.so"
- symbol_output = "$root_out_dir/debug/libEGL.so.debug"
- compress_debug_sections = true
- deps = [ "//third_party/angle:libEGL" ]
- }
-
- strip_binary("strip_libGLESv2_shlib") {
- binary_input = "$root_out_dir/libGLESv2.so"
- symbol_output = "$root_out_dir/debug/libGLESv2.so.debug"
- compress_debug_sections = true
- deps = [ "//third_party/angle:libGLESv2" ]
- }
-
- strip_binary("strip_libffmpeg_shlib") {
- binary_input = "$root_out_dir/libffmpeg.so"
- symbol_output = "$root_out_dir/debug/libffmpeg.so.debug"
- compress_debug_sections = true
- deps = [ "//third_party/ffmpeg" ]
- }
-
- strip_binary("strip_libvk_swiftshader_shlib") {
- binary_input = "$root_out_dir/libvk_swiftshader.so"
- symbol_output = "$root_out_dir/debug/libvk_swiftshader.so.debug"
- compress_debug_sections = true
- deps = [ "//third_party/swiftshader/src/Vulkan:swiftshader_libvulkan" ]
- }
-
- strip_binary("strip_mksnapshot_binary") {
- _binary_path = rebase_path(
- get_label_info(
- ":v8_context_snapshot_generator($v8_snapshot_toolchain)",
- "root_out_dir") + "/mksnapshot",
- root_build_dir)
- binary_input = "$root_out_dir/$_binary_path"
- symbol_output = "$root_out_dir/debug/${_binary_path}.debug"
- compress_debug_sections = true
- deps = mksnapshot_deps
- }
-
- strip_binary("strip_v8_context_snapshot_generator_binary") {
- _binary_path = rebase_path(
- get_label_info(
- ":v8_context_snapshot_generator($v8_snapshot_toolchain)",
- "root_out_dir") + "/v8_context_snapshot_generator",
- root_build_dir)
- binary_input = "$root_out_dir/$_binary_path"
- symbol_output = "$root_out_dir/debug/${_binary_path}.debug"
- compress_debug_sections = true
- deps = mksnapshot_deps
- }
-}

View file

@ -1,16 +0,0 @@
diff --git a/./electron/shell/browser/ui/file_dialog.h.orig b/./electron/shell/browser/ui/file_dialog.h
index 6cdfc7b..f7757da 100644
--- a/./electron/shell/browser/ui/file_dialog.h.orig
+++ b/./electron/shell/browser/ui/file_dialog.h
@@ -13,10 +13,6 @@
#include "base/files/file_path.h"
#include "base/memory/raw_ptr_exclusion.h"
-#if BUILDFLAG(IS_LINUX)
-#include <bits/stdint-uintn.h>
-#endif
-
namespace electron {
class NativeWindow;
}

View file

@ -1,12 +0,0 @@
diff --git a/electron/BUILD.gn.orig b/electron/BUILD.gn
index 235c7abd3e8..088c24ac45e 100644
--- a/electron/BUILD.gn.orig
+++ b/electron/BUILD.gn
@@ -1569,7 +1569,6 @@ group("copy_node_headers") {
":generate_node_headers",
":node_gypi_headers",
":node_version_header",
- ":zlib_headers",
]
}

View file

@ -1,143 +0,0 @@
diff --git a/electron/script/generate-config-gypi.py.orig b/electron/script/generate-config-gypi.py
index 58c973b..c215d90 100755
--- a/electron/script/generate-config-gypi.py.orig
+++ b/electron/script/generate-config-gypi.py
@@ -64,6 +64,11 @@ def main(target_file, target_cpu):
# in common.gypi
if 'clang' in v:
del v['clang']
+
+ with open(os.path.join(NODE_DIR, 'use_system.txt')) as f:
+ for dep in f.read().strip().split(' '):
+ if v.get(f'node_shared_{dep}') is not None:
+ v[f'node_shared_{dep}'] = 'true'
with open(target_file, 'w+', encoding='utf-8') as file_out:
file_out.write(pprint.pformat(config, indent=2))
diff --git a/third_party/electron_node/node.gni.orig b/third_party/electron_node/node.gni
index 73bf383..1c80d5a 100644
--- a/third_party/electron_node/node.gni.orig
+++ b/third_party/electron_node/node.gni
@@ -73,6 +73,7 @@ declare_args() {
node_use_amaro = true
# Allows downstream packagers (eg. Linux distributions) to build against system shared libraries.
+ use_system_ada = false
use_system_cares = false
use_system_nghttp2 = false
use_system_llhttp = false
diff --git a/third_party/electron_node/unofficial.gni.orig b/third_party/electron_node/unofficial.gni
index d61a9bd..8bf990e 100644
--- a/third_party/electron_node/unofficial.gni.orig
+++ b/third_party/electron_node/unofficial.gni
@@ -143,7 +143,6 @@ template("node_gn_build") {
"deps/googletest:googletest_config",
]
public_deps = [
- "deps/ada",
"deps/uv",
"//electron:electron_js2c",
"deps/simdjson",
@@ -151,10 +150,7 @@ template("node_gn_build") {
]
deps = [
":run_node_js2c",
- "deps/cares",
- "deps/histogram",
"deps/nbytes",
- "deps/nghttp2",
"deps/postject",
"deps/sqlite",
"deps/uvwasi",
@@ -182,12 +178,30 @@ template("node_gn_build") {
if (is_posix) {
configs -= [ "//build/config/gcc:symbol_visibility_hidden" ]
configs += [ "//build/config/gcc:symbol_visibility_default" ]
+ libs = []
+ include_dirs = []
}
if (use_system_llhttp) {
libs += [ "llhttp" ]
} else {
deps += [ "deps/llhttp" ]
}
+ if (use_system_cares) {
+ libs += [ "cares" ]
+ } else {
+ deps += [ "deps/cares" ]
+ }
+ if (use_system_nghttp2) {
+ libs += [ "nghttp2" ]
+ } else {
+ deps += [ "deps/nghttp2" ]
+ }
+ if (use_system_ada) {
+ libs += [ "ada" ]
+ include_dirs += [ "/usr/include/ada" ]
+ } else {
+ public_deps += [ "deps/ada" ]
+ }
if (use_system_histogram) {
libs += [ "hdr_histogram" ]
include_dirs += [ "/usr/include/hdr" ]
@@ -208,7 +222,7 @@ template("node_gn_build") {
"src/inspector:node_protocol_generated_sources",
"src/inspector:v8_inspector_compress_protocol_json",
]
- include_dirs = [
+ include_dirs += [
"$target_gen_dir/src",
"$target_gen_dir/src/inspector",
"$node_inspector_protocol_path",
@@ -222,17 +236,18 @@ template("node_gn_build") {
sources += node_inspector.node_inspector_sources +
node_inspector.node_inspector_generated_sources
}
- if (is_linux) {
- import("//build/config/linux/pkg_config.gni")
- if (use_system_cares) {
- pkg_config("cares") {
- packages = [ "libcares" ]
- }
- }
- if (use_system_nghttp2) {
- pkg_config("nghttp2") {
- packages = [ "libnghttp2" ]
- }
+ }
+
+ if (is_linux) {
+ import("//build/config/linux/pkg_config.gni")
+ if (use_system_cares) {
+ pkg_config("cares") {
+ packages = [ "libcares" ]
+ }
+ }
+ if (use_system_nghttp2) {
+ pkg_config("nghttp2") {
+ packages = [ "libnghttp2" ]
}
}
}
diff --git a/third_party/electron_node/unofficial.gni.orig b/third_party/electron_node/unofficial.gni
index 6bcc40b..7e383b2 100644
--- a/third_party/electron_node/unofficial.gni.orig
+++ b/third_party/electron_node/unofficial.gni
@@ -142,7 +142,6 @@ template("node_gn_build") {
public_configs = [
":node_external_config",
"deps/googletest:googletest_config",
- ":zstd_include_config"
]
public_deps = [
"deps/ada",
@@ -163,8 +162,6 @@ template("node_gn_build") {
"//third_party/zlib",
"//third_party/brotli:dec",
"//third_party/brotli:enc",
- "//third_party/zstd:decompress",
- "//third_party/zstd:headers",
"$node_simdutf_path",
"$node_v8_path:v8_libplatform",
]

View file

@ -1,17 +0,0 @@
diff --git a/electron/.yarnrc.yml.orig b/electron/.yarnrc.yml
index ca0a580..a388ff9 100644
--- a/electron/.yarnrc.yml.orig
+++ b/electron/.yarnrc.yml
@@ -1,12 +1,3 @@
enableScripts: false
-nmHoistingLimits: workspaces
-
nodeLinker: node-modules
-
-npmMinimalAgeGate: 10080
-
-npmPreapprovedPackages:
- - "@electron/*"
-
-yarnPath: .yarn/releases/yarn-4.12.0.cjs

View file

@ -1,10 +1,12 @@
--- ./third_party/test_fonts/fontconfig/generate_fontconfig_caches.cc.orig
+++ ./third_party/test_fonts/fontconfig/generate_fontconfig_caches.cc
instead of hardcoding the version, use the defined macro.
--
--- a/third_party/test_fonts/fontconfig/generate_fontconfig_caches.cc
+++ b/third_party/test_fonts/fontconfig/generate_fontconfig_caches.cc
@@ -56,7 +56,7 @@
FcFini();
// Check existence of intended fontconfig cache file.
- auto cache = fontconfig_caches + "/" + kCacheKey + "-le64.cache-11";
- auto cache = fontconfig_caches + "/" + kCacheKey + "-le64.cache-9";
+ auto cache = fontconfig_caches + "/" + kCacheKey + "-le64.cache-" + FC_CACHE_VERSION;
bool cache_exists = access(cache.c_str(), F_OK) == 0;
return !cache_exists;

View file

@ -1,13 +0,0 @@
--- a/media/ffmpeg/ffmpeg_common.cc
+++ b/media/ffmpeg/ffmpeg_common.cc
@@ -1046,8 +1046,8 @@
// This should match the configured lists in //third_party/ffmpeg.
static constexpr std::string_view kAllowedAudioCodecs =
- "vorbis,libopus,flac,pcm_u8,pcm_s16le,pcm_s24le,pcm_s32le,pcm_f32le,"
- "mp3,pcm_s16be,pcm_s24be,pcm_mulaw,pcm_alaw" EXTRA_CODECS;
+ "vorbis,opus,libopus,flac,pcm_u8,pcm_s16le,pcm_s24le,pcm_s32le,pcm_f32le,"
+ "mp3float,mp3,pcm_s16be,pcm_s24be,pcm_mulaw,pcm_alaw" EXTRA_CODECS;
#undef EXTRA_CODECS
return kAllowedAudioCodecs.data();

View file

@ -0,0 +1,13 @@
Patch-Source: https://github.com/void-linux/void-packages/blob/378db3cf5087877588aebaaa8ca3c9d94dfb54e0/srcpkgs/chromium/patches/fix-missing-cstdint-include-musl.patch
fixed in https://github.com/google/quiche/commit/4d4820f0a959be7fa22285d114a5b5b8676f10fe
--
--- a/net/third_party/quiche/src/quiche/http2/adapter/window_manager.h
+++ b/net/third_party/quiche/src/quiche/http2/adapter/window_manager.h
@@ -3,6 +3,7 @@
#include <stddef.h>
+#include <cstdint>
#include <functional>
#include "common/platform/api/quiche_export.h"

View file

@ -0,0 +1,12 @@
--- a/media/filters/ffmpeg_glue.cc
+++ b/media/filters/ffmpeg_glue.cc
@@ -142,7 +142,7 @@ const char* FFmpegGlue::GetAllowedAudioDecoders() {
static const base::NoDestructor<std::string> kAllowedAudioCodecs([]() {
// This should match the configured lists in //third_party/ffmpeg.
std::string allowed_decoders(
- "vorbis,libopus,flac,pcm_u8,pcm_s16le,pcm_s24le,pcm_s32le,pcm_f32le,"
+ "vorbis,opus,libopus,flac,pcm_u8,pcm_s16le,pcm_s24le,pcm_s32le,pcm_f32le,"
"mp3,pcm_s16be,pcm_s24be,pcm_mulaw,pcm_alaw");
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
allowed_decoders += ",aac";

View file

@ -1,27 +0,0 @@
From 6f4685cff1ab8c68de98c0731bb8afaf8a05a723 Mon Sep 17 00:00:00 2001
From: knuxify <knuxify@gmail.com>
Date: Sat, 5 Apr 2025 14:10:37 +0200
Subject: [PATCH] Do not generate license file for headless-shell build
---
headless/BUILD.gn | 4 ----
1 file changed, 4 deletions(-)
diff --git a/headless/BUILD.gn b/headless/BUILD.gn
index 798bb22..9d83f49 100644
--- a/headless/BUILD.gn
+++ b/headless/BUILD.gn
@@ -934,10 +934,6 @@ executable("headless_shell") {
deps = [ ":headless_shell_lib" ]
- if (proprietary_codecs) {
- deps += [ ":generate_headless_shell_license_file" ]
- }
-
if (!headless_use_embedded_resources) {
data = [
"$root_out_dir/headless_lib_data.pak",
--
2.49.0

View file

@ -0,0 +1,15 @@
they fixed the import to be glibc conditional, but now nothing is imported so
all the string functions are missing.
--
--- a/chrome/browser/metrics/chrome_browser_main_extra_parts_metrics.cc
+++ b/chrome/browser/metrics/chrome_browser_main_extra_parts_metrics.cc
@@ -61,8 +61,7 @@
// TODO(crbug.com/1052397): Revisit the macro expression once build flag switch
// of lacros-chrome is complete.
-#if defined(__GLIBC__) && (BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS))
-#include <gnu/libc-version.h>
+#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS)
#include "base/linux_util.h"
#include "base/strings/string_split.h"

View file

@ -0,0 +1,269 @@
missing libstdc++13 includes
--
--- a/third_party/openscreen/src/platform/base/error.h
+++ b/third_party/openscreen/src/platform/base/error.h
@@ -6,6 +6,7 @@
#define PLATFORM_BASE_ERROR_H_
#include <cassert>
+#include <cstdint>
#include <ostream>
#include <string>
#include <utility>
--- a/ui/base/prediction/kalman_filter.h
+++ b/ui/base/prediction/kalman_filter.h
@@ -8,6 +8,8 @@
#include "base/component_export.h"
#include "ui/gfx/geometry/matrix3_f.h"
+#include <cstdint>
+
namespace ui {
// This Kalman filter is used to predict state in one axles.
--- a/ui/events/types/scroll_types.h
+++ b/ui/events/types/scroll_types.h
@@ -5,6 +5,8 @@
#ifndef UI_EVENTS_TYPES_SCROLL_TYPES_H_
#define UI_EVENTS_TYPES_SCROLL_TYPES_H_
+#include <cstdint>
+
namespace ui {
enum class ScrollGranularity : uint8_t {
--- a/third_party/webrtc/rtc_base/system/file_wrapper.h
+++ b/third_party/webrtc/rtc_base/system/file_wrapper.h
@@ -14,6 +14,7 @@
#include <stddef.h>
#include <stdio.h>
+#include <cstdint>
#include <string>
#include "absl/strings/string_view.h"
--- a/third_party/pdfium/constants/annotation_flags.h
+++ b/third_party/pdfium/constants/annotation_flags.h
@@ -5,6 +5,8 @@
#ifndef CONSTANTS_ANNOTATION_FLAGS_H_
#define CONSTANTS_ANNOTATION_FLAGS_H_
+#include <cstdint>
+
namespace pdfium {
namespace annotation_flags {
--- a/third_party/s2cellid/src/s2/util/math/mathutil.h
+++ b/third_party/s2cellid/src/s2/util/math/mathutil.h
@@ -21,6 +21,7 @@
#ifndef S2_UTIL_MATH_MATHUTIL_H_
#define S2_UTIL_MATH_MATHUTIL_H_
+#include <cstdint>
#include <type_traits>
class MathUtil {
--- a/third_party/s2cellid/src/s2/s1angle.h
+++ b/third_party/s2cellid/src/s2/s1angle.h
@@ -24,6 +24,7 @@
#define S2_S1ANGLE_H_
#include <math.h>
+#include <cstdint>
#include <limits>
#include <ostream>
#include <type_traits>
--- a/third_party/maldoca/src/maldoca/ole/header.h
+++ b/third_party/maldoca/src/maldoca/ole/header.h
@@ -45,6 +45,8 @@
#include "absl/strings/string_view.h"
+#include <cstdint>
+
namespace maldoca {
class OLEHeader {
--- a/components/password_manager/core/browser/generation/password_generator.h
+++ b/components/password_manager/core/browser/generation/password_generator.h
@@ -5,6 +5,7 @@
#ifndef COMPONENTS_PASSWORD_MANAGER_CORE_BROWSER_GENERATION_PASSWORD_GENERATOR_H_
#define COMPONENTS_PASSWORD_MANAGER_CORE_BROWSER_GENERATION_PASSWORD_GENERATOR_H_
+#include <cstdint>
#include <string>
--- a/base/debug/profiler.h
+++ b/base/debug/profiler.h
@@ -7,6 +7,7 @@
#include <stddef.h>
+#include <cstdint>
#include <string>
#include "base/base_export.h"
--- a/components/dom_distiller/core/url_utils.h
+++ b/components/dom_distiller/core/url_utils.h
@@ -5,6 +5,7 @@
#ifndef COMPONENTS_DOM_DISTILLER_CORE_URL_UTILS_H_
#define COMPONENTS_DOM_DISTILLER_CORE_URL_UTILS_H_
+#include <cstdint>
#include <string>
#include "base/strings/string_piece_forward.h"
--- a/components/feature_engagement/internal/event_storage_validator.h
+++ b/components/feature_engagement/internal/event_storage_validator.h
@@ -5,6 +5,7 @@
#ifndef COMPONENTS_FEATURE_ENGAGEMENT_INTERNAL_EVENT_STORAGE_VALIDATOR_H_
#define COMPONENTS_FEATURE_ENGAGEMENT_INTERNAL_EVENT_STORAGE_VALIDATOR_H_
+#include <cstdint>
#include <string>
namespace feature_engagement {
--- a/chrome/test/chromedriver/chrome/web_view_impl.cc
+++ b/chrome/test/chromedriver/chrome/web_view_impl.cc
@@ -8,6 +8,7 @@
#include <algorithm>
#include <memory>
#include <queue>
+#include <string>
#include <utility>
#include <vector>
--- a/cc/trees/target_property.cc
+++ b/cc/trees/target_property.cc
@@ -2,6 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+#include <cstdint>
+
#include "cc/trees/target_property.h"
#include "ui/gfx/animation/keyframe/target_property.h"
--- a/gpu/config/gpu_feature_info.h
+++ b/gpu/config/gpu_feature_info.h
@@ -5,6 +5,7 @@
#ifndef GPU_CONFIG_GPU_FEATURE_INFO_H_
#define GPU_CONFIG_GPU_FEATURE_INFO_H_
+#include <cstdint>
#include <string>
#include <vector>
--- a/gpu/config/gpu_driver_bug_workarounds.h
+++ b/gpu/config/gpu_driver_bug_workarounds.h
@@ -5,6 +5,7 @@
#ifndef GPU_CONFIG_GPU_DRIVER_BUG_WORKAROUNDS_H_
#define GPU_CONFIG_GPU_DRIVER_BUG_WORKAROUNDS_H_
+#include <cstdint>
#include <vector>
#include "build/build_config.h"
--- a/third_party/blink/public/common/page_state/page_state.h
+++ b/third_party/blink/public/common/page_state/page_state.h
@@ -5,6 +5,7 @@
#ifndef THIRD_PARTY_BLINK_PUBLIC_COMMON_PAGE_STATE_PAGE_STATE_H_
#define THIRD_PARTY_BLINK_PUBLIC_COMMON_PAGE_STATE_PAGE_STATE_H_
+#include <cstdint>
#include <string>
#include <vector>
--- a/pdf/document_attachment_info.h
+++ b/pdf/document_attachment_info.h
@@ -5,6 +5,7 @@
#ifndef PDF_DOCUMENT_ATTACHMENT_INFO_H_
#define PDF_DOCUMENT_ATTACHMENT_INFO_H_
+#include <cstdint>
#include <string>
--- a/components/payments/content/utility/fingerprint_parser.h
+++ b/components/payments/content/utility/fingerprint_parser.h
@@ -7,6 +7,7 @@
#include <stddef.h>
+#include <cstdint>
#include <string>
#include <vector>
--- a/media/base/video_transformation.h
+++ b/media/base/video_transformation.h
@@ -5,6 +5,7 @@
#ifndef MEDIA_BASE_VIDEO_TRANSFORMATION_H_
#define MEDIA_BASE_VIDEO_TRANSFORMATION_H_
+#include <cstdint>
#include <string>
#include "base/numerics/math_constants.h"
--- a/components/omnibox/browser/on_device_head_model.h
+++ b/components/omnibox/browser/on_device_head_model.h
@@ -5,6 +5,7 @@
#ifndef COMPONENTS_OMNIBOX_BROWSER_ON_DEVICE_HEAD_MODEL_H_
#define COMPONENTS_OMNIBOX_BROWSER_ON_DEVICE_HEAD_MODEL_H_
+#include <cstdint>
#include <string>
#include <utility>
#include <vector>
--- a/components/autofill/core/browser/autofill_ablation_study.h
+++ b/components/autofill/core/browser/autofill_ablation_study.h
@@ -5,6 +5,7 @@
#ifndef COMPONENTS_AUTOFILL_CORE_BROWSER_AUTOFILL_ABLATION_STUDY_H_
#define COMPONENTS_AUTOFILL_CORE_BROWSER_AUTOFILL_ABLATION_STUDY_H_
+#include <cstdint>
#include <string>
class GURL;
--- a/components/autofill/core/browser/strike_databases/strike_database_base.h
+++ b/components/autofill/core/browser/strike_databases/strike_database_base.h
@@ -5,6 +5,7 @@
#ifndef COMPONENTS_AUTOFILL_CORE_BROWSER_STRIKE_DATABASES_STRIKE_DATABASE_BASE_H_
#define COMPONENTS_AUTOFILL_CORE_BROWSER_STRIKE_DATABASES_STRIKE_DATABASE_BASE_H_
+#include <cstdint>
#include <map>
#include <string>
#include <vector>
--- a/chrome/browser/resource_coordinator/decision_details.h
+++ b/chrome/browser/resource_coordinator/decision_details.h
@@ -5,6 +5,7 @@
#ifndef CHROME_BROWSER_RESOURCE_COORDINATOR_DECISION_DETAILS_H_
#define CHROME_BROWSER_RESOURCE_COORDINATOR_DECISION_DETAILS_H_
+#include <cstdint>
#include <string>
#include <vector>
--- a/net/third_party/quiche/src/quiche/quic/core/qpack/qpack_stream_sender_delegate.h
+++ b/net/third_party/quiche/src/quiche/quic/core/qpack/qpack_stream_sender_delegate.h
@@ -8,6 +8,8 @@
#include "absl/strings/string_view.h"
#include "quiche/quic/platform/api/quic_export.h"
+#include <cstdint>
+
namespace quic {
// This interface writes encoder/decoder data to peer.
--- a/third_party/perfetto/include/perfetto/base/export.h
+++ b/third_party/perfetto/include/perfetto/base/export.h
@@ -17,6 +17,8 @@
#ifndef INCLUDE_PERFETTO_BASE_EXPORT_H_
#define INCLUDE_PERFETTO_BASE_EXPORT_H_
+#include <cstdint>
+
#include "perfetto/base/build_config.h"
#include "perfetto/public/abi/export.h"

View file

@ -0,0 +1,13 @@
needed for PKEY_DISABLE_WRITE. these are documented as also being from sys/
mman.h with GNU_SOURCE, but musl doesn't do that, so these are strictly from
kernel headers
--- a/base/allocator/partition_allocator/src/partition_alloc/partition_alloc_unittest.cc
+++ b/base/allocator/partition_allocator/src/partition_alloc/partition_alloc_unittest.cc
@@ -60,6 +60,7 @@
#include <sys/mman.h>
#include <sys/resource.h>
#include <sys/time.h>
+#include <linux/mman.h>
#endif // BUILDFLAG(IS_POSIX)
#if BUILDFLAG(ENABLE_PARTITION_ALLOC_AS_MALLOC_SUPPORT) && BUILDFLAG(IS_MAC)

View file

@ -0,0 +1,11 @@
--- ./v8/src/base/cpu.cc.orig
+++ ./v8/src/base/cpu.cc
@@ -14,7 +14,7 @@
#if V8_OS_LINUX
#include <linux/auxvec.h> // AT_HWCAP
#endif
-#if V8_GLIBC_PREREQ(2, 16) || V8_OS_ANDROID
+#if 1
#include <sys/auxv.h> // getauxval()
#endif
#if V8_OS_QNX

View file

@ -45,7 +45,7 @@ diff --git a/sandbox/linux/seccomp-bpf-helpers/syscall_sets.cc ./sandbox/linux/s
index d9d1882..0567557 100644
--- a/sandbox/linux/seccomp-bpf-helpers/syscall_sets.cc
+++ ./sandbox/linux/seccomp-bpf-helpers/syscall_sets.cc
@@ -438,6 +438,7 @@
@@ -392,6 +392,7 @@ bool SyscallSets::IsAllowedProcessStartOrDeath(int sysno) {
#if defined(__i386__)
case __NR_waitpid:
#endif
@ -53,7 +53,7 @@ index d9d1882..0567557 100644
return true;
case __NR_clone: // Should be parameter-restricted.
case __NR_setns: // Privileged.
@@ -450,7 +451,6 @@
@@ -404,7 +405,6 @@ bool SyscallSets::IsAllowedProcessStartOrDeath(int sysno) {
#if defined(__i386__) || defined(__x86_64__) || defined(__mips__)
case __NR_set_thread_area:
#endif
@ -61,16 +61,16 @@ index d9d1882..0567557 100644
case __NR_unshare:
#if !defined(__mips__) && !defined(__aarch64__)
case __NR_vfork:
@@ -549,6 +549,8 @@
@@ -514,6 +514,8 @@ bool SyscallSets::IsAllowedAddressSpaceAccess(int sysno) {
case __NR_mlock:
case __NR_munlock:
case __NR_munmap:
case __NR_mseal:
+ case __NR_mremap:
+ case __NR_membarrier:
return true;
case __NR_madvise:
case __NR_mincore:
@@ -566,7 +568,6 @@
@@ -531,7 +533,6 @@ bool SyscallSets::IsAllowedAddressSpaceAccess(int sysno) {
case __NR_modify_ldt:
#endif
case __NR_mprotect:

View file

@ -0,0 +1,23 @@
use monotonic clock for pthread_cond_timedwait with musl too, since it supports
it
--
--- a/v8/src/base/platform/condition-variable.cc
+++ b/v8/src/base/platform/condition-variable.cc
@@ -16,7 +16,7 @@
ConditionVariable::ConditionVariable() {
#if (V8_OS_FREEBSD || V8_OS_NETBSD || V8_OS_OPENBSD || \
- (V8_OS_LINUX && V8_LIBC_GLIBC))
+ V8_OS_LINUX)
// On Free/Net/OpenBSD and Linux with glibc we can change the time
// source for pthread_cond_timedwait() to use the monotonic clock.
pthread_condattr_t attr;
@@ -92,7 +92,7 @@
&native_handle_, &mutex->native_handle(), &ts);
#else
#if (V8_OS_FREEBSD || V8_OS_NETBSD || V8_OS_OPENBSD || \
- (V8_OS_LINUX && V8_LIBC_GLIBC))
+ V8_OS_LINUX)
// On Free/Net/OpenBSD and Linux with glibc we can change the time
// source for pthread_cond_timedwait() to use the monotonic clock.
result = clock_gettime(CLOCK_MONOTONIC, &ts);

View file

@ -1,22 +0,0 @@
From 4b41417068045f11db9e7edead1447e93adb9073 Mon Sep 17 00:00:00 2001
From: LN Liberda <lauren@selfisekai.rocks>
Date: Sat, 28 Jun 2025 18:13:59 +0200
Subject: [PATCH] Test net without vendored python
---
net/test/python_utils.cc | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/net/test/python_utils.cc b/net/test/python_utils.cc
index 2cdc07dad9948..0b2d42a5bf126 100644
--- a/net/test/python_utils.cc
+++ b/net/test/python_utils.cc
@@ -47,7 +47,7 @@ bool GetPython3Command(base::CommandLine* python_cmd) {
#if BUILDFLAG(IS_WIN)
python_cmd->SetProgram(base::FilePath(FILE_PATH_LITERAL("vpython3.bat")));
#else
- python_cmd->SetProgram(base::FilePath(FILE_PATH_LITERAL("vpython3")));
+ python_cmd->SetProgram(base::FilePath(FILE_PATH_LITERAL("python3")));
#endif
#if BUILDFLAG(IS_MAC)

View file

@ -1,39 +0,0 @@
ssl.wrap_socket() was removed in Python 3.12, needed for net_unittests.
Patch-Source: https://github.com/GoogleChromeLabs/pywebsocket3/pull/39
Modified (changed path) -lnl
From bc50ae9d451ca705edd6101d987b839e1a09d45e Mon Sep 17 00:00:00 2001
From: Sven Diederichs <22592421+zaurask@users.noreply.github.com>
Date: Thu, 28 Mar 2024 18:55:28 +0100
Subject: [PATCH] use ssl.SSLContext.wrap_socket rather than the deprecated
ssl.wrap_socket
---
pywebsocket3/websocket_server.py | 14 ++++++++------
1 file changed, 8 insertions(+), 6 deletions(-)
diff --git a/third_party/pywebsocket3/src/mod_pywebsocket/websocket_server.py b/third_party/pywebsocket3/src/mod_pywebsocket/websocket_server.py
index e7485ec..93ad6f1 100644
--- a/third_party/pywebsocket3/src/mod_pywebsocket/websocket_server.py
+++ b/third_party/pywebsocket3/src/mod_pywebsocket/websocket_server.py
@@ -157,12 +157,14 @@ class WebSocketServer(socketserver.ThreadingMixIn, BaseHTTPServer.HTTPServer):
client_cert_ = ssl.CERT_REQUIRED
else:
client_cert_ = ssl.CERT_NONE
- socket_ = ssl.wrap_socket(
- socket_,
- keyfile=server_options.private_key,
- certfile=server_options.certificate,
- ca_certs=server_options.tls_client_ca,
- cert_reqs=client_cert_)
+
+ ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS)
+ if server_options.certificate:
+ ssl_context.load_cert_chain(certfile=server_options.certificate, keyfile=server_options.private_key)
+ if server_options.tls_client_ca:
+ ssl_context.load_verify_locations(server_options.tls_client_ca)
+ ssl_context.verify_mode =client_cert_
+ socket_ = ssl_context.wrap_socket(socket_)
self._sockets.append((socket_, addrinfo))
def server_bind(self):

View file

@ -37,16 +37,16 @@ for discussion about this, see https://www.openwall.com/lists/musl/2021/07/16/1
#define HAVE_FCNTL_H 1
--- a/base/debug/stack_trace.cc
+++ b/base/debug/stack_trace.cc
@@ -311,7 +311,7 @@
std::string StackTrace::ToStringWithPrefix(cstring_view prefix_string) const {
@@ -291,7 +291,7 @@
}
std::string StackTrace::ToStringWithPrefix(const char* prefix_string) const {
std::stringstream stream;
-#if !defined(__UCLIBC__) && !defined(_AIX)
+#if defined(__GLIBC__) && !defined(_AIX)
OutputToStreamWithPrefix(&stream, prefix_string);
#endif
return stream.str();
@@ -335,7 +335,7 @@
if (ShouldSuppressOutput()) {
return "Backtrace suppressed.";
}
@@ -301,7 +301,7 @@
}
std::ostream& operator<<(std::ostream& os, const StackTrace& s) {

View file

@ -102,23 +102,23 @@ musl does not implement mallinfo()/mallinfo2()
#define HAVE_MALLOC_H 1
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc.cc
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc.cc
@@ -660,7 +660,7 @@
@@ -717,7 +717,7 @@
#endif // !PA_BUILDFLAG(IS_APPLE) && !PA_BUILDFLAG(IS_ANDROID)
#endif // !BUILDFLAG(IS_APPLE) && !BUILDFLAG(IS_ANDROID)
-#if PA_BUILDFLAG(IS_LINUX) || PA_BUILDFLAG(IS_CHROMEOS)
-#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
+#if 0
SHIM_ALWAYS_EXPORT struct mallinfo mallinfo(void) __THROW {
partition_alloc::SimplePartitionStatsDumper allocator_dumper;
base::SimplePartitionStatsDumper allocator_dumper;
Allocator()->DumpStats("malloc", true, &allocator_dumper);
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc_unittest.cc
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc_unittest.cc
@@ -29,7 +29,7 @@
#if PA_BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC)
@@ -24,7 +24,7 @@
#if BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC)
// Platforms on which we override weak libc symbols.
-#if PA_BUILDFLAG(IS_LINUX) || PA_BUILDFLAG(IS_CHROMEOS)
+#if (PA_BUILDFLAG(IS_LINUX) && defined(__GLIBC__)) || PA_BUILDFLAG(IS_CHROMEOS)
-#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
+#if (BUILDFLAG(IS_LINUX) && defined(__GLIBC__)) || BUILDFLAG(IS_CHROMEOS)
PA_NOINLINE void FreeForTest(void* data) {
NOINLINE void FreeForTest(void* data) {
free(data);

View file

@ -3,24 +3,12 @@ missing some required interface headers for it, and it's not clear how
to make the partalloc support code for it work.
--- ./base/allocator/partition_allocator/partition_alloc.gni.orig
+++ ./base/allocator/partition_allocator/partition_alloc.gni
@@ -89,8 +89,7 @@
# TODO(crbug.com/329199197): Clean this up when experiments are complete.
use_large_empty_slot_span_ring = true
@@ -30,7 +30,7 @@
}
-has_memory_tagging = current_cpu == "arm64" && is_clang && !is_asan &&
- !is_hwasan && (is_linux || is_android)
+has_memory_tagging = false
has_memory_tagging =
- current_cpu == "arm64" && is_clang && !is_asan && (is_linux || is_android)
+ false
declare_args() {
# Debug configuration.
--- ./base/allocator/partition_allocator/src/partition_alloc/aarch64_support.h.orig
+++ ./base/allocator/partition_allocator/src/partition_alloc/aarch64_support.h
@@ -10,7 +10,7 @@
#include "partition_alloc/build_config.h"
#include "partition_alloc/partition_alloc_buildflags.h"
-#if PA_BUILDFLAG(IS_ANDROID) || PA_BUILDFLAG(IS_LINUX)
+#if PA_BUILDFLAG(IS_ANDROID) || (PA_BUILDFLAG(IS_LINUX) && defined(__GLIBC__))
#define HAS_HW_CAPS
#endif
# Causes all the allocations to be routed via allocator_shim.cc. Usually,

View file

@ -0,0 +1,20 @@
--- a/third_party/perfetto/src/trace_processor/perfetto_sql/engine/created_function.cc
+++ b/third_party/perfetto/src/trace_processor/perfetto_sql/engine/created_function.cc
@@ -107,7 +107,7 @@
// the destructors run correctly for non-trivial members of the
// union.
using Data =
- std::variant<int64_t, double, OwnedString, OwnedBytes, nullptr_t>;
+ std::variant<int64_t, double, OwnedString, OwnedBytes, std::nullptr_t>;
StoredSqlValue(SqlValue value) {
switch (value.type) {
@@ -132,7 +132,7 @@
}
SqlValue AsSqlValue() {
- if (std::holds_alternative<nullptr_t>(data)) {
+ if (std::holds_alternative<std::nullptr_t>(data)) {
return SqlValue();
} else if (std::holds_alternative<int64_t>(data)) {
return SqlValue::Long(std::get<int64_t>(data));

View file

@ -2,24 +2,24 @@ the pvalloc/valloc symbols are obsolete and not implemented in musl
--
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_unittest.cc
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_unittest.cc
@@ -410,7 +410,7 @@
@@ -375,7 +375,7 @@
ASSERT_GE(aligned_allocs_intercepted_by_alignment[128], 1u);
ASSERT_GE(aligned_allocs_intercepted_by_size[53], 1u);
-#if PA_BUILDFLAG(IS_POSIX) && !PA_BUILDFLAG(IS_ANDROID)
+#if (PA_BUILDFLAG(IS_POSIX) && defined(__GLIBC__)) && !PA_BUILDFLAG(IS_ANDROID)
-#if BUILDFLAG(IS_POSIX) && !BUILDFLAG(IS_ANDROID)
+#if BUILDFLAG(IS_POSIX) && !BUILDFLAG(IS_ANDROID) && defined(__GLIBC__)
void* pvalloc_ptr = pvalloc(67);
ASSERT_NE(nullptr, pvalloc_ptr);
ASSERT_EQ(0u, reinterpret_cast<uintptr_t>(pvalloc_ptr) % kPageSize);
@@ -449,7 +449,7 @@
@@ -414,7 +414,7 @@
free(memalign_ptr);
ASSERT_GE(frees_intercepted_by_addr[Hash(memalign_ptr)], 1u);
-#if PA_BUILDFLAG(IS_POSIX) && !PA_BUILDFLAG(IS_ANDROID)
+#if (PA_BUILDFLAG(IS_POSIX) && defined(__GLIBC__)) && !PA_BUILDFLAG(IS_ANDROID)
-#if BUILDFLAG(IS_POSIX) && !BUILDFLAG(IS_ANDROID)
+#if BUILDFLAG(IS_POSIX) && !BUILDFLAG(IS_ANDROID) && defined(__GLIBC__)
free(pvalloc_ptr);
ASSERT_GE(frees_intercepted_by_addr[Hash(pvalloc_ptr)], 1u);
#endif // PA_BUILDFLAG(IS_POSIX) && !PA_BUILDFLAG(IS_ANDROID)
#endif // BUILDFLAG(IS_POSIX) && !BUILDFLAG(IS_ANDROID)
--- a/base/process/memory_unittest.cc
+++ b/base/process/memory_unittest.cc
@@ -359,7 +359,7 @@

View file

@ -0,0 +1,12 @@
needed for push_back on array
--
--- a/net/third_party/quiche/src/quiche/common/quiche_endian.h
+++ b/net/third_party/quiche/src/quiche/common/quiche_endian.h
@@ -6,6 +6,7 @@
#define QUICHE_COMMON_QUICHE_ENDIAN_H_
#include <algorithm>
+#include <array>
#include <cstdint>
#include <type_traits>

View file

@ -0,0 +1,94 @@
Patch-Source: https://gitlab.archlinux.org/archlinux/packaging/packages/chromium/-/blob/c073b0c20935d7eb452732e0f3b2860a96c3db21/random-build-fixes.patch
--
diff --git a/chrome/browser/download/bubble/download_bubble_update_service.cc b/chrome/browser/download/bubble/download_bubble_update_service.cc
index 41b647f7b44..8940c6bb7fc 100644
--- a/chrome/browser/download/bubble/download_bubble_update_service.cc
+++ b/chrome/browser/download/bubble/download_bubble_update_service.cc
@@ -91,7 +91,7 @@ ItemSortKey GetSortKey(const Item& item) {
// Helper to get an iterator to the last element in the cache. The cache
// must not be empty.
template <typename Item>
-SortedItems<Item>::const_iterator GetLastIter(const SortedItems<Item>& cache) {
+typename SortedItems<Item>::const_iterator GetLastIter(const SortedItems<Item>& cache) {
CHECK(!cache.empty());
auto it = cache.end();
return std::prev(it);
@@ -967,9 +967,9 @@ bool DownloadBubbleUpdateService::CacheManager::RemoveItemFromCacheImpl(
}
template <typename Id, typename Item>
-SortedItems<Item>::iterator
+typename SortedItems<Item>::iterator
DownloadBubbleUpdateService::CacheManager::RemoveItemFromCacheByIter(
- SortedItems<Item>::iterator iter,
+ typename SortedItems<Item>::iterator iter,
SortedItems<Item>& cache,
IterMap<Id, Item>& iter_map) {
CHECK(iter != cache.end());
diff --git a/chrome/test/chromedriver/capabilities.cc b/chrome/test/chromedriver/capabilities.cc
index c0708681ebd..98b8494d170 100644
--- a/chrome/test/chromedriver/capabilities.cc
+++ b/chrome/test/chromedriver/capabilities.cc
@@ -355,7 +355,7 @@ Status ParseMobileEmulation(const base::Value& option,
"'version' field of type string");
}
- brands.emplace_back(*brand, *version);
+ brands.emplace_back(BrandVersion{*brand, *version});
}
client_hints.brands = std::move(brands);
@@ -392,7 +392,7 @@ Status ParseMobileEmulation(const base::Value& option,
"a 'version' field of type string");
}
- full_version_list.emplace_back(*brand, *version);
+ full_version_list.emplace_back(BrandVersion{*brand, *version});
}
client_hints.full_version_list = std::move(full_version_list);
diff --git a/components/optimization_guide/core/tflite_model_executor.h b/components/optimization_guide/core/tflite_model_executor.h
index c4f750f4684..b5635f4108b 100644
--- a/components/optimization_guide/core/tflite_model_executor.h
+++ b/components/optimization_guide/core/tflite_model_executor.h
@@ -189,7 +189,7 @@ class TFLiteModelExecutor : public ModelExecutor<OutputType, InputType> {
void SendForBatchExecution(
BatchExecutionCallback callback_on_complete,
base::TimeTicks start_time,
- ModelExecutor<OutputType, InputType>::ConstRefInputVector inputs)
+ typename ModelExecutor<OutputType, InputType>::ConstRefInputVector inputs)
override {
DCHECK(execution_task_runner_->RunsTasksInCurrentSequence());
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
diff --git a/third_party/blink/renderer/core/html/parser/html_document_parser_fastpath.cc b/third_party/blink/renderer/core/html/parser/html_document_parser_fastpath.cc
index 2dc0b304092..a82f255090b 100644
--- a/third_party/blink/renderer/core/html/parser/html_document_parser_fastpath.cc
+++ b/third_party/blink/renderer/core/html/parser/html_document_parser_fastpath.cc
@@ -169,7 +169,7 @@ class HTMLFastPathParser {
using Span = base::span<const Char>;
using USpan = base::span<const UChar>;
// 32 matches that used by HTMLToken::Attribute.
- typedef std::conditional<std::is_same_v<Char, UChar>,
+ typedef typename std::conditional<std::is_same_v<Char, UChar>,
UCharLiteralBuffer<32>,
LCharLiteralBuffer<32>>::type LiteralBufferType;
typedef UCharLiteralBuffer<32> UCharLiteralBufferType;
diff --git a/third_party/blink/renderer/modules/canvas/canvas2d/canvas_style.cc b/third_party/blink/renderer/modules/canvas/canvas2d/canvas_style.cc
index f0b49139147..a308fb67982 100644
--- a/third_party/blink/renderer/modules/canvas/canvas2d/canvas_style.cc
+++ b/third_party/blink/renderer/modules/canvas/canvas2d/canvas_style.cc
@@ -91,12 +91,12 @@ void CanvasStyle::ApplyToFlags(cc::PaintFlags& flags,
case kGradient:
GetCanvasGradient()->GetGradient()->ApplyToFlags(flags, SkMatrix::I(),
ImageDrawOptions());
- flags.setColor(SkColor4f(0.0f, 0.0f, 0.0f, global_alpha));
+ flags.setColor(SkColor4f{0.0f, 0.0f, 0.0f, global_alpha});
break;
case kImagePattern:
GetCanvasPattern()->GetPattern()->ApplyToFlags(
flags, AffineTransformToSkMatrix(GetCanvasPattern()->GetTransform()));
- flags.setColor(SkColor4f(0.0f, 0.0f, 0.0f, global_alpha));
+ flags.setColor(SkColor4f{0.0f, 0.0f, 0.0f, global_alpha});
break;
default:
NOTREACHED();

View file

@ -0,0 +1,46 @@
From ae3ae3711784865bdc38bf119a6182a7b8dae91c Mon Sep 17 00:00:00 2001
From: Matt Jolly <Matt.Jolly@footclan.ninja>
Date: Sun, 17 Sep 2023 16:51:42 +1000
Subject: [PATCH] Add system-zstd
--- a/build/linux/unbundle/replace_gn_files.py
+++ b/build/linux/unbundle/replace_gn_files.py
@@ -74,6 +74,7 @@ REPLACEMENTS = {
#
'woff2': 'third_party/woff2/BUILD.gn',
'zlib': 'third_party/zlib/BUILD.gn',
+ 'zstd': 'third_party/zstd/BUILD.gn',
}
--- /dev/null
+++ b/build/linux/unbundle/zstd.gn
@@ -0,0 +1,25 @@
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+pkg_config("system_zstd") {
+ packages = [ "libzstd" ]
+}
+
+shim_headers("zstd_shim") {
+ root_path = "src/lib"
+ headers = [
+ "zdict.h",
+ "zstd.h",
+ "zstd_errors.h",
+ ]
+}
+
+source_set("zstd") {
+ deps = [ ":zstd_shim" ]
+ public_configs = [ ":system_zstd" ]
+}
+
+source_set("decompress") {
+ deps = [ ":zstd_shim" ]
+ public_configs = [ ":system_zstd" ]
+}
--
2.42.0

View file

@ -0,0 +1,68 @@
--- ./third_party/electron_node/BUILD.gn.orig
+++ ./third_party/electron_node/BUILD.gn
@@ -40,6 +40,8 @@
node_release_urlbase = ""
# Allows downstream packagers (eg. Linux distributions) to build Electron against system shared libraries.
+ use_system_ada = false
+ use_system_base64 = false
use_system_cares = false
use_system_nghttp2 = false
use_system_llhttp = false
@@ -48,6 +50,16 @@
if (is_linux) {
import("//build/config/linux/pkg_config.gni")
+ if (use_system_ada) {
+ config("ada") {
+ libs = [ "ada" ]
+ }
+ }
+ if (use_system_base64) {
+ pkg_config("base64") {
+ packages = [ "base64" ]
+ }
+ }
if (use_system_cares) {
pkg_config("cares") {
packages = [ "libcares" ]
@@ -258,8 +270,6 @@
deps = [
":node_js2c_exec",
"deps/googletest:gtest",
- "deps/ada",
- "deps/base64",
"deps/simdutf",
"deps/uvwasi",
"//third_party/zlib",
@@ -267,6 +277,16 @@
"//third_party/brotli:enc",
"//v8:v8_libplatform",
]
+ if (use_system_ada) {
+ configs += [ ":ada" ]
+ } else {
+ deps += [ "deps/ada" ]
+ }
+ if (use_system_base64) {
+ configs += [ ":base64" ]
+ } else {
+ deps += [ "deps/base64" ]
+ }
if (use_system_cares) {
configs += [ ":cares" ]
} else {
--- ./electron/script/generate-config-gypi.py.orig
+++ ./electron/script/generate-config-gypi.py
@@ -62,6 +62,11 @@
# Used by certain versions of node-gyp.
v['build_v8_with_gn'] = 'false'
+ with open(os.path.join(NODE_DIR, 'use_system.txt')) as f:
+ for dep in f.read().strip().split(' '):
+ if v.get(f'node_shared_{dep}') is not None:
+ v[f'node_shared_{dep}'] = 'true'
+
with open(target_file, 'w+') as f:
f.write(pprint.pformat(config, indent=2))

View file

@ -0,0 +1,11 @@
--- ./buildtools/third_party/libc++/__config_site.orig
+++ ./buildtools/third_party/libc++/__config_site
@@ -18,7 +18,7 @@
/* #undef _LIBCPP_ABI_FORCE_MICROSOFT */
/* #undef _LIBCPP_HAS_NO_THREADS */
/* #undef _LIBCPP_HAS_NO_MONOTONIC_CLOCK */
-/* #undef _LIBCPP_HAS_MUSL_LIBC */
+#define _LIBCPP_HAS_MUSL_LIBC 1
/* #undef _LIBCPP_HAS_THREAD_API_PTHREAD */
/* #undef _LIBCPP_HAS_THREAD_API_EXTERNAL */
/* #undef _LIBCPP_HAS_THREAD_API_WIN32 */

View file

@ -1,172 +0,0 @@
# Contributor: lauren n. liberda <lauren@selfisekai.rocks>
maintainer="lauren n. liberda <lauren@selfisekai.rocks>"
pkgname=element-desktop
pkgver=1.12.3
pkgrel=1
pkgdesc="Secure and independent communication, connected via Matrix"
url="https://element.io/"
arch="aarch64 x86_64" # same as electron
license="GPL-3.0-only"
_electronver=39
depends="
electron~$_electronver
font-inconsolata
font-inter
font-nunito
font-opensans
font-twemoji
"
makedepends="
cargo
electron-dev~$_electronver
electron-tasje
jq
libsecret-dev
nodejs
npm
python3
py3-setuptools
sqlcipher-dev
swc
yarn
"
source="
https://github.com/vector-im/element-desktop/archive/refs/tags/v$pkgver/element-desktop-$pkgver.tar.gz
https://github.com/vector-im/element-web/archive/refs/tags/v$pkgver/element-web-$pkgver.tar.gz
add-alpine-targets.patch
use-system-headers.patch
tasje-fixes.patch
no-source-maps.patch.web
use-system-fonts.patch.web
element-desktop
"
options="net !check" # broken
# Avoid conflicting providers
sonameprefix="$pkgname:"
# secfixes:
# 1.11.30-r0:
# - CVE-2023-30609
# 1.11.26-r0:
# - CVE-2023-28103
# - CVE-2023-28427
# 1.11.7-r0:
# - CVE-2022-39249
# - CVE-2022-39250
# - CVE-2022-39251
# - CVE-2022-39236
# 1.11.4-r0:
# - CVE-2022-36059
# - CVE-2022-36060
# used by buildscripts (at least web's webpack)
export VERSION=$pkgver
export CARGO_PROFILE_RELEASE_OPT_LEVEL=2
export CARGO_PROFILE_RELEASE_STRIP="symbols"
export NODE_OPTIONS="--openssl-legacy-provider"
prepare() {
default_prepare
msg "Applying more patches"
for x in $source; do
case "$x" in
*.patch.web)
msg "$x"
patch -p1 -i "$srcdir"/$x -d "$srcdir"/element-web-$pkgver
;;
esac
done
rm -rf res/fonts
(
cd "$srcdir"/element-web-$pkgver
msg "Fetch element-web dependencies"
yarn install --frozen-lockfile --ignore-scripts --ignore-engines
jq '.show_labs_settings = true' < config.sample.json > config.json
)
ln -s "$srcdir"/element-web-$pkgver/webapp webapp
msg "Fetch element-desktop dependencies"
yarn install --frozen-lockfile --ignore-scripts
patch -p1 -i patches/@types+auto-launch+5.0.5.patch
}
build() {
(
cd "$srcdir"/element-web-$pkgver
msg "Build element-web"
NODE_ENV=production yarn build
)
msg "Build element-desktop"
yarn asar-webapp
# add "optional" native dependencies
# hak stands for hack
yarn run hak --target "$(uname -m)-alpine-linux-musl"
yarn build:ts
yarn build:res
# we need it as js to be of any use for tasje.
# fails with `yarn tsc`. https://github.com/electron-userland/electron-builder/issues/7961
swc compile electron-builder.ts --out-file electron-builder.mjs
yarn install --frozen-lockfile --ignore-scripts --production
npm rebuild keytar-forked --nodedir=/usr/include/electron/node_headers --build-from-source
find node_modules/keytar-forked/build/ -type f \
\! -path node_modules/keytar-forked/build/Release/keytar.node \
-delete
# stripping in build because it gets into asar
strip node_modules/keytar-forked/build/Release/keytar.node
tasje -c electron-builder.mjs pack
}
check() {
(
cd "$srcdir"/element-web-$pkgver
yarn test
)
}
package() {
local resources="dist/resources"
install -Dm644 $resources/app.asar "$pkgdir"/usr/lib/element-desktop/app.asar
install -Dm644 webapp.asar "$pkgdir"/usr/lib/element-desktop/webapp.asar
cp -r $resources/app.asar.unpacked "$pkgdir"/usr/lib/element-desktop/app.asar.unpacked
install -Dm644 $resources/build/icon.png "$pkgdir"/usr/lib/element-desktop/build/icon.png
install -Dm755 "$srcdir"/$pkgname "$pkgdir"/usr/bin/$pkgname
install -Dm644 dist/$pkgname.desktop "$pkgdir"/usr/share/applications/$pkgname.desktop
while read -r size; do
install -Dm644 dist/icons/$size.png "$pkgdir"/usr/share/icons/hicolor/$size/apps/$pkgname.png
done < dist/icons/size-list
}
sha512sums="
f302907165a35f4a4f069f5aec6bc28edeba3d09c75f483c818e3930ceb4e838e5bb91ad9d42019a11a661d6e656da3c1ff25507cbb281c69183aac7d499e882 element-desktop-1.12.3.tar.gz
b845ff71ca39d7ae4dca9bb55e821bfdf911b12de5d012ba55d598f3287046fb2b525bce608925a9fa8fa7d39a4ceed9b4213d5d1c1d0c9e6b9b72154c9a35a5 element-web-1.12.3.tar.gz
4747893ed3e43d3074e9afe1cdd668a6be0de073d439205fe8c38c5e0f4091cc76e3cd15d98818bea5139add29501d8d07e83c58e9da230a4ce5bb538d388f80 add-alpine-targets.patch
755b17f7b828eb6920c06a6950ad4e14c32c99d22e9c05fcef7a081b5d2034adb03db3958aa5209c99fb7201f4d888c2383fc9864c5e743dd33f8b5c4925acd7 use-system-headers.patch
a5d90dd1ec7aec0dc18b73eb3a6fd51ac1223e381c492d24e7dc0fd2ade955ac727cebbaff6ffa27c7e18d9acf712c709de3f886ee2ddf87ab3b028d3eb461c6 tasje-fixes.patch
ec635fde026f7fce8e8cc57960b5b9dcec4418416d4867ed47711422d48f068bb58a3c9ceb7715efc9c177beca3788da6b0babc9b689ea8c0724a0395f2b85f8 no-source-maps.patch.web
aaf46476bac403aa5204aa265fcf0654fad4c149fd74d0ec4273c051a5549943384cae3cdd62c5b78fdedfed55c11ecceb898b886e44165cbe7e30953a095cf9 use-system-fonts.patch.web
afc588311dc3b566a754e3e7fe6b37b99a06d47b8bbce0ed9acca8ef308fdab0bd1d41b406199e5cbdd86bdce695ff847cd8668857a235cbdc292ad8b899c063 element-desktop
"

View file

@ -1,52 +0,0 @@
--- a/scripts/hak/target.ts
+++ b/scripts/hak/target.ts
@@ -29,8 +29,10 @@
| "i686-unknown-linux-gnu"
| "x86_64-unknown-linux-musl"
| "x86_64-unknown-linux-gnu"
+ | "x86_64-alpine-linux-musl"
| "aarch64-unknown-linux-musl"
| "aarch64-unknown-linux-gnu"
+ | "aarch64-alpine-linux-musl"
| "powerpc64le-unknown-linux-musl"
| "powerpc64le-unknown-linux-gnu";
@@ -112,6 +114,13 @@
libC: MUSL,
};
+const x8664AlpineLinuxMusl: LinuxTarget = {
+ id: "x86_64-alpine-linux-musl",
+ platform: "linux",
+ arch: "x64",
+ libC: MUSL,
+};
+
const i686UnknownLinuxGnu: LinuxTarget = {
id: "i686-unknown-linux-gnu",
platform: "linux",
@@ -140,6 +149,13 @@
libC: MUSL,
};
+const aarch64AlpineLinuxMusl: LinuxTarget = {
+ id: "aarch64-alpine-linux-musl",
+ platform: "linux",
+ arch: "arm64",
+ libC: MUSL,
+};
+
const powerpc64leUnknownLinuxGnu: LinuxTarget = {
id: "powerpc64le-unknown-linux-gnu",
platform: "linux",
@@ -167,8 +183,10 @@
"i686-unknown-linux-gnu": i686UnknownLinuxGnu,
"x86_64-unknown-linux-musl": x8664UnknownLinuxMusl,
"x86_64-unknown-linux-gnu": x8664UnknownLinuxGnu,
+ "x86_64-alpine-linux-musl": x8664AlpineLinuxMusl,
"aarch64-unknown-linux-musl": aarch64UnknownLinuxMusl,
"aarch64-unknown-linux-gnu": aarch64UnknownLinuxGnu,
+ "aarch64-alpine-linux-musl": aarch64AlpineLinuxMusl,
"powerpc64le-unknown-linux-musl": powerpc64leUnknownLinuxMusl,
"powerpc64le-unknown-linux-gnu": powerpc64leUnknownLinuxGnu,
};

View file

@ -1,3 +0,0 @@
#!/bin/sh
exec electron /usr/lib/element-desktop/app.asar "$@"

View file

@ -1,18 +0,0 @@
--- ./webpack.config.js.orig
+++ ./webpack.config.js
@@ -102,15 +102,6 @@
}
const development = {};
- if (devMode) {
- // Embedded source maps for dev builds, can't use eval-source-map due to CSP
- development["devtool"] = "inline-source-map";
- } else {
- // High quality source maps in separate .map files which include the source. This doesn't bulk up the .js
- // payload file size, which is nice for performance but also necessary to get the bundle to a small enough
- // size that sentry will accept the upload.
- development["devtool"] = "source-map";
- }
// Resolve the directories for the js-sdk for later use. We resolve these early, so we
// don't have to call them over and over. We also resolve to the package.json instead of the src

View file

@ -1,33 +0,0 @@
directories in .hak/hakModules are already symlinked inside node_modules,
and as such are already being copied by default. this makes tasje fail with:
```
thread 'main' panicked at 'called `Result::unwrap()` on an `Err` value:
FileAlreadyWritten("/node_modules/keytar/package.json")', src/main.rs:200:18
```
console.log interferes with tasje, which reads config from node stdout
--- ./electron-builder.ts.orig
+++ ./electron-builder.ts
@@ -72,10 +72,6 @@
console.warn(`No VARIANT_PATH specified, using default variant configuration '${DEFAULT_VARIANT}':`);
}
-for (const key in variant) {
- console.log(`${key}: ${variant[key]}`);
-}
-
interface Configuration extends BaseConfiguration {
extraMetadata: Partial<Pick<Pkg, "version">> & ExtraMetadata;
linux: BaseConfiguration["linux"];
@@ -112,10 +108,6 @@
},
files: [
"package.json",
- {
- from: ".hak/hakModules",
- to: "node_modules",
- },
"lib/**",
],
extraResources: ["build/icon.*", "webapp.asar"],

View file

@ -1,79 +0,0 @@
--- a/src/vector/jitsi/index.pcss
+++ b/src/vector/jitsi/index.pcss
@@ -14,7 +14,7 @@
font-family: "Nunito";
font-style: normal;
font-weight: 400;
- src: url("$(res)/fonts/Nunito/Nunito-Regular.ttf") format("truetype");
+ src: local("Nunito Regular");
}
$dark-fg: #edf3ff;
--- a/res/themes/light/css/_fonts.pcss
+++ b/res/themes/light/css/_fonts.pcss
@@ -5,16 +5,16 @@
@font-face {
font-family: "Twemoji";
font-weight: 400;
- src: url("$(res)/fonts/Twemoji_Mozilla/TwemojiMozilla-colr.woff2") format("woff2");
+ src: local("Twemoji");
}
/* For at least Chrome on Windows 10, we have to explictly add extra weights for the emoji to appear in bold messages, etc. */
@font-face {
font-family: "Twemoji";
font-weight: 600;
- src: url("$(res)/fonts/Twemoji_Mozilla/TwemojiMozilla-colr.woff2") format("woff2");
+ src: local("Twemoji");
}
@font-face {
font-family: "Twemoji";
font-weight: 700;
- src: url("$(res)/fonts/Twemoji_Mozilla/TwemojiMozilla-colr.woff2") format("woff2");
+ src: local("Twemoji");
}
--- a/res/themes/legacy-light/css/_fonts.pcss
+++ b/res/themes/legacy-light/css/_fonts.pcss
@@ -23,17 +23,17 @@
font-family: "Nunito";
font-style: normal;
font-weight: 400;
- src: url("$(res)/fonts/Nunito/Nunito-Regular.ttf") format("truetype");
+ src: local("Nunito Regular");
}
@font-face {
font-family: "Nunito";
font-style: normal;
font-weight: 600;
- src: url("$(res)/fonts/Nunito/Nunito-SemiBold.ttf") format("truetype");
+ src: local("Nunito SemiBold");
}
@font-face {
font-family: "Nunito";
font-style: normal;
font-weight: 700;
- src: url("$(res)/fonts/Nunito/Nunito-Bold.ttf") format("truetype");
+ src: local("Nunito Bold");
}
--- ./src/theme.ts.orig
+++ ./src/theme.ts
@@ -7,20 +7,6 @@
Please see LICENSE files in the repository root for full details.
*/
-import "@fontsource/inter/400.css";
-import "@fontsource/inter/400-italic.css";
-import "@fontsource/inter/500.css";
-import "@fontsource/inter/500-italic.css";
-import "@fontsource/inter/600.css";
-import "@fontsource/inter/600-italic.css";
-import "@fontsource/inter/700.css";
-import "@fontsource/inter/700-italic.css";
-
-import "@fontsource/inconsolata/latin-ext-400.css";
-import "@fontsource/inconsolata/latin-400.css";
-import "@fontsource/inconsolata/latin-ext-700.css";
-import "@fontsource/inconsolata/latin-700.css";
-
import { logger } from "matrix-js-sdk/src/logger";
import { _t } from "./languageHandler";

View file

@ -1,15 +0,0 @@
--- a/scripts/hak/hakEnv.ts
+++ b/scripts/hak/hakEnv.ts
@@ -101,11 +101,10 @@
...process.env,
npm_config_arch: this.target.arch,
npm_config_target_arch: this.target.arch,
- npm_config_disturl: "https://electronjs.org/headers",
+ npm_config_nodedir: "/usr/include/electron/node_headers",
npm_config_runtime: this.runtime,
npm_config_target: this.runtimeVersion,
npm_config_build_from_source: "true",
- npm_config_devdir: path.join(os.homedir(), ".electron-gyp"),
};
}

View file

@ -0,0 +1,27 @@
# Contributor: Aiden Grossman <agrossman154@yahoo.com>
# Maintainer: Aiden Grossman <agrossman154@yahoo.com>
pkgname=fdm-materials
pkgver=5.2.2
pkgrel=0
pkgdesc="FDM Material Database"
url="https://github.com/Ultimaker/fdm_materials"
arch="noarch"
license="CC0-1.0"
makedepends="cmake samurai"
options="!check" # no checks provided
source="$pkgname-$pkgver.tar.gz::https://github.com/Ultimaker/fdm_materials/archive/refs/tags/$pkgver.tar.gz"
builddir="$srcdir/fdm_materials-$pkgver"
build() {
cmake -B build -G Ninja \
-DCMAKE_INSTALL_PREFIX=/usr
cmake --build build
}
package() {
DESTDIR="$pkgdir" cmake --install build
}
sha512sums="
73eefec8b7b88af73afc578ffba583480bda30309945b1720d7a1a075bd7ab3279599d53fe83f4c96695f294a5a3e11297abc334ca6cc9db163d4eb0fbdaf0f9 fdm-materials-5.2.2.tar.gz
"

108
backports/freecad/APKBUILD Normal file
View file

@ -0,0 +1,108 @@
# Contributor: Aiden Grossman <agrossman154@yahoo.com>
# Maintainer: Aiden Grossman <agrossman154@yahoo.com>
pkgname=freecad
pkgver=0.20.2
pkgrel=5
pkgdesc="Free and open source 3D parametric modeler"
url="https://freecadweb.org/"
license="LGPL-2.0-or-later"
arch="" # removed dependency py3-pyside2
#arch="x86_64" # dependency OpenCascade is only x86_64
depends="
graphviz
hdf5
opencascade
py3-matplotlib
py3-numpy
py3-pivy
py3-ply
py3-pyside2
py3-six
py3-yaml
python3
"
makedepends="
boost-dev
cmake
coin-dev
doxygen
eigen-dev
freeimage-dev
glu-dev
hdf5-dev
libmedc-dev
libshiboken2-dev
onetbb-dev
opencascade-dev
py3-pyside2-dev
python3-dev
shiboken2
qt5-qtsvg-dev
qt5-qtwebengine-dev
qt5-qtxmlpatterns-dev
samurai
swig
vtk-dev
xerces-c-dev
"
checkdepends="xvfb-run mesa mesa-dri-gallium font-opensans"
source="https://github.com/FreeCAD/FreeCAD/archive/$pkgver/freecad-$pkgver.tar.gz
$pkgname-python3.11-1.patch::https://github.com/FreeCAD/FreeCAD/commit/fe02d63c8c9b1280978be841d04e68a0a55cceb9.patch
numpy-1.20.patch
no-execinfo.patch
no-workaround-spnav.patch
resourceDirectory.patch
tests.patch
opencascade-7.8.0.patch
missing-include-cstdint.patch
"
builddir="$srcdir/FreeCAD-$pkgver"
build() {
cmake -B build -G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_C_FLAGS="$CFLAGS -fPIC -w" \
-DCMAKE_CXX_FLAGS="$CXXFLAGS -fPIC -w" \
-DCMAKE_INSTALL_PREFIX=/usr/lib/freecad \
-DCMAKE_INSTALL_DATADIR=/usr/share/freecad \
-DCMAKE_INSTALL_DATAROOTDIR=/usr/share \
-DCMAKE_INSTALL_DOCDIR=/usr/share/freecad/doc \
-DBUILD_ENABLE_CXX_STD=C++17 \
-DBUILD_QT5=ON \
-DFREECAD_USE_EXTERNAL_PIVY=ON \
-DFREECAD_USE_OCC_VARIANT="Official Version" \
-DFREECAD_USE_QT_FILEDIALOG=ON \
-DPYTHON_EXECUTABLE=/usr/bin/python3
cmake --build build
}
check() {
DESTDIR=test_install cmake --install build
mkdir -p test_install/usr/bin
ln -s ../lib/freecad/bin/FreeCAD test_install/usr/bin/FreeCAD
ln -s ../lib/freecad/bin/FreeCADCmd test_install/usr/bin/FreeCADCmd
LD_LIBRARY_PATH="$PWD"/test_install/usr/lib/freecad/lib \
xvfb-run "$PWD"/test_install/usr/bin/FreeCAD -t 0
}
package() {
DESTDIR="$pkgdir" cmake --install build
# FreeCAD does not initialize correctly when binaries
# are located under /usr/bin; thus, symlinks are necessary.
install -d "$pkgdir"/usr/bin
ln -s /usr/lib/freecad/bin/FreeCAD "$pkgdir"/usr/bin/FreeCAD
ln -s /usr/lib/freecad/bin/FreeCADCmd "$pkgdir"/usr/bin/FreeCADCmd
}
sha512sums="
c3acd77dd2bb9a2a23ac354da3b6102effb89c95d675e91421d65486414dfe8cc0188a7212245e0deb63f17b9c5df76133017be09e4cd14b833be8cbec52a08d freecad-0.20.2.tar.gz
75a237f7ed7a89a98c0e5bdb3d3f0788749602daf718089aa0814e05f93ced1e15ad5867c7c87f170b48c5984f9ace1bbc95c4f386ce72bfb8d616323b47f1e5 freecad-python3.11-1.patch
80b08b031810fce7b6d698c662f64fa4f8a904f283f46b478b1d718529164c0ee61ce190f633abf04e03212720480f3f0603b0c1e160af79d7b6bb82da3bd0e4 numpy-1.20.patch
73aaba7015dce7048eb7d2456131b5b5ba4673cc980503331987be54d99daed5f61db015ca33d7d2ef0f02bd3192da8ce122c103c3b93f9959927deb4f0b933e no-execinfo.patch
15696bdaaf77482f1b5d3806535a8004c8cec7d598d62092d9f0394b4ca9e2ad6cedd77c4b86a83a06324d16678c1c6bbf3a390b807729717a2f513e858afd50 no-workaround-spnav.patch
8ba13b17bad66316757d180c1b9e9e72a24382627eac7c43a2264b3b5101e6e8f701775f2b805ed733f500fbcd8b0e8e422ec58a9ab3d948d613b666157d4c52 resourceDirectory.patch
5db19e0aa2ca1fd21f4c56afc9db54390a799262aaa0a741704c2c304b0068fd6ca1dcc086465e12e9c0cfe06aac750aaf9b8f5f4db324539af4dd3394803ff9 tests.patch
f933680dea8744e147f38abce389cb7fd0ec3fb3566454fdd5e6ea07b2faaac5fe61aabe1df3bda9f0d7b4fca16055aa2ad700e9cce10d2604ae37b761b68ade opencascade-7.8.0.patch
fec515cc63830f0e715527c7890173705b24e7d99d225821ec4300104cf3affdee49243bbd4d0a331a902cf04db756a1b8f18f0a17cc71f5757f8b5c73c78ede missing-include-cstdint.patch
"

View file

@ -0,0 +1,11 @@
diff --color -rupN a/src/3rdParty/libE57Format/include/E57Format.h b/src/3rdParty/libE57Format/include/E57Format.h
--- a/src/3rdParty/libE57Format/include/E57Format.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/3rdParty/libE57Format/include/E57Format.h 2024-02-27 14:06:29.308892531 +0100
@@ -32,6 +32,7 @@
//! @file E57Format.h header file for the E57 API
#include <cfloat>
+#include <cstdint>
#include <memory>
#include <vector>

View file

@ -0,0 +1,46 @@
diff --git a/src/3rdParty/salomesmesh/src/DriverSTL/Basics_Utils.cpp b/src/3rdParty/salomesmesh/src/DriverSTL/Basics_Utils.cpp
index 23c5083..54c7ecf 100644
--- a/src/3rdParty/salomesmesh/src/DriverSTL/Basics_Utils.cpp
+++ b/src/3rdParty/salomesmesh/src/DriverSTL/Basics_Utils.cpp
@@ -29,7 +29,6 @@
#ifndef WIN32
#include <unistd.h>
#include <sys/stat.h>
-#include <execinfo.h>
#endif
@@ -109,20 +108,7 @@ namespace Kernel_Utils
#ifndef WIN32
void print_traceback()
{
- void *array[50];
- size_t size;
- char **strings;
- size_t i;
-
- size = backtrace (array, 40);
- strings = backtrace_symbols (array, size);
-
- for (i = 0; i < size; i++)
- {
- std::cerr << strings[i] << std::endl;
- }
-
- free (strings);
+ std::cerr << "there is no backtrace." << std::endl;
}
#else
#if (_MSC_VER >= 1400) // Visual Studio 2005
diff --git a/src/App/Application.cpp b/src/App/Application.cpp
index 3081623..ba3525f 100644
--- a/src/App/Application.cpp
+++ b/src/App/Application.cpp
@@ -1732,7 +1732,6 @@ static void freecadNewHandler ()
#endif
#if defined(FC_OS_LINUX)
-#include <execinfo.h>
#include <dlfcn.h>
#include <cxxabi.h>

View file

@ -0,0 +1,69 @@
Patch-Source: https://github.com/FreeCAD/FreeCAD/commit/7b377a216b9185960e4cee980a6504dc1a755f50
fixes stderr reassignment
--
From 7b377a216b9185960e4cee980a6504dc1a755f50 Mon Sep 17 00:00:00 2001
From: wmayer <wmayer@users.sourceforge.net>
Date: Wed, 29 Jun 2022 15:19:18 +0200
Subject: [PATCH] Gui: remove workaround for spnav 0.23 due to build failure
with musl libc
---
src/Gui/3Dconnexion/GuiNativeEventLinux.cpp | 27 ---------------------
1 file changed, 27 deletions(-)
diff --git a/src/Gui/3Dconnexion/GuiNativeEventLinux.cpp b/src/Gui/3Dconnexion/GuiNativeEventLinux.cpp
index 7f0ddd75d262..455ece0b36bd 100644
--- a/src/Gui/3Dconnexion/GuiNativeEventLinux.cpp
+++ b/src/Gui/3Dconnexion/GuiNativeEventLinux.cpp
@@ -21,42 +21,17 @@
***************************************************************************/
#include <FCConfig.h>
-#include <cstdio>
#include "GuiNativeEventLinux.h"
#include "GuiApplicationNativeEventAware.h"
#include <Base/Console.h>
-#include <Base/FileInfo.h>
#include <QMainWindow>
#include <QSocketNotifier>
#include <spnav.h>
-namespace {
-class RedirectStdErr
-{
-public:
- RedirectStdErr()
- : fi(Base::FileInfo::getTempFileName())
- , file(stderr)
- {
- stderr = fopen(fi.filePath().c_str(), "w");
- }
- ~RedirectStdErr()
- {
- fclose(stderr);
- fi.deleteFile();
- stderr = file;
- }
-
-private:
- Base::FileInfo fi;
- FILE* file;
-};
-}
-
Gui::GuiNativeEvent::GuiNativeEvent(Gui::GUIApplicationNativeEventAware *app)
: GuiAbstractNativeEvent(app)
{
@@ -72,8 +47,6 @@ Gui::GuiNativeEvent::~GuiNativeEvent()
void Gui::GuiNativeEvent::initSpaceball(QMainWindow *window)
{
- // tmp. redirect stderr to a file to suppress an error message from spnav_open()
- RedirectStdErr err;
Q_UNUSED(window)
if (spnav_open() == -1) {
Base::Console().Log("Couldn't connect to spacenav daemon. Please ignore if you don't have a spacemouse.\n");

View file

@ -0,0 +1,25 @@
Patch-Source: https://github.com/FreeCAD/FreeCAD/commit/8b0df1dc936b544091f6a2d68df1c1a14ae3de5b
--
From 8b0df1dc936b544091f6a2d68df1c1a14ae3de5b Mon Sep 17 00:00:00 2001
From: lorenz <looooo@users.noreply.github.com>
Date: Tue, 27 Dec 2022 04:59:35 +0100
Subject: [PATCH] FEM: femmesh: fix AttributeError: module 'numpy' has no
attribute 'int'
---
src/Mod/Fem/femmesh/meshtools.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/Mod/Fem/femmesh/meshtools.py b/src/Mod/Fem/femmesh/meshtools.py
index dd8671b..d2e2b0a 100644
--- a/src/Mod/Fem/femmesh/meshtools.py
+++ b/src/Mod/Fem/femmesh/meshtools.py
@@ -485,7 +485,7 @@ def get_femelement_sets(
# fem_objects = FreeCAD FEM document objects
# get femelements for reference shapes of each obj.References
count_femelements = 0
- referenced_femelements = np.zeros((max(femelement_table.keys())+1,),dtype=np.int)
+ referenced_femelements = np.zeros((max(femelement_table.keys())+1,),dtype=int)
has_remaining_femelements = None
for fem_object_i, fem_object in enumerate(fem_objects):
obj = fem_object["Object"]

View file

@ -0,0 +1,877 @@
Fix compilation with opencascase 7.8.0
Base on https://github.com/FreeCAD/FreeCAD/pull/11909
diff --color -rupN a/cMake/FindOCC.cmake b/cMake/FindOCC.cmake
--- a/cMake/FindOCC.cmake 2022-12-07 03:35:37.000000000 +0100
+++ b/cMake/FindOCC.cmake 2024-02-27 15:00:48.248873883 +0100
@@ -127,8 +127,6 @@ if(OCC_FOUND)
TKG2d
TKG3d
TKMath
- TKIGES
- TKSTL
TKShHealing
TKXSBase
TKBool
@@ -139,10 +137,6 @@ if(OCC_FOUND)
TKGeomBase
TKOffset
TKPrim
- TKSTEPBase
- TKSTEPAttr
- TKSTEP209
- TKSTEP
TKHLR
TKFeat
)
@@ -154,17 +148,19 @@ if(OCC_FOUND)
TKLCAF
TKVCAF
TKCDF
- TKXDESTEP
- TKXDEIGES
TKMeshVS
TKService
TKV3d
)
- if(OCC_VERSION_STRING VERSION_LESS 6.7.3)
- list(APPEND OCC_OCAF_LIBRARIES TKAdvTools)
- elseif(NOT OCC_VERSION_STRING VERSION_LESS 7.5.0)
+ if(NOT OCC_VERSION_STRING VERSION_LESS 7.5.0)
list(APPEND OCC_OCAF_LIBRARIES TKRWMesh)
- endif(OCC_VERSION_STRING VERSION_LESS 6.7.3)
+ endif(NOT OCC_VERSION_STRING VERSION_LESS 7.5.0)
+ if(OCC_VERSION_STRING VERSION_LESS 7.8.0)
+ list(APPEND OCC_LIBRARIES TKIGES TKSTL TKSTEPBase TKSTEPAttr TKSTEP209 TKSTEP)
+ list(APPEND OCC_OCAF_LIBRARIES TKXDESTEP TKXDEIGES)
+ else(OCC_VERSION_STRING VERSION_LESS 7.8.0)
+ list(APPEND OCC_LIBRARIES TKDESTEP TKDEIGES TKDEGLTF TKDESTL)
+ endif(OCC_VERSION_STRING VERSION_LESS 7.8.0)
message(STATUS "-- Found OCE/OpenCASCADE version: ${OCC_VERSION_STRING}")
message(STATUS "-- OCE/OpenCASCADE include directory: ${OCC_INCLUDE_DIR}")
message(STATUS "-- OCE/OpenCASCADE shared libraries directory: ${OCC_LIBRARY_DIR}")
diff --color -rupN a/src/3rdParty/salomesmesh/inc/SMESHDS_DataMapOfShape.hxx b/src/3rdParty/salomesmesh/inc/SMESHDS_DataMapOfShape.hxx
--- a/src/3rdParty/salomesmesh/inc/SMESHDS_DataMapOfShape.hxx 2022-12-07 03:35:37.000000000 +0100
+++ b/src/3rdParty/salomesmesh/inc/SMESHDS_DataMapOfShape.hxx 2024-02-27 15:00:48.248873883 +0100
@@ -29,22 +29,35 @@
#include <TopoDS_Shape.hxx>
+#include <Standard_Version.hxx>
+
/*
* This method needed for instance NCollection_DataMap with TopoDS_Shape as key
*/
+#if OCC_VERSION_HEX >= 0x070800
struct SMESHDS_Hasher
{
- static inline Standard_Boolean IsEqual(const TopoDS_Shape& S1,
- const TopoDS_Shape& S2)
- {
- return S1.IsSame(S2);
+ size_t operator()(const TopoDS_Shape& S) const noexcept {
+ return std::hash<TopoDS_Shape>{}(S);
}
- static inline Standard_Integer HashCode(const TopoDS_Shape& S,
- const Standard_Integer Upper)
- {
- return ::HashCode( S, Upper);
+ size_t operator()(const TopoDS_Shape& S1, const TopoDS_Shape& S2) const noexcept {
+ return S1.IsSame(S2);
}
};
-
+#else
+struct SMESHDS_Hasher
+{
+static inline Standard_Boolean IsEqual(const TopoDS_Shape& S1,
+ const TopoDS_Shape& S2)
+{
+ return S1.IsSame(S2);
+}
+static inline Standard_Integer HashCode(const TopoDS_Shape& S,
+ const Standard_Integer Upper)
+{
+ return ::HashCode( S, Upper);
+}
+};
+#endif
#endif
diff --color -rupN a/src/3rdParty/salomesmesh/inc/SMESH_MeshVSLink.hxx b/src/3rdParty/salomesmesh/inc/SMESH_MeshVSLink.hxx
--- a/src/3rdParty/salomesmesh/inc/SMESH_MeshVSLink.hxx 2022-12-07 03:35:37.000000000 +0100
+++ b/src/3rdParty/salomesmesh/inc/SMESH_MeshVSLink.hxx 2024-02-27 15:00:48.248873883 +0100
@@ -57,9 +57,6 @@
#ifndef _MeshVS_EntityType_HeaderFile
#include <MeshVS_EntityType.hxx>
#endif
-#ifndef _Standard_Address_HeaderFile
-#include <Standard_Address.hxx>
-#endif
#ifndef _TColStd_HArray1OfInteger_HeaderFile
#include <TColStd_HArray1OfInteger.hxx>
#endif
diff --color -rupN a/src/3rdParty/salomesmesh/inc/SMESH_SMESH.hxx b/src/3rdParty/salomesmesh/inc/SMESH_SMESH.hxx
--- a/src/3rdParty/salomesmesh/inc/SMESH_SMESH.hxx 2022-12-07 03:35:37.000000000 +0100
+++ b/src/3rdParty/salomesmesh/inc/SMESH_SMESH.hxx 2024-02-27 15:00:48.248873883 +0100
@@ -37,4 +37,5 @@
#define SMESH_EXPORT
#endif
+#include <Standard_Version.hxx>
#endif
diff --color -rupN a/src/3rdParty/salomesmesh/inc/SMESH_SequenceOfNode.hxx b/src/3rdParty/salomesmesh/inc/SMESH_SequenceOfNode.hxx
--- a/src/3rdParty/salomesmesh/inc/SMESH_SequenceOfNode.hxx 2022-12-07 03:35:37.000000000 +0100
+++ b/src/3rdParty/salomesmesh/inc/SMESH_SequenceOfNode.hxx 2024-02-27 15:00:48.252207183 +0100
@@ -28,10 +28,11 @@
#include "SMESH_SMESH.hxx"
-#include <NCollection_DefineSequence.hxx>
#if OCC_VERSION_HEX >= 0x060703
#include <NCollection_IncAllocator.hxx>
#include <NCollection_Sequence.hxx>
+#else
+#include <NCollection_DefineSequence.hxx>
#endif
typedef const SMDS_MeshNode* SMDS_MeshNodePtr;
diff --color -rupN a/src/3rdParty/salomesmesh/inc/SMESH_TypeDefs.hxx b/src/3rdParty/salomesmesh/inc/SMESH_TypeDefs.hxx
--- a/src/3rdParty/salomesmesh/inc/SMESH_TypeDefs.hxx 2022-12-07 03:35:37.000000000 +0100
+++ b/src/3rdParty/salomesmesh/inc/SMESH_TypeDefs.hxx 2024-02-27 15:00:48.252207183 +0100
@@ -185,11 +185,18 @@ typedef std::vector< UVPtStruct > UVPtSt
// --------------------------------------------------------------------------------
// class SMESH_SequenceOfElemPtr
+#include <Standard_Version.hxx>
+#if OCC_VERSION_HEX >= 0x060703
+#include <NCollection_Sequence.hxx>
+#else
#include <NCollection_DefineSequence.hxx>
+#endif
class SMDS_MeshElement;
typedef const SMDS_MeshElement* SMDS_MeshElementPtr;
+#define DEFINE_SEQUENCE(_ClassName_, _BaseCollection_, TheItemType) \
+typedef NCollection_Sequence<TheItemType > _ClassName_;
DEFINE_SEQUENCE (SMESH_SequenceOfElemPtr, SMESH_BaseCollectionElemPtr, SMDS_MeshElementPtr)
diff --color -rupN a/src/3rdParty/salomesmesh/src/DriverSTL/DriverSTL_R_SMDS_Mesh.cpp b/src/3rdParty/salomesmesh/src/DriverSTL/DriverSTL_R_SMDS_Mesh.cpp
--- a/src/3rdParty/salomesmesh/src/DriverSTL/DriverSTL_R_SMDS_Mesh.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/3rdParty/salomesmesh/src/DriverSTL/DriverSTL_R_SMDS_Mesh.cpp 2024-02-27 15:00:48.252207183 +0100
@@ -33,10 +33,33 @@
#include "SMDS_MeshNode.hxx"
#include "SMESH_File.hxx"
+#include <Standard_Version.hxx>
+
namespace
{
struct Hasher
{
+#if OCC_VERSION_HEX >= 0x070800
+ size_t operator()(const gp_Pnt& point) const noexcept
+ {
+ union
+ {
+ Standard_Real R[3];
+ Standard_Integer I[6];
+ } U;
+
+ point.Coord( U.R[0], U.R[1], U.R[2] );
+ return std::hash<Standard_Integer>{}(U.I[0]/23+U.I[1]/19+U.I[2]/17+U.I[3]/13+U.I[4]/11+U.I[5]/7);
+ }
+
+ size_t operator()(const gp_Pnt& point1, const gp_Pnt& point2) const noexcept
+ {
+ static Standard_Real tab1[3], tab2[3];
+ point1.Coord(tab1[0],tab1[1],tab1[2]);
+ point2.Coord(tab2[0],tab2[1],tab2[2]);
+ return (memcmp(tab1,tab2,sizeof(tab1)) == 0);
+ }
+#else
//=======================================================================
//function : HashCode
//purpose :
@@ -51,9 +74,9 @@ namespace
} U;
point.Coord( U.R[0], U.R[1], U.R[2] );
-
- return ::HashCode(U.I[0]/23+U.I[1]/19+U.I[2]/17+U.I[3]/13+U.I[4]/11+U.I[5]/7,Upper);
+ return std::hash<Standard_Integer>{}(U.I[0]/23+U.I[1]/19+U.I[2]/17+U.I[3]/13+U.I[4]/11+U.I[5]/7);
}
+
//=======================================================================
//function : IsEqual
//purpose :
@@ -66,7 +89,9 @@ namespace
point2.Coord(tab2[0],tab2[1],tab2[2]);
return (memcmp(tab1,tab2,sizeof(tab1)) == 0);
}
+#endif
};
+
typedef NCollection_DataMap<gp_Pnt,SMDS_MeshNode*,Hasher> TDataMapOfPntNodePtr;
const int HEADER_SIZE = 84;
diff --color -rupN a/src/3rdParty/salomesmesh/src/StdMeshers/StdMeshers_Quadrangle_2D.cpp b/src/3rdParty/salomesmesh/src/StdMeshers/StdMeshers_Quadrangle_2D.cpp
--- a/src/3rdParty/salomesmesh/src/StdMeshers/StdMeshers_Quadrangle_2D.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/3rdParty/salomesmesh/src/StdMeshers/StdMeshers_Quadrangle_2D.cpp 2024-02-27 15:00:48.252207183 +0100
@@ -48,7 +48,6 @@
#include <Bnd_Box.hxx>
#include <GeomAPI_ProjectPointOnSurf.hxx>
#include <Geom_Surface.hxx>
-#include <NCollection_DefineArray2.hxx>
#include <Precision.hxx>
#include <Standard_Real.hxx>
#include <TColStd_SequenceOfInteger.hxx>
diff --color -rupN a/src/Mod/Drawing/App/PreCompiled.h b/src/Mod/Drawing/App/PreCompiled.h
--- a/src/Mod/Drawing/App/PreCompiled.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Drawing/App/PreCompiled.h 2024-02-27 15:00:48.252207183 +0100
@@ -70,7 +70,6 @@
#include <Standard_LicenseError.hxx>
#include <Standard_LicenseNotFound.hxx>
#include <Standard_Macro.hxx>
-#include <Standard_math.hxx>
#include <Standard_MultiplyDefined.hxx>
#include <Standard_NegativeValue.hxx>
#include <Standard_NoMoreObject.hxx>
diff --color -rupN a/src/Mod/Drawing/Gui/TaskDialog.cpp b/src/Mod/Drawing/Gui/TaskDialog.cpp
--- a/src/Mod/Drawing/Gui/TaskDialog.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Drawing/Gui/TaskDialog.cpp 2024-02-27 15:00:48.252207183 +0100
@@ -29,7 +29,6 @@
#endif
-#include <Standard_math.hxx>
#include "TaskDialog.h"
#include <Gui/Application.h>
#include <Gui/Command.h>
diff --color -rupN a/src/Mod/Fem/Gui/PreCompiled.h b/src/Mod/Fem/Gui/PreCompiled.h
--- a/src/Mod/Fem/Gui/PreCompiled.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Fem/Gui/PreCompiled.h 2024-02-27 15:00:48.252207183 +0100
@@ -63,7 +63,6 @@
#endif
// OCC
-#include <Standard_math.hxx>
#include <Precision.hxx>
#include <TopoDS.hxx>
#include <BRepAdaptor_Surface.hxx>
diff --color -rupN a/src/Mod/Fem/Gui/TaskCreateNodeSet.cpp b/src/Mod/Fem/Gui/TaskCreateNodeSet.cpp
--- a/src/Mod/Fem/Gui/TaskCreateNodeSet.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Fem/Gui/TaskCreateNodeSet.cpp 2024-02-27 15:00:48.252207183 +0100
@@ -28,7 +28,6 @@
# include <SMESH_Mesh.hxx>
# include <SMESHDS_Mesh.hxx>
-# include <Standard_math.hxx>
#endif
#include <Base/Console.h>
diff --color -rupN a/src/Mod/Import/App/ImportOCAF.cpp b/src/Mod/Import/App/ImportOCAF.cpp
--- a/src/Mod/Import/App/ImportOCAF.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Import/App/ImportOCAF.cpp 2024-02-27 15:00:48.252207183 +0100
@@ -165,7 +165,11 @@ void ImportOCAF::loadShapes(const TDF_La
std::vector<App::DocumentObject *> localValue;
if (aShapeTool->GetShape(label,aShape)) {
+#if OCC_VERSION_HEX >= 0x070800
+ hash = std::hash<TopoDS_Shape>{}(aShape);
+#else
hash = aShape.HashCode(HashUpper);
+#endif
}
Handle(TDataStd_Name) name;
@@ -235,7 +239,11 @@ void ImportOCAF::loadShapes(const TDF_La
if (isRef || myRefShapes.find(hash) == myRefShapes.end()) {
TopoDS_Shape aShape;
if (isRef && aShapeTool->GetShape(label, aShape))
+#if OCC_VERSION_HEX >= 0x070800
+ myRefShapes.insert(std::hash<TopoDS_Shape>{}(aShape));
+#else
myRefShapes.insert(aShape.HashCode(HashUpper));
+#endif
if (aShapeTool->IsSimpleShape(label) && (isRef || aShapeTool->IsFree(label))) {
if (!asm_name.empty())
@@ -565,7 +573,11 @@ void ImportXCAF::createShape(const TopoD
part->Label.setValue(default_name);
part->Shape.setValue(shape);
std::map<Standard_Integer, Quantity_ColorRGBA>::const_iterator jt;
+#if OCC_VERSION_HEX >= 0x070800
+ jt = myColorMap.find(std::hash<TopoDS_Shape>{}(shape));
+#else
jt = myColorMap.find(shape.HashCode(INT_MAX));
+#endif
App::Color partColor(0.8f,0.8f,0.8f);
#if 0//TODO
@@ -586,7 +598,11 @@ void ImportXCAF::createShape(const TopoD
// set label name if defined
if (setname && !myNameMap.empty()) {
std::map<Standard_Integer, std::string>::const_iterator jt;
+#if OCC_VERSION_HEX >= 0x070800
+ jt = myNameMap.find(std::hash<TopoDS_Shape>{}(shape));
+#else
jt = myNameMap.find(shape.HashCode(INT_MAX));
+#endif
if (jt != myNameMap.end()) {
part->Label.setValue(jt->second);
}
@@ -606,7 +622,11 @@ void ImportXCAF::createShape(const TopoD
faceColors.resize(faces.Extent(), partColor);
xp.Init(shape,TopAbs_FACE);
while (xp.More()) {
+#if OCC_VERSION_HEX >= 0x070800
+ jt = myColorMap.find(std::hash<TopoDS_Shape>{}(xp.Current()));
+#else
jt = myColorMap.find(xp.Current().HashCode(INT_MAX));
+#endif
if (jt != myColorMap.end()) {
int index = faces.FindIndex(xp.Current());
faceColors[index-1] = convertColor(jt->second);
@@ -641,23 +661,51 @@ void ImportXCAF::loadShapes(const TDF_La
// add the shapes
TopExp_Explorer xp;
for (xp.Init(aShape, TopAbs_SOLID); xp.More(); xp.Next(), ctSolids++)
+#if OCC_VERSION_HEX >= 0x070800
+ this->mySolids[std::hash<TopoDS_Shape>{}(xp.Current())] = (xp.Current());
+#else
this->mySolids[xp.Current().HashCode(INT_MAX)] = (xp.Current());
+#endif
for (xp.Init(aShape, TopAbs_SHELL, TopAbs_SOLID); xp.More(); xp.Next(), ctShells++)
+#if OCC_VERSION_HEX >= 0x070800
+ this->myShells[std::hash<TopoDS_Shape>{}(xp.Current())] = (xp.Current());
+#else
this->myShells[xp.Current().HashCode(INT_MAX)] = (xp.Current());
+#endif
// if no solids and no shells were found then go for compounds
if (ctSolids == 0 && ctShells == 0) {
for (xp.Init(aShape, TopAbs_COMPOUND); xp.More(); xp.Next(), ctComps++)
+#if OCC_VERSION_HEX >= 0x070800
+ this->myCompds[std::hash<TopoDS_Shape>{}(xp.Current())] = (xp.Current());
+#else
this->myCompds[xp.Current().HashCode(INT_MAX)] = (xp.Current());
+#endif
}
if (ctComps == 0) {
for (xp.Init(aShape, TopAbs_FACE, TopAbs_SHELL); xp.More(); xp.Next())
+#if OCC_VERSION_HEX >= 0x070800
+ this->myShapes[std::hash<TopoDS_Shape>{}(xp.Current())] = (xp.Current());
+#else
this->myShapes[xp.Current().HashCode(INT_MAX)] = (xp.Current());
+#endif
for (xp.Init(aShape, TopAbs_WIRE, TopAbs_FACE); xp.More(); xp.Next())
+#if OCC_VERSION_HEX >= 0x070800
+ this->myShapes[std::hash<TopoDS_Shape>{}(xp.Current())] = (xp.Current());
+#else
this->myShapes[xp.Current().HashCode(INT_MAX)] = (xp.Current());
+#endif
for (xp.Init(aShape, TopAbs_EDGE, TopAbs_WIRE); xp.More(); xp.Next())
+#if OCC_VERSION_HEX >= 0x070800
+ this->myShapes[std::hash<TopoDS_Shape>{}(xp.Current())] = (xp.Current());
+#else
this->myShapes[xp.Current().HashCode(INT_MAX)] = (xp.Current());
+#endif
for (xp.Init(aShape, TopAbs_VERTEX, TopAbs_EDGE); xp.More(); xp.Next())
+#if OCC_VERSION_HEX >= 0x070800
+ this->myShapes[std::hash<TopoDS_Shape>{}(xp.Current())] = (xp.Current());
+#else
this->myShapes[xp.Current().HashCode(INT_MAX)] = (xp.Current());
+#endif
}
}
@@ -667,7 +715,11 @@ void ImportXCAF::loadShapes(const TDF_La
hColors->GetColor(label, XCAFDoc_ColorSurf, col) ||
hColors->GetColor(label, XCAFDoc_ColorCurv, col)) {
// add defined color
+#if OCC_VERSION_HEX >= 0x070800
+ myColorMap[std::hash<TopoDS_Shape>{}(aShape)] = col;
+#else
myColorMap[aShape.HashCode(INT_MAX)] = col;
+#endif
}
else {
// http://www.opencascade.org/org/forum/thread_17107/
@@ -677,7 +729,11 @@ void ImportXCAF::loadShapes(const TDF_La
hColors->GetColor(it.Value(), XCAFDoc_ColorSurf, col) ||
hColors->GetColor(it.Value(), XCAFDoc_ColorCurv, col)) {
// add defined color
+#if OCC_VERSION_HEX >= 0x070800
+ myColorMap[std::hash<TopoDS_Shape>{}(it.Value())] = col;
+#else
myColorMap[it.Value().HashCode(INT_MAX)] = col;
+#endif
}
}
}
@@ -690,7 +746,11 @@ void ImportXCAF::loadShapes(const TDF_La
extstr.ToUTF8CString(str);
std::string labelName(str);
if (!labelName.empty())
+#if OCC_VERSION_HEX >= 0x070800
+ myNameMap[std::hash<TopoDS_Shape>{}(aShape)] = labelName;
+#else
myNameMap[aShape.HashCode(INT_MAX)] = labelName;
+#endif
delete [] str;
}
diff --color -rupN a/src/Mod/Import/App/ImportOCAF.h b/src/Mod/Import/App/ImportOCAF.h
--- a/src/Mod/Import/App/ImportOCAF.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Import/App/ImportOCAF.h 2024-02-27 15:00:48.252207183 +0100
@@ -29,7 +29,6 @@
#include <XCAFDoc_ShapeTool.hxx>
#include <Quantity_ColorRGBA.hxx>
#include <TopoDS_Shape.hxx>
-#include <TDF_LabelMapHasher.hxx>
#include <climits>
#include <string>
#include <set>
diff --color -rupN a/src/Mod/Import/App/ImportOCAF2.h b/src/Mod/Import/App/ImportOCAF2.h
--- a/src/Mod/Import/App/ImportOCAF2.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Import/App/ImportOCAF2.h 2024-02-27 15:06:12.358890276 +0100
@@ -27,7 +27,6 @@
#include <XCAFDoc_ColorTool.hxx>
#include <XCAFDoc_ShapeTool.hxx>
#include <TopoDS_Shape.hxx>
-#include <TDF_LabelMapHasher.hxx>
#include <climits>
#include <string>
#include <set>
@@ -57,13 +56,21 @@ namespace Import {
struct ShapeHasher {
std::size_t operator()(const TopoDS_Shape &s) const {
+#if OCC_VERSION_HEX >= 0x070800
+ return std::hash<TopoDS_Shape>{}(s);
+#else
return s.HashCode(INT_MAX);
+#endif
}
};
struct LabelHasher {
std::size_t operator()(const TDF_Label &l) const {
+#if OCC_VERSION_HEX >= 0x070800
+ return std::hash<TDF_Label> {}(l);
+#else
return TDF_LabelMapHasher::HashCode(l,INT_MAX);
+#endif
}
};
diff --color -rupN a/src/Mod/MeshPart/App/CurveProjector.h b/src/Mod/MeshPart/App/CurveProjector.h
--- a/src/Mod/MeshPart/App/CurveProjector.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/MeshPart/App/CurveProjector.h 2024-02-27 15:00:48.252207183 +0100
@@ -20,6 +20,7 @@
* *
***************************************************************************/
+#include <Standard_Version.hxx>
#ifndef _CurveProjector_h_
#define _CurveProjector_h_
@@ -64,8 +65,13 @@ public:
template<class T>
struct TopoDSLess {
- bool operator()(const T& x, const T& y) const {
- return x.HashCode(INT_MAX-1) < y.HashCode(INT_MAX-1);
+ bool operator()(const T& x, const T& y) const {
+#if OCC_VERSION_HEX >= 0x070800
+ std::hash<T> hasher;
+ return hasher(x) < hasher(y);
+#else
+ return x.HashCode(INT_MAX-1) < y.HashCode(INT_MAX-1);
+#endif
}
};
diff --color -rupN a/src/Mod/MeshPart/App/PreCompiled.h b/src/Mod/MeshPart/App/PreCompiled.h
--- a/src/Mod/MeshPart/App/PreCompiled.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/MeshPart/App/PreCompiled.h 2024-02-27 15:00:48.252207183 +0100
@@ -76,7 +76,6 @@
#include <Standard_LicenseError.hxx>
#include <Standard_LicenseNotFound.hxx>
#include <Standard_Macro.hxx>
-#include <Standard_math.hxx>
#include <Standard_MultiplyDefined.hxx>
#include <Standard_NegativeValue.hxx>
#include <Standard_NoMoreObject.hxx>
diff --color -rupN a/src/Mod/MeshPart/Gui/CrossSections.cpp b/src/Mod/MeshPart/Gui/CrossSections.cpp
--- a/src/Mod/MeshPart/Gui/CrossSections.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/MeshPart/Gui/CrossSections.cpp 2024-02-27 15:00:48.252207183 +0100
@@ -24,7 +24,6 @@
#include "PreCompiled.h"
#ifndef _PreComp_
# include <sstream>
-# include <Standard_math.hxx>
# include <BRep_Builder.hxx>
# include <BRepBuilderAPI_MakePolygon.hxx>
# include <TopoDS.hxx>
diff --color -rupN a/src/Mod/Part/App/ImportStep.cpp b/src/Mod/Part/App/ImportStep.cpp
--- a/src/Mod/Part/App/ImportStep.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/App/ImportStep.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -167,7 +167,12 @@ int Part::ImportStepParts(App::Document
// This is a trick to access the GUI via Python and set the color property
// of the associated view provider. If no GUI is up an exception is thrown
// and cleared immediately
+#if OCC_VERSION_HEX >= 0x070800
+ std::hash<TopoDS_Solid> hasher;
+ std::map<int, Quantity_Color>::iterator it = hash_col.find(hasher(aSolid));
+#else
std::map<int, Quantity_Color>::iterator it = hash_col.find(aSolid.HashCode(INT_MAX));
+#endif
if (it != hash_col.end()) {
try {
Py::Object obj(pcFeature->getPyObject(), true);
diff --color -rupN a/src/Mod/Part/App/OCCError.h b/src/Mod/Part/App/OCCError.h
--- a/src/Mod/Part/App/OCCError.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/App/OCCError.h 2024-02-27 15:00:48.255540482 +0100
@@ -50,7 +50,6 @@
# include <Standard_Overflow.hxx>
# include <Standard_ProgramError.hxx>
# include <Standard_RangeError.hxx>
-# include <Standard_TooManyUsers.hxx>
# include <Standard_TypeMismatch.hxx>
# include <Standard_Underflow.hxx>
diff --color -rupN a/src/Mod/Part/App/OpenCascadeAll.h b/src/Mod/Part/App/OpenCascadeAll.h
--- a/src/Mod/Part/App/OpenCascadeAll.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/App/OpenCascadeAll.h 2024-02-27 15:00:48.255540482 +0100
@@ -49,7 +49,6 @@
#include <Standard_LicenseError.hxx>
#include <Standard_LicenseNotFound.hxx>
#include <Standard_Macro.hxx>
-#include <Standard_math.hxx>
#include <Standard_MultiplyDefined.hxx>
#include <Standard_NegativeValue.hxx>
#include <Standard_NoMoreObject.hxx>
diff --color -rupN a/src/Mod/Part/App/TopoShapePyImp.cpp b/src/Mod/Part/App/TopoShapePyImp.cpp
--- a/src/Mod/Part/App/TopoShapePyImp.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/App/TopoShapePyImp.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -1321,7 +1321,11 @@ PyObject* TopoShapePy::ancestorsOfType(
TopTools_ListIteratorOfListOfShape it(ancestors);
for (; it.More(); it.Next()) {
// make sure to avoid duplicates
+#if OCC_VERSION_HEX >= 0x070800
+ const size_t code = std::hash<TopoDS_Shape>{}(static_cast<TopoDS_Shape>(it.Value()));
+#else
Standard_Integer code = it.Value().HashCode(INT_MAX);
+#endif
if (hashes.find(code) == hashes.end()) {
list.append(shape2pyshape(it.Value()));
hashes.insert(code);
@@ -1943,7 +1947,11 @@ PyObject* TopoShapePy::hashCode(PyObject
if (!PyArg_ParseTuple(args, "|i",&upper))
return nullptr;
+#if OCC_VERSION_HEX >= 0x070800
+ int hc = std::hash<TopoDS_Shape>{}(getTopoShapePtr()->getShape());
+#else
int hc = getTopoShapePtr()->getShape().HashCode(upper);
+#endif
return Py_BuildValue("i", hc);
}
diff --color -rupN a/src/Mod/Part/Gui/AppPartGui.cpp b/src/Mod/Part/Gui/AppPartGui.cpp
--- a/src/Mod/Part/Gui/AppPartGui.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/Gui/AppPartGui.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -11,9 +11,6 @@
#include "PreCompiled.h"
-#ifndef _PreComp_
-# include <Standard_math.hxx>
-#endif
#include <Base/Console.h>
#include <Base/Interpreter.h>
diff --color -rupN a/src/Mod/Part/Gui/Command.cpp b/src/Mod/Part/Gui/Command.cpp
--- a/src/Mod/Part/Gui/Command.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/Gui/Command.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -26,7 +26,6 @@
# include <QFileInfo>
# include <QPointer>
# include <QString>
-# include <Standard_math.hxx>
# include <Standard_Version.hxx>
# include <TopExp_Explorer.hxx>
# include <TopoDS_Shape.hxx>
diff --color -rupN a/src/Mod/Part/Gui/CommandSimple.cpp b/src/Mod/Part/Gui/CommandSimple.cpp
--- a/src/Mod/Part/Gui/CommandSimple.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/Gui/CommandSimple.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -22,9 +22,6 @@
#include "PreCompiled.h"
-#ifndef _PreComp_
-# include <Standard_math.hxx>
-#endif
#include <App/Document.h>
#include <App/DocumentObject.h>
diff --color -rupN a/src/Mod/Part/Gui/CrossSections.cpp b/src/Mod/Part/Gui/CrossSections.cpp
--- a/src/Mod/Part/Gui/CrossSections.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/Gui/CrossSections.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -23,7 +23,6 @@
#include "PreCompiled.h"
#ifndef _PreComp_
-# include <Standard_math.hxx>
# include <BRep_Builder.hxx>
# include <BRepAlgoAPI_Section.hxx>
# include <BRepBuilderAPI_MakeWire.hxx>
diff --color -rupN a/src/Mod/Part/Gui/ViewProvider2DObject.cpp b/src/Mod/Part/Gui/ViewProvider2DObject.cpp
--- a/src/Mod/Part/Gui/ViewProvider2DObject.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/Gui/ViewProvider2DObject.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -26,8 +26,6 @@
#ifndef _PreComp_
# include <cfloat>
-# include <Standard_math.hxx>
-
# include <Inventor/nodes/SoAnnotation.h>
# include <Inventor/nodes/SoBaseColor.h>
# include <Inventor/nodes/SoDepthBuffer.h>
diff --color -rupN a/src/Mod/Part/Gui/ViewProviderExt.cpp b/src/Mod/Part/Gui/ViewProviderExt.cpp
--- a/src/Mod/Part/Gui/ViewProviderExt.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/Gui/ViewProviderExt.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -978,7 +978,11 @@ void ViewProviderPartExt::updateVisual()
TopExp_Explorer xp;
for (xp.Init(faceMap(i),TopAbs_EDGE);xp.More();xp.Next())
+#if OCC_VERSION_HEX >= 0x070800
+ faceEdges.insert(std::hash<TopoDS_Shape>{}(xp.Current()));
+#else
faceEdges.insert(xp.Current().HashCode(INT_MAX));
+#endif
numFaces++;
}
@@ -1006,7 +1010,11 @@ void ViewProviderPartExt::updateVisual()
// So, we have to store the hashes of the edges associated to a face.
// If the hash of a given edge is not in this list we know it's really
// a free edge.
+#if OCC_VERSION_HEX >= 0x070800
+ int hash = std::hash<TopoDS_Shape>{}(aEdge);
+#else
int hash = aEdge.HashCode(INT_MAX);
+#endif
if (faceEdges.find(hash) == faceEdges.end()) {
Handle(Poly_Polygon3D) aPoly = Part::Tools::polygonOfEdge(aEdge, aLoc);
if (!aPoly.IsNull()) {
@@ -1205,7 +1213,11 @@ void ViewProviderPartExt::updateVisual()
TopLoc_Location aLoc;
// handling of the free edge that are not associated to a face
+#if OCC_VERSION_HEX >= 0x070800
+ int hash = std::hash<TopoDS_Shape>{}(aEdge);
+#else
int hash = aEdge.HashCode(INT_MAX);
+#endif
if (faceEdges.find(hash) == faceEdges.end()) {
Handle(Poly_Polygon3D) aPoly = Part::Tools::polygonOfEdge(aEdge, aLoc);
if (!aPoly.IsNull()) {
diff --color -rupN a/src/Mod/Part/Gui/ViewProviderExt.h b/src/Mod/Part/Gui/ViewProviderExt.h
--- a/src/Mod/Part/Gui/ViewProviderExt.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/Gui/ViewProviderExt.h 2024-02-27 15:00:48.255540482 +0100
@@ -24,7 +24,6 @@
#ifndef PARTGUI_VIEWPROVIDERPARTEXT_H
#define PARTGUI_VIEWPROVIDERPARTEXT_H
-#include <Standard_math.hxx>
#include <Standard_Boolean.hxx>
#include <TopoDS_Shape.hxx>
#include <TopoDS_Face.hxx>
diff --color -rupN a/src/Mod/Part/Gui/ViewProviderMirror.cpp b/src/Mod/Part/Gui/ViewProviderMirror.cpp
--- a/src/Mod/Part/Gui/ViewProviderMirror.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/Gui/ViewProviderMirror.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -27,7 +27,6 @@
# include <QAction>
# include <QMenu>
# include <QTimer>
-# include <Standard_math.hxx>
# include <TopExp.hxx>
# include <TopTools_IndexedMapOfShape.hxx>
# include <TopTools_ListOfShape.hxx>
diff --color -rupN a/src/Mod/Part/Gui/ViewProviderPython.cpp b/src/Mod/Part/Gui/ViewProviderPython.cpp
--- a/src/Mod/Part/Gui/ViewProviderPython.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/Gui/ViewProviderPython.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -23,7 +23,6 @@
#include "PreCompiled.h"
-#include <Standard_math.hxx>
#ifndef _PreComp_
# include <Inventor/nodes/SoSeparator.h>
#endif
diff --color -rupN a/src/Mod/Part/Gui/ViewProviderReference.h b/src/Mod/Part/Gui/ViewProviderReference.h
--- a/src/Mod/Part/Gui/ViewProviderReference.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/Gui/ViewProviderReference.h 2024-02-27 15:00:48.255540482 +0100
@@ -24,7 +24,6 @@
#ifndef PARTGUI_ViewProviderPartReference_H
#define PARTGUI_ViewProviderPartReference_H
-#include <Standard_math.hxx>
#include <Standard_Boolean.hxx>
#include <TopoDS_Shape.hxx>
#include <Gui/ViewProviderGeometryObject.h>
diff --color -rupN a/src/Mod/PartDesign/Gui/PreCompiled.h b/src/Mod/PartDesign/Gui/PreCompiled.h
--- a/src/Mod/PartDesign/Gui/PreCompiled.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/PartDesign/Gui/PreCompiled.h 2024-02-27 15:00:48.255540482 +0100
@@ -44,7 +44,6 @@
#include <boost/bind/bind.hpp>
// OCC
-#include <Standard_math.hxx>
#include <Standard_Version.hxx>
#include <Bnd_Box.hxx>
#include <BRepBndLib.hxx>
diff --color -rupN a/src/Mod/Path/App/Voronoi.cpp b/src/Mod/Path/App/Voronoi.cpp
--- a/src/Mod/Path/App/Voronoi.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Path/App/Voronoi.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -23,10 +23,6 @@
#include "PreCompiled.h"
-#ifndef _PreComp_
-# include <Standard_math.hxx>
-#endif
-
#include <Base/Vector3D.h>
#include "Voronoi.h"
diff --color -rupN a/src/Mod/Path/Gui/PreCompiled.h b/src/Mod/Path/Gui/PreCompiled.h
--- a/src/Mod/Path/Gui/PreCompiled.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Path/Gui/PreCompiled.h 2024-02-27 15:00:48.258873781 +0100
@@ -38,8 +38,6 @@
# define PathGuiExport
#endif
-#include <Standard_math.hxx>
-
#ifdef _MSC_VER
# pragma warning( disable : 4273 )
#endif
diff --color -rupN a/src/Mod/Robot/Gui/PreCompiled.h b/src/Mod/Robot/Gui/PreCompiled.h
--- a/src/Mod/Robot/Gui/PreCompiled.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Robot/Gui/PreCompiled.h 2024-02-27 15:00:48.258873781 +0100
@@ -39,8 +39,6 @@
# define RobotGuiExport
#endif
-#include <Standard_math.hxx>
-
#ifdef _MSC_VER
# pragma warning(disable : 4005)
# pragma warning(disable : 4273)
diff --color -rupN a/src/Mod/Sandbox/Gui/AppSandboxGui.cpp b/src/Mod/Sandbox/Gui/AppSandboxGui.cpp
--- a/src/Mod/Sandbox/Gui/AppSandboxGui.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Sandbox/Gui/AppSandboxGui.cpp 2024-02-27 15:00:48.258873781 +0100
@@ -24,7 +24,6 @@
#include "PreCompiled.h"
#ifndef _PreComp_
# include <Python.h>
-# include <Standard_math.hxx>
# include <Inventor/nodes/SoLineSet.h>
# include <Inventor/nodes/SoBaseColor.h>
# include <Inventor/nodes/SoSeparator.h>
diff --color -rupN a/src/Mod/Sketcher/App/SketchAnalysis.cpp b/src/Mod/Sketcher/App/SketchAnalysis.cpp
--- a/src/Mod/Sketcher/App/SketchAnalysis.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Sketcher/App/SketchAnalysis.cpp 2024-02-27 15:00:48.258873781 +0100
@@ -25,7 +25,6 @@
#include "PreCompiled.h"
#ifndef _PreComp_
-# include <Standard_math.hxx>
# include <BRep_Tool.hxx>
# include <gp_Pnt.hxx>
# include <Precision.hxx>
diff --color -rupN a/src/Mod/Sketcher/Gui/DrawSketchHandler.cpp b/src/Mod/Sketcher/Gui/DrawSketchHandler.cpp
--- a/src/Mod/Sketcher/Gui/DrawSketchHandler.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Sketcher/Gui/DrawSketchHandler.cpp 2024-02-27 15:00:48.258873781 +0100
@@ -24,7 +24,6 @@
#include "PreCompiled.h"
#ifndef _PreComp_
-# include <Standard_math.hxx>
# include <Inventor/nodes/SoTranslation.h>
# include <Inventor/nodes/SoText2.h>
# include <Inventor/nodes/SoFont.h>
diff --color -rupN a/src/Mod/Sketcher/Gui/EditDatumDialog.cpp b/src/Mod/Sketcher/Gui/EditDatumDialog.cpp
--- a/src/Mod/Sketcher/Gui/EditDatumDialog.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Sketcher/Gui/EditDatumDialog.cpp 2024-02-27 15:00:48.258873781 +0100
@@ -23,7 +23,6 @@
#include "PreCompiled.h"
#ifndef _PreComp_
-# include <Standard_math.hxx>
/// Qt Include Files
# include <QApplication>
# include <QDialog>
diff --color -rupN a/src/Mod/Sketcher/Gui/TaskSketcherValidation.cpp b/src/Mod/Sketcher/Gui/TaskSketcherValidation.cpp
--- a/src/Mod/Sketcher/Gui/TaskSketcherValidation.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Sketcher/Gui/TaskSketcherValidation.cpp 2024-02-27 15:00:48.258873781 +0100
@@ -24,7 +24,6 @@
#include "PreCompiled.h"
#ifndef _PreComp_
-# include <Standard_math.hxx>
# include <QDoubleValidator>
# include <QLocale>
# include <QMessageBox>
diff --color -rupN a/src/Mod/Sketcher/Gui/ViewProviderPython.cpp b/src/Mod/Sketcher/Gui/ViewProviderPython.cpp
--- a/src/Mod/Sketcher/Gui/ViewProviderPython.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Sketcher/Gui/ViewProviderPython.cpp 2024-02-27 15:00:48.258873781 +0100
@@ -23,7 +23,6 @@
#include "PreCompiled.h"
-#include <Standard_math.hxx>
#ifndef _PreComp_
# include <Inventor/nodes/SoSeparator.h>
#endif
diff --color -rupN a/src/Mod/Sketcher/Gui/ViewProviderSketch.cpp b/src/Mod/Sketcher/Gui/ViewProviderSketch.cpp
--- a/src/Mod/Sketcher/Gui/ViewProviderSketch.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Sketcher/Gui/ViewProviderSketch.cpp 2024-02-27 15:00:48.258873781 +0100
@@ -24,7 +24,6 @@
#include "PreCompiled.h"
#ifndef _PreComp_
-# include <Standard_math.hxx>
# include <Inventor/actions/SoGetBoundingBoxAction.h>
# include <Inventor/SbBox3f.h>
diff --color -rupN a/src/Mod/Surface/Gui/Command.cpp b/src/Mod/Surface/Gui/Command.cpp
--- a/src/Mod/Surface/Gui/Command.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Surface/Gui/Command.cpp 2024-02-27 15:00:48.258873781 +0100
@@ -31,7 +31,6 @@
#include <QLineEdit>
#include <QMessageBox>
#include <QPointer>
-#include <Standard_math.hxx>
#include <TopoDS_Shape.hxx>
#include <TopoDS_Edge.hxx>
#include <Geom_BezierCurve.hxx>
diff --color -rupN a/src/Mod/TechDraw/Gui/TaskProjection.cpp b/src/Mod/TechDraw/Gui/TaskProjection.cpp
--- a/src/Mod/TechDraw/Gui/TaskProjection.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/TechDraw/Gui/TaskProjection.cpp 2024-02-27 15:00:48.258873781 +0100
@@ -29,9 +29,6 @@
# include <QMessageBox>
#endif
-
-#include <Standard_math.hxx>
-
#include <Gui/Application.h>
#include <Gui/BitmapFactory.h>
#include <Gui/Command.h>

View file

@ -0,0 +1,11 @@
--- ./cMake/FreeCAD_Helpers/ConfigureCMakeVariables.cmake.orig
+++ ./cMake/FreeCAD_Helpers/ConfigureCMakeVariables.cmake
@@ -23,7 +23,7 @@
"Path to the directory containing PyCXX's cxxextensions.c source file")
# used as compiler defines
- set(RESOURCEDIR "${CMAKE_INSTALL_DATADIR}")
+ set(RESOURCEDIR "../../share/freecad")
set(LIBRARYDIR "${CMAKE_INSTALL_LIBDIR}")
set(DOCDIR "${CMAKE_INSTALL_DOCDIR}")

View file

@ -0,0 +1,33 @@
--- ./src/Mod/Mesh/App/MeshTestsApp.py.orig
+++ ./src/Mod/Mesh/App/MeshTestsApp.py
@@ -423,30 +423,6 @@
self.planarMesh = []
FreeCAD.newDocument("MeshTest")
- def testRayPick(self):
- if not FreeCAD.GuiUp:
- return
- self.planarMesh.append( [-16.097176,-29.891157,15.987688] )
- self.planarMesh.append( [-16.176304,-29.859991,15.947966] )
- self.planarMesh.append( [-16.071451,-29.900553,15.912505] )
- self.planarMesh.append( [-16.092241,-29.893408,16.020439] )
- self.planarMesh.append( [-16.007210,-29.926180,15.967641] )
- self.planarMesh.append( [-16.064457,-29.904951,16.090832] )
- planarMeshObject = Mesh.Mesh(self.planarMesh)
-
- from pivy import coin; import FreeCADGui
- Mesh.show(planarMeshObject)
- view=FreeCADGui.ActiveDocument.ActiveView.getViewer()
- rp=coin.SoRayPickAction(view.getSoRenderManager().getViewportRegion())
- rp.setRay(coin.SbVec3f(-16.05,16.0,16.0),coin.SbVec3f(0,-1,0))
- rp.apply(view.getSoRenderManager().getSceneGraph())
- pp=rp.getPickedPoint()
- self.assertTrue(pp != None)
- det=pp.getDetail()
- self.assertTrue(det.getTypeId() == coin.SoFaceDetail.getClassTypeId())
- det=coin.cast(det, det.getTypeId().getName().getString())
- self.assertTrue(det.getFaceIndex() == 1)
-
def testPrimitiveCount(self):
if not FreeCAD.GuiUp:
return

View file

@ -1,8 +1,8 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=freetube
pkgver=0.23.12
pkgrel=1
pkgver=0.21.3
pkgrel=0
pkgdesc="An open source desktop YouTube player built with privacy in mind."
arch="x86_64 aarch64" # blocked by electron
license="AGPL-3.0-only"
@ -50,7 +50,7 @@ package() {
}
sha512sums="
e19c7e8de0c6c5bbddcd3da73cd1907cae7157e8f44f550c4a34965b3b4f3c1a180c111a8c497d74a556d6d8e74e9fdd1ed6e064d4fc899f80712a1f187395ae freetube-0.23.12.tar.gz
22e5ab677cd442d50237b2d62534698d8ad73a37e1731003dc23c4ea3da992b3cae936f0bb3a0a86cd4b7fba731c9fa53276cb0a6cd5bab213ff2a6c9006cb05 freetube-0.21.3.tar.gz
2ce2effc794bb663789cefe968b5899122127983dbfa1b240aa33a2be383720b18204e6d01b4a550df72956f02b6636b79c93a58f470a970b09b770f5b8f2fc4 freetube.sh
d27cb896b65a7e8d52ffe86e5f74eed72b6cf976b28e1a13012d34c7eceba5ff6f20298017738dfa93c0336ffa52b8ee4da7e06b02747062898db7e678819526 tasje-dotdash.patch
"

View file

@ -0,0 +1,43 @@
# Contributor: Aiden Grossman <agrossman154@yahoo.com>
# Maintainer: Aiden Grossman <agrossman154@yahoo.com>
pkgname=libmedc
pkgver=4.1.1
pkgrel=3
pkgdesc="Open source library for numerical simulation"
url="https://www.salome-platform.org/"
arch="all"
license="GPL-3.0-or-later"
makedepends="cmake hdf5-dev swig python3-dev samurai"
options="!check" #test suite is nonfunctional with python bindings
subpackages="$pkgname-dev $pkgname-doc $pkgname-python-pyc $pkgname-python:_py"
source="
https://files.salome-platform.org/Salome/medfile/med-$pkgver.tar.gz
hdf5.patch
cmake-config-dir.patch
"
builddir="$srcdir/med-$pkgver"
build() {
cmake -B build -G Ninja \
-DCMAKE_BUILD_TYPE=None \
-DCMAKE_INSTALL_PREFIX=/usr \
-DMEDFILE_BUILD_TESTS=OFF \
-DMEDFILE_BUILD_PYTHON=ON
cmake --build build
}
package() {
DESTDIR="$pkgdir" cmake --install build
}
_py() {
pkgdesc="Python bindings for libmedc"
depends="python3"
amove usr/lib/python3*
}
sha512sums="
f211fa82750a7cc935baa3a50a55d16e40117a0f2254b482492ba8396d82781ca84960995da7a16b2b5be0b93ce76368bf4b311bb8af0e5f0243e7051c9c554c med-4.1.1.tar.gz
68d9291e73a68d674081314028c0fce7bbd4a7b78b93b7e5078117ce62f2d07318bc33ec95091ce677148ec3926c1ce653d0760c34e74b29257a7be59210f040 hdf5.patch
8d0f58cd67d205fbacaff0e6da76e2ee5473457b478ede13a551ebe5853c0716c7406b74c3792e1ace33a34d352fccca8dd2940f063a7c060a12529d060a991a cmake-config-dir.patch
"

View file

@ -0,0 +1,11 @@
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -101,7 +101,7 @@
IF(WIN32 AND NOT CYGWIN)
SET(INSTALL_CMAKE_CONFIG_DIR cmake)
ELSE()
- SET(INSTALL_CMAKE_CONFIG_DIR share/cmake/medfile-${MED_STR_VERSION})
+ SET(INSTALL_CMAKE_CONFIG_DIR lib${LIB_SUFFIX}/cmake/medfile-${MED_STR_VERSION})
ENDIF()
SET(INSTALL_INCLUDE_DIR include)

View file

@ -0,0 +1,94 @@
Originally from https://gist.github.com/jedbrown/527ef81ff59a0dccf833da40fdd15a47
diff -rupN med-4.1.0/config/cmake_files/medMacros.cmake med-4.1.0-new/config/cmake_files/medMacros.cmake
--- med-4.1.0/config/cmake_files/medMacros.cmake 2021-12-03 09:35:30.675827163 +0100
+++ med-4.1.0-new/config/cmake_files/medMacros.cmake 2021-12-03 09:32:31.894994147 +0100
@@ -447,7 +447,7 @@ MACRO(MED_FIND_HDF5)
##
## Requires 1.10.x version
##
- IF (NOT HDF_VERSION_MAJOR_REF EQUAL 1 OR NOT HDF_VERSION_MINOR_REF EQUAL 10 OR NOT HDF_VERSION_RELEASE_REF GREATER 1)
+ IF (HDF5_VERSION VERSION_LESS 1.10.2)
MESSAGE(FATAL_ERROR "HDF5 version is ${HDF_VERSION_REF}. Only versions >= 1.10.2 are supported.")
ENDIF()
##
diff -rupN med-4.1.0/src/ci/MEDfileCompatibility.c med-4.1.0-new/src/ci/MEDfileCompatibility.c
--- med-4.1.0/src/ci/MEDfileCompatibility.c 2021-12-03 09:35:30.676827162 +0100
+++ med-4.1.0-new/src/ci/MEDfileCompatibility.c 2021-12-03 09:33:26.292942149 +0100
@@ -71,7 +71,7 @@ MEDfileCompatibility(const char* const f
_hversionMMR=10000*_hmajeur+100*_hmineur+_hrelease;
/* ISCRUTE(_hversionMMR); */
/* ISCRUTE(HDF_VERSION_NUM_REF); */
- if ( (_hversionMMR >= HDF_VERSION_NUM_REF) && (_hmineur == HDF_VERSION_MINOR_REF) ) *hdfok = MED_TRUE;
+ if (_hversionMMR >= HDF_VERSION_NUM_REF) *hdfok = MED_TRUE;
/* TODO : Vérifier si la version mineure HDF du fichier est supérieure
à la version mineure de la bibliothèque HDF utilisée :
@@ -113,7 +113,7 @@ MEDfileCompatibility(const char* const f
#if MED_NUM_MAJEUR != 4
#error "Don't forget to update the test version here when you change the major version of the library !"
#endif
-#if H5_VERS_MINOR > 10
+#if H5_VERS_MINOR > 14
#error "Don't forget to check the compatibility version of the library, depending on the internal hdf model choice !"
#error "Cf. _MEDfileCreate ..."
#endif
diff -rupN med-4.1.0/src/hdfi/_MEDfileCreate.c med-4.1.0-new/src/hdfi/_MEDfileCreate.c
--- med-4.1.0/src/hdfi/_MEDfileCreate.c 2021-12-03 09:35:30.677827161 +0100
+++ med-4.1.0-new/src/hdfi/_MEDfileCreate.c 2021-12-03 09:32:31.894994147 +0100
@@ -159,7 +159,7 @@ med_idt _MEDfileCreate(const char * cons
* En HDF5-1.10.0p1 cela n'a aucun effet !
* Un test autoconf permet de fixer un intervalle de version HDF à MED.
*/
-#if H5_VERS_MINOR > 10
+#if H5_VERS_MINOR > 14
#error "Don't forget to change the compatibility version of the library !"
#endif
diff -rupN med-4.1.0/src/hdfi/_MEDfileOpen.c med-4.1.0-new/src/hdfi/_MEDfileOpen.c
--- med-4.1.0/src/hdfi/_MEDfileOpen.c 2021-12-03 09:35:30.677827161 +0100
+++ med-4.1.0-new/src/hdfi/_MEDfileOpen.c 2021-12-03 09:32:31.894994147 +0100
@@ -72,7 +72,7 @@ med_idt _MEDfileOpen(const char * const
• The creation order tracking property, H5P_CRT_ORDER_TRACKED, has been set in the group creation property list (see H5Pset_link_creation_order).
*/
-#if H5_VERS_MINOR > 10
+#if H5_VERS_MINOR > 14
#error "Don't forget to change the compatibility version of the library !"
#endif
/* L'avantage de bloquer le modèle interne HDF5
diff -rupN med-4.1.0/src/hdfi/_MEDmemFileOpen.c med-4.1.0-new/src/hdfi/_MEDmemFileOpen.c
--- med-4.1.0/src/hdfi/_MEDmemFileOpen.c 2021-12-03 09:35:30.678827160 +0100
+++ med-4.1.0-new/src/hdfi/_MEDmemFileOpen.c 2021-12-03 09:32:31.894994147 +0100
@@ -434,7 +434,7 @@ med_idt _MEDmemFileOpen(const char * con
goto ERROR;
}
-#if H5_VERS_MINOR > 10
+#if H5_VERS_MINOR > 14
#error "Don't forget to change the compatibility version of the library !"
#endif
if ( H5Pset_libver_bounds( _fapl, H5F_LIBVER_18, H5F_LIBVER_18) ) {
diff -rupN med-4.1.0/src/hdfi/_MEDparFileCreate.c med-4.1.0-new/src/hdfi/_MEDparFileCreate.c
--- med-4.1.0/src/hdfi/_MEDparFileCreate.c 2021-12-03 09:35:30.678827160 +0100
+++ med-4.1.0-new/src/hdfi/_MEDparFileCreate.c 2021-12-03 09:32:31.894994147 +0100
@@ -64,7 +64,7 @@ med_idt _MEDparFileCreate(const char * c
* En HDF5-1.10.0p1 cela n'a aucun effet !
* Un test autoconf permet de fixer un intervalle de version HDF à MED.
*/
-#if H5_VERS_MINOR > 10
+#if H5_VERS_MINOR > 14
#error "Don't forget to change the compatibility version of the library !"
#endif
diff -rupN med-4.1.0/src/hdfi/_MEDparFileOpen.c med-4.1.0-new/src/hdfi/_MEDparFileOpen.c
--- med-4.1.0/src/hdfi/_MEDparFileOpen.c 2021-12-03 09:35:30.679827159 +0100
+++ med-4.1.0-new/src/hdfi/_MEDparFileOpen.c 2021-12-03 09:32:31.894994147 +0100
@@ -55,7 +55,7 @@ med_idt _MEDparFileOpen(const char * con
MED_ERR_(_fid,MED_ERR_INIT,MED_ERR_PROPERTY,MED_ERR_PARALLEL_MSG);
goto ERROR;
}
-#if H5_VERS_MINOR > 10
+#if H5_VERS_MINOR > 14
#error "Don't forget to change the compatibility version of the library !"
#endif
if ( H5Pset_libver_bounds( _fapl, H5F_LIBVER_18, H5F_LIBVER_18 ) ) {

View file

@ -0,0 +1,40 @@
# Contributor: Anjandev Momi <anjan@momi.ca>
# Maintainer: Anjandev Momi <anjan@momi.ca>
pkgname=libnest2d
pkgver=0.4
pkgrel=6
pkgdesc="2D irregular bin packaging and nesting library written in modern C++"
url="https://github.com/tamasmeszaros/libnest2d"
arch="noarch"
license="LGPL-3.0-only"
makedepends="samurai cmake clipper-dev boost-dev nlopt-dev"
subpackages="$pkgname-dev"
source="$pkgname-$pkgver.tar.gz::https://github.com/tamasmeszaros/libnest2d/archive/refs/tags/$pkgver.tar.gz
allow-disallowed-area.patch"
build() {
if [ "$CBUILD" != "$CHOST" ]; then
CMAKE_CROSSOPTS="-DCMAKE_SYSTEM_NAME=Linux -DCMAKE_HOST_SYSTEM_NAME=Linux"
fi
cmake -B build -G Ninja \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_INSTALL_LIBDIR=lib \
-DBUILD_SHARED_LIBS=True \
-DCMAKE_BUILD_TYPE=minsizerel \
$CMAKE_CROSSOPTS .
cmake --build build
}
check() {
cd build
CTEST_OUTPUT_ON_FAILURE=TRUE ctest
}
package() {
DESTDIR="$pkgdir" cmake --install build
}
sha512sums="
fadce18986b844eed13a581f84055df909a17407a0980deb6c7c24248a969a537a8840650bcfc673e61973810ce9a008acb599e3b8e00c9bff6b566ca41cd62c libnest2d-0.4.tar.gz
2e8cd3343c72c576ecb54960d7ad9f4f2322f822b19ac41850b3b28da95e97c2cefe7c67de6c97627df08cd5cdc1660ce4dfa95fe51f88e0ff5c066c8d785458 allow-disallowed-area.patch
"

View file

@ -0,0 +1,124 @@
From 2e91be2679b5efa0773292d9d0a2ae72255bb271 Mon Sep 17 00:00:00 2001
From: Ghostkeeper <rubend@tutanota.com>
Date: Tue, 6 Oct 2020 16:13:15 +0200
Subject: [PATCH 1/3] Allow for an item to be a disallowed area
url: https://github.com/tamasmeszaros/libnest2d/pull/18
Disallowed areas have slightly different behaviour from fixed items: Other items won't get packed closely around them. Implementation of that pending.
Contributes to issue CURA-7754.
---
include/libnest2d/nester.hpp | 16 ++++++++++++++++
1 file changed, 16 insertions(+)
diff --git a/include/libnest2d/nester.hpp b/include/libnest2d/nester.hpp
index 2f207d5..932a060 100644
--- a/include/libnest2d/nester.hpp
+++ b/include/libnest2d/nester.hpp
@@ -71,6 +71,15 @@ class _Item {
int binid_{BIN_ID_UNSET}, priority_{0};
bool fixed_{false};
+ /**
+ * \brief If this is a fixed area, indicates whether it is a disallowed area
+ * or a previously placed item.
+ *
+ * If this is a disallowed area, other objects will not get packed close
+ * together with this item. It only blocks other items in its area.
+ */
+ bool disallowed_{false};
+
public:
/// The type of the shape which was handed over as the template argument.
@@ -129,11 +138,18 @@ class _Item {
sh_(sl::create<RawShape>(std::move(contour), std::move(holes))) {}
inline bool isFixed() const noexcept { return fixed_; }
+ inline bool isDisallowedArea() const noexcept { return disallowed_; }
inline void markAsFixedInBin(int binid)
{
fixed_ = binid >= 0;
binid_ = binid;
}
+ inline void markAsDisallowedAreaInBin(int binid)
+ {
+ fixed_ = binid >= 0;
+ binid_ = binid;
+ disallowed_ = true;
+ }
inline void binId(int idx) { binid_ = idx; }
inline int binId() const noexcept { return binid_; }
From ff61049e59d3151462bca7ff2e2268c2b32731e7 Mon Sep 17 00:00:00 2001
From: Ghostkeeper <rubend@tutanota.com>
Date: Tue, 6 Oct 2020 16:14:36 +0200
Subject: [PATCH 2/3] Allow unsetting of being a disallowed area
If you set the bin to -1 or set the item to be a simple fixed item afterwards, it'll no longer be a disallowed area.
Contributes to issue CURA-7754.
---
include/libnest2d/nester.hpp | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/include/libnest2d/nester.hpp b/include/libnest2d/nester.hpp
index 932a060..54761a6 100644
--- a/include/libnest2d/nester.hpp
+++ b/include/libnest2d/nester.hpp
@@ -143,12 +143,13 @@ class _Item {
{
fixed_ = binid >= 0;
binid_ = binid;
+ disallowed_ = false;
}
inline void markAsDisallowedAreaInBin(int binid)
{
fixed_ = binid >= 0;
binid_ = binid;
- disallowed_ = true;
+ disallowed_ = fixed_;
}
inline void binId(int idx) { binid_ = idx; }
From 31391fd173249ad9b906390058e13b09238fadc8 Mon Sep 17 00:00:00 2001
From: Ghostkeeper <rubend@tutanota.com>
Date: Thu, 8 Oct 2020 11:06:58 +0200
Subject: [PATCH 3/3] Align items to their starting position if all placed
items are disallowed
We shouldn't align items to disallowed areas. So place them in the starting position according to the alignment property.
Lot of work to investigate. But very little code changes!
Contributes to issue CURA-7754.
---
include/libnest2d/placers/nfpplacer.hpp | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/include/libnest2d/placers/nfpplacer.hpp b/include/libnest2d/placers/nfpplacer.hpp
index 96a8cff..b0ebb15 100644
--- a/include/libnest2d/placers/nfpplacer.hpp
+++ b/include/libnest2d/placers/nfpplacer.hpp
@@ -101,7 +101,7 @@ struct NfpPConfig {
* alignment with the candidate item or do anything else.
*
* \param remaining A container with the remaining items waiting to be
- * placed. You can use some features about the remaining items to alter to
+ * placed. You can use some features about the remaining items to alter the
* score of the current placement. If you know that you have to leave place
* for other items as well, that might influence your decision about where
* the current candidate should be placed. E.g. imagine three big circles
@@ -735,7 +735,8 @@ class _NofitPolyPlacer: public PlacerBoilerplate<_NofitPolyPlacer<RawShape, TBin
remlist.insert(remlist.end(), remaining.from, remaining.to);
}
- if(items_.empty()) {
+ if(std::all_of(items_.begin(), items_.end(),
+ [](const Item& item) { return item.isDisallowedArea(); })) {
setInitialPosition(item);
best_overfit = overfit(item.transformedShape(), bin_);
can_pack = best_overfit <= 0;

View file

@ -0,0 +1,33 @@
# Contributor: Alex Yam <alex@alexyam.com>
# Maintainer: Alex Yam <alex@alexyam.com>
pkgname=libspatialindex
pkgver=0_git20210205
_commit=8ee223632f95c81f49f5eb2d547ad973475c4601
pkgrel=1
pkgdesc="extensible framework for robust spatial indexing methods"
url="https://libspatialindex.org/"
arch="all"
license="MIT"
makedepends="cmake"
subpackages="$pkgname-dev"
source="$pkgname-$_commit.tar.gz::https://github.com/libspatialindex/libspatialindex/archive/$_commit.tar.gz"
builddir="$srcdir/$pkgname-$_commit"
build() {
cmake -B build \
-DCMAKE_BUILD_TYPE=MinSizeRel \
-DCMAKE_PREFIX_PATH=/usr \
-DCMAKE_INSTALL_PREFIX=/usr \
-DBUILD_TESTING=ON
cmake --build build
}
check() {
cd build && ctest
}
package() {
DESTDIR="$pkgdir" cmake --build build --target install
}
sha512sums="caf91aac77b75445e4fc4d0baedcd10c619b2097dfd841b00339d9ddd4b73db05b99de1d84be88f1083f4713a936cf110d5851523491f5a74c6f96e1d5795dbb libspatialindex-8ee223632f95c81f49f5eb2d547ad973475c4601.tar.gz"

View file

@ -0,0 +1,93 @@
# Contributor: Rogério da Silva Yokomizo <me@ro.ger.io>
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Rogério da Silva Yokomizo <me@ro.ger.io>
pkgname=looking-glass
_gittag=b7_git20240607
pkgver=7b_git20240607
pkgrel=0
pkgdesc="Allows the use of a KVM configured for VGA PCI Pass-through without an attached physical monitor, keyboard or mouse"
url="https://looking-glass.io/"
arch="x86_64"
license="GPL-2.0-or-later"
makedepends="
cmake
fontconfig-dev
libsamplerate-dev
libx11-dev
libxcursor-dev
libxfixes-dev
libxi-dev
libxinerama-dev
libxkbcommon-dev
libxpresent-dev
libxscrnsaver-dev
nettle-dev
obs-studio-dev
pipewire-dev
pulseaudio-dev
samurai
spice-dev
wayland-dev
wayland-protocols
"
source="$pkgname-$_gittag.tar.gz::https://lab.ilot.io/mirrors/looking-glass/-/releases/$_gittag/downloads/tarball/looking-glass-$_gittag.tar.gz
missing-includes.patch
obs-plugins-lib.patch
werror.patch
"
subpackages="$pkgname-obs $pkgname-module"
builddir="$srcdir/$pkgname-$_gittag"
options="!check" # There are no tests nor --version.
build() {
cmake -S client -B build-client -G Ninja \
-DENABLE_BACKTRACE=OFF \
-DOPTIMIZE_FOR_NATIVE=OFF \
-DCMAKE_INSTALL_PREFIX=/usr
cmake -S obs -B build-obs -G Ninja \
-DENABLE_BACKTRACE=OFF \
-DOPTIMIZE_FOR_NATIVE=OFF \
-DCMAKE_INSTALL_PREFIX=/usr
cmake --build build-client
cmake --build build-obs
}
package() {
DESTDIR="$pkgdir" cmake --install build-client
DESTDIR="$pkgdir" cmake --install build-obs
}
module() {
pkgdesc="Looking Glass kernel module (AKMS)"
depends="akms"
install_if="looking-glass=$pkgver-r$pkgrel"
_modver=$(awk -F "=" '{if($1=="PACKAGE_VERSION"){print $2}}' src/looking-glass-B6/module/dkms.conf | tr -d '"')
install -Dm644 "$builddir"/module/Makefile "$subpkgdir"/usr/src/looking-glass/Makefile
install -Dm644 "$builddir"/module/kvmfr* "$subpkgdir"/usr/src/looking-glass/.
cat ->> "$subpkgdir"/usr/src/looking-glass/AKMBUILD <<EOF
modname=kvmfr
modver=$pkgver
built_modules='kvmfr.ko'
EOF
chmod -R u=rwX,go=rX-w "$subpkgdir"/usr/src/looking-glass
mkdir -p "$subpkgdir"/etc/udev/rules.d
echo 'SUBSYSTEM=="kvmfr", OWNER="root", GROUP="kvm", MODE="0660"' > "$subpkgdir"/etc/udev/rules.d/99-kvmfr.rules
}
obs() {
pkgdesc="$pkgdesc (obs plugin)"
amove usr/lib/obs-plugins
}
sha512sums="
959f49c91dc7bb06dfae890547bfbd1c02bd4154f4ba1c898a12d15a3579658d65fcb9fc4b951c04180e17fc9151e551858e0fb60f20e3f1a72d19b86c7dc3db looking-glass-b7_git20240607.tar.gz
6d2449764a8316dd3c1b5cc0aa552671068f89ed2f95297f3c5256af8529b93e5ec7af8f979bd2e744fd09b11063e8a93f3ed26284f0e49294e467ca10f6e772 missing-includes.patch
33c5463412a16691f47d7833ebf81d7cf20c560a077dca141dcc9f02a5d6dfb676e483835f39a06012b114be9f509dda4614fe253bb1c72a0142e82dc265a5ca obs-plugins-lib.patch
b952d1fd284aed15bcfe7990f160dec3a4565fb5833ce339920f62de6bb46fbc09265a0a79fe80d212eecc6a1813614e1e193a8846c37e2afd18431dc3a89ca4 werror.patch
"

View file

@ -0,0 +1,2 @@
#!/bin/sh
exec /usr/sbin/akms uninstall kvmfr

View file

@ -0,0 +1,92 @@
--- a/repos/PureSpice/src/agent.c
+++ b/repos/PureSpice/src/agent.c
@@ -31,6 +31,7 @@ Place, Suite 330, Boston, MA 02111-1307
#include <unistd.h>
#include <stdio.h>
#include <stdlib.h>
+#include <string.h>
#include <assert.h>
#include <sys/ioctl.h>
--- a/repos/PureSpice/src/channel_cursor.c
+++ b/repos/PureSpice/src/channel_cursor.c
@@ -25,6 +25,7 @@ Place, Suite 330, Boston, MA 02111-1307
#include "channel_cursor.h"
#include <stdlib.h>
+#include <string.h>
#include "messages.h"
--- a/repos/PureSpice/src/channel_display.c
+++ b/repos/PureSpice/src/channel_display.c
@@ -19,6 +19,7 @@ Place, Suite 330, Boston, MA 02111-1307
#include "purespice.h"
#include <stdlib.h>
+#include <string.h>
#include "ps.h"
#include "log.h"
--- a/repos/PureSpice/src/channel_inputs.c
+++ b/repos/PureSpice/src/channel_inputs.c
@@ -25,6 +25,7 @@ Place, Suite 330, Boston, MA 02111-1307
#include "messages.h"
#include <stdlib.h>
+#include <string.h>
const SpiceLinkHeader * channelInputs_getConnectPacket(void)
{
--- a/repos/PureSpice/src/channel_main.c
+++ b/repos/PureSpice/src/channel_main.c
@@ -24,6 +24,7 @@ Place, Suite 330, Boston, MA 02111-1307
#include "messages.h"
#include <stdlib.h>
+#include <string.h>
struct ChannelMain
{
--- a/repos/PureSpice/src/channel_playback.c
+++ b/repos/PureSpice/src/channel_playback.c
@@ -26,6 +26,8 @@ Place, Suite 330, Boston, MA 02111-1307
#include "messages.h"
+#include <string.h>
+
const SpiceLinkHeader * channelPlayback_getConnectPacket(void)
{
typedef struct
--- a/repos/PureSpice/src/channel_record.c
+++ b/repos/PureSpice/src/channel_record.c
@@ -26,6 +26,8 @@ Place, Suite 330, Boston, MA 02111-1307
#include "messages.h"
+#include <string.h>
+
const SpiceLinkHeader * channelRecord_getConnectPacket(void)
{
typedef struct
--- a/repos/PureSpice/src/log.c
+++ b/repos/PureSpice/src/log.c
@@ -25,6 +25,7 @@ Place, Suite 330, Boston, MA 02111-1307
#include <stdarg.h>
#include <stdio.h>
+#include <string.h>
static void log_stdout(const char * file, unsigned int line,
const char * function, const char * format, ...)
--- a/repos/PureSpice/src/ps.c
+++ b/repos/PureSpice/src/ps.c
@@ -37,6 +37,7 @@ Place, Suite 330, Boston, MA 02111-1307
#include <unistd.h>
#include <stdio.h>
#include <stdlib.h>
+#include <string.h>
#include <assert.h>
#include <errno.h>

Some files were not shown because too many files have changed in this diff Show more