Compare commits

..

2 commits

Author SHA1 Message Date
eb25e86ee2
zotero: upgrade to 20240131 2024-01-31 14:54:14 -05:00
4ad3ef905b
user/firefox-esr: upgrade to 102.15.1 2024-01-31 14:44:27 -05:00
376 changed files with 20891 additions and 3980 deletions

View file

@ -1,41 +0,0 @@
#!/bin/bash
# expects the following env variables:
# downstream: downstream repo
repo=${downstream/*\/}
curl --silent $downstream/x86_64/APKINDEX.tar.gz | tar -O -zx APKINDEX > APKINDEX
if [ "$ALL_PACKAGES" == "true" ]; then
owned_by_you=$(awk -F ':' '{if($1=="o"){print $2}}' APKINDEX | sort | uniq)
echo "Found $(printf '%s\n' $owned_by_you | wc -l ) packages"
else
owned_by_you=$(awk -v RS= -v ORS="\n\n" '/m:Antoine Martin \(ayakael\) <dev@ayakael.net>/' APKINDEX | awk -F ':' '{if($1=="o"){print $2}}' | sort | uniq)
echo "Found $(printf '%s\n' $owned_by_you | wc -l ) packages owned by you"
fi
rm -f out_of_date not_in_anitya
for pkg in $owned_by_you; do
upstream_version=$(curl --fail -X GET -sS -H 'Content-Type: application/json' "https://release-monitoring.org/api/v2/packages/?name=$pkg&distribution=Alpine" | jq -r '.items.[].stable_version')
downstream_version=$(sed -n "/^P:$pkg$/,/^$/p" APKINDEX | awk -F ':' '{if($1=="V"){print $2}}' | sort -V | tail -n 1)
downstream_version=${downstream_version/-*}
# special cases
case $pkg in
freetube) upstream_version=$(curl --fail -X GET -sS -H 'Content-Type: application/json' "https://release-monitoring.org/api/v2/packages/?name=$pkg&distribution=Alpine" | jq -r '.items.[].version' | sed "s|-beta||");;
dotnet9-sdk|dotnet9-stage0) upstream_version=${upstream_version/-*};;
esac
if [ -z "$upstream_version" ]; then
echo "$pkg not in anitya"
echo "$pkg" >> not_in_anitya
elif [ "$downstream_version" != "$(printf '%s\n' $upstream_version $downstream_version | sort -V | head -n 1)" ]; then
echo "$pkg higher downstream"
continue
elif [ "$upstream_version" != "$downstream_version" ]; then
echo "$pkg upstream version $upstream_version does not match downstream version $downstream_version"
echo "$pkg $downstream_version $upstream_version $repo" >> out_of_date
fi
done

View file

@ -1,17 +0,0 @@
#!/bin/sh
TARGET_REPO=$1
ARCH=$2
curl --silent $TARGET_REPO/$ARCH/APKINDEX.tar.gz | tar -O -zx APKINDEX > APKINDEX
pkgs=$(awk -F ':' '{if($1=="o"){print $2}}' APKINDEX | sort | uniq)
for pkg in $pkgs; do
pkgvers=$(sed -n "/^P:$pkg$/,/^$/p" APKINDEX | awk -F ':' '{if($1=="V"){print $2}}')
for pkgver in $pkgvers; do
echo "Deleting $pkg-$pkgver of arch $ARCH from $TARGET_REPO"
curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN -X DELETE $TARGET_REPO/$ARCH/$pkg-$pkgver.apk
done
done

View file

@ -1,165 +0,0 @@
#!/bin/bash
# expects:
# env variable FORGEJO_TOKEN
# file out_of_date
IFS='
'
repo=${downstream/*\/}
does_it_exist() {
name=$1
downstream_version=$2
upstream_version=$3
repo=$4
query="$repo/$name: upgrade to $upstream_version"
query="$(echo $query | sed 's| |%20|g' | sed 's|:|%3A|g' | sed 's|/|%2F|g' )"
result="$(curl --silent -X 'GET' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN"
)"
if [ "$result" == "[]" ]; then
return 1
fi
}
is_it_old() {
name=$1
downstream_version=$2
upstream_version=$3
repo=$4
query="$repo/$name: upgrade to"
query="$(echo $query | sed 's| |%20|g' | sed 's|:|%3A|g' | sed 's|/|%2F|g' )"
result="$(curl --silent -X 'GET' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN"
)"
result_title="$(echo $result | jq -r '.[].title' )"
result_id="$(echo $result | jq -r '.[].number' )"
result_upstream_version="$(echo $result_title | awk '{print $4}')"
if [ "$upstream_version" != "$result_upstream_version" ]; then
echo $result_id
else
echo 0
fi
}
update_title() {
name=$1
downstream_version=$2
upstream_version=$3
repo=$4
id=$5
result=$(curl --silent -X 'PATCH' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues/$id" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN" \
-H 'Content-Type: application/json' \
-d "{
\"title\": \"$repo/$name: upgrade to $upstream_version\"
}"
)
return 0
}
create_issue() {
name=$1
downstream_version=$2
upstream_version=$3
repo=$4
result=$(curl --silent -X 'POST' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN" \
-H 'Content-Type: application/json' \
-d "{
\"title\": \"$repo/$name: upgrade to $upstream_version\",
\"labels\": [
$LABEL_NUMBER
]
}")
return 0
}
if [ -f out_of_date ]; then
out_of_date="$(cat out_of_date)"
echo "Detected $(wc -l out_of_date) out-of-date packages, creating issues"
for pkg in $out_of_date; do
name="$(echo $pkg | awk '{print $1}')"
downstream_version="$(echo $pkg | awk '{print $2}')"
upstream_version="$(echo $pkg | awk '{print $3}')"
repo="$(echo $pkg | awk '{print $4}')"
if does_it_exist $name $downstream_version $upstream_version $repo; then
echo "Issue for $repo/$name already exists"
continue
fi
id=$(is_it_old $name $downstream_version $upstream_version $repo)
if [ "$id" != "0" ] && [ -n "$id" ]; then
echo "Issue for $repo/$name needs updating"
update_title $name $downstream_version $upstream_version $repo $id
continue
fi
echo "Creating issue for $repo/$name"
create_issue $name $downstream_version $upstream_version $repo
done
fi
if [ -f not_in_anitya ]; then
query="Add missing $repo packages to anitya"
query="$(echo $query | sed 's| |%20|g')"
result="$(curl --silent -X 'GET' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN"
)"
if [ "$result" == "[]" ]; then
echo "Creating anitya issue"
result=$(curl --silent -X 'POST' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN" \
-H 'Content-Type: application/json' \
-d "{
\"title\": \"Add missing $repo packages to anitya\",
\"body\": \"- [ ] $(sed '{:q;N;s/\n/\\n- [ ] /g;t q}' not_in_anitya)\",
\"labels\": [
$LABEL_NUMBER
]
}")
else
echo "Updating anitya issue"
result_id="$(echo $result | jq -r '.[].number' )"
result=$(curl --silent -X 'PATCH' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues/$result_id" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN" \
-H 'Content-Type: application/json' \
-d "{
\"body\": \"- [ ] $(sed '{:q;N;s/\n/\\n- [ ] /g;t q}' not_in_anitya)\"
}"
)
fi
fi

View file

@ -1,26 +0,0 @@
#!/bin/sh
# shellcheck disable=SC3040
set -eu -o pipefail
readonly REPOS="backports user"
readonly BASEBRANCH=$GITHUB_BASE_REF
readonly TARGET_REPO=$CI_ALPINE_REPO
apkgs=$(find package -type f -name "*.apk")
for apk in $apkgs; do
branch=$(echo $apk | awk -F '/' '{print $2}')
arch=$(echo $apk | awk -F '/' '{print $3}')
name=$(echo $apk | awk -F '/' '{print $4}')
echo "Sending $name of arch $arch to $TARGET_REPO/$BASEBRANCH/$branch"
return=$(curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN --upload-file $apk $TARGET_REPO/$BASEBRANCH/$branch 2>&1)
echo $return
if [ "$return" == "package file already exists" ]; then
echo "Package already exists, refreshing..."
curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN -X DELETE $TARGET_REPO/$BASEBRANCH/$branch/$arch/$name
curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN --upload-file $apk $TARGET_REPO/$BASEBRANCH/$branch
fi
done

View file

@ -1,52 +0,0 @@
on:
pull_request:
types: [ assigned, opened, synchronize, reopened ]
jobs:
build-aarch64:
runs-on: aarch64
container:
image: alpinelinux/alpine-gitlab-ci:latest
env:
CI_PROJECT_DIR: ${{ github.workspace }}
CI_DEBUG_BUILD: ${{ runner.debug }}
CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }}
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
steps:
- name: Environment setup
run: |
doas apk add nodejs git patch curl
cd /etc/apk/keys
doas curl -JO https://ayakael.net/api/packages/forge/alpine/key
- name: Repo pull
uses: actions/checkout@v4
with:
fetch-depth: 500
- name: Package build
run: |
${{ github.workspace }}/.forgejo/bin/build.sh
touch packages/dummy
- name: Package upload
uses: forgejo/upload-artifact@v3
with:
name: package
path: packages
deploy-aarch64:
needs: [build-aarch64]
runs-on: aarch64
container:
image: alpine:latest
env:
CI_ALPINE_REPO: 'https://ayakael.net/api/packages/forge/alpine'
FORGE_REPO_TOKEN: ${{ secrets.FORGE_REPO_TOKEN }}
FORGE_REPO_USER: ${{ vars.FORGE_REPO_USER }}
steps:
- name: Setting up environment
run: apk add nodejs curl findutils git gawk
- name: Repo pull
uses: actions/checkout@v4
- name: Package download
uses: forgejo/download-artifact@v3
- name: Package deployment
run: ${{ github.workspace }}/.forgejo/bin/deploy.sh

View file

@ -1,52 +0,0 @@
on:
pull_request:
types: [ assigned, opened, synchronize, reopened ]
jobs:
build-x86_64:
runs-on: x86_64
container:
image: alpinelinux/alpine-gitlab-ci:latest
env:
CI_PROJECT_DIR: ${{ github.workspace }}
CI_DEBUG_BUILD: ${{ runner.debug }}
CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }}
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
steps:
- name: Environment setup
run: |
doas apk add nodejs git patch curl
cd /etc/apk/keys
doas curl -JO https://ayakael.net/api/packages/forge/alpine/key
- name: Repo pull
uses: actions/checkout@v4
with:
fetch-depth: 500
- name: Package build
run: |
${{ github.workspace }}/.forgejo/bin/build.sh
touch packages/dummy
- name: Package upload
uses: forgejo/upload-artifact@v3
with:
name: package
path: packages
deploy-x86_64:
needs: [build-x86_64]
runs-on: x86_64
container:
image: alpine:latest
env:
CI_ALPINE_REPO: 'https://ayakael.net/api/packages/forge/alpine'
FORGE_REPO_TOKEN: ${{ secrets.FORGE_REPO_TOKEN }}
FORGE_REPO_USER: ${{ vars.FORGE_REPO_USER }}
steps:
- name: Setting up environment
run: apk add nodejs curl findutils git gawk
- name: Repo pull
uses: actions/checkout@v4
- name: Package download
uses: forgejo/download-artifact@v3
- name: Package deployment
run: ${{ github.workspace }}/.forgejo/bin/deploy.sh

View file

@ -1,28 +0,0 @@
on:
workflow_dispatch:
schedule:
- cron: '0 5 * * *'
jobs:
check-backports:
name: Check backports repo
runs-on: x86_64
container:
image: alpine:latest
env:
downstream: https://ayakael.net/api/packages/forge/alpine/v3.21/backports
FORGEJO_TOKEN: ${{ secrets.forgejo_token }}
LABEL_NUMBER: 1
ALL_PACKAGES: true
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh

View file

@ -1,27 +0,0 @@
on:
workflow_dispatch:
schedule:
- cron: '0 5 * * *'
jobs:
check-community:
name: Check community repo
runs-on: x86_64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/edge/community
FORGEJO_TOKEN: ${{ secrets.forgejo_token }}
LABEL_NUMBER: 4
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh

View file

@ -1,27 +0,0 @@
on:
workflow_dispatch:
schedule:
- cron: '0 5 * * *'
jobs:
check-community:
name: Check testing repo
runs-on: x86_64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/edge/testing
FORGEJO_TOKEN: ${{ secrets.forgejo_token }}
LABEL_NUMBER: 4
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh

View file

@ -1,27 +0,0 @@
on:
workflow_dispatch:
schedule:
- cron: '0 5 * * *'
jobs:
check-user:
name: Check user repo
runs-on: x86_64
container:
image: alpine:latest
env:
downstream: https://ayakael.net/api/packages/forge/alpine/edge/user
FORGEJO_TOKEN: ${{ secrets.forgejo_token }}
LABEL_NUMBER: 4
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh

View file

@ -1,21 +0,0 @@
on:
pull_request:
types: [ assigned, opened, synchronize, reopened ]
jobs:
lint:
run-name: lint
runs-on: x86_64
container:
image: alpinelinux/apkbuild-lint-tools:latest
env:
CI_PROJECT_DIR: ${{ github.workspace }}
CI_DEBUG_BUILD: ${{ runner.debug }}
CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }}
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
steps:
- run: doas apk add nodejs git
- uses: actions/checkout@v4
with:
fetch-depth: 500
- run: lint

109
.gitlab-ci.yml Normal file
View file

@ -0,0 +1,109 @@
stages:
- verify
- build
- deploy
variables:
GIT_STRATEGY: clone
GIT_DEPTH: "500"
lint:
stage: verify
interruptible: true
script:
- |
sudo apk add shellcheck atools sudo abuild
export PATH="$PATH:$CI_PROJECT_DIR/.gitlab/bin"
lint
allow_failure: true
only:
- merge_requests
tags:
- apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-x86_64
.build:
stage: build
interruptible: true
script:
- |
sudo apk add alpine-sdk lua-aports sudo
sudo addgroup $USER abuild
export PATH="$PATH:$CI_PROJECT_DIR/.gitlab/bin"
sudo -Eu $USER build.sh
artifacts:
paths:
- packages/
- keys/
- logs/
expire_in: 7 days
when: always
only:
- merge_requests
.cross:
stage: build
interruptible: true
script:
- |
sudo apk add alpine-sdk lua-aports sudo gzip xz qemu-$CI_QEMU_TARGET_ARCH
sudo addgroup $USER abuild
export PATH="$PATH:$CI_PROJECT_DIR/.gitlab/bin"
build-rootfs.sh alpine${CI_MERGE_REQUEST_TARGET_BRANCH_NAME/v} $CI_ALPINE_TARGET_ARCH --rootfsdir $HOME/sysroot-$CI_ALPINE_TARGET_ARCH
cp /etc/apk/repositories $HOME/sysroot-$CI_ALPINE_TARGET_ARCH/etc/apk/.
sudo -Eu $USER CHOST=$CI_TARGET_ALPINE_ARCH build.sh
artifacts:
paths:
- packages/
- keys/
- logs/
expire_in: 7 days
when: always
only:
- merge_requests
build-x86_64:
extends: .build
when: always
tags:
- apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-x86_64
build-aarch64:
extends: .build
when: always
tags:
- apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-aarch64
build-ppc64le:
extends: .build
when: manual
tags:
- apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-ppc64le
build-s390x:
extends: .build
when: manual
tags:
- apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-s390x
build-armv7:
extends: .cross
when: manual
tags:
- apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-x86_64
variables:
CI_ALPINE_TARGET_ARCH: armv7
CI_QEMU_TARGET_ARCH: arm
push:
interruptible: true
stage: deploy
script:
- |
sudo apk add abuild git-lfs findutils
export PATH="$PATH:$CI_PROJECT_DIR/.gitlab/bin"
push.sh
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
when: manual
tags:
- repo

111
.gitlab/bin/APKBUILD_SHIM Executable file
View file

@ -0,0 +1,111 @@
#!/bin/sh
set -e
arch=
builddir=
checkdepends=
depends=
depends_dev=
depends_doc=
depends_libs=
depends_openrc=
depends_static=
install=
install_if=
langdir=
ldpath=
license=
makedepends=
makedepends_build=
makedepends_host=
md5sums=
options=
patch_args=
pkgbasedir=
pkgdesc=
pkgdir=
pkgname=
pkgrel=
pkgver=
pkggroups=
pkgusers=
provides=
provider_priority=
replaces=
sha256sums=
sha512sums=
sonameprefix=
source=
srcdir=
startdir=
subpackages=
subpkgdir=
subpkgname=
triggers=
url=
# abuild.conf
CFLAGS=
CXXFLAGS=
CPPFLAGS=
LDFLAGS=
JOBS=
MAKEFLAGS=
CMAKE_CROSSOPTS=
. ./APKBUILD
: "$arch"
: "$builddir"
: "$checkdepends"
: "$depends"
: "$depends_dev"
: "$depends_doc"
: "$depends_libs"
: "$depends_openrc"
: "$depends_static"
: "$install"
: "$install_if"
: "$langdir"
: "$ldpath"
: "$license"
: "$makedepends"
: "$makedepends_build"
: "$makedepends_host"
: "$md5sums"
: "$options"
: "$patch_args"
: "$pkgbasedir"
: "$pkgdesc"
: "$pkgdir"
: "$pkgname"
: "$pkgrel"
: "$pkgver"
: "$pkggroups"
: "$pkgusers"
: "$provides"
: "$provider_priority"
: "$replaces"
: "$sha256sums"
: "$sha512sums"
: "$sonameprefix"
: "$source"
: "$srcdir"
: "$startdir"
: "$subpackages"
: "$subpkgdir"
: "$subpkgname"
: "$triggers"
: "$url"
# abuild.conf
: "$CFLAGS"
: "$CXXFLAGS"
: "$CPPFLAGS"
: "$LDFLAGS"
: "$JOBS"
: "$MAKEFLAGS"
: "$CMAKE_CROSSOPTS"

16
.gitlab/bin/apkbuild-shellcheck Executable file
View file

@ -0,0 +1,16 @@
#!/bin/sh
shellcheck -s ash \
-e SC3043 \
-e SC3057 \
-e SC3060 \
-e SC2016 \
-e SC2086 \
-e SC2169 \
-e SC2155 \
-e SC2100 \
-e SC2209 \
-e SC2030 \
-e SC2031 \
-e SC1090 \
-xa $CI_PROJECT_DIR/.gitlab/bin/APKBUILD_SHIM

556
.gitlab/bin/build-rootfs.sh Executable file
View file

@ -0,0 +1,556 @@
#!/usr/bin/env bash
# Availabl here: https://lab.ilot.io/dotnet/arcade/-/blob/7f6d9796cc7f594772f798358dbdd8c69b6a97af/eng/common/cross/build-rootfs.sh
# Only modification: qemu-$arch-static becomes qemu-$arch
set -e
usage()
{
echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [llvmx[.y]] [--skipunmount] --rootfsdir <directory>]"
echo "BuildArch can be: arm(default), arm64, armel, armv6, ppc64le, riscv64, s390x, x64, x86"
echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine"
echo " for alpine can be specified with version: alpineX.YY or alpineedge"
echo " for FreeBSD can be: freebsd12, freebsd13"
echo " for illumos can be: illumos"
echo " for Haiku can be: haiku."
echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD"
echo "llvmx[.y] - optional, LLVM version for LLVM related packages."
echo "--skipunmount - optional, will skip the unmount of rootfs folder."
echo "--use-mirror - optional, use mirror URL to fetch resources, when available."
echo "--jobs N - optional, restrict to N jobs."
exit 1
}
__CodeName=xenial
__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
__BuildArch=arm
__AlpineArch=armv7
__FreeBSDArch=arm
__FreeBSDMachineArch=armv7
__IllumosArch=arm7
__QEMUArch=arm
__UbuntuArch=armhf
__UbuntuRepo="http://ports.ubuntu.com/"
__LLDB_Package="liblldb-3.9-dev"
__SkipUnmount=0
# base development support
__UbuntuPackages="build-essential"
__AlpinePackages="alpine-base"
__AlpinePackages+=" build-base"
# symlinks fixer
__UbuntuPackages+=" symlinks"
# runtime dependencies
__UbuntuPackages+=" libicu-dev"
__UbuntuPackages+=" liblttng-ust-dev"
__UbuntuPackages+=" libunwind8-dev"
__UbuntuPackages+=" libnuma-dev"
# runtime libraries' dependencies
__UbuntuPackages+=" libcurl4-openssl-dev"
__UbuntuPackages+=" libkrb5-dev"
__UbuntuPackages+=" libssl-dev"
__UbuntuPackages+=" zlib1g-dev"
__FreeBSDBase="12.3-RELEASE"
__FreeBSDPkg="1.17.0"
__FreeBSDABI="12"
__FreeBSDPackages="libunwind"
__FreeBSDPackages+=" icu"
__FreeBSDPackages+=" libinotify"
__FreeBSDPackages+=" openssl"
__FreeBSDPackages+=" krb5"
__FreeBSDPackages+=" terminfo-db"
__IllumosPackages="icu"
__IllumosPackages+=" mit-krb5"
__IllumosPackages+=" openssl"
__IllumosPackages+=" zlib"
__HaikuPackages="gmp"
__HaikuPackages+=" gmp_devel"
__HaikuPackages+=" krb5"
__HaikuPackages+=" krb5_devel"
__HaikuPackages+=" libiconv"
__HaikuPackages+=" libiconv_devel"
__HaikuPackages+=" llvm12_libunwind"
__HaikuPackages+=" llvm12_libunwind_devel"
__HaikuPackages+=" mpfr"
__HaikuPackages+=" mpfr_devel"
# ML.NET dependencies
__UbuntuPackages+=" libomp5"
__UbuntuPackages+=" libomp-dev"
__Keyring=
__UseMirror=0
__UnprocessedBuildArgs=
while :; do
if [[ "$#" -le 0 ]]; then
break
fi
lowerI="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
case $lowerI in
-\?|-h|--help)
usage
exit 1
;;
arm)
__BuildArch=arm
__UbuntuArch=armhf
__AlpineArch=armv7
__QEMUArch=arm
;;
arm64)
__BuildArch=arm64
__UbuntuArch=arm64
__AlpineArch=aarch64
__QEMUArch=aarch64
__FreeBSDArch=arm64
__FreeBSDMachineArch=aarch64
;;
armel)
__BuildArch=armel
__UbuntuArch=armel
__UbuntuRepo="http://ftp.debian.org/debian/"
__CodeName=jessie
;;
armv6)
__BuildArch=armv6
__UbuntuArch=armhf
__QEMUArch=arm
__UbuntuRepo="http://raspbian.raspberrypi.org/raspbian/"
__CodeName=buster
__LLDB_Package="liblldb-6.0-dev"
if [[ -e "/usr/share/keyrings/raspbian-archive-keyring.gpg" ]]; then
__Keyring="--keyring /usr/share/keyrings/raspbian-archive-keyring.gpg"
fi
;;
riscv64)
__BuildArch=riscv64
__AlpineArch=riscv64
__QEMUArch=riscv64
__UbuntuArch=riscv64
__UbuntuRepo="http://deb.debian.org/debian-ports"
__UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}"
unset __LLDB_Package
if [[ -e "/usr/share/keyrings/debian-ports-archive-keyring.gpg" ]]; then
__Keyring="--keyring /usr/share/keyrings/debian-ports-archive-keyring.gpg --include=debian-ports-archive-keyring"
fi
;;
ppc64le)
__BuildArch=ppc64le
__AlpineArch=ppc64le
__QEMUArch=ppc64le
__UbuntuArch=ppc64el
__UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/"
__UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}"
__UbuntuPackages="${__UbuntuPackages// libomp-dev/}"
__UbuntuPackages="${__UbuntuPackages// libomp5/}"
unset __LLDB_Package
;;
s390x)
__BuildArch=s390x
__AlpineArch=s390x
__QEMUArch=s390x
__UbuntuArch=s390x
__UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/"
__UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}"
__UbuntuPackages="${__UbuntuPackages// libomp-dev/}"
__UbuntuPackages="${__UbuntuPackages// libomp5/}"
unset __LLDB_Package
;;
x64)
__BuildArch=x64
__AlpineArch=x86_64
__QEMUArch=x86_64
__UbuntuArch=amd64
__FreeBSDArch=amd64
__FreeBSDMachineArch=amd64
__illumosArch=x86_64
__UbuntuRepo=
;;
x86)
__BuildArch=x86
__AlpineArch=i386
__QEMUArch=i386
__UbuntuArch=i386
__AlpineArch=x86
__UbuntuRepo="http://archive.ubuntu.com/ubuntu/"
;;
lldb*)
version="${lowerI/lldb/}"
parts=(${version//./ })
# for versions > 6.0, lldb has dropped the minor version
if [[ "${parts[0]}" -gt 6 ]]; then
version="${parts[0]}"
fi
__LLDB_Package="liblldb-${version}-dev"
;;
no-lldb)
unset __LLDB_Package
;;
llvm*)
version="${lowerI/llvm/}"
parts=(${version//./ })
__LLVM_MajorVersion="${parts[0]}"
__LLVM_MinorVersion="${parts[1]}"
# for versions > 6.0, llvm has dropped the minor version
if [[ -z "$__LLVM_MinorVersion" && "$__LLVM_MajorVersion" -le 6 ]]; then
__LLVM_MinorVersion=0;
fi
;;
xenial) # Ubuntu 16.04
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=xenial
fi
;;
zesty) # Ubuntu 17.04
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=zesty
fi
;;
bionic) # Ubuntu 18.04
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=bionic
fi
;;
focal) # Ubuntu 20.04
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=focal
fi
;;
jammy) # Ubuntu 22.04
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=jammy
fi
;;
jessie) # Debian 8
__CodeName=jessie
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
stretch) # Debian 9
__CodeName=stretch
__LLDB_Package="liblldb-6.0-dev"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
buster) # Debian 10
__CodeName=buster
__LLDB_Package="liblldb-6.0-dev"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
bullseye) # Debian 11
__CodeName=bullseye
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
sid) # Debian sid
__CodeName=sid
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
tizen)
__CodeName=
__UbuntuRepo=
__Tizen=tizen
;;
alpine*)
__CodeName=alpine
__UbuntuRepo=
version="${lowerI/alpine/}"
if [[ "$version" == "edge" ]]; then
__AlpineVersion=edge
else
parts=(${version//./ })
__AlpineMajorVersion="${parts[0]}"
__AlpineMinoVersion="${parts[1]}"
__AlpineVersion="$__AlpineMajorVersion.$__AlpineMinoVersion"
fi
;;
freebsd12)
__CodeName=freebsd
__SkipUnmount=1
;;
freebsd13)
__CodeName=freebsd
__FreeBSDBase="13.0-RELEASE"
__FreeBSDABI="13"
__SkipUnmount=1
;;
illumos)
__CodeName=illumos
__SkipUnmount=1
;;
haiku)
__CodeName=haiku
__BuildArch=x64
__SkipUnmount=1
;;
--skipunmount)
__SkipUnmount=1
;;
--rootfsdir|-rootfsdir)
shift
__RootfsDir="$1"
;;
--use-mirror)
__UseMirror=1
;;
--use-jobs)
shift
MAXJOBS=$1
;;
*)
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $1"
;;
esac
shift
done
if [[ "$__BuildArch" == "armel" ]]; then
__LLDB_Package="lldb-3.5-dev"
fi
__UbuntuPackages+=" ${__LLDB_Package:-}"
if [[ -n "$__LLVM_MajorVersion" ]]; then
__UbuntuPackages+=" libclang-common-${__LLVM_MajorVersion}${__LLVM_MinorVersion:+.$__LLVM_MinorVersion}-dev"
fi
if [[ -z "$__RootfsDir" && -n "$ROOTFS_DIR" ]]; then
__RootfsDir="$ROOTFS_DIR"
fi
if [[ -z "$__RootfsDir" ]]; then
__RootfsDir="$__CrossDir/../../../.tools/rootfs/$__BuildArch"
fi
if [[ -d "$__RootfsDir" ]]; then
if [[ "$__SkipUnmount" == "0" ]]; then
umount "$__RootfsDir"/* || true
fi
rm -rf "$__RootfsDir"
fi
mkdir -p "$__RootfsDir"
__RootfsDir="$( cd "$__RootfsDir" && pwd )"
if [[ "$__CodeName" == "alpine" ]]; then
__ApkToolsVersion=2.12.11
__ApkToolsDir="$(mktemp -d)"
wget "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic//v$__ApkToolsVersion/x86_64/apk.static" -P "$__ApkToolsDir"
chmod +x "$__ApkToolsDir/apk.static"
mkdir -p "$__RootfsDir"/usr/bin
cp -v "/usr/bin/qemu-$__QEMUArch" "$__RootfsDir/usr/bin"
if [[ "$__AlpineVersion" == "edge" ]]; then
version=edge
else
version="v$__AlpineVersion"
fi
# initialize DB
"$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U --allow-untrusted --root "$__RootfsDir" --arch "$__AlpineArch" --initdb add
if [[ "$__AlpineLlvmLibsLookup" == 1 ]]; then
__AlpinePackages+=" $("$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U --allow-untrusted --root "$__RootfsDir" --arch "$__AlpineArch" \
search 'llvm*-libs' | sort | tail -1 | sed 's/-[^-]*//2g')"
fi
# install all packages in one go
"$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U --allow-untrusted --no-scripts --root "$__RootfsDir" --arch "$__AlpineArch" \
add $__AlpinePackages
rm -r "$__ApkToolsDir"
elif [[ "$__CodeName" == "freebsd" ]]; then
mkdir -p "$__RootfsDir"/usr/local/etc
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
wget -O - "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf
echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf
mkdir -p "$__RootfsDir"/tmp
# get and build package manager
wget -O - "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf -
cd "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
# needed for install to succeed
mkdir -p "$__RootfsDir"/host/etc
./autogen.sh && ./configure --prefix="$__RootfsDir"/host && make -j "$JOBS" && make install
rm -rf "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
# install packages we need.
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf update
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages
elif [[ "$__CodeName" == "illumos" ]]; then
mkdir "$__RootfsDir/tmp"
pushd "$__RootfsDir/tmp"
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
echo "Downloading sysroot."
wget -O - https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
echo "Building binutils. Please wait.."
wget -O - https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf -
mkdir build-binutils && cd build-binutils
../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir"
make -j "$JOBS" && make install && cd ..
echo "Building gcc. Please wait.."
wget -O - https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf -
CFLAGS="-fPIC"
CXXFLAGS="-fPIC"
CXXFLAGS_FOR_TARGET="-fPIC"
CFLAGS_FOR_TARGET="-fPIC"
export CFLAGS CXXFLAGS CXXFLAGS_FOR_TARGET CFLAGS_FOR_TARGET
mkdir build-gcc && cd build-gcc
../gcc-8.4.0/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \
--with-gnu-ld --disable-nls --disable-libgomp --disable-libquadmath --disable-libssp --disable-libvtv --disable-libcilkrts --disable-libada --disable-libsanitizer \
--disable-libquadmath-support --disable-shared --enable-tls
make -j "$JOBS" && make install && cd ..
BaseUrl=https://pkgsrc.smartos.org
if [[ "$__UseMirror" == 1 ]]; then
BaseUrl=https://pkgsrc.smartos.skylime.net
fi
BaseUrl="$BaseUrl/packages/SmartOS/trunk/${__illumosArch}/All"
echo "Downloading manifest"
wget "$BaseUrl"
echo "Downloading dependencies."
read -ra array <<<"$__IllumosPackages"
for package in "${array[@]}"; do
echo "Installing '$package'"
# find last occurrence of package in listing and extract its name
package="$(sed -En '/.*href="('"$package"'-[0-9].*).tgz".*/h;$!d;g;s//\1/p' All)"
echo "Resolved name '$package'"
wget "$BaseUrl"/"$package".tgz
ar -x "$package".tgz
tar --skip-old-files -xzf "$package".tmp.tg* -C "$__RootfsDir" 2>/dev/null
done
echo "Cleaning up temporary files."
popd
rm -rf "$__RootfsDir"/{tmp,+*}
mkdir -p "$__RootfsDir"/usr/include/net
mkdir -p "$__RootfsDir"/usr/include/netpacket
wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
elif [[ "$__CodeName" == "haiku" ]]; then
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
echo "Building Haiku sysroot for x86_64"
mkdir -p "$__RootfsDir/tmp"
cd "$__RootfsDir/tmp"
git clone -b hrev56235 https://review.haiku-os.org/haiku
git clone -b btrev43195 https://review.haiku-os.org/buildtools
cd "$__RootfsDir/tmp/buildtools" && git checkout 7487388f5110021d400b9f3b88e1a7f310dc066d
# Fetch some unmerged patches
cd "$__RootfsDir/tmp/haiku"
## Add development build profile (slimmer than nightly)
git fetch origin refs/changes/64/4164/1 && git -c commit.gpgsign=false cherry-pick FETCH_HEAD
# Build jam
cd "$__RootfsDir/tmp/buildtools/jam"
make
# Configure cross tools
echo "Building cross-compiler"
mkdir -p "$__RootfsDir/generated"
cd "$__RootfsDir/generated"
"$__RootfsDir/tmp/haiku/configure" -j"$JOBS" --sysroot "$__RootfsDir" --cross-tools-source "$__RootfsDir/tmp/buildtools" --build-cross-tools x86_64
# Build Haiku packages
echo "Building Haiku"
echo 'HAIKU_BUILD_PROFILE = "development-raw" ;' > UserProfileConfig
"$__RootfsDir/tmp/buildtools/jam/jam0" -j"$JOBS" -q '<build>package' '<repository>Haiku'
BaseUrl="https://depot.haiku-os.org/__api/v2/pkg/get-pkg"
# Download additional packages
echo "Downloading additional required packages"
read -ra array <<<"$__HaikuPackages"
for package in "${array[@]}"; do
echo "Downloading $package..."
# API documented here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L60
# The schema here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L598
hpkgDownloadUrl="$(wget -qO- --post-data='{"name":"'"$package"'","repositorySourceCode":"haikuports_x86_64","versionType":"LATEST","naturalLanguageCode":"en"}' \
--header='Content-Type:application/json' "$BaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')"
wget -P "$__RootfsDir/generated/download" "$hpkgDownloadUrl"
done
# Setup the sysroot
echo "Setting up sysroot and extracting needed packages"
mkdir -p "$__RootfsDir/boot/system"
for file in "$__RootfsDir/generated/objects/haiku/x86_64/packaging/packages/"*.hpkg; do
"$__RootfsDir/generated/objects/linux/x86_64/release/tools/package/package" extract -C "$__RootfsDir/boot/system" "$file"
done
for file in "$__RootfsDir/generated/download/"*.hpkg; do
"$__RootfsDir/generated/objects/linux/x86_64/release/tools/package/package" extract -C "$__RootfsDir/boot/system" "$file"
done
# Cleaning up temporary files
echo "Cleaning up temporary files"
rm -rf "$__RootfsDir/tmp"
for name in "$__RootfsDir/generated/"*; do
if [[ "$name" =~ "cross-tools-" ]]; then
: # Keep the cross-compiler
else
rm -rf "$name"
fi
done
elif [[ -n "$__CodeName" ]]; then
qemu-debootstrap $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"
cp "$__CrossDir/$__BuildArch/sources.list.$__CodeName" "$__RootfsDir/etc/apt/sources.list"
chroot "$__RootfsDir" apt-get update
chroot "$__RootfsDir" apt-get -f -y install
chroot "$__RootfsDir" apt-get -y install $__UbuntuPackages
chroot "$__RootfsDir" symlinks -cr /usr
chroot "$__RootfsDir" apt-get clean
if [[ "$__SkipUnmount" == "0" ]]; then
umount "$__RootfsDir"/* || true
fi
if [[ "$__BuildArch" == "armel" && "$__CodeName" == "jessie" ]]; then
pushd "$__RootfsDir"
patch -p1 < "$__CrossDir/$__BuildArch/armel.jessie.patch"
popd
fi
elif [[ "$__Tizen" == "tizen" ]]; then
ROOTFS_DIR="$__RootfsDir" "$__CrossDir/tizen-build-rootfs.sh" "$__BuildArch"
else
echo "Unsupported target platform."
usage;
exit 1
fi

View file

@ -1,26 +1,27 @@
#!/bin/sh #!/bin/sh
# shellcheck disable=SC3043 # shellcheck disable=SC3043
. /usr/local/lib/functions.sh . $CI_PROJECT_DIR/.gitlab/bin/functions.sh
# shellcheck disable=SC3040 # shellcheck disable=SC3040
set -eu -o pipefail set -eu -o pipefail
readonly APORTSDIR=$CI_PROJECT_DIR readonly APORTSDIR=$CI_PROJECT_DIR
readonly REPOS="backports user" readonly REPOS="cross backports user testing community"
readonly ALPINE_REPOS="main community testing" readonly ALPINE_REPOS="main community"
readonly ARCH=$(apk --print-arch) readonly ARCH=$(apk --print-arch)
# gitlab variables # gitlab variables
readonly BASEBRANCH=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME readonly BASEBRANCH=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME
: "${REPODEST:=$HOME/packages}" : "${REPODEST:=$HOME/packages}"
: "${MIRROR:=https://ayakael.net/api/packages/forge/alpine}" : "${MIRROR:=https://lab.ilot.io/ayakael/repo-apk/-/raw}"
: "${ALPINE_MIRROR:=http://dl-cdn.alpinelinux.org/alpine}" : "${ALPINE_MIRROR:=http://dl-cdn.alpinelinux.org/alpine}"
: "${MAX_ARTIFACT_SIZE:=300000000}" #300M : "${MAX_ARTIFACT_SIZE:=300000000}" #300M
: "${CI_DEBUG_BUILD:=}" : "${CI_DEBUG_BUILD:=}"
: "${CI_ALPINE_BUILD_OFFSET:=0}" : "${CI_ALPINE_BUILD_OFFSET:=0}"
: "${CI_ALPINE_BUILD_LIMIT:=9999}" : "${CI_ALPINE_BUILD_LIMIT:=9999}"
: "${CI_ALPINE_TARGET_ARCH:=$(uname -m)}"
msg() { msg() {
local color=${2:-green} local color=${2:-green}
@ -70,7 +71,7 @@ report() {
get_release() { get_release() {
case $BASEBRANCH in case $BASEBRANCH in
v*) echo "$BASEBRANCH";; v*) echo "${BASEBRANCH%-*}";;
edge) echo edge;; edge) echo edge;;
*) die "Branch \"$BASEBRANCH\" not supported!" *) die "Branch \"$BASEBRANCH\" not supported!"
esac esac
@ -79,8 +80,9 @@ get_release() {
build_aport() { build_aport() {
local repo="$1" aport="$2" local repo="$1" aport="$2"
cd "$APORTSDIR/$repo/$aport" cd "$APORTSDIR/$repo/$aport"
export CHOST=$CI_ALPINE_TARGET_ARCH
if abuild -r 2>&1 | report "build-$aport"; then if abuild -r 2>&1 | report "build-$aport"; then
checkapk 2>&1 | report "checkapk-$aport" || true checkapk | report "checkapk-$aport" || true
aport_ok="$aport_ok $repo/$aport" aport_ok="$aport_ok $repo/$aport"
else else
aport_ng="$aport_ng $repo/$aport" aport_ng="$aport_ng $repo/$aport"
@ -90,6 +92,12 @@ build_aport() {
check_aport() { check_aport() {
local repo="$1" aport="$2" local repo="$1" aport="$2"
cd "$APORTSDIR/$repo/$aport" cd "$APORTSDIR/$repo/$aport"
export CHOST=$CI_ALPINE_TARGET_ARCH
# TODO: this enables crossbuild only on user, this should be cleaner
if [ "$repo" != "user" ] && [ "$repo" != "backports" ] && [ "$CI_ALPINE_TARGET_ARCH" != "$ARCH" ]; then
aport_na="$aport_na $repo/$aport"
return 1
fi
if ! abuild check_arch 2>/dev/null; then if ! abuild check_arch 2>/dev/null; then
aport_na="$aport_na $repo/$aport" aport_na="$aport_na $repo/$aport"
return 1 return 1
@ -102,13 +110,16 @@ set_repositories_for() {
release=$(get_release) release=$(get_release)
for repo in $REPOS; do for repo in $REPOS; do
[ "$repo" = "non-free" ] && continue
[ "$release" == "edge" ] && [ "$repo" == "backports" ] && continue
repos="$repos $MIRROR/$release/$repo $REPODEST/$repo" repos="$repos $MIRROR/$release/$repo $REPODEST/$repo"
[ "$repo" = "$target_repo" ] && break [ "$repo" = "$target_repo" ] && break
done done
doas sh -c "printf '%s\n' $repos >> /etc/apk/repositories" sudo sh -c "printf '%s\n' $repos >> /etc/apk/repositories"
doas apk update sudo apk update || true
if [ "$CI_ALPINE_TARGET_ARCH" != "$ARCH" ]; then
sudo sh -c "printf '%s\n' $repos >> $HOME/sysroot-$CI_ALPINE_TARGET_ARCH/etc/apk/repositories"
sudo cp -R /etc/apk/keys/* $HOME/sysroot-$CI_ALPINE_TARGET_ARCH/etc/apk/keys/.
sudo apk --root=$HOME/sysroot-$CI_ALPINE_TARGET_ARCH update || true
fi
} }
apply_offset_limit() { apply_offset_limit() {
@ -128,10 +139,22 @@ setup_system() {
[ "$release" != "edge" ] && [ "$repo" == "testing" ] && continue [ "$release" != "edge" ] && [ "$repo" == "testing" ] && continue
repos="$repos $ALPINE_MIRROR/$release/$repo" repos="$repos $ALPINE_MIRROR/$release/$repo"
done done
doas sh -c "printf '%s\n' $repos > /etc/apk/repositories" repos="$repos $MIRROR/$release/cross"
doas apk -U upgrade -a || apk fix || die "Failed to up/downgrade system" sudo sh -c "printf '%s\n' $repos > /etc/apk/repositories"
abuild-keygen -ain sudo apk -U upgrade -a || sudo apk fix || die "Failed to up/downgrade system"
doas sed -i -E 's/export JOBS=[0-9]+$/export JOBS=$(nproc)/' /etc/abuild.conf if [ "$CI_ALPINE_TARGET_ARCH" != "$ARCH" ]; then
sudo apk add gcc-$CI_ALPINE_TARGET_ARCH
fi
gitlab_key_to_rsa $ABUILD_KEY rsa-private $HOME/.abuild/$ABUILD_KEY_NAME.rsa
gitlab_key_to_rsa $ABUILD_KEY_PUB rsa-public $HOME/.abuild/$ABUILD_KEY_NAME.rsa.pub
chmod 700 $HOME/.abuild/$ABUILD_KEY_NAME.rsa
echo "PACKAGER_PRIVKEY=$HOME/.abuild/$ABUILD_KEY_NAME.rsa" >> $HOME/.abuild/abuild.conf
sudo cp $HOME/.abuild/$ABUILD_KEY_NAME.rsa.pub /etc/apk/keys/$ABUILD_KEY_NAME.rsa.pub
# patch abuild for crosscompiling
sudo patch -p1 -d / -i $CI_PROJECT_DIR/.gitlab/patches/abuild-cross.patch
sudo sed -i -E 's/export JOBS=[0-9]+$/export JOBS=$(nproc)/' /etc/abuild.conf
( . /etc/abuild.conf && echo "Building with $JOBS jobs" ) ( . /etc/abuild.conf && echo "Building with $JOBS jobs" )
mkdir -p "$REPODEST" mkdir -p "$REPODEST"
git config --global init.defaultBranch master git config --global init.defaultBranch master
@ -180,7 +203,7 @@ sysinfo || true
setup_system || die "Failed to setup system" setup_system || die "Failed to setup system"
# git no longer allows to execute in repositories owned by different users # git no longer allows to execute in repositories owned by different users
doas chown -R buildozer: . sudo chown -R $USER: .
fetch_flags="-qn" fetch_flags="-qn"
debugging && fetch_flags="-v" debugging && fetch_flags="-v"
@ -203,6 +226,7 @@ build_start=$CI_ALPINE_BUILD_OFFSET
build_limit=$CI_ALPINE_BUILD_LIMIT build_limit=$CI_ALPINE_BUILD_LIMIT
for repo in $(changed_repos); do for repo in $(changed_repos); do
mkdir -p "$APORTSDIR"/logs "$APORTSDIR"/packages "$APORTSDIR"/keys
set_repositories_for "$repo" set_repositories_for "$repo"
built_aports=0 built_aports=0
changed_aports_in_repo=$(changed_aports "$repo") changed_aports_in_repo=$(changed_aports "$repo")
@ -243,7 +267,7 @@ for ok in $aport_ok; do
done done
for na in $aport_na; do for na in $aport_na; do
msg "$na: disabled for $ARCH" yellow msg "$na: disabled for $CI_ALPINE_TARGET_ARCH" yellow
done done
for ng in $aport_ng; do for ng in $aport_ng; do
@ -257,4 +281,3 @@ if [ "$failed" = true ]; then
elif [ -z "$aport_ok" ]; then elif [ -z "$aport_ok" ]; then
msg "No packages found to be built." yellow msg "No packages found to be built." yellow
fi fi

20
.gitlab/bin/changed-aports Executable file
View file

@ -0,0 +1,20 @@
#!/bin/sh
if [ $# -lt 1 ]; then
echo "Usage: $0 <basebranch>"
exit 1
fi
if ! git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
echo "Fatal: not inside a git repository"
exit 2
fi
basebranch=$1
if ! git rev-parse --verify --quiet $basebranch >/dev/null; then
# The base branch does not eixst, probably due to a shallow clone
git fetch -v $CI_MERGE_REQUEST_PROJECT_URL.git +refs/heads/$basebranch:refs/heads/$basebranch
fi
git --no-pager diff --diff-filter=ACMR --name-only $basebranch...HEAD -- "*/APKBUILD" | xargs -r -n1 dirname

74
.gitlab/bin/functions.sh Executable file
View file

@ -0,0 +1,74 @@
# shellcheck disable=SC3043
:
# shellcheck disable=SC3040
set -eu -o pipefail
changed_repos() {
: "${APORTSDIR?APORTSDIR missing}"
: "${BASEBRANCH?BASEBRANCH missing}"
cd "$APORTSDIR"
for repo in $REPOS; do
git diff --diff-filter=ACMR --exit-code "$BASEBRANCH"...HEAD -- "$repo" >/dev/null \
|| echo "$repo"
done
}
changed_aports() {
: "${APORTSDIR?APORTSDIR missing}"
: "${BASEBRANCH?BASEBRANCH missing}"
cd "$APORTSDIR"
local repo="$1"
local aports
aports=$(git diff --name-only --diff-filter=ACMR --relative="$repo" \
"$BASEBRANCH"...HEAD -- "*/APKBUILD" | xargs -rn1 dirname)
# shellcheck disable=2086
ap builddirs -d "$APORTSDIR/$repo" $aports 2>/dev/null | xargs -rn1 basename
}
section_start() {
name=${1?arg 1 name missing}
header=${2?arg 2 header missing}
collapsed=$2
timestamp=$(date +%s)
options=""
case $collapsed in
yes|on|collapsed|true) options="[collapsed=true]";;
esac
printf "\e[0Ksection_start:%d:%s%s\r\e[0K%s\n" "$timestamp" "$name" "$options" "$header"
}
section_end() {
name=$1
timestamp=$(date +%s)
printf "\e[0Ksection_end:%d:%s\r\e[0K" "$timestamp" "$name"
}
gitlab_key_to_rsa() {
KEY=$1
TYPE=$2
TGT=$3
TGT_DIR=${TGT%/*}
if [ "$TGT" == "$TGT_DIR" ]; then
TGT_DIR="./"
fi
if [ ! -d "$TGT_DIR" ]; then
mkdir -p "$TGT_DIR"
fi
case $TYPE in
rsa-public) local type="PUBLIC";;
rsa-private) local type="RSA PRIVATE";;
esac
echo "-----BEGIN $type KEY-----" > "$TGT"
echo $1 | sed 's/.\{64\}/&\
/g' >> "$TGT"
echo "-----END $type KEY-----" >> "$TGT"
}

96
.gitlab/bin/lint Executable file
View file

@ -0,0 +1,96 @@
#!/bin/sh
BLUE="\e[34m"
MAGENTA="\e[35m"
RESET="\e[0m"
readonly BASEBRANCH=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME
verbose() {
echo "> " "$@"
# shellcheck disable=SC2068
$@
}
debugging() {
[ -n "$CI_DEBUG_BUILD" ]
}
debug() {
if debugging; then
verbose "$@"
fi
}
# git no longer allows to execute in repositories owned by different users
sudo chown -R gitlab-runner: .
fetch_flags="-qn"
debugging && fetch_flags="-v"
git fetch $fetch_flags "$CI_MERGE_REQUEST_PROJECT_URL" \
"+refs/heads/$BASEBRANCH:refs/heads/$BASEBRANCH"
if debugging; then
merge_base=$(git merge-base "$BASEBRANCH" HEAD)
echo "$merge_base"
git --version
git config -l
git tag merge-base "$merge_base" || { echo "Could not determine merge-base"; exit 50; }
git log --oneline --graph --decorate --all
fi
has_problems=0
for PKG in $(changed-aports "$BASEBRANCH"); do
printf "$BLUE==>$RESET Linting $PKG\n"
(
cd "$PKG"
repo=$(basename $(dirname $PKG));
if [ "$repo" == "backports" ]; then
echo "Skipping $PKG as backports (we don't care)"
continue
fi
printf "\n\n"
printf "$BLUE"
printf '======================================================\n'
printf " parse APKBUILD:\n"
printf '======================================================'
printf "$RESET\n\n"
( . ./APKBUILD ) || has_problems=1
printf "\n\n"
printf "$BLUE"
printf '======================================================\n'
printf " abuild sanitycheck:\n"
printf '======================================================'
printf "$RESET\n\n"
abuild sanitycheck || has_problems=1
printf "\n\n"
printf "$BLUE"
printf '======================================================\n'
printf " apkbuild-shellcheck:\n"
printf '======================================================'
printf "$RESET\n"
apkbuild-shellcheck || has_problems=1
printf "\n\n"
printf "$BLUE"
printf '======================================================\n'
printf " apkbuild-lint:\n"
printf '======================================================'
printf "$RESET\n\n"
apkbuild-lint APKBUILD || has_problems=1
return $has_problems
) || has_problems=1
echo
done
exit $has_problems

56
.gitlab/bin/push.sh Executable file
View file

@ -0,0 +1,56 @@
#!/bin/sh
# shellcheck disable=SC3043
. $CI_PROJECT_DIR/.gitlab/bin/functions.sh
# shellcheck disable=SC3040
set -eu -o pipefail
readonly APORTSDIR=$CI_PROJECT_DIR
readonly REPOS="backports user"
readonly BASEBRANCH=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME
export GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
gitlab_key_to_rsa $ABUILD_KEY rsa-private $HOME/.abuild/$ABUILD_KEY_NAME.rsa
gitlab_key_to_rsa $ABUILD_KEY_PUB rsa-public $HOME/.abuild/$ABUILD_KEY_NAME.rsa.pub
gitlab_key_to_rsa $SSH_KEY rsa-private $HOME/.ssh/id_rsa
chmod 700 "$HOME"/.ssh/id_rsa
chmod 700 "$HOME"/.abuild/$ABUILD_KEY_NAME.rsa
echo "PACKAGER_PRIVKEY=$HOME/.abuild/$ABUILD_KEY_NAME.rsa" > $HOME/.abuild/abuild.conf
echo "REPODEST=$HOME/repo-apk" >> $HOME/.abuild/abuild.conf
sudo cp $HOME/.abuild/$ABUILD_KEY_NAME.rsa.pub /etc/apk/keys/.
if [ -d $HOME/repo-apk ]; then
git -C $HOME/repo-apk fetch
git -C $HOME/repo-apk checkout $BASEBRANCH
git -C $HOME/repo-apk pull --rebase
else
git clone git@lab.ilot.io:ayakael/repo-apk -b $BASEBRANCH $HOME/repo-apk
fi
for i in $(find packages -type f -name "*.apk"); do
install -vDm644 $i ${i/packages/$HOME\/repo-apk}
done
fetch_flags="-qn"
git fetch $fetch_flags "$CI_MERGE_REQUEST_PROJECT_URL" \
"+refs/heads/$BASEBRANCH:refs/heads/$BASEBRANCH"
for repo in $(changed_repos); do
rm $HOME/repo-apk/$repo/*/APKINDEX.tar.gz | true
mkdir -p $repo/DUMMY
echo "pkgname=DUMMY" > $repo/DUMMY/APKBUILD
cd $repo/DUMMY
for i in $(find $HOME/repo-apk/$repo -maxdepth 1 -mindepth 1 -printf '%P '); do
CHOST=$i abuild index
done
cd "$CI_PROJECT_DIR"
rm -R $repo/DUMMY
done
git -C $HOME/repo-apk add .
git -C $HOME/repo-apk commit -m "Update from $CI_MERGE_REQUEST_IID - $CI_MERGE_REQUEST_TITLE"
git -C $HOME/repo-apk push

View file

@ -0,0 +1,17 @@
diff --git a/usr/bin/abuild.orig b/usr/bin/abuild
index 71e0681..d4ae3dd 100755
--- a/usr/bin/abuild.orig
+++ b/usr/bin/abuild
@@ -2231,7 +2231,11 @@ calcdeps() {
list_has $i $builddeps && continue
subpackages_has ${i%%[<>=]*} || builddeps="$builddeps $i"
done
- hostdeps="$EXTRADEPENDS_TARGET"
+ for i in $EXTRADEPENDS_HOST $EXTRADEPENDS_TARGET $depends $makedepends; do
+ [ "$pkgname" = "${i%%[<>=]*}" ] && continue
+ list_has $i $hostdeps && continue
+ subpackages_has ${i%%[<>=]*} || hostdeps="$hostdeps $i"
+ done
fi
}

View file

@ -1,32 +1,32 @@
# ayaports # user-aports
Upstream: https://ayakael.net/forge/ayaports Upstream: https://lab.ilot.io/ayakael/user-aports
## Description ## Description
This repository contains aports that are not yet merged in the official Alpine This repository contains aports that are not yet merged in the official Alpine
Linux repository or dont adhere to Alpine polices. Packages are automatically Linux repository or dont adhere to Alpine polices. Packages are automatically
built using CI. Once built, they are deployed to a git-lfs repository, making built using GitLab CI on my own GitLab instance. Once built, they are deployed
them available to apk. to a git-lfs repository, making them available to apk.
Branches are matched to Alpine releases. Branches are matched to Alpine releases.
## Repositories ## Repositories
You can browse all the repositories at https://codeberg.org/ayakael/ayaports You can browse all the repositories at https://lab.ilot.io/ayakael/repo-apk.
Affixed to each repository description is the appropriate link for use in Affixed to each repository description is the appropriate link for use in
`/etc/apk/repositories`. `/etc/apk/repositories`.
#### Backports #### Backports
``` ```
https://ayakael.net/api/packages/forge/alpine/v3.21/backports https://lab.ilot.io/ayakael/repo-apk/-/raw/edge/backports
``` ```
Aports from the official Alpine repositories backported from edge. This is only Aports from the official Alpine repositories backported from edge.
available and kept up-to-date on latest stable release.
#### User #### User
``` ```
https://ayakael.net/api/packages/forge/alpine/edge/user https://lab.ilot.io/ayakael/repo-apk/-/raw/edge/user
``` ```
Aports that have yet to be (or may never be) upstreamed to the official Aports that have yet to be (or may never be) upstreamed to the official
@ -34,11 +34,11 @@ aports.
## How to use ## How to use
Add security key of the apk repository to your /etc/apk/keys: Add security key of the repo-apk repository to your /etc/apk/keys:
```shell ```shell
cd /etc/apk/keys cd /etc/apk/keys
curl -JO https://ayakael.net/api/packages/forge/alpine/key wget https://lab.ilot.io/ayakael/repo-apk/-/raw/edge/antoine.martin@protonmail.com-5b3109ad.rsa.pub
``` ```
Add repositories that you want to use (see above) to `/etc/apk/repositories`. Add repositories that you want to use (see above) to `/etc/apk/repositories`.
@ -52,10 +52,10 @@ they will work for you.
## Contribution & bug reports ## Contribution & bug reports
If you wish to contribute to this aports collection, or wish to report a bug, If you wish to contribute to this aports collection, or wish to report a bug,
you can do so on Codeberg here: you can do so on Alpine's GitLab instance here:
https://codeberg.org/ayakael/ayaports/issues https://gitlab.alpinelinux.org/ayakael/user-aports
For packages that are in backports, bug reports and merge requests For packages that are in testing/community, bug reports and merge requests
should be done on Alpine's aports repo instance: should be done on Alpine's aports repo instance:
https://gitlab.alpinelinux.org/alpine/aports https://gitlab.alpinelinux.org/alpine/aports

View file

@ -1,25 +0,0 @@
diff --color -Nur calibre-6.17.0.orig/src/calibre/gui2/update.py calibre-6.17.0/src/calibre/gui2/update.py
--- calibre-6.17.0.orig/src/calibre/gui2/update.py 2023-05-06 11:36:35.678461036 -0700
+++ calibre-6.17.0/src/calibre/gui2/update.py 2023-05-06 11:39:10.365134930 -0700
@@ -82,20 +82,6 @@
while not self.shutdown_event.is_set():
calibre_update_version = NO_CALIBRE_UPDATE
plugins_update_found = 0
- try:
- version = get_newest_version()
- if version[:2] > numeric_version[:2]:
- calibre_update_version = version
- except Exception as e:
- prints('Failed to check for calibre update:', as_unicode(e))
- try:
- update_plugins = get_plugin_updates_available(raise_error=True)
- if update_plugins is not None:
- plugins_update_found = len(update_plugins)
- except Exception as e:
- prints('Failed to check for plugin update:', as_unicode(e))
- if calibre_update_version != NO_CALIBRE_UPDATE or plugins_update_found > 0:
- self.signal.update_found.emit(calibre_update_version, plugins_update_found)
self.shutdown_event.wait(self.INTERVAL)
def shutdown(self):

View file

@ -1,116 +0,0 @@
# Maintainer: Cowington Post <cowingtonpost@gmail.com>
pkgname=calibre
pkgver=7.21.0
pkgrel=0
pkgdesc="Ebook management application"
# qt6-webengine
arch="x86_64 aarch64"
url="https://calibre-ebook.com"
license="GPL-3.0-or-later"
depends="
font-liberation
libwmf
mtdev
optipng
poppler
py3-apsw
py3-beautifulsoup4
py3-css-parser
py3-cssselect
py3-dateutil
py3-dnspython
py3-feedparser
py3-fonttools
py3-html2text
py3-html5-parser
py3-html5lib
py3-jeepney
py3-lxml
py3-markdown
py3-mechanize
py3-msgpack
py3-netifaces
py3-pillow
py3-psutil
py3-pycryptodome
py3-pygments
py3-pyqt6-webengine
py3-regex
py3-xxhash
py3-zeroconf
qt6-qtimageformats
qt6-qtsvg
qt6-qtwebengine
udisks2
"
makedepends="
cmake
curl
hunspell-dev
hyphen-dev
libmtp-dev
libstemmer-dev
libusb-dev
podofo-dev
py3-pyqt-builder
py3-pyqt6-sip
py3-sip
python3-dev
qt6-qtbase-dev
uchardet-dev
xdg-utils
ffmpeg-dev
"
subpackages="
$pkgname-pyc
$pkgname-doc
$pkgname-bash-completion
$pkgname-zsh-completion
"
source="https://download.calibre-ebook.com/$pkgver/calibre-$pkgver.tar.xz
0001-$pkgname-no-update.patch
"
# net: downloads iso-codes
# !check: no tests ran
options="net !check"
export LANG="en_US.UTF-8"
prepare() {
default_prepare
rm -f resources/calibre-portable.*
}
build() {
python3 setup.py build
python3 setup.py iso639
python3 setup.py iso3166
python3 setup.py liberation_fonts --system-liberation_fonts --path-to-liberation_fonts /usr/share/fonts/liberation
python3 setup.py mathjax
python3 setup.py gui
}
check() {
python3 -m unittest discover
}
package() {
# needed for zsh
mkdir -p "$pkgdir"/usr/share/zsh/site-functions
python3 setup.py install \
--staging-root="$pkgdir"/usr \
--system-plugins-location=/usr/share/calibre/system-plugins
cp -a man-pages/ "$pkgdir"/usr/share/man
rm -r "$pkgdir"/usr/share/calibre/rapydscript/
python3 -m compileall -fq "$pkgdir"/usr
}
sha512sums="
0c2ee610833df83219c0c33b09e1374a8262f1630ccd48e3c4725c92922a3ac5d102ad83fc213457fb9de3efa4f5a2c98ff6dff039828e1661085a1054d7f631 calibre-7.21.0.tar.xz
eb8e7ce40ff8b8daf6e7e55a5dff8ec4dff06c45744266bb48b3194e92ab1196bc91468203e3c2ca1e5144166a7d6be90e6cf0253513e761b56a4c85be4c2c76 0001-calibre-no-update.patch
"

View file

@ -1,2 +0,0 @@
#!/bin/sh
/usr/bin/electron "/usr/lib/caprine"

View file

@ -1,8 +1,8 @@
# Contributor: Aiden Grossman <agrossman154@yahoo.com> # Contributor: Aiden Grossman <agrossman154@yahoo.com>
# Maintainer: # Maintainer: Aiden Grossman <agrossman154@yahoo.com>
pkgname=coin pkgname=coin
pkgver=4.0.0 pkgver=4.0.0
pkgrel=7 pkgrel=5
pkgdesc="OpenGL OpenInventor compatible graphics library" pkgdesc="OpenGL OpenInventor compatible graphics library"
url="https://github.com/coin3d/coin" url="https://github.com/coin3d/coin"
license="BSD-3-Clause" license="BSD-3-Clause"

View file

@ -4,16 +4,15 @@ pkgname=cura
# uranium and curaengine packages must be updated in sync with this verion number # uranium and curaengine packages must be updated in sync with this verion number
# py3-pynest2d and fdm-materials should be checked as well, but their versions are not always in sync # py3-pynest2d and fdm-materials should be checked as well, but their versions are not always in sync
pkgver=5.2.2 pkgver=5.2.2
pkgrel=1 pkgrel=0
pkgdesc="3D printer / slicing GUI built on top of the Uranium framework" pkgdesc="3D printer / slicing GUI built on top of the Uranium framework"
url="https://ultimaker.com/software/ultimaker-cura" url="https://ultimaker.com/software/ultimaker-cura"
# ppc64le: no py3-keyring arch="noarch !ppc64le !x86 !armhf !riscv64 !s390x !armv7" # ppc64le: no py3-keyring
# x86: no curaengine # x86: no curaengine
# armhf: no uranium, qt5-qtquickcontrols, qt5-qtquickcontrols2, qt5-qtgraphicaleffects # armhf: no uranium, qt5-qtquickcontrols, qt5-qtquickcontrols2, qt5-qtgraphicaleffects
# riscv64: no uranium # riscv64: no uranium
# s390x: no py3-trimesh, no py3-numpy-stl # s390x: no py3-trimesh, no py3-numpy-stl
# armv7: no py3-trimesh # armv7: no py3-trimesh
arch="noarch !ppc64le !x86 !armhf !riscv64 !s390x !armv7"
license="LGPL-3.0-or-later" license="LGPL-3.0-or-later"
# add cura-binary-data to depends when packaged # add cura-binary-data to depends when packaged
depends=" depends="
@ -43,10 +42,8 @@ builddir="$srcdir/Cura-$pkgver"
options="!check" # tests broken after v5.x options="!check" # tests broken after v5.x
build() { build() {
local pyver="$(python3 -c 'import sys; print(f"{sys.version_info.major}.{sys.version_info.minor}")')"
cmake -B build -G Ninja \ cmake -B build -G Ninja \
-DCURA_VERSION=$pkgver \ -DCURA_VERSION=$pkgver \
-DPython_VERSION=$pyver \
-DURANIUM_DIR=/usr/share/uranium \ -DURANIUM_DIR=/usr/share/uranium \
-DCMAKE_INSTALL_PREFIX=/usr \ -DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_INSTALL_LIBDIR=lib \ -DCMAKE_INSTALL_LIBDIR=lib \

View file

@ -2,7 +2,7 @@
# Maintainer: Anjandev Momi <anjan@momi.ca> # Maintainer: Anjandev Momi <anjan@momi.ca>
pkgname=dex pkgname=dex
pkgver=0.9.0 pkgver=0.9.0
pkgrel=1 pkgrel=0
pkgdesc="program to generate and execute DesktopEntry files of the Application type" pkgdesc="program to generate and execute DesktopEntry files of the Application type"
url="https://github.com/jceb/dex" url="https://github.com/jceb/dex"
arch="all" arch="all"
@ -18,7 +18,7 @@ build() {
} }
package() { package() {
make install PREFIX=/usr MANPREFIX=/usr/share/man DESTDIR="$pkgdir" make install PREFIX=/usr DESTDIR="$pkgdir"
} }
sha512sums=" sha512sums="

View file

@ -1,7 +1,7 @@
# Contributor: lauren n. liberda <lauren@selfisekai.rocks> # Contributor: lauren n. liberda <lauren@selfisekai.rocks>
# Maintainer: lauren n. liberda <lauren@selfisekai.rocks> # Maintainer: lauren n. liberda <lauren@selfisekai.rocks>
pkgname=electron-tasje pkgname=electron-tasje
pkgver=0.7.3 pkgver=0.7.0
pkgrel=0 pkgrel=0
pkgdesc="Tiny replacement for electron-builder" pkgdesc="Tiny replacement for electron-builder"
url="https://codeberg.org/selfisekai/electron_tasje/" url="https://codeberg.org/selfisekai/electron_tasje/"
@ -31,5 +31,5 @@ package() {
} }
sha512sums=" sha512sums="
251b7eabe74acdb5c7394f9d4d735b736acf555352785a9896ddaeed37632b238e823e1bb639e1f5a44a50455957ec41e1a585a3b2a9919b5818bb40843bd877 electron_tasje-0.7.3.tar.gz 1f77dc8a5639c5e61952172b1b30c84cce41b5763d5ce96fa8fb2a02f8f8197a87dd3d9c047a90951d8ddc6f1542d8b0f8c33a3dad233d0868b82f19a765731d electron_tasje-0.7.0.tar.gz
" "

View file

@ -1,43 +1,36 @@
# Contributor: lauren n. liberda <lauren@selfisekai.rocks> # Maintainer: lauren n. liberda <lauren@selfisekai.rocks>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=electron pkgname=electron
pkgver=33.2.1 pkgver=27.1.2
_gittag=v"${pkgver/_beta/-beta.}" _semver="${pkgver/_beta/-beta.}"
pkgrel=0 pkgrel=0
_chromium=130.0.6723.127 _chromium=118.0.5993.162
_copium_tag=129.1 _depot_tools=b5509953468edd0906f2dc297886939abbd2bed5
_depot_tools=495b23b39aaba2ca3b55dd27cadc523f1cb17ee6 _extra_patches=118.0.5993.11
pkgdesc="Electron cross-platform desktop toolkit" pkgdesc="Electron cross-platform desktop toolkit"
url="https://github.com/electron/electron" url="https://github.com/electron/electron"
arch="aarch64 x86_64" # same as chromium arch="aarch64 x86_64" # same as chromium
license="MIT" license="MIT"
depends="gtk+3.0 so:libudev.so.1 xdg-utils" depends="gtk+3.0 so:libudev.so.1 xdg-utils"
_llvmver=19
makedepends=" makedepends="
ada-dev
alsa-lib-dev alsa-lib-dev
aom-dev aom-dev
base64-dev
bash bash
brotli-dev brotli-dev
bsd-compat-headers bsd-compat-headers
bzip2-dev bzip2-dev
c-ares-dev c-ares-dev
cairo-dev cairo-dev
clang$_llvmver-dev clang16-dev
clang-extra-tools clang-extra-tools
compiler-rt compiler-rt
crc32c-dev
cups-dev cups-dev
curl-dev curl-dev
dav1d-dev dav1d-dev
dbus-glib-dev dbus-glib-dev
double-conversion-dev
eudev-dev eudev-dev
ffmpeg-dev ffmpeg-dev
findutils findutils
flac-dev flac-dev
flatbuffers-dev
flex flex
freetype-dev freetype-dev
gperf gperf
@ -45,7 +38,6 @@ makedepends="
gn gn
gzip gzip
harfbuzz-dev harfbuzz-dev
hdrhistogram-c-dev
hunspell-dev hunspell-dev
http-parser-dev http-parser-dev
hwdata-dev hwdata-dev
@ -58,13 +50,11 @@ makedepends="
libavif-dev libavif-dev
libbsd-dev libbsd-dev
libcap-dev libcap-dev
libdrm-dev
libevent-dev libevent-dev
libexif-dev libexif-dev
libgcrypt-dev libgcrypt-dev
libjpeg-turbo-dev libjpeg-turbo-dev
libnotify-dev libnotify-dev
libsecret-dev
libusb-dev libusb-dev
libva-dev libva-dev
libwebp-dev libwebp-dev
@ -77,34 +67,29 @@ makedepends="
libxslt-dev libxslt-dev
linux-headers linux-headers
lld lld
llvm$_llvmver llvm16
mesa-dev mesa-dev
minizip-dev minizip-dev
nghttp2-dev nghttp2-dev
nodejs nodejs
npm npm
nss-dev nss-dev
openh264-dev
opus-dev opus-dev
pciutils-dev pciutils-dev
perl perl
pipewire-dev pipewire-dev
pulseaudio-dev pulseaudio-dev
py3-httplib2 py3-httplib2
py3-jinja2
py3-parsing py3-parsing
py3-setuptools
py3-six py3-six
python3 python3
qt5-qtbase-dev qt5-qtbase-dev
re2-dev re2-dev
rsync rsync
rust rust
rust-bindgen
samurai samurai
snappy-dev snappy-dev
speex-dev speex-dev
spirv-tools-dev
sqlite-dev sqlite-dev
woff2-dev woff2-dev
xcb-proto xcb-proto
@ -114,86 +99,74 @@ makedepends="
" "
subpackages="$pkgname-lang $pkgname-dev" subpackages="$pkgname-lang $pkgname-dev"
# the lower patches are specific to electron, the top ones are from the equivalent chromium version # the lower patches are specific to electron, the top ones are from the equivalent chromium version
source=" source="https://s3.sakamoto.pl/lnl-aports-snapshots/electron-$_semver-$_chromium.tar.zst
https://ayakael.net/api/packages/mirrors/generic/electron/$_gittag/electron-$_gittag-$_chromium.tar.zst https://gitlab.com/Matt.Jolly/chromium-patches/-/archive/$_extra_patches/chromium-patches-$_extra_patches.tar.gz
copium-$_copium_tag.tar.gz::https://codeberg.org/selfisekai/copium/archive/$_copium_tag.tar.gz
chromium-revert-drop-of-system-java.patch chromium-revert-drop-of-system-java.patch
compiler.patch chromium-use-alpine-target.patch
disable-dns_config_service.patch fix-missing-cstdint-include-musl.patch
disable-failing-tests.patch
fc-cache-version.patch
fix-opus.patch
fstatat-32bit.patch
gdbinit.patch gdbinit.patch
generic-sensor-include.patch import-version.patch
musl-auxv.patch libstdc++13.patch
mman.patch
musl-sandbox.patch musl-sandbox.patch
musl-tid-caching.patch musl-tid-caching.patch
musl-v8-monotonic-pthread-cont_timedwait.patch musl-v8-monotonic-pthread-cont_timedwait.patch
no-execinfo.patch no-execinfo.patch
no-mallinfo.patch no-mallinfo.patch
no-mte.patch
no-res-ninit-nclose.patch no-res-ninit-nclose.patch
no-sandbox-settls.patch no-sandbox-settls.patch
partalloc-no-tagging-arm64.patch
pvalloc.patch
temp-failure-retry.patch temp-failure-retry.patch
yes-musl.patch
electron_icon.patch icon.patch
electron_python-jinja-3.10.patch python-jinja-3.10.patch
electron_webpack-hash.patch vector-const.patch
electron_unbundle-node.patch webpack-hash.patch
electron_system-zlib-headers.patch chromium-icu-74.patch
default.conf default.conf
electron.desktop electron.desktop
electron-launcher.sh electron-launcher.sh
" "
_copium_patches="
cr124-iwyu-sys-select-dawn-terminal.patch
cr126-aarch64-musl-unsupported-relocation.patch
cr129-ffmpeg-no-noh264parse.patch
cr129-musl-metricts-imports.patch
"
# tests are todo for some base checks # tests are todo for some base checks
options="!check net suid" options="!check net suid"
builddir="$srcdir/electron-$_gittag-$_chromium" builddir="$srcdir/electron-$_semver-$_chromium"
export PATH="$PATH:/usr/lib/qt5/bin" export PATH="$PATH:/usr/lib/qt5/bin"
export CC=clang-$_llvmver # clang uses much less memory (and this doesn't support gcc)
export CXX=clang++-$_llvmver export CC=clang-16
export CXX=clang++-16
# required to find the tools # required to find the tools
export AR=llvm-ar export AR=llvm16-ar
export NM=llvm-nm export NM=llvm16-nm
export LD=clang++-$_llvmver export LD=clang++-16
# less log spam, reproducible # less log spam, reproducible
export CFLAGS="${CFLAGS/-g/} -O2 -Wno-builtin-macro-redefined -Wno-deprecated-declarations -Wno-shift-count-overflow -Wno-ignored-attributes" export CFLAGS="${CFLAGS/-g/} -O2 -Wno-unknown-warning-option -Wno-builtin-macro-redefined -Wno-deprecated-declarations"
export CXXFLAGS="${CXXFLAGS/-g/} -O2 -Wno-builtin-macro-redefined -Wno-deprecated-declarations -Wno-invalid-constexpr" export CXXFLAGS="${CXXFLAGS/-g/} -O2 -Wno-unknown-warning-option -Wno-builtin-macro-redefined -Wno-deprecated-declarations"
export CPPFLAGS="${CPPFLAGS/-g/} -D__DATE__= -D__TIME__= -D__TIMESTAMP__=" export CPPFLAGS="${CPPFLAGS/-g/} -D__DATE__= -D__TIME__= -D__TIMESTAMP__="
case "$CARCH" in
aarch64|arm*|riscv64)
# not supported by clang here
export CFLAGS="${CFLAGS/-fstack-clash-protection}"
export CXXFLAGS="${CXXFLAGS/-fstack-clash-protection}"
;;
esac
# breaks chromium-based stuff # breaks chromium-based stuff
export CXXFLAGS="${CXXFLAGS/-D_GLIBCXX_ASSERTIONS=1}" export CXXFLAGS="${CXXFLAGS/-D_GLIBCXX_ASSERTIONS=1}"
# creates a dist tarball that does not need to git clone everything at build time. # creates a dist tarball that does not need to git clone everything at build time.
_distbucket="sakamoto/lnl-aports-snapshots/"
snapshot() { snapshot() {
deps deps
# vpython3 execs system python3 with this set # vpython3 execs system python3 with this set
export VPYTHON_BYPASS="manually managed python not supported by chrome operations" export VPYTHON_BYPASS="manually managed python not supported by chrome operations"
export CHROMIUM_BUILDTOOLS_PATH="$srcdir/src/buildtools" export CHROMIUM_BUILDTOOLS_PATH="$srcdir/src/buildtools"
export DEPOT_TOOLS_UPDATE=0
mkdir -p "$srcdir" mkdir -p "$srcdir"
cd "$srcdir" cd "$srcdir"
if ! [ -d src ]; then
git clone --branch=$_chromium --depth=1 \
https://chromium.googlesource.com/chromium/src.git
fi
if ! [ -d electron ]; then
git clone https://github.com/electron/electron.git
fi
if ! [ -d depot_tools ]; then if ! [ -d depot_tools ]; then
( (
@ -209,75 +182,83 @@ snapshot() {
echo "solutions = [ echo "solutions = [
{ {
\"name\": \"src/electron\", \"name\": \"src/electron\",
\"url\": \"https://github.com/electron/electron.git@$_gittag\", \"url\": \"file://$srcdir/electron@v$_semver\",
\"deps_file\": \"DEPS\", \"deps_file\": \"DEPS\",
\"managed\": False, \"managed\": False,
\"custom_deps\": { \"custom_deps\": {
\"src\": \"https://chromium.googlesource.com/chromium/src.git@$_chromium\", \"src\": None,
}, },
\"custom_vars\": {}, \"custom_vars\": {},
}, },
]" > .gclient ]" > .gclient
python3 depot_tools/gclient.py sync \ python3 depot_tools/gclient.py sync \
--no-history \ --with_branch_heads \
--with_tags \
--nohooks --nohooks
python3 src/build/landmines.py python3 src/build/landmines.py
python3 src/build/util/lastchange.py -o src/build/util/LASTCHANGE \ python3 src/build/util/lastchange.py -o src/build/util/LASTCHANGE
python3 src/build/util/lastchange.py -s src/third_party/dawn \
--revision src/gpu/webgpu/DAWN_VERSION --revision src/gpu/webgpu/DAWN_VERSION
python3 src/build/util/lastchange.py -m GPU_LISTS_VERSION \ python3 src/build/util/lastchange.py -m GPU_LISTS_VERSION \
--revision-id-only --header src/gpu/config/gpu_lists_version.h --revision-id-only --header src/gpu/config/gpu_lists_version.h
python3 src/build/util/lastchange.py -m SKIA_COMMIT_HASH \ python3 src/build/util/lastchange.py -m SKIA_COMMIT_HASH \
-s src/third_party/skia --header src/skia/ext/skia_commit_hash.h -s src/third_party/skia --header src/skia/ext/skia_commit_hash.h
# rolled newer chromium with it included # why?
sed -i '/reland_mojom_ts_generator_handle_empty_module_path_identically_to.patch/d' src/electron/patches/chromium/.patches cp -r electron/patches/ffmpeg src/electron/patches/
python3 src/electron/script/apply_all_patches.py \ python3 electron/script/apply_all_patches.py \
src/electron/patches/config.json electron/patches/config.json
mv src $pkgname-$_gittag-$_chromium python3 src/tools/update_pgo_profiles.py \
--target=linux \
update \
--gs-url-base=chromium-optimization-profiles/pgo_profiles
python3 src/tools/download_optimization_profile.py \
--newest_state=src/chrome/android/profiles/newest.txt \
--local_state=src/chrome/android/profiles/local.txt \
--output_name=src/chrome/android/profiles/afdo.prof \
--gs_url_base=chromeos-prebuilt/afdo-job/llvm
mv src $pkgname-$_semver-$_chromium
# extra binaries are most likely things we don't want, so nuke them all # extra binaries are most likely things we don't want, so nuke them all
for elf in $(scanelf -RA -F "%F" $pkgname-$_gittag-$_chromium); do for elf in $(scanelf -RA -F "%F" $pkgname-$_semver-$_chromium); do
rm -f "$elf" rm -f "$elf"
done done
msg "generating tarball.. (this takes a while)" msg "generating tarball.. (this takes a while)"
tar -cf $pkgname-$_gittag-$_chromium.tar \ tar -cf $pkgname-$_semver-$_chromium.tar \
--exclude="ChangeLog*" \ --exclude="ChangeLog*" \
--exclude="testdata/*" \ --exclude="testdata/*" \
--exclude="test_data/*" \ --exclude="test_data/*" \
--exclude="android_rust_toolchain/*" \ --exclude="android_rust_toolchain/*" \
--exclude="third_party/instrumented_libs/binaries" \
--exclude-backups \ --exclude-backups \
--exclude-caches-all \ --exclude-caches-all \
--exclude-vcs \ --exclude-vcs \
$pkgname-$_gittag-$_chromium $pkgname-$_semver-$_chromium
zstd --auto-threads=logical --ultra --long -22 -T"${ZSTD_LIMIT:-0}" -vv $pkgname-$_gittag-$_chromium.tar -o "$SRCDEST"/$pkgname-$_gittag-$_chromium.tar.zst zstd --auto-threads=logical --ultra --long -22 -T"${ZSTD_LIMIT:-0}" -vv $pkgname-$_semver-$_chromium.tar
mcli cp "$SRCDEST"/$pkgname-$_gittag-$_chromium.tar.zst "$_distbucket" }
_extra_patch() {
msg chromium-"$1".patch
patch -Np1 < "$srcdir"/chromium-patches-"$_extra_patches"/chromium-"$1".patch
} }
prepare() { prepare() {
dos2unix third_party/vulkan_memory_allocator/include/vk_mem_alloc.h
default_prepare default_prepare
for i in $_copium_patches; do _extra_patch 118-SensorReadingField-include
case "$i" in _extra_patch 117-material-color-include
*.patch) _extra_patch 118-system-freetype
msg "${i%::*}" _extra_patch 117-system-zstd
patch -p1 -i "$srcdir/copium/$i" || failed="$failed $i" _extra_patch 118-compiler
;;
esac
done
if [ -n "$failed" ]; then
error "The following patches failed to apply:"
for i in $failed; do
printf " %s\n" "$i" >&2
done
exit 1
fi
git init -q . git init -q .
@ -295,9 +276,9 @@ prepare() {
git config commit.gpgsign false git config commit.gpgsign false
git add LICENSE git add LICENSE
git commit -m "init" git commit -m "init"
git tag "$_gittag" git tag "v$_semver"
git pack-refs git pack-refs
yarn install --frozen-lockfile --ignore-scripts yarn install --frozen-lockfile --no-scripts
) )
( (
@ -305,41 +286,33 @@ prepare() {
./update_npm_deps ./update_npm_deps
) )
# reusable system library settings
# flatbuffers - tensorflow has a few static_asserts for a specific patch version
# highway - requires highway>=1.1.0 (arm failures)
# libavif - https://github.com/AOMediaCodec/libavif/commit/50a541469c98009016af8dcc9f83a1be79f3a7d9
# libaom - https://aomedia.googlesource.com/aom/+/706ee36dcc82%5E%21/ # libaom - https://aomedia.googlesource.com/aom/+/706ee36dcc82%5E%21/
# but watch this space: https://aomedia-review.googlesource.com/c/aom/+/188606 local use_system="
# jsoncpp, re2, snappy, swiftshader-*, woff2 - requires use_custom_libcxx=false
local chromium_use_system="
brotli
crc32c
dav1d dav1d
double-conversion
ffmpeg ffmpeg
flac flac
fontconfig fontconfig
freetype freetype
harfbuzz-ng harfbuzz-ng
icu icu
jsoncpp
libavif
libdrm libdrm
libevent libevent
libjpeg libjpeg
libsecret
libusb
libwebp libwebp
libxml libxml
libxslt libxslt
openh264
opus opus
re2
snappy
woff2
zlib zlib
zstd zstd
" "
for _lib in $chromium_use_system jinja2 libjpeg_turbo unrar; do for _lib in $use_system libjpeg_turbo; do
msg "Removing buildscripts for system provided $_lib" msg "Removing buildscripts for system provided $_lib"
_lib="${_lib/swiftshader-/swiftshader/third_party/}"
find . -type f -path "*third_party/$_lib/*" \ find . -type f -path "*third_party/$_lib/*" \
\! -path "*third_party/$_lib/chromium/*" \ \! -path "*third_party/$_lib/chromium/*" \
\! -path "*third_party/$_lib/google/*" \ \! -path "*third_party/$_lib/google/*" \
@ -348,39 +321,10 @@ prepare() {
\! -path './third_party/pdfium/third_party/freetype/include/pstables.h' \ \! -path './third_party/pdfium/third_party/freetype/include/pstables.h' \
\! -path './third_party/harfbuzz-ng/utils/hb_scoped.h' \ \! -path './third_party/harfbuzz-ng/utils/hb_scoped.h' \
\! -path './third_party/crashpad/crashpad/third_party/zlib/zlib_crashpad.h' \ \! -path './third_party/crashpad/crashpad/third_party/zlib/zlib_crashpad.h' \
\! -regex '.*\.\(gn\|gni\|gyp\|gypi\|isolate\|py\)' \ \! -regex '.*\.\(gn\|gni\|isolate\|py\)' \
-delete -delete
done done
# llhttp - 9.x needed, 8.x in repo (2023-12-17)
# ada - needs use_custom_libcxx=false
local node_use_system="
base64
brotli
cares
corepack
histogram
nghttp2
nghttp3
ngtcp2
zlib
"
# some of these are provided by system, e.g. brotli. some are from chromium,
# e.g. boringssl (as openssl). some are not in use at all (corepack)
for _lib in $node_use_system openssl; do
msg "Removing buildscripts for $_lib"
find . -type f -path "*third_party/electron_node/deps/$_lib/*" \
\! -path "*third_party/electron_node/deps/$_lib/chromium/*" \
\! -path "*third_party/electron_node/deps/$_lib/google/*" \
\! -regex '.*\.\(gn\|gni\|gyp\|gypi\|isolate\|py\)' \
-delete
done
# XXX: hack. unbundle-node.patch uses this list to switch things
# in config.gypi. https://github.com/electron/electron/issues/40836
echo $node_use_system > third_party/electron_node/use_system.txt
rm -rf third_party/electron_node/tools/inspector_protocol/jinja2
# https://groups.google.com/a/chromium.org/d/topic/chromium-packagers/9JX1N2nf4PU/discussion # https://groups.google.com/a/chromium.org/d/topic/chromium-packagers/9JX1N2nf4PU/discussion
touch chrome/test/data/webui/i18n_process_css_test.html touch chrome/test/data/webui/i18n_process_css_test.html
# Use the file at run time instead of effectively compiling it in # Use the file at run time instead of effectively compiling it in
@ -389,15 +333,9 @@ prepare() {
msg "Running debundle script" msg "Running debundle script"
python3 build/linux/unbundle/replace_gn_files.py --system-libraries \ python3 build/linux/unbundle/replace_gn_files.py --system-libraries \
$chromium_use_system $use_system
python3 third_party/libaddressinput/chromium/tools/update-strings.py python3 third_party/libaddressinput/chromium/tools/update-strings.py
# flatc is used in build workflows since https://crrev.com/c/5595037,
# but the pre-generated files are still checked-in. remove to make sure
# they're not used. (if used, they will break builds on version mismatch.)
# https://github.com/tensorflow/tensorflow/issues/62298
# find third_party/tflite/ -name '*_generated.h' -delete
# prevent annoying errors when regenerating gni # prevent annoying errors when regenerating gni
sed -i 's,^update_readme$,#update_readme,' \ sed -i 's,^update_readme$,#update_readme,' \
third_party/libvpx/generate_gni.sh third_party/libvpx/generate_gni.sh
@ -410,7 +348,8 @@ prepare() {
sed -i -e 's/\<xmlMalloc\>/malloc/' -e 's/\<xmlFree\>/free/' \ sed -i -e 's/\<xmlMalloc\>/malloc/' -e 's/\<xmlFree\>/free/' \
third_party/blink/renderer/core/xml/*.cc \ third_party/blink/renderer/core/xml/*.cc \
third_party/blink/renderer/core/xml/parser/xml_document_parser.cc \ third_party/blink/renderer/core/xml/parser/xml_document_parser.cc \
third_party/libxml/chromium/*.cc third_party/libxml/chromium/*.cc \
third_party/maldoca/src/maldoca/ole/oss_utils.h
_configure _configure
} }
@ -419,76 +358,63 @@ _configure() {
cd "$builddir" cd "$builddir"
msg "Configuring build" msg "Configuring build"
case "$USE_CCACHE" in
1)
local cc_wrapper="ccache"
;;
*)
local cc_wrapper=""
;;
esac
local maglev=true
local symbol_level=0
local vaapi=true
# shellcheck disable=2089 # shellcheck disable=2089
local gn_config=" local gn_config="
clang_base_path=\"/usr\"
custom_toolchain=\"//build/toolchain/linux/unbundle:default\"
host_toolchain=\"//build/toolchain/linux/unbundle:default\"
import(\"//electron/build/args/release.gn\") import(\"//electron/build/args/release.gn\")
blink_enable_generated_code_formatting=false blink_enable_generated_code_formatting=false
cc_wrapper=\"$cc_wrapper\"
chrome_pgo_phase=0 chrome_pgo_phase=0
clang_base_path=\"/usr\"
clang_use_chrome_plugins=false clang_use_chrome_plugins=false
clang_version=\"$_llvmver\"
custom_toolchain=\"//build/toolchain/linux/unbundle:default\"
disable_fieldtrial_testing_config=true
enable_hangout_services_extension=true
enable_nacl=false
enable_nocompile_tests=false
enable_stripping=false
enable_rust=true
enable_vr=false
fatal_linker_warnings=false fatal_linker_warnings=false
ffmpeg_branding=\"Chrome\" ffmpeg_branding=\"Chrome\"
host_toolchain=\"//build/toolchain/linux/unbundle:default\" icu_use_data_file=true
icu_use_data_file=false
is_cfi=false is_cfi=false
is_clang=true is_clang=true
is_component_ffmpeg=true is_component_ffmpeg=true
is_debug=false is_debug=false
is_official_build=true is_official_build=true
symbol_level=0
treat_warnings_as_errors=false
angle_enable_gl_null=false
build_tflite_with_xnnpack=false
build_with_tflite_lib=true
disable_fieldtrial_testing_config=true
enable_hangout_services_extension=true
enable_library_cdms=false
enable_media_remoting=false
enable_nacl=false
enable_paint_preview=false
enable_reading_list=false
enable_remoting=false
enable_reporting=false
enable_rust=false
enable_screen_ai_service=false
enable_service_discovery=false
enable_stripping=false
enable_vr=false
ozone_platform_headless=false
link_pulseaudio=true link_pulseaudio=true
proprietary_codecs=true proprietary_codecs=true
rtc_link_pipewire=true rtc_link_pipewire=true
rtc_use_pipewire=true rtc_use_pipewire=true
rustc_version=\"yes\" use_custom_libcxx=false
rust_bindgen_root=\"/usr\"
rust_sysroot_absolute=\"/usr\"
safe_browsing_use_unrar=false
symbol_level=$symbol_level
treat_warnings_as_errors=false
use_custom_libcxx=true
use_lld=true
use_pulseaudio=true
use_safe_libstdcxx=false
use_system_libffi=true
use_sysroot=false
use_thin_lto=false
use_vaapi=$vaapi
v8_enable_maglev=$maglev
skia_use_dawn=false
use_dawn=false use_dawn=false
use_system_ada=false use_pulseaudio=true
use_system_base64=true use_sysroot=false
use_system_cares=true use_system_cares=true
use_system_histogram=true use_system_freetype=true
use_system_harfbuzz=true
use_system_lcms2=true use_system_lcms2=true
use_system_libdrm=true
use_system_libffi=true use_system_libffi=true
use_system_llhttp=false use_system_libjpeg=true
use_system_nghttp2=true use_system_nghttp2=true
use_vaapi=true
" "
# shellcheck disable=2086,2090,2116 # shellcheck disable=2086,2090,2116
@ -500,11 +426,10 @@ build() {
export PATH="$PATH:/usr/lib/qt5/bin" export PATH="$PATH:/usr/lib/qt5/bin"
ninja -C out/Release \ ninja -C out/Release \
copy_node_headers \
electron_dist_zip \ electron_dist_zip \
node_gypi_headers \ node_gypi_headers \
node_version_header node_version_header \
tar_headers
} }
package() { package() {
@ -518,7 +443,8 @@ package() {
install -Dm755 "$srcdir"/default.conf "$pkgdir"/etc/electron/default.conf install -Dm755 "$srcdir"/default.conf "$pkgdir"/etc/electron/default.conf
mkdir -p "$pkgdir"/usr/include/electron mkdir -p "$pkgdir"/usr/include/electron
cp -rv "$builddir"/out/Release/gen/node_headers "$pkgdir"/usr/include/electron
mv -v "$builddir"/out/Release/gen/node_headers "$pkgdir"/usr/include/electron
ln -sv /usr/include/electron/node_headers/include/node "$pkgdir"/usr/include/electron/node ln -sv /usr/include/electron/node_headers/include/node "$pkgdir"/usr/include/electron/node
mkdir -p "$pkgdir"/usr/include/electron/node_headers/include/nan mkdir -p "$pkgdir"/usr/include/electron/node_headers/include/nan
@ -544,35 +470,30 @@ lang() {
} }
sha512sums=" sha512sums="
e2df4454f4178af859c13aadee4ea04a5b6aa202972cad625e54bc68f5b8c25e098e50d428ec9c1886c37ccf49aaaedb4c5f02fc8bdd498314ba216901932185 electron-v33.2.1-130.0.6723.127.tar.zst 07f653f24e7fe1ef96a0ff676fa3c987fd0826980b5e9611705cb7fc44f00182fa62e67e67c6df5a3b1b4c063d99a3266054a83aa67fb44b11ffffcd8e23c3eb electron-27.1.2-118.0.5993.162.tar.zst
6138b3dbf3903c78f4ca1ed5a6c3c3c485471ded31976010484ce8893d03953df2b8f066a4fe84bbde5ae7ef9bbff664ef917e247b2e95dd471de40f2774d7d0 copium-129.1.tar.gz 194c3a7a0fa03a85df6fe52ece3d53d4d15b9d0cb440b56a2ccb1b5c0d3f6481b6f7287aa705c596ceea92d475677ddaf58926f3b31c03a3c20e80ad7e481ce7 chromium-patches-118.0.5993.11.tar.gz
29bb685e03356a77df5fd347cdf55194cc8b3265c421cc76e54d64edefc329dbcb052deb26b22e8f587ce68456876c071de1b7d258dd0fcc6ee66c875ec4a020 chromium-revert-drop-of-system-java.patch 29bb685e03356a77df5fd347cdf55194cc8b3265c421cc76e54d64edefc329dbcb052deb26b22e8f587ce68456876c071de1b7d258dd0fcc6ee66c875ec4a020 chromium-revert-drop-of-system-java.patch
53b7cdee8f7bfb4c9371cb385c473e34ed3d8ac7efaa43c0af061107560be30d8747b07fb0b16c01079b8c770f2c721bb5a8081313b7c126856ea4078a74da2a compiler.patch fa291e941076146d0edd5b96c088240a44a6e0aca3dfc744929655607182d2dc47e6c35ecb419f7c623fcf7f26dc3c4dd924dbf5ed10c3b986283f5ef2f72573 chromium-use-alpine-target.patch
4057cc78f10bfd64092bc35a373869abb1d68b880cdbca70422f39ffd78a929c19c7728d4d4c40709aaba25581148a93ae5343e724849fd35323062ed68753fa disable-dns_config_service.patch 9200f78bad70e95c648a5e8392d50642190600f655c6baa366ff6467ebad52d3b3f305dad58f3610da67136f4b723557653b174ec5c25be8d8737ee04d9ee09f fix-missing-cstdint-include-musl.patch
2470904846e3adde2c9506f9e78220daca0932320b628dd3d427bf2b7c17a8f7880cb97e787b046c28de7aca642e1a8d30824d6049905976da77e7473baa64da disable-failing-tests.patch
5fc5c012c1db6cf1ba82f38c6f3f4f5ca3a209e47ac708a74de379b018e0649b7694877c9571ef79002dde875ffc07b458a3355425f1c01867f362c66c2bc1bf fc-cache-version.patch
b24563e9a738c00fce7ff2fbdee3d7c024d9125d7c74d9ab90af6bdb16f7ec8419f2c8aa78c0640f6d5d81c17dc2c673a194401d354f466749672729b48ed068 fix-opus.patch
c63dee5044353eb306a39ca1526158c0f003ab310ecb03d1c368dc2a979454590c84b8d3c15484517d5e66bb8add9b231da9abbadf2e50850abd72ac1345c4ab fstatat-32bit.patch
33ee60863cc438ef57ffef92ba4cf67a856a5ffc16138bce241bcf87e47b15154aa86918e793c26f7ec4dc62a445257ad5673ed7001daf22c4043cf6cc57da7f gdbinit.patch 33ee60863cc438ef57ffef92ba4cf67a856a5ffc16138bce241bcf87e47b15154aa86918e793c26f7ec4dc62a445257ad5673ed7001daf22c4043cf6cc57da7f gdbinit.patch
36a764fa73443b47d38050b52dbe6ad2fa8d67201ff4ccdbad13b52308ef165ca046aac6f9609fe35890a6485f0f3e672e78cc41e3e44f3cdc7f145e540524e8 generic-sensor-include.patch 8de65109ece27ea63bd469f2220c56b8c752ba0a50fdf390082a2d5ae74b8e010199126175569f6d5084270dd4e0571e68aec32c0bca8211a6699925b3a09124 import-version.patch
99bcc7dd485b404a90c606a96addab1d900852128d44fb8cea8acc7303189ef87c89a7b0e749fd0e10c5ef5f6bf1fadeb5c16a34503cab6a59938ce2653d887e musl-auxv.patch 49851d42ce8ccd533d01d1bb2477930802b0bcebab8dd52f2da292088378c6ed9b74146e7dad55edfe096281fc84b2c55abaf832744fd4553a97c38ed891df3a libstdc++13.patch
51f1959bd622af26a1c3a1f4b0ad9a5bfa461057aa4cf9960c568dddf8ac47d55989c277f5d5ab5db040a04c54925a531af7a1cc767559218b408eaa6bdd7577 musl-sandbox.patch 0e991842e23a4b9133898125eeb39e45e3f86f886eef5d2f0d9a72ee143a3e124b3b4f60be94edd57ce4185bcd69704edb51f76d08fdb6207f5559a08dd41ab0 mman.patch
50c274a420bb8a7f14fcb56e40920dac8f708792a4520789b4987facea459bef88113d5a2b60fa8c57bee6e92bff3617d6b73fa305c8c44614c638971cffd440 musl-sandbox.patch
e7163ac5810ac85366cef2447412287c856e3d67c6b77f219a6e5a418b1965b98e449c409424ad0704a5bded9355dd0aec3dc4585918ce5a2ab36c079707afe2 musl-tid-caching.patch e7163ac5810ac85366cef2447412287c856e3d67c6b77f219a6e5a418b1965b98e449c409424ad0704a5bded9355dd0aec3dc4585918ce5a2ab36c079707afe2 musl-tid-caching.patch
92eb002718026611f5542362ad69b67f0a398ff71b3fca5c05d55cb5c6f9f29334e5e127bb4860cfaa3fba0f0d4c901e2b98808217e7dc02e254a64a5c9521aa musl-v8-monotonic-pthread-cont_timedwait.patch 92eb002718026611f5542362ad69b67f0a398ff71b3fca5c05d55cb5c6f9f29334e5e127bb4860cfaa3fba0f0d4c901e2b98808217e7dc02e254a64a5c9521aa musl-v8-monotonic-pthread-cont_timedwait.patch
a250cff50d282b02ce0f28880d0a2b4fb8e7df51bc072bfeeddc561c29a7c76453dbcbc7b17b82966a7b30a31409d2555720d1dcf963e1b3fb8a2a06a6abcf46 no-execinfo.patch 8cc774e8d84e434960222c0497ad8193ae35c0732f98d3282d5fd4b4930f914809eec97832c199517ca89ca6b9d1d011db5ce533c40c68ce5fa464609d131a23 no-execinfo.patch
0b41aeb6b212f9c3f61aa0a8d3085c9e865a2e68f3270ceec2376aab67f337ac46eaea7da36d3fd7219e2a1cb731b7aa2d3fb619a374d2b7653976b9f4f384bb no-mallinfo.patch b5479874d125ee95a311295f227f8881a83023ec34fded7a6160b3ae32ea3ba0f2b833a9fb264c57f3d22746b6d8b00bdc8eb2ff86c43c412d6d3b55ae15b16b no-mallinfo.patch
8a52ff52201a5e20344f5497ee2ffef0520f7b2d934be92227e49c3f2c12a94c33650eefc88a0e451a6b81d44ce197db421aaec7388e6bb1cb525a43628779d3 no-mte.patch
e4c4e5bc6f828f9c883dd418c0ba01887949c29c311f76206a1ec29f620b0c0ba0452949dc2778a9c46ea066405857536964a36436a68eecf7da7952736333cf no-res-ninit-nclose.patch e4c4e5bc6f828f9c883dd418c0ba01887949c29c311f76206a1ec29f620b0c0ba0452949dc2778a9c46ea066405857536964a36436a68eecf7da7952736333cf no-res-ninit-nclose.patch
6dc4d8dc92e685dace62265a1ddb3aebc558aed54d20ff6d36b030be0c48d7e84662326c31363612492574d9a03c62653cdc21a60995b97dee1d75cae86a9f9b no-sandbox-settls.patch 6dc4d8dc92e685dace62265a1ddb3aebc558aed54d20ff6d36b030be0c48d7e84662326c31363612492574d9a03c62653cdc21a60995b97dee1d75cae86a9f9b no-sandbox-settls.patch
f2b08538ff57c50b3772a07ca91845f9d45f4a5112f608b6192d4fb5d7be48f478c0c36194d95ab7bbf933e0278e5c6d578619d8643895cdc40386eebc5b975f partalloc-no-tagging-arm64.patch
03f829a2da633533ef3fd0f287f5ec602d936a97a98b53cd2415553c2537ae9d571f35397ca7c9fb3f4b0806c300e3b189569f8d979ca132e1a2a4dae7206396 pvalloc.patch
e48693e6b7aeebf69a5acbf80d9a35defe4c23835121dfeb58b051ac7c527e758a41004f4d193274fe1b01c0bfb1dbc77b09cb6a404a3fdee507a2918afb0edb temp-failure-retry.patch e48693e6b7aeebf69a5acbf80d9a35defe4c23835121dfeb58b051ac7c527e758a41004f4d193274fe1b01c0bfb1dbc77b09cb6a404a3fdee507a2918afb0edb temp-failure-retry.patch
914ccf649d7771f19f209ab97f99c481aebc6f66174d68e8b539f6ad4a70bc8cb0fae2df6dadbf0415958ffb3574c420fe029079dcce45f5e5add4db2e903566 yes-musl.patch 905565c10f5e5600e7d4db965c892cc45009a258e9995da958974d838ace469e1db1019195307e8807860d5b55ba6bfeea478b1f39a9b99e82c619b2816a1a22 icon.patch
465107da7818b237e3c144a318ab80c3c9343b51ed38b8971ef204692d13346929becbe94cefad4c153788d3a200642143584d5ca070f6304e768ba2139c19ec electron_icon.patch e05180199ee1d559e4e577cedd3e589844ecf40d98a86321bf1bea5607b02eeb5feb486deddae40e1005b644550331f6b8500177aa7e79bcb3750d3c1ceb76c3 python-jinja-3.10.patch
e05180199ee1d559e4e577cedd3e589844ecf40d98a86321bf1bea5607b02eeb5feb486deddae40e1005b644550331f6b8500177aa7e79bcb3750d3c1ceb76c3 electron_python-jinja-3.10.patch 71571b15cf8bd6259b7fd22bea0e46b64890f3db776365de33fe539f26ce9ef99459e05c3dde9434c3657225bc67160abc915acd93033cb487c770c6a2a5975f vector-const.patch
2aa340854316f1284217c0ca17cbf44953684ad6c7da90815117df30928612eb9fb9ffb734b948dfc309cd25d1a67cd57f77aac2d052a3dd9aca07a3a58cbb30 electron_webpack-hash.patch 2aa340854316f1284217c0ca17cbf44953684ad6c7da90815117df30928612eb9fb9ffb734b948dfc309cd25d1a67cd57f77aac2d052a3dd9aca07a3a58cbb30 webpack-hash.patch
57aa81d46b9cc931092d9d9b3cb4a9859f86c183a236bc5cca6abbaeca86b82bf1b537dd9cb3412114fa4e86087c0022ee3f7e88de974d29b309e9d1714df7a5 electron_unbundle-node.patch 4c540972fa12acd9f0aafb8dc7e9987c3d6e4f28ff679dde522ebcec2dc5ae1a62d9d255bed0a30b9c79ae3b90ab0f5b9ae1ef5b7bf338612e28d9ef70250ca3 chromium-icu-74.patch
1b35edcf0b41e39e20c4d64dbb978bcaab8036f2fe839930709b269c50cb1321458a15b4d0013246f9e03f58f250a1e3a57ea910db1aa0adbd602a6a11ad33b9 electron_system-zlib-headers.patch 07e9203b05402f81c0ded5871a845e37bdc4c09b7bb2839312396f298a9ce8196e2c24508675e3d6f695f1e2b0ff1c2c64f4e9dfff3ff5359a87cb7b9b972393 default.conf
e8ea87c547546011c4c8fc2de30e4f443b85cd4cfcff92808e2521d2f9ada03feefb8e1b0cf0f6b460919c146e56ef8d5ad4bb5e2461cc5247c30d92eb4d068e default.conf
191559fc7aa1ea0353c6fb0cc321ee1d5803a0e44848c8be941cfab96277b0de6a59962d373e2a2a1686c8f9be2bcf2d2f33706759a339a959e297d3f7fda463 electron.desktop 191559fc7aa1ea0353c6fb0cc321ee1d5803a0e44848c8be941cfab96277b0de6a59962d373e2a2a1686c8f9be2bcf2d2f33706759a339a959e297d3f7fda463 electron.desktop
5f7ba5ad005f196facec1c0f26108356b64cafb1e5cfa462ff714a33b8a4c757ac00bfcb080da09eb5b65032f8eb245d9676a61ec554515d125ed63912708648 electron-launcher.sh ff1844036c8ae0a0a57a16211a816bc0ad550ccf6ea1cf718e228b8c95b9c4f5c9772d1a1a23638c0e140703a7b52874371e27a0d9d54a7b9468e5c384759be5 electron-launcher.sh
" "

View file

@ -0,0 +1,20 @@
See ICU change https://github.com/unicode-org/icu/commit/2e45e6ec0e84a1c01812015a254ea31b286316fb
Similar has happened in the past. See:
https://chromium.googlesource.com/chromium/src/+/e60b571faa3f14dd9119a6792dccf12f8bf80192
diff --git a/third_party/blink/renderer/platform/text/text_break_iterator.cc b/third_party/blink/renderer/platform/text/text_break_iterator.cc
index ddfbd51..247da06 100644
--- a/third_party/blink/renderer/platform/text/text_break_iterator.cc
+++ b/third_party/blink/renderer/platform/text/text_break_iterator.cc
@@ -161,7 +161,9 @@ static const unsigned char kAsciiLineBreakTable[][(kAsciiLineBreakTableLastChar
};
// clang-format on
-#if U_ICU_VERSION_MAJOR_NUM >= 58
+#if U_ICU_VERSION_MAJOR_NUM >= 74
+#define BA_LB_COUNT (U_LB_COUNT - 8)
+#elif U_ICU_VERSION_MAJOR_NUM >= 58
#define BA_LB_COUNT (U_LB_COUNT - 3)
#else
#define BA_LB_COUNT U_LB_COUNT

View file

@ -0,0 +1,30 @@
building for arm targets by default passes --target to clang, because it
assumes it's cross compiling (so passes --target as if the host is different,
instead of assuming default)
probably also works: removing this entirely. but to be safe, pass the alpine clang host triple
--
--- a/build/config/compiler/BUILD.gn
+++ b/build/config/compiler/BUILD.gn
@@ -915,8 +915,8 @@ config("compiler_cpu_abi") {
} else if (current_cpu == "arm") {
if (is_clang && !is_android && !is_nacl &&
!(is_chromeos_lacros && is_chromeos_device)) {
- cflags += [ "--target=arm-linux-gnueabihf" ]
- ldflags += [ "--target=arm-linux-gnueabihf" ]
+ cflags += [ "--target=armv7-alpine-linux-musleabihf" ]
+ ldflags += [ "--target=armv7-alpine-linux-musleabihf" ]
}
if (!is_nacl) {
cflags += [
@@ -930,8 +930,8 @@ config("compiler_cpu_abi") {
} else if (current_cpu == "arm64") {
if (is_clang && !is_android && !is_nacl && !is_fuchsia &&
!(is_chromeos_lacros && is_chromeos_device)) {
- cflags += [ "--target=aarch64-linux-gnu" ]
- ldflags += [ "--target=aarch64-linux-gnu" ]
+ cflags += [ "--target=aarch64-alpine-linux-musl" ]
+ ldflags += [ "--target=aarch64-alpine-linux-musl" ]
}
if (is_android) {
# Outline atomics crash on Exynos 9810. http://crbug.com/1272795

View file

@ -1,135 +0,0 @@
--- ./build/config/compiler/BUILD.gn.orig
+++ ./build/config/compiler/BUILD.gn
@@ -568,24 +568,6 @@
}
}
- # TODO(crbug.com/40283598): This causes binary size growth and potentially
- # other problems.
- # TODO(crbug.com/40284925): This isn't supported by Cronet's mainline llvm version.
- if (default_toolchain != "//build/toolchain/cros:target" &&
- !llvm_android_mainline) {
- cflags += [
- "-mllvm",
- "-split-threshold-for-reg-with-hint=0",
- ]
- if (use_thin_lto && is_a_target_toolchain) {
- if (is_win) {
- ldflags += [ "-mllvm:-split-threshold-for-reg-with-hint=0" ]
- } else {
- ldflags += [ "-Wl,-mllvm,-split-threshold-for-reg-with-hint=0" ]
- }
- }
- }
-
# TODO(crbug.com/40192287): Investigate why/if this should be needed.
if (is_win) {
cflags += [ "/clang:-ffp-contract=off" ]
@@ -998,17 +980,6 @@
# `-nodefaultlibs` from the linker invocation from Rust, which would be used
# to compile dylibs on Android, such as for constructing unit test APKs.
"-Cdefault-linker-libraries",
-
- # To make Rust .d files compatible with ninja
- "-Zdep-info-omit-d-target",
-
- # If a macro panics during compilation, show which macro and where it is
- # defined.
- "-Zmacro-backtrace",
-
- # For deterministic builds, keep the local machine's current working
- # directory from appearing in build outputs.
- "-Zremap-cwd-prefix=.",
]
if (!is_win || force_rustc_color_output) {
@@ -1175,8 +1146,8 @@
} else if (current_cpu == "arm") {
if (is_clang && !is_android && !is_nacl &&
!(is_chromeos_lacros && is_chromeos_device)) {
- cflags += [ "--target=arm-linux-gnueabihf" ]
- ldflags += [ "--target=arm-linux-gnueabihf" ]
+ cflags += [ "--target=armv7-alpine-linux-musleabihf" ]
+ ldflags += [ "--target=armv7-alpine-linux-musleabihf" ]
}
if (!is_nacl) {
cflags += [
@@ -1190,8 +1161,8 @@
} else if (current_cpu == "arm64") {
if (is_clang && !is_android && !is_nacl && !is_fuchsia &&
!(is_chromeos_lacros && is_chromeos_device)) {
- cflags += [ "--target=aarch64-linux-gnu" ]
- ldflags += [ "--target=aarch64-linux-gnu" ]
+ cflags += [ "--target=aarch64-alpine-linux-musl" ]
+ ldflags += [ "--target=aarch64-alpine-linux-musl" ]
}
} else if (current_cpu == "mipsel" && !is_nacl) {
ldflags += [ "-Wl,--hash-style=sysv" ]
@@ -1982,7 +1953,7 @@
defines = [ "_HAS_NODISCARD" ]
}
} else {
- cflags = [ "-Wall" ]
+ cflags = []
if (is_clang) {
# Enable extra warnings for chromium_code when we control the compiler.
cflags += [ "-Wextra" ]
--- ./build/config/rust.gni.orig
+++ ./build/config/rust.gni
@@ -185,11 +185,11 @@
rust_abi_target = ""
if (is_linux || is_chromeos) {
if (current_cpu == "arm64") {
- rust_abi_target = "aarch64-unknown-linux-gnu"
+ rust_abi_target = "aarch64-alpine-linux-musl"
} else if (current_cpu == "x86") {
- rust_abi_target = "i686-unknown-linux-gnu"
+ rust_abi_target = "i586-alpine-linux-musl"
} else if (current_cpu == "x64") {
- rust_abi_target = "x86_64-unknown-linux-gnu"
+ rust_abi_target = "x86_64-alpine-linux-musl"
} else if (current_cpu == "arm") {
if (arm_float_abi == "hard") {
float_suffix = "hf"
@@ -198,15 +198,15 @@
}
if (arm_arch == "armv7-a" || arm_arch == "armv7") {
# No way to inform Rust about the -a suffix.
- rust_abi_target = "armv7-unknown-linux-gnueabi" + float_suffix
+ rust_abi_target = "armv7-alpine-linux-musleabi" + float_suffix
} else {
- rust_abi_target = "arm-unknown-linux-gnueabi" + float_suffix
+ rust_abi_target = "armv6-alpine-linux-musleabi" + float_suffix
}
} else if (current_cpu == "riscv64") {
- rust_abi_target = "riscv64gc-unknown-linux-gnu"
+ rust_abi_target = "riscv64-alpine-linux-musl"
} else {
# Best guess for other future platforms.
- rust_abi_target = current_cpu + "-unknown-linux-gnu"
+ rust_abi_target = current_cpu + "-alpine-linux-musl"
}
} else if (is_android) {
import("//build/config/android/abi.gni")
--- ./build/config/clang/BUILD.gn.orig
+++ ./build/config/clang/BUILD.gn
@@ -128,14 +128,15 @@
} else if (is_apple) {
_dir = "darwin"
} else if (is_linux || is_chromeos) {
+ _dir = "linux"
if (current_cpu == "x64") {
- _dir = "x86_64-unknown-linux-gnu"
+ _suffix = "-x86_64"
} else if (current_cpu == "x86") {
- _dir = "i386-unknown-linux-gnu"
+ _suffix = "-i386"
} else if (current_cpu == "arm") {
- _dir = "armv7-unknown-linux-gnueabihf"
+ _suffix = "-armhf"
} else if (current_cpu == "arm64") {
- _dir = "aarch64-unknown-linux-gnu"
+ _suffix = "-aarch64"
} else {
assert(false) # Unhandled cpu type
}

View file

@ -0,0 +1,372 @@
From d0c1f5ee1f56c165bdf550c9e3be0d7313587b80 Mon Sep 17 00:00:00 2001
From: Elly Fong-Jones <ellyjones@chromium.org>
Date: Wed, 18 Jan 2023 22:33:11 +0000
Subject: [PATCH] media: untangle MediaRouterUI lifetimes
Currently, MediaRouterUI is owned by MediaItemUIDeviceSelectorView.
There is an observer method named "OnControllerInvalidated" which
MediaItemUIDeviceSelectorView reacts to by deleting the MediaRouterUI it
owns. However, OnControllerInvalidated can actually be called in two
different situations:
* From MediaRouterUI::TakeMediaRouteStarter(), in which case the
MediaRouterUI object is *not* being destroyed, but should be, because
it can't be safely used after TakeMediaRouteStarter() ends;
* From MediaRouterUI::~MediaRouterUI(), in which case the MediaRouterUI
object *is* being destroyed already and should not be.
In the second case, only the fact that libc++ nulls out unique_ptr
before destroying the pointed-to object saves us from a use-after-free;
under libstdc++, we UaF immediately by re-entering the destructor. Even
under libc++ though this is still very dangerous, because any observers
that happened to be registered after MediaItemUIDeviceSelectorView will
be invoked after the destruction of the object they're observing. Right
now there are no such other observers, but the fact remains that this
interface is basically a UaF timebomb.
This change separates "this object is about to be destroyed" (an
observable state) from "please destroy this object, it is no longer
useful" (a callback that is made to the object's owner) by:
1. Renaming OnControllerInvalidated to OnControllerDestroying, to make
it very clear what is happening to the object, and
2. Adding a RegisterDestructor method to CastDialogController, which
allows MediaItemUIDeviceSelectorView to pass a callback into
MediaRouterUI which MediaRouterUI can use to arrange for its own
destruction.
This is still a bit tangled and ungainly, but it's safe. A fuller
writeup is on the linked bug.
Fixed: 1407202
Change-Id: Id9410de1fbf2cb42f13957dde316b7c9259f192f
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/4165967
Reviewed-by: Peter Kasting <pkasting@chromium.org>
Reviewed-by: Takumi Fujimoto <takumif@chromium.org>
Commit-Queue: Elly Fong-Jones <ellyjones@chromium.org>
Cr-Commit-Position: refs/heads/main@{#1094110}
---
diff --git a/chrome/browser/ui/media_router/cast_dialog_controller.h b/chrome/browser/ui/media_router/cast_dialog_controller.h
index 2a8de976..c3c0553 100644
--- a/chrome/browser/ui/media_router/cast_dialog_controller.h
+++ b/chrome/browser/ui/media_router/cast_dialog_controller.h
@@ -24,10 +24,12 @@
public:
virtual ~Observer() = default;
- virtual void OnModelUpdated(const CastDialogModel& model) = 0;
+ virtual void OnModelUpdated(const CastDialogModel& model) {}
- // Observer should drop its reference to the controller when this is called.
- virtual void OnControllerInvalidated() = 0;
+ // Notifies observers that the observed object is being destroyed. Observers
+ // MUST NOT try to destroy the observed object in response - to manage the
+ // lifetime of a CastDialogController, use RegisterDestructor() below.
+ virtual void OnControllerDestroying() {}
};
virtual ~CastDialogController() = default;
@@ -55,6 +57,16 @@
// intended that this API should only be used to transfer ownership to some
// new component that will want to start casting on this dialog box's behalf.
virtual std::unique_ptr<MediaRouteStarter> TakeMediaRouteStarter() = 0;
+
+ // Registers a callback for when the CastDialogController has given up
+ // ownership of its MediaRouteStarter and is no longer safe to use. The
+ // provided closure must destroy |this| or otherwise ensure it is never used
+ // again. This method can only be called once.
+ //
+ // TODO(https://crbug.com/1408494): It's awkward that CastDialogController has
+ // a state where it exists but is unsafe to use, and doubly awkward that we
+ // have to paper over that with this callback. Can that be fixed?
+ virtual void RegisterDestructor(base::OnceClosure destructor) = 0;
};
} // namespace media_router
diff --git a/chrome/browser/ui/media_router/media_router_ui.cc b/chrome/browser/ui/media_router/media_router_ui.cc
index 1865115f..644d131 100644
--- a/chrome/browser/ui/media_router/media_router_ui.cc
+++ b/chrome/browser/ui/media_router/media_router_ui.cc
@@ -83,6 +83,9 @@
MediaRouterUI::~MediaRouterUI() {
if (media_route_starter_)
DetachFromMediaRouteStarter();
+ for (CastDialogController::Observer& observer : observers_) {
+ observer.OnControllerDestroying();
+ }
}
// static
@@ -145,9 +148,6 @@
}
void MediaRouterUI::DetachFromMediaRouteStarter() {
- for (CastDialogController::Observer& observer : observers_)
- observer.OnControllerInvalidated();
-
media_route_starter()->RemovePresentationRequestSourceObserver(this);
media_route_starter()->RemoveMediaSinkWithCastModesObserver(this);
}
@@ -181,8 +181,16 @@
std::unique_ptr<MediaRouteStarter> MediaRouterUI::TakeMediaRouteStarter() {
DCHECK(media_route_starter_) << "MediaRouteStarter already taken!";
- DetachFromMediaRouteStarter();
- return std::move(media_route_starter_);
+ auto starter = std::move(media_route_starter_);
+ if (destructor_) {
+ std::move(destructor_).Run(); // May destroy `this`.
+ }
+ return starter;
+}
+
+void MediaRouterUI::RegisterDestructor(base::OnceClosure destructor) {
+ DCHECK(!destructor_);
+ destructor_ = std::move(destructor);
}
bool MediaRouterUI::CreateRoute(const MediaSink::Id& sink_id,
diff --git a/chrome/browser/ui/media_router/media_router_ui.h b/chrome/browser/ui/media_router/media_router_ui.h
index 5c2f14e..7afe775 100644
--- a/chrome/browser/ui/media_router/media_router_ui.h
+++ b/chrome/browser/ui/media_router/media_router_ui.h
@@ -100,8 +100,10 @@
void StopCasting(const std::string& route_id) override;
void ClearIssue(const Issue::Id& issue_id) override;
// Note that |MediaRouterUI| should not be used after |TakeMediaRouteStarter|
- // is called.
+ // is called. To enforce that, |TakeMediaRouteStarter| calls the destructor
+ // callback given to |RegisterDestructor| to destroy itself.
std::unique_ptr<MediaRouteStarter> TakeMediaRouteStarter() override;
+ void RegisterDestructor(base::OnceClosure destructor) override;
// Requests a route be created from the source mapped to
// |cast_mode|, to the sink given by |sink_id|.
@@ -337,6 +339,8 @@
raw_ptr<MediaRouter> router_;
raw_ptr<LoggerImpl> logger_;
+ base::OnceClosure destructor_;
+
// NOTE: Weak pointers must be invalidated before all other member variables.
// Therefore |weak_factory_| must be placed at the end.
base::WeakPtrFactory<MediaRouterUI> weak_factory_{this};
diff --git a/chrome/browser/ui/media_router/media_router_ui_unittest.cc b/chrome/browser/ui/media_router/media_router_ui_unittest.cc
index 2cc243d1..c33437b 100644
--- a/chrome/browser/ui/media_router/media_router_ui_unittest.cc
+++ b/chrome/browser/ui/media_router/media_router_ui_unittest.cc
@@ -80,11 +80,11 @@
}
MOCK_METHOD1(OnModelUpdated, void(const CastDialogModel& model));
- void OnControllerInvalidated() override {
+ void OnControllerDestroying() override {
controller_ = nullptr;
- OnControllerInvalidatedInternal();
+ OnControllerDestroyingInternal();
}
- MOCK_METHOD0(OnControllerInvalidatedInternal, void());
+ MOCK_METHOD0(OnControllerDestroyingInternal, void());
private:
raw_ptr<CastDialogController> controller_ = nullptr;
@@ -295,7 +295,7 @@
})));
NotifyUiOnRoutesUpdated({route});
- EXPECT_CALL(observer, OnControllerInvalidatedInternal());
+ EXPECT_CALL(observer, OnControllerDestroyingInternal());
ui_.reset();
}
diff --git a/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view.cc b/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view.cc
index 34dad46..d843bba 100644
--- a/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view.cc
+++ b/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view.cc
@@ -222,6 +222,11 @@
if (cast_controller) {
cast_controller_ = std::move(cast_controller);
cast_controller_->AddObserver(this);
+ cast_controller_->RegisterDestructor(
+ base::BindOnce(&MediaItemUIDeviceSelectorView::DestroyCastController,
+ // Unretained is safe: this callback is held by
+ // cast_controller_, which is owned by this object.
+ base::Unretained(this)));
}
}
@@ -499,10 +504,6 @@
observer.OnMediaItemUIDeviceSelectorUpdated(device_entry_ui_map_);
}
-void MediaItemUIDeviceSelectorView::OnControllerInvalidated() {
- cast_controller_.reset();
-}
-
void MediaItemUIDeviceSelectorView::OnDeviceSelected(int tag) {
auto it = device_entry_ui_map_.find(tag);
DCHECK(it != device_entry_ui_map_.end());
@@ -658,5 +659,9 @@
weak_ptr_factory_.GetWeakPtr()));
}
+void MediaItemUIDeviceSelectorView::DestroyCastController() {
+ cast_controller_.reset();
+}
+
BEGIN_METADATA(MediaItemUIDeviceSelectorView, views::View)
END_METADATA
diff --git a/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view.h b/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view.h
index e950565..222fc20 100644
--- a/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view.h
+++ b/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view.h
@@ -81,7 +81,6 @@
// media_router::CastDialogController::Observer
void OnModelUpdated(const media_router::CastDialogModel& model) override;
- void OnControllerInvalidated() override;
// MediaItemUIFooterView::Delegate
void OnDeviceSelected(int tag) override;
@@ -121,6 +120,7 @@
void RecordCastDeviceCount();
DeviceEntryUI* GetDeviceEntryUI(views::View* view) const;
void RegisterAudioDeviceCallbacks();
+ void DestroyCastController();
bool has_expand_button_been_shown_ = false;
bool have_devices_been_shown_ = false;
diff --git a/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view_unittest.cc b/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view_unittest.cc
index c3bcc6cc..6ae3dde8 100644
--- a/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view_unittest.cc
+++ b/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view_unittest.cc
@@ -156,6 +156,7 @@
MOCK_METHOD1(ClearIssue, void(const media_router::Issue::Id& issue_id));
MOCK_METHOD0(TakeMediaRouteStarter,
std::unique_ptr<media_router::MediaRouteStarter>());
+ MOCK_METHOD1(RegisterDestructor, void(base::OnceClosure));
};
} // anonymous namespace
diff --git a/chrome/browser/ui/views/media_router/cast_dialog_coordinator_unittest.cc b/chrome/browser/ui/views/media_router/cast_dialog_coordinator_unittest.cc
index f6c80d6a..2dedc7e 100644
--- a/chrome/browser/ui/views/media_router/cast_dialog_coordinator_unittest.cc
+++ b/chrome/browser/ui/views/media_router/cast_dialog_coordinator_unittest.cc
@@ -40,6 +40,7 @@
MOCK_METHOD(void, StopCasting, (const std::string& route_id));
MOCK_METHOD(void, ClearIssue, (const Issue::Id& issue_id));
MOCK_METHOD(std::unique_ptr<MediaRouteStarter>, TakeMediaRouteStarter, ());
+ MOCK_METHOD(void, RegisterDestructor, (base::OnceClosure));
};
class CastDialogCoordinatorTest : public TestWithBrowserView {
diff --git a/chrome/browser/ui/views/media_router/cast_dialog_view.cc b/chrome/browser/ui/views/media_router/cast_dialog_view.cc
index e3c7dadb..711d081 100644
--- a/chrome/browser/ui/views/media_router/cast_dialog_view.cc
+++ b/chrome/browser/ui/views/media_router/cast_dialog_view.cc
@@ -125,9 +125,9 @@
observer.OnDialogModelUpdated(this);
}
-void CastDialogView::OnControllerInvalidated() {
+void CastDialogView::OnControllerDestroying() {
controller_ = nullptr;
- // We don't destroy the dialog here because if the invalidation was caused by
+ // We don't destroy the dialog here because if the destruction was caused by
// activating the toolbar icon in order to close the dialog, then it would
// cause the dialog to immediately open again.
}
diff --git a/chrome/browser/ui/views/media_router/cast_dialog_view.h b/chrome/browser/ui/views/media_router/cast_dialog_view.h
index d87fdda..d44d4e0 100644
--- a/chrome/browser/ui/views/media_router/cast_dialog_view.h
+++ b/chrome/browser/ui/views/media_router/cast_dialog_view.h
@@ -66,7 +66,7 @@
// CastDialogController::Observer:
void OnModelUpdated(const CastDialogModel& model) override;
- void OnControllerInvalidated() override;
+ void OnControllerDestroying() override;
// views::BubbleDialogDelegateView:
void OnPaint(gfx::Canvas* canvas) override;
diff --git a/chrome/browser/ui/views/media_router/cast_dialog_view_browsertest.cc b/chrome/browser/ui/views/media_router/cast_dialog_view_browsertest.cc
index 1c584120..a7af3c8 100644
--- a/chrome/browser/ui/views/media_router/cast_dialog_view_browsertest.cc
+++ b/chrome/browser/ui/views/media_router/cast_dialog_view_browsertest.cc
@@ -70,6 +70,7 @@
override {
return nullptr;
}
+ void RegisterDestructor(base::OnceClosure destructor) override {}
};
} // namespace
diff --git a/chrome/browser/ui/views/media_router/cast_dialog_view_unittest.cc b/chrome/browser/ui/views/media_router/cast_dialog_view_unittest.cc
index 5326467..988cb07a 100644
--- a/chrome/browser/ui/views/media_router/cast_dialog_view_unittest.cc
+++ b/chrome/browser/ui/views/media_router/cast_dialog_view_unittest.cc
@@ -91,6 +91,7 @@
MOCK_METHOD1(StopCasting, void(const std::string& route_id));
MOCK_METHOD1(ClearIssue, void(const Issue::Id& issue_id));
MOCK_METHOD0(TakeMediaRouteStarter, std::unique_ptr<MediaRouteStarter>());
+ MOCK_METHOD1(RegisterDestructor, void(base::OnceClosure));
};
class CastDialogViewTest : public ChromeViewsTestBase {
diff --git a/chrome/browser/ui/views/media_router/media_router_dialog_controller_views.cc b/chrome/browser/ui/views/media_router/media_router_dialog_controller_views.cc
index ad379b2..244d523 100644
--- a/chrome/browser/ui/views/media_router/media_router_dialog_controller_views.cc
+++ b/chrome/browser/ui/views/media_router/media_router_dialog_controller_views.cc
@@ -51,7 +51,7 @@
std::move(context));
}
- ShowGlobalMeidaControlsDialog(std::move(context));
+ ShowGlobalMediaControlsDialog(std::move(context));
return true;
}
@@ -155,9 +155,20 @@
initiator(), std::move(start_presentation_context_))
: MediaRouterUI::CreateWithDefaultMediaSourceAndMirroring(
initiator());
+ ui_->RegisterDestructor(
+ base::BindOnce(&MediaRouterDialogControllerViews::DestroyMediaRouterUI,
+ // Safe to use base::Unretained here: the callback being
+ // bound is held by the MediaRouterUI we are creating and
+ // owning, and ownership of |ui_| is never transferred
+ // away from this object.
+ base::Unretained(this)));
}
-void MediaRouterDialogControllerViews::ShowGlobalMeidaControlsDialog(
+void MediaRouterDialogControllerViews::DestroyMediaRouterUI() {
+ ui_.reset();
+}
+
+void MediaRouterDialogControllerViews::ShowGlobalMediaControlsDialog(
std::unique_ptr<StartPresentationContext> context) {
// Show the WebContents requesting a dialog.
initiator()->GetDelegate()->ActivateContents(initiator());
diff --git a/chrome/browser/ui/views/media_router/media_router_dialog_controller_views.h b/chrome/browser/ui/views/media_router/media_router_dialog_controller_views.h
index 0a5fdb1..7c97211 100644
--- a/chrome/browser/ui/views/media_router/media_router_dialog_controller_views.h
+++ b/chrome/browser/ui/views/media_router/media_router_dialog_controller_views.h
@@ -69,13 +69,14 @@
// MediaRouterUIService::Observer:
void OnServiceDisabled() override;
- // Initializes |ui_|.
+ // Initializes and destroys |ui_| respectively.
void InitializeMediaRouterUI();
+ void DestroyMediaRouterUI();
// If there exists a media button, show the GMC dialog anchored to the media
// button. Otherwise, show the dialog anchored to the top center of the web
// contents.
- void ShowGlobalMeidaControlsDialog(
+ void ShowGlobalMediaControlsDialog(
std::unique_ptr<StartPresentationContext> context);
// Returns the media button from the browser that initiates the request to

View file

@ -2,8 +2,4 @@
# the electron launcher. # the electron launcher.
# Options to pass to electron. # Options to pass to electron.
ELECTRON_FLAGS="--enable-features=WebRTCPipeWireCapturer" ELECTRON_FLAGS="--ozone-platform-hint=auto --enable-features=WebRTCPipeWireCapturer"
# This can be 'x11', 'wayland', or 'auto'. Overriding default to 'auto',
# but respecting the variable content if any
ELECTRON_OZONE_PLATFORM_HINT="${ELECTRON_OZONE_PLATFORM_HINT:-auto}"

View file

@ -1,15 +0,0 @@
diff --git a/net/dns/BUILD.gn b/net/dns/BUILD.gn
index f36bf68..805d9a6 100644
--- a/net/dns/BUILD.gn
+++ b/net/dns/BUILD.gn
@@ -130,8 +130,8 @@ source_set("dns") {
]
} else if (is_linux) {
sources += [
- "dns_config_service_linux.cc",
- "dns_config_service_linux.h",
+ "dns_config_service_fuchsia.cc",
+ "dns_config_service_fuchsia.h",
]
} else if (is_posix) {
sources += [

View file

@ -1,343 +0,0 @@
safesprintf emitnull:
error: conversion from 'std::nullptr_t' to 'const internal::Arg' is ambiguous
const internal::Arg arg_array[] = { args... };
flatmap incompletetype:
error: static assertion failed due to requirement 'std::__is_complete_or_unbounded(std::__type_identity<std::pair<A, A>>{})': template argument must be a complete class or an unbounded array
static_assert(std::__is_complete_or_unbounded(__type_identity<_Tp>{}),
i18n, time:
various icu failures (new icu time formatting? internal api difference?)
a ton of these fail:
Expected equality of these values:
u"Monday 16 May Saturday 28 May"
Which is: u"Monday 16 May \x2013 Saturday 28 May"
DateIntervalFormat(begin_time, end_time, DATE_FORMAT_MONTH_WEEKDAY_DAY)
Which is: u"Monday 16\x2009\x2013\x2009Saturday 28 May"
../../base/i18n/time_formatting_unittest.cc:84: Failure
Expected equality of these values:
clock12h_pm
Which is: u"3:42 PM"
TimeFormatTimeOfDay(time)
Which is: u"3:42\x202FPM"
.. and so on
fileutiltest filetofile:
../../base/files/file_util_unittest.cc:2692: Failure
Value of: stream
Actual: true
Expected: false
stacktracetest: crashes (this doesn't seem to use execinfo so probably relies on glibc internal layout for tracing here)
platformthreadtest canchangethreadtype:
../../base/threading/platform_thread_unittest.cc:445: Failure
Expected equality of these values:
PlatformThread::CanChangeThreadType(ThreadType::kBackground, ThreadType::kResourceEfficient)
Which is: true
kCanIncreasePriority
Which is: false
scopedfdownershiptrackingtest crashonunownedclose: fails due to scoped-file-no-close.patch
stackcontainer customallocator:
../../base/containers/stack_container_unittest.cc:211: Failure
Expected equality of these values:
1
Allocator::deallocated
Which is: 0
nativelibrarytest loadlibrarypreferownsymbols: crashes (probably musl dlopen does not play nice here)
spantest empty: crashes (this looks fishy)
readelfbuildid: crashes (this looks like glibc dynamic linker semantics)
nss db unittest: various nss failures: e.g.:
../../net/cert/nss_cert_database_unittest.cc:209: Failure
Expected equality of these values:
OK
Which is: 0
cert_db_->ImportFromPKCS12(GetPublicSlot(), pkcs12_data, u"12345", true, nullptr)
Which is: -702
processutiltest cloneflags: fails in CI (ulimit? too many threads?)
../../base/process/process_util_unittest.cc:1434: Failure
Value of: process.IsValid()
Actual: false
Expected: true
addresstrackerlinuxnetlinktest:
../../net/base/address_tracker_linux_unittest.cc:886: Failure
Value of: child.process.IsValid()
Actual: false
Expected: true
ToAddressDoesNotDereference: ; Expected `get_for_extraction_cnt` to be 1 but got 0;
DataCapturedManyThreads: flaky
ProcessAlternativeServicesTest.Process*: crashed ?
--- a/base/strings/safe_sprintf_unittest.cc
+++ b/base/strings/safe_sprintf_unittest.cc
@@ -740,6 +740,7 @@
#endif
}
+#if 0
TEST(SafeSPrintfTest, EmitNULL) {
char buf[40];
#if defined(__GNUC__)
@@ -756,6 +757,7 @@
#pragma GCC diagnostic pop
#endif
}
+#endif
TEST(SafeSPrintfTest, PointerSize) {
// The internal data representation is a 64bit value, independent of the
--- a/base/containers/flat_map_unittest.cc
+++ b/base/containers/flat_map_unittest.cc
@@ -52,6 +52,7 @@
} // namespace
+#if 0
TEST(FlatMap, IncompleteType) {
struct A {
using Map = flat_map<A, A>;
@@ -65,6 +66,7 @@
A a;
}
+#endif
TEST(FlatMap, RangeConstructor) {
flat_map<int, int>::value_type input_vals[] = {
--- a/base/BUILD.gn
+++ b/base/BUILD.gn
@@ -3194,21 +3194,6 @@
"hash/md5_constexpr_unittest.cc",
"hash/md5_unittest.cc",
"hash/sha1_unittest.cc",
- "i18n/break_iterator_unittest.cc",
- "i18n/case_conversion_unittest.cc",
- "i18n/char_iterator_unittest.cc",
- "i18n/character_encoding_unittest.cc",
- "i18n/file_util_icu_unittest.cc",
- "i18n/icu_string_conversions_unittest.cc",
- "i18n/icu_util_unittest.cc",
- "i18n/message_formatter_unittest.cc",
- "i18n/number_formatting_unittest.cc",
- "i18n/rtl_unittest.cc",
- "i18n/streaming_utf8_validator_unittest.cc",
- "i18n/string_search_unittest.cc",
- "i18n/time_formatting_unittest.cc",
- "i18n/timezone_unittest.cc",
- "i18n/transliterator_unittest.cc",
"immediate_crash_unittest.cc",
"json/json_parser_unittest.cc",
"json/json_reader_unittest.cc",
--- a/base/files/file_util_unittest.cc
+++ b/base/files/file_util_unittest.cc
@@ -2686,6 +2686,7 @@
}
}
+#if 0
TEST_F(FileUtilTest, FileToFILE) {
File file;
FILE* stream = FileToFILE(std::move(file), "w");
@@ -2700,6 +2701,7 @@
EXPECT_FALSE(file.IsValid());
EXPECT_TRUE(CloseFile(stream));
}
+#endif
TEST_F(FileUtilTest, FILEToFile) {
ScopedFILE stream;
--- a/base/threading/platform_thread_unittest.cc
+++ b/base/threading/platform_thread_unittest.cc
@@ -416,6 +416,7 @@
// platforms for all priorities. This not being the case. This test documents
// and hardcodes what we know. Please inform scheduler-dev@chromium.org if this
// proprerty changes for a given platform.
+#if 0
TEST(PlatformThreadTest, CanChangeThreadType) {
#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
// On Ubuntu, RLIMIT_NICE and RLIMIT_RTPRIO are 0 by default, so we won't be
@@ -472,6 +473,7 @@
ThreadType::kBackground));
#endif
}
+#endif
TEST(PlatformThreadTest, SetCurrentThreadTypeTest) {
TestPriorityResultingFromThreadType(ThreadType::kBackground,
--- a/base/files/scoped_file_linux_unittest.cc
+++ b/base/files/scoped_file_linux_unittest.cc
@@ -42,11 +42,13 @@
EXPECT_DEATH(ScopedFD(fd.get()), "");
}
+#if 0
TEST_F(ScopedFDOwnershipTrackingTest, CrashOnUnownedClose) {
ScopedFD fd = OpenFD();
subtle::EnableFDOwnershipEnforcement(true);
EXPECT_DEATH(close(fd.get()), "");
}
+#endif
#endif // defined(GTEST_HAS_DEATH_TEST)
--- a/base/native_library_unittest.cc
+++ b/base/native_library_unittest.cc
@@ -139,6 +139,7 @@
// Verifies that the |prefer_own_symbols| option satisfies its guarantee that
// a loaded library will always prefer local symbol resolution before
// considering global symbols.
+#if 0
TEST(NativeLibraryTest, LoadLibraryPreferOwnSymbols) {
NativeLibraryOptions options;
options.prefer_own_symbols = true;
@@ -171,6 +172,7 @@
EXPECT_EQ(2, NativeLibraryTestIncrement());
EXPECT_EQ(3, NativeLibraryTestIncrement());
}
+#endif
#endif // !BUILDFLAG(IS_ANDROID) && !defined(THREAD_SANITIZER) && \
// !defined(MEMORY_SANITIZER)
--- a/base/containers/span_unittest.cc
+++ b/base/containers/span_unittest.cc
@@ -995,6 +995,7 @@
}
}
+#if 0
TEST(SpanTest, Empty) {
{
span<int> span;
@@ -1014,6 +1015,7 @@
EXPECT_TRUE(span_of_checked_iterators.empty());
}
}
+#endif
TEST(SpanTest, OperatorAt) {
static constexpr int kArray[] = {1, 6, 1, 8, 0};
--- a/base/debug/elf_reader_unittest.cc
+++ b/base/debug/elf_reader_unittest.cc
@@ -194,6 +194,7 @@
}
}
+#if 0
TEST(ElfReaderTestWithCurrentImage, ReadElfBuildId) {
#if BUILDFLAG(IS_ANDROID)
// On Android the library loader memory maps the full so file.
@@ -229,6 +230,7 @@
UnloadNativeLibrary(library);
#endif
}
+#endif
} // namespace debug
} // namespace base
--- a/net/BUILD.gn
+++ b/net/BUILD.gn
@@ -4826,7 +4826,6 @@
sources += [
"cert/internal/system_trust_store_nss_unittest.cc",
"cert/internal/trust_store_nss_unittest.cc",
- "cert/nss_cert_database_unittest.cc",
"cert/x509_util_nss_unittest.cc",
]
if (!is_castos) {
--- a/base/process/process_util_unittest.cc
+++ b/base/process/process_util_unittest.cc
@@ -1419,7 +1419,7 @@
return kSuccess;
}
-#if defined(CLONE_NEWUSER) && defined(CLONE_NEWPID)
+#if 0 && defined(CLONE_NEWUSER) && defined(CLONE_NEWPID)
TEST_F(ProcessUtilTest, CloneFlags) {
if (!PathExists(FilePath("/proc/self/ns/user")) ||
!PathExists(FilePath("/proc/self/ns/pid"))) {
--- a/net/base/address_tracker_linux_unittest.cc
+++ b/net/base/address_tracker_linux_unittest.cc
@@ -831,6 +831,7 @@
//
// This test creates multiple concurrent `AddressTrackerLinux` instances in
// separate processes, each in their own PID namespaces.
+#if 0
TEST(AddressTrackerLinuxNetlinkTest, TestInitializeTwoTrackersInPidNamespaces) {
// This test initializes `kNumChildren` instances of `AddressTrackerLinux` in
// tracking mode, each in their own child process running in a PID namespace.
@@ -901,6 +902,7 @@
ASSERT_EQ(exit_code, 0);
}
}
+#endif
MULTIPROCESS_TEST_MAIN(ChildProcessInitializeTrackerForTesting) {
base::test::TaskEnvironment task_env(
--- a/base/trace_event/trace_event_unittest.cc
+++ b/base/trace_event/trace_event_unittest.cc
@@ -1368,6 +1368,7 @@
}
// Test that data sent from multiple threads is gathered
+#if 0
TEST_F(TraceEventTestFixture, DataCapturedManyThreads) {
BeginTrace();
@@ -1408,6 +1409,7 @@
delete task_complete_events[i];
}
}
+#endif
// Test that thread and process names show up in the trace.
// In SDK build, thread names are not tracked inside //base. Instead, there's
--- a/base/allocator/partition_allocator/src/partition_alloc/pointers/raw_ptr_unittest.cc
+++ b/base/allocator/partition_allocator/src/partition_alloc/pointers/raw_ptr_unittest.cc
@@ -1481,6 +1481,7 @@
// `base::to_address()` will use the dereference operator. This is not
// what we want; this test enforces extraction semantics for
// `to_address()`.
+#if 0
TEST_F(RawPtrTest, ToAddressDoesNotDereference) {
CountingRawPtr<int> ptr = nullptr;
int* raw = base::to_address(ptr);
@@ -1492,6 +1493,7 @@
.get_for_duplication_cnt = 0}),
CountersMatch());
}
+#endif
TEST_F(RawPtrTest, ToAddressGivesBackRawAddress) {
int* raw = nullptr;
--- a/net/http/http_stream_factory_unittest.cc
+++ b/net/http/http_stream_factory_unittest.cc
@@ -3477,6 +3477,7 @@
DefaultCTPolicyEnforcer ct_policy_enforcer_;
};
+#if 0
TEST_F(ProcessAlternativeServicesTest, ProcessEmptyAltSvc) {
session_ =
std::make_unique<HttpNetworkSession>(session_params_, session_context_);
@@ -3585,6 +3586,7 @@
alternatives[0].host_port_pair());
EXPECT_EQ(0u, alternatives[0].advertised_versions().size());
}
+#endif
} // namespace

View file

@ -10,8 +10,6 @@ done
# Prefer user defined ELECTRON_USER_FLAGS (from env) over system # Prefer user defined ELECTRON_USER_FLAGS (from env) over system
# default ELECTRON_FLAGS (from /etc/electron/default.conf). # default ELECTRON_FLAGS (from /etc/electron/default.conf).
export ELECTRON_FLAGS="$ELECTRON_FLAGS ${ELECTRON_USER_FLAGS:-"$ELECTRON_USER_FLAGS"}" export ELECTRON_FLAGS="$ELECTRON_FLAGS ${ELECTRON_USER_FLAGS:-"$ELECTRON_USER_FLAGS"}"
# Re-export, for it to be accessible by the process
export ELECTRON_OZONE_PLATFORM_HINT="${ELECTRON_OZONE_PLATFORM_HINT}"
if [ "$ELECTRON_RUN_AS_NODE" == "1" ] && [ "$ELECTRON_STILL_PASS_THE_DEFAULT_FLAGS" != "1" ]; then if [ "$ELECTRON_RUN_AS_NODE" == "1" ] && [ "$ELECTRON_STILL_PASS_THE_DEFAULT_FLAGS" != "1" ]; then
exec "/usr/lib/electron/electron" "$@" exec "/usr/lib/electron/electron" "$@"

View file

@ -1,10 +0,0 @@
--- ./electron/BUILD.gn.orig
+++ ./electron/BUILD.gn
@@ -1565,7 +1565,6 @@
public_deps = header_groups + [
":node_gypi_headers",
":node_version_header",
- ":zlib_headers",
]
}

View file

@ -1,69 +0,0 @@
--- ./third_party/electron_node/BUILD.gn.orig
+++ ./third_party/electron_node/BUILD.gn
@@ -40,6 +40,8 @@
node_release_urlbase = ""
# Allows downstream packagers (eg. Linux distributions) to build Electron against system shared libraries.
+ use_system_ada = false
+ use_system_base64 = false
use_system_cares = false
use_system_nghttp2 = false
use_system_llhttp = false
@@ -48,6 +50,16 @@
if (is_linux) {
import("//build/config/linux/pkg_config.gni")
+ if (use_system_ada) {
+ config("ada") {
+ libs = [ "ada" ]
+ }
+ }
+ if (use_system_base64) {
+ pkg_config("base64") {
+ packages = [ "base64" ]
+ }
+ }
if (use_system_cares) {
pkg_config("cares") {
packages = [ "libcares" ]
@@ -258,8 +270,6 @@
deps = [
":node_js2c_exec",
"deps/googletest:gtest",
- "deps/ada",
- "deps/base64",
"deps/simdutf",
"deps/uvwasi",
"//third_party/zlib",
@@ -267,6 +277,16 @@
"//third_party/brotli:enc",
"//v8:v8_libplatform",
]
+ if (use_system_ada) {
+ configs += [ ":ada" ]
+ } else {
+ deps += [ "deps/ada" ]
+ }
+ if (use_system_base64) {
+ configs += [ ":base64" ]
+ } else {
+ deps += [ "deps/base64" ]
+ }
if (use_system_cares) {
configs += [ ":cares" ]
} else {
diff --git a/./electron/script/generate-config-gypi.py.orig b/./electron/script/generate-config-gypi.py
index b41cd7eb450..bc4098debb5 100755
--- a/./electron/script/generate-config-gypi.py.orig
+++ b/./electron/script/generate-config-gypi.py
@@ -62,6 +62,11 @@ def main(target_file, target_cpu):
# Used by certain versions of node-gyp.
v['build_v8_with_gn'] = 'false'
+ with open(os.path.join(NODE_DIR, 'use_system.txt')) as f:
+ for dep in f.read().strip().split(' '):
+ if v.get(f'node_shared_{dep}') is not None:
+ v[f'node_shared_{dep}'] = 'true'
+
with open(target_file, 'w+', encoding='utf-8') as file_out:
file_out.write(pprint.pformat(config, indent=2))

View file

@ -1,13 +0,0 @@
instead of hardcoding the version, use the defined macro.
--
--- a/third_party/test_fonts/fontconfig/generate_fontconfig_caches.cc
+++ b/third_party/test_fonts/fontconfig/generate_fontconfig_caches.cc
@@ -56,7 +56,7 @@
FcFini();
// Check existence of intended fontconfig cache file.
- auto cache = fontconfig_caches + "/" + kCacheKey + "-le64.cache-9";
+ auto cache = fontconfig_caches + "/" + kCacheKey + "-le64.cache-" + FC_CACHE_VERSION;
bool cache_exists = access(cache.c_str(), F_OK) == 0;
return !cache_exists;
}

View file

@ -0,0 +1,13 @@
Patch-Source: https://github.com/void-linux/void-packages/blob/378db3cf5087877588aebaaa8ca3c9d94dfb54e0/srcpkgs/chromium/patches/fix-missing-cstdint-include-musl.patch
fixed in https://github.com/google/quiche/commit/4d4820f0a959be7fa22285d114a5b5b8676f10fe
--
--- a/net/third_party/quiche/src/quiche/http2/adapter/window_manager.h
+++ b/net/third_party/quiche/src/quiche/http2/adapter/window_manager.h
@@ -3,6 +3,7 @@
#include <stddef.h>
+#include <cstdint>
#include <functional>
#include "common/platform/api/quiche_export.h"

View file

@ -1,12 +0,0 @@
--- a/media/filters/ffmpeg_glue.cc
+++ b/media/filters/ffmpeg_glue.cc
@@ -142,7 +142,7 @@ const char* FFmpegGlue::GetAllowedAudioDecoders() {
static const base::NoDestructor<std::string> kAllowedAudioCodecs([]() {
// This should match the configured lists in //third_party/ffmpeg.
std::string allowed_decoders(
- "vorbis,libopus,flac,pcm_u8,pcm_s16le,pcm_s24le,pcm_s32le,pcm_f32le,"
+ "vorbis,opus,libopus,flac,pcm_u8,pcm_s16le,pcm_s24le,pcm_s32le,pcm_f32le,"
"mp3,pcm_s16be,pcm_s24be,pcm_mulaw,pcm_alaw");
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
allowed_decoders += ",aac";

View file

@ -1,17 +0,0 @@
fstatat64 is macrod to fstatat in sys/stat.h in musl- but then that fstatat is
used in the _syscall4 macro mapping to __NR_$name, and __NR_fstatat is not
defined anywhere here, as it wants the 64 name.
so, just let it keep the name with an undef of the stat.h macro, then the macro
expansion below evaluates correctly.
--- a/third_party/lss/linux_syscall_support.h
+++ b/third_party/lss/linux_syscall_support.h
@@ -4947,7 +4947,8 @@
# endif
#endif
#if defined(__NR_fstatat64)
+ #undef fstatat64
LSS_INLINE _syscall4(int, fstatat64, int, d,
const char *, p,
struct kernel_stat64 *, b, int, f)
#endif

View file

@ -1,11 +0,0 @@
--- a/services/device/public/cpp/generic_sensor/sensor_reading.h
+++ b/services/device/public/cpp/generic_sensor/sensor_reading.h
@@ -5,6 +5,8 @@
#ifndef SERVICES_DEVICE_PUBLIC_CPP_GENERIC_SENSOR_SENSOR_READING_H_
#define SERVICES_DEVICE_PUBLIC_CPP_GENERIC_SENSOR_SENSOR_READING_H_
+#include <cstddef>
+#include <cstdint>
#include <type_traits>
namespace device {

View file

@ -1,11 +1,11 @@
--- a/electron/default_app/default_app.ts --- a/electron/default_app/default_app.ts
+++ b/electron/default_app/default_app.ts +++ b/electron/default_app/default_app.ts
@@ -61,7 +61,7 @@ @@ -60,7 +60,7 @@
}; };
if (process.platform === 'linux') { if (process.platform === 'linux') {
- options.icon = url.fileURLToPath(new URL('icon.png', import.meta.url)); - options.icon = path.join(__dirname, 'icon.png');
+ options.icon = 'file:///usr/share/icons/hicolor/1024x1024/apps/electron.png'; + options.icon = '/usr/share/icons/hicolor/1024x1024/apps/electron.png';
} }
mainWindow = new BrowserWindow(options); mainWindow = new BrowserWindow(options);

View file

@ -0,0 +1,15 @@
they fixed the import to be glibc conditional, but now nothing is imported so
all the string functions are missing.
--
--- a/chrome/browser/metrics/chrome_browser_main_extra_parts_metrics.cc
+++ b/chrome/browser/metrics/chrome_browser_main_extra_parts_metrics.cc
@@ -61,8 +61,7 @@
// TODO(crbug.com/1052397): Revisit the macro expression once build flag switch
// of lacros-chrome is complete.
-#if defined(__GLIBC__) && (BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS))
-#include <gnu/libc-version.h>
+#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS)
#include "base/linux_util.h"
#include "base/strings/string_split.h"

View file

@ -0,0 +1,39 @@
Patch-Source: https://github.com/archlinux/svntogit-packages/blob/bf2401407df5bcc938382eb03748fbef41e41c89/trunk/unbundle-jsoncpp-avoid-CFI-faults-with-is_cfi-true.patch
From ed8d931e35f81d8566835a579caf7d61368f85b7 Mon Sep 17 00:00:00 2001
From: Evangelos Foutras <evangelos@foutrelis.com>
Date: Tue, 27 Sep 2022 22:20:41 +0000
Subject: [PATCH] unbundle/jsoncpp: avoid CFI faults with is_cfi=true
Ensure jsoncpp symbols have public visibility and are thus excluded from
CFI checks and whole-program optimization. This is achieved by defining
JSON_DLL_BUILD which in turn causes json/config.h to define JSON_API as
__attribute__((visibility("default"))). The latter macro is used to tag
jsoncpp classes and namespace functions throughout jsoncpp's headers.
BUG=1365218
Change-Id: I56277737b7d9ecaeb5e17c8d21a2e55f3d5d5bc9
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/3919652
Reviewed-by: Thomas Anderson <thomasanderson@chromium.org>
Commit-Queue: Thomas Anderson <thomasanderson@chromium.org>
Cr-Commit-Position: refs/heads/main@{#1052077}
---
build/linux/unbundle/jsoncpp.gn | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/build/linux/unbundle/jsoncpp.gn b/build/linux/unbundle/jsoncpp.gn
index 544f9d13c9..e84a0ef27a 100644
--- a/build/linux/unbundle/jsoncpp.gn
+++ b/build/linux/unbundle/jsoncpp.gn
@@ -3,6 +3,11 @@ import("//build/shim_headers.gni")
pkg_config("jsoncpp_config") {
packages = [ "jsoncpp" ]
+
+ # Defining JSON_DLL_BUILD applies public visibility to jsoncpp classes
+ # thus deactivating CFI checks for them. This avoids CFI violations in
+ # virtual calls to system jsoncpp library (https://crbug.com/1365218).
+ defines = [ "JSON_DLL_BUILD" ]
}
shim_headers("jsoncpp_shim") {

View file

@ -0,0 +1,319 @@
missing libstdc++13 includes
--
--- a/third_party/ruy/src/ruy/profiler/instrumentation.h
+++ b/third_party/ruy/src/ruy/profiler/instrumentation.h
@@ -19,6 +19,7 @@
#ifdef RUY_PROFILER
#include <cstdio>
#include <mutex>
+#include <string>
#include <vector>
#endif
--- a/third_party/openscreen/src/platform/base/error.h
+++ b/third_party/openscreen/src/platform/base/error.h
@@ -6,6 +6,7 @@
#define PLATFORM_BASE_ERROR_H_
#include <cassert>
+#include <cstdint>
#include <ostream>
#include <string>
#include <utility>
--- a/third_party/vulkan_memory_allocator/include/vk_mem_alloc.h
+++ b/third_party/vulkan_memory_allocator/include/vk_mem_alloc.h
@@ -2389,6 +2389,7 @@
#undef VMA_IMPLEMENTATION
#include <cstdint>
+#include <cstdio>
#include <cstdlib>
#include <cstring>
#include <utility>
--- a/third_party/vulkan-deps/vulkan-validation-layers/src/layers/external/vma/vk_mem_alloc.h
+++ b/third_party/vulkan-deps/vulkan-validation-layers/src/layers/external/vma/vk_mem_alloc.h
@@ -2389,6 +2389,7 @@
#undef VMA_IMPLEMENTATION
#include <cstdint>
+#include <cstdio>
#include <cstdlib>
#include <cstring>
#include <utility>
--- a/ui/base/prediction/kalman_filter.h
+++ b/ui/base/prediction/kalman_filter.h
@@ -8,6 +8,8 @@
#include "base/component_export.h"
#include "ui/gfx/geometry/matrix3_f.h"
+#include <cstdint>
+
namespace ui {
// This Kalman filter is used to predict state in one axles.
--- a/ui/events/types/scroll_types.h
+++ b/ui/events/types/scroll_types.h
@@ -5,6 +5,8 @@
#ifndef UI_EVENTS_TYPES_SCROLL_TYPES_H_
#define UI_EVENTS_TYPES_SCROLL_TYPES_H_
+#include <cstdint>
+
namespace ui {
enum class ScrollGranularity : uint8_t {
--- a/third_party/webrtc/rtc_base/system/file_wrapper.h
+++ b/third_party/webrtc/rtc_base/system/file_wrapper.h
@@ -14,6 +14,7 @@
#include <stddef.h>
#include <stdio.h>
+#include <cstdint>
#include <string>
#include "absl/strings/string_view.h"
--- a/third_party/pdfium/constants/annotation_flags.h
+++ b/third_party/pdfium/constants/annotation_flags.h
@@ -5,6 +5,8 @@
#ifndef CONSTANTS_ANNOTATION_FLAGS_H_
#define CONSTANTS_ANNOTATION_FLAGS_H_
+#include <cstdint>
+
namespace pdfium {
namespace annotation_flags {
--- a/third_party/s2cellid/src/s2/util/math/mathutil.h
+++ b/third_party/s2cellid/src/s2/util/math/mathutil.h
@@ -21,6 +21,7 @@
#ifndef S2_UTIL_MATH_MATHUTIL_H_
#define S2_UTIL_MATH_MATHUTIL_H_
+#include <cstdint>
#include <type_traits>
class MathUtil {
--- a/third_party/s2cellid/src/s2/s1angle.h
+++ b/third_party/s2cellid/src/s2/s1angle.h
@@ -24,6 +24,7 @@
#define S2_S1ANGLE_H_
#include <math.h>
+#include <cstdint>
#include <limits>
#include <ostream>
#include <type_traits>
--- a/third_party/maldoca/src/maldoca/ole/header.h
+++ b/third_party/maldoca/src/maldoca/ole/header.h
@@ -45,6 +45,8 @@
#include "absl/strings/string_view.h"
+#include <cstdint>
+
namespace maldoca {
class OLEHeader {
--- a/components/password_manager/core/browser/generation/password_generator.h
+++ b/components/password_manager/core/browser/generation/password_generator.h
@@ -5,6 +5,7 @@
#ifndef COMPONENTS_PASSWORD_MANAGER_CORE_BROWSER_GENERATION_PASSWORD_GENERATOR_H_
#define COMPONENTS_PASSWORD_MANAGER_CORE_BROWSER_GENERATION_PASSWORD_GENERATOR_H_
+#include <cstdint>
#include <string>
--- a/base/debug/profiler.h
+++ b/base/debug/profiler.h
@@ -7,6 +7,7 @@
#include <stddef.h>
+#include <cstdint>
#include <string>
#include "base/base_export.h"
--- a/components/dom_distiller/core/url_utils.h
+++ b/components/dom_distiller/core/url_utils.h
@@ -5,6 +5,7 @@
#ifndef COMPONENTS_DOM_DISTILLER_CORE_URL_UTILS_H_
#define COMPONENTS_DOM_DISTILLER_CORE_URL_UTILS_H_
+#include <cstdint>
#include <string>
#include "base/strings/string_piece_forward.h"
--- a/components/feature_engagement/internal/event_storage_validator.h
+++ b/components/feature_engagement/internal/event_storage_validator.h
@@ -5,6 +5,7 @@
#ifndef COMPONENTS_FEATURE_ENGAGEMENT_INTERNAL_EVENT_STORAGE_VALIDATOR_H_
#define COMPONENTS_FEATURE_ENGAGEMENT_INTERNAL_EVENT_STORAGE_VALIDATOR_H_
+#include <cstdint>
#include <string>
namespace feature_engagement {
--- a/chrome/test/chromedriver/chrome/web_view_impl.cc
+++ b/chrome/test/chromedriver/chrome/web_view_impl.cc
@@ -8,6 +8,7 @@
#include <algorithm>
#include <memory>
#include <queue>
+#include <string>
#include <utility>
#include <vector>
--- a/cc/trees/target_property.cc
+++ b/cc/trees/target_property.cc
@@ -2,6 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+#include <cstdint>
+
#include "cc/trees/target_property.h"
#include "ui/gfx/animation/keyframe/target_property.h"
--- a/gpu/config/gpu_feature_info.h
+++ b/gpu/config/gpu_feature_info.h
@@ -5,6 +5,7 @@
#ifndef GPU_CONFIG_GPU_FEATURE_INFO_H_
#define GPU_CONFIG_GPU_FEATURE_INFO_H_
+#include <cstdint>
#include <string>
#include <vector>
--- a/gpu/config/gpu_driver_bug_workarounds.h
+++ b/gpu/config/gpu_driver_bug_workarounds.h
@@ -5,6 +5,7 @@
#ifndef GPU_CONFIG_GPU_DRIVER_BUG_WORKAROUNDS_H_
#define GPU_CONFIG_GPU_DRIVER_BUG_WORKAROUNDS_H_
+#include <cstdint>
#include <vector>
#include "build/build_config.h"
--- a/third_party/blink/public/common/page_state/page_state.h
+++ b/third_party/blink/public/common/page_state/page_state.h
@@ -5,6 +5,7 @@
#ifndef THIRD_PARTY_BLINK_PUBLIC_COMMON_PAGE_STATE_PAGE_STATE_H_
#define THIRD_PARTY_BLINK_PUBLIC_COMMON_PAGE_STATE_PAGE_STATE_H_
+#include <cstdint>
#include <string>
#include <vector>
--- a/pdf/document_attachment_info.h
+++ b/pdf/document_attachment_info.h
@@ -5,6 +5,7 @@
#ifndef PDF_DOCUMENT_ATTACHMENT_INFO_H_
#define PDF_DOCUMENT_ATTACHMENT_INFO_H_
+#include <cstdint>
#include <string>
--- a/components/payments/content/utility/fingerprint_parser.h
+++ b/components/payments/content/utility/fingerprint_parser.h
@@ -7,6 +7,7 @@
#include <stddef.h>
+#include <cstdint>
#include <string>
#include <vector>
--- a/media/base/video_transformation.h
+++ b/media/base/video_transformation.h
@@ -5,6 +5,7 @@
#ifndef MEDIA_BASE_VIDEO_TRANSFORMATION_H_
#define MEDIA_BASE_VIDEO_TRANSFORMATION_H_
+#include <cstdint>
#include <string>
#include "base/numerics/math_constants.h"
--- a/components/omnibox/browser/on_device_head_model.h
+++ b/components/omnibox/browser/on_device_head_model.h
@@ -5,6 +5,7 @@
#ifndef COMPONENTS_OMNIBOX_BROWSER_ON_DEVICE_HEAD_MODEL_H_
#define COMPONENTS_OMNIBOX_BROWSER_ON_DEVICE_HEAD_MODEL_H_
+#include <cstdint>
#include <string>
#include <utility>
#include <vector>
--- a/components/autofill/core/browser/autofill_ablation_study.h
+++ b/components/autofill/core/browser/autofill_ablation_study.h
@@ -5,6 +5,7 @@
#ifndef COMPONENTS_AUTOFILL_CORE_BROWSER_AUTOFILL_ABLATION_STUDY_H_
#define COMPONENTS_AUTOFILL_CORE_BROWSER_AUTOFILL_ABLATION_STUDY_H_
+#include <cstdint>
#include <string>
class GURL;
--- a/components/autofill/core/browser/strike_databases/strike_database_base.h
+++ b/components/autofill/core/browser/strike_databases/strike_database_base.h
@@ -5,6 +5,7 @@
#ifndef COMPONENTS_AUTOFILL_CORE_BROWSER_STRIKE_DATABASES_STRIKE_DATABASE_BASE_H_
#define COMPONENTS_AUTOFILL_CORE_BROWSER_STRIKE_DATABASES_STRIKE_DATABASE_BASE_H_
+#include <cstdint>
#include <map>
#include <string>
#include <vector>
--- a/chrome/browser/privacy_budget/encountered_surface_tracker.h
+++ b/chrome/browser/privacy_budget/encountered_surface_tracker.h
@@ -5,6 +5,7 @@
#ifndef CHROME_BROWSER_PRIVACY_BUDGET_ENCOUNTERED_SURFACE_TRACKER_H_
#define CHROME_BROWSER_PRIVACY_BUDGET_ENCOUNTERED_SURFACE_TRACKER_H_
+#include <cstdint>
#include <map>
#include "base/containers/flat_set.h"
--- a/chrome/browser/resource_coordinator/decision_details.h
+++ b/chrome/browser/resource_coordinator/decision_details.h
@@ -5,6 +5,7 @@
#ifndef CHROME_BROWSER_RESOURCE_COORDINATOR_DECISION_DETAILS_H_
#define CHROME_BROWSER_RESOURCE_COORDINATOR_DECISION_DETAILS_H_
+#include <cstdint>
#include <string>
#include <vector>
--- a/net/third_party/quiche/src/quiche/quic/core/qpack/qpack_stream_sender_delegate.h
+++ b/net/third_party/quiche/src/quiche/quic/core/qpack/qpack_stream_sender_delegate.h
@@ -8,6 +8,8 @@
#include "absl/strings/string_view.h"
#include "quiche/quic/platform/api/quic_export.h"
+#include <cstdint>
+
namespace quic {
// This interface writes encoder/decoder data to peer.
--- a/third_party/perfetto/src/trace_processor/sqlite/query_constraints.h
+++ b/third_party/perfetto/src/trace_processor/sqlite/query_constraints.h
@@ -17,6 +17,7 @@
#ifndef SRC_TRACE_PROCESSOR_SQLITE_QUERY_CONSTRAINTS_H_
#define SRC_TRACE_PROCESSOR_SQLITE_QUERY_CONSTRAINTS_H_
+#include <cstdint>
#include <limits>
#include <vector>
--- a/third_party/perfetto/include/perfetto/base/export.h
+++ b/third_party/perfetto/include/perfetto/base/export.h
@@ -17,6 +17,8 @@
#ifndef INCLUDE_PERFETTO_BASE_EXPORT_H_
#define INCLUDE_PERFETTO_BASE_EXPORT_H_
+#include <cstdint>
+
#include "perfetto/base/build_config.h"
#include "perfetto/public/abi/export.h"

View file

@ -0,0 +1,13 @@
needed for PKEY_DISABLE_WRITE. these are documented as also being from sys/
mman.h with GNU_SOURCE, but musl doesn't do that, so these are strictly from
kernel headers
--- a/base/allocator/partition_allocator/partition_alloc_unittest.cc
+++ b/base/allocator/partition_allocator/partition_alloc_unittest.cc
@@ -60,6 +60,7 @@
#include <sys/mman.h>
#include <sys/resource.h>
#include <sys/time.h>
+#include <linux/mman.h>
#endif // BUILDFLAG(IS_POSIX)
#if BUILDFLAG(ENABLE_PARTITION_ALLOC_AS_MALLOC_SUPPORT) && BUILDFLAG(IS_MAC)

View file

@ -1,11 +0,0 @@
--- ./v8/src/base/cpu.cc.orig
+++ ./v8/src/base/cpu.cc
@@ -14,7 +14,7 @@
#if V8_OS_LINUX
#include <linux/auxvec.h> // AT_HWCAP
#endif
-#if V8_GLIBC_PREREQ(2, 16) || V8_OS_ANDROID
+#if 1
#include <sys/auxv.h> // getauxval()
#endif
#if V8_OS_QNX

View file

@ -45,7 +45,7 @@ diff --git a/sandbox/linux/seccomp-bpf-helpers/syscall_sets.cc ./sandbox/linux/s
index d9d1882..0567557 100644 index d9d1882..0567557 100644
--- a/sandbox/linux/seccomp-bpf-helpers/syscall_sets.cc --- a/sandbox/linux/seccomp-bpf-helpers/syscall_sets.cc
+++ ./sandbox/linux/seccomp-bpf-helpers/syscall_sets.cc +++ ./sandbox/linux/seccomp-bpf-helpers/syscall_sets.cc
@@ -438,6 +438,7 @@ @@ -392,6 +392,7 @@ bool SyscallSets::IsAllowedProcessStartOrDeath(int sysno) {
#if defined(__i386__) #if defined(__i386__)
case __NR_waitpid: case __NR_waitpid:
#endif #endif
@ -53,7 +53,7 @@ index d9d1882..0567557 100644
return true; return true;
case __NR_clone: // Should be parameter-restricted. case __NR_clone: // Should be parameter-restricted.
case __NR_setns: // Privileged. case __NR_setns: // Privileged.
@@ -450,7 +451,6 @@ @@ -404,7 +405,6 @@ bool SyscallSets::IsAllowedProcessStartOrDeath(int sysno) {
#if defined(__i386__) || defined(__x86_64__) || defined(__mips__) #if defined(__i386__) || defined(__x86_64__) || defined(__mips__)
case __NR_set_thread_area: case __NR_set_thread_area:
#endif #endif
@ -61,16 +61,16 @@ index d9d1882..0567557 100644
case __NR_unshare: case __NR_unshare:
#if !defined(__mips__) && !defined(__aarch64__) #if !defined(__mips__) && !defined(__aarch64__)
case __NR_vfork: case __NR_vfork:
@@ -549,6 +549,8 @@ @@ -514,6 +514,8 @@ bool SyscallSets::IsAllowedAddressSpaceAccess(int sysno) {
case __NR_mlock:
case __NR_munlock: case __NR_munlock:
case __NR_munmap: case __NR_munmap:
case __NR_mseal:
+ case __NR_mremap: + case __NR_mremap:
+ case __NR_membarrier: + case __NR_membarrier:
return true; return true;
case __NR_madvise: case __NR_madvise:
case __NR_mincore: case __NR_mincore:
@@ -566,7 +568,6 @@ @@ -531,7 +533,6 @@ bool SyscallSets::IsAllowedAddressSpaceAccess(int sysno) {
case __NR_modify_ldt: case __NR_modify_ldt:
#endif #endif
case __NR_mprotect: case __NR_mprotect:

View file

@ -37,24 +37,25 @@ for discussion about this, see https://www.openwall.com/lists/musl/2021/07/16/1
#define HAVE_FCNTL_H 1 #define HAVE_FCNTL_H 1
--- a/base/debug/stack_trace.cc --- a/base/debug/stack_trace.cc
+++ b/base/debug/stack_trace.cc +++ b/base/debug/stack_trace.cc
@@ -311,7 +311,7 @@ @@ -251,7 +253,9 @@
}
std::string StackTrace::ToStringWithPrefix(cstring_view prefix_string) const { void StackTrace::OutputToStream(std::ostream* os) const {
+#if defined(__GLIBC__) && !defined(_AIX)
OutputToStreamWithPrefix(os, nullptr);
+#endif
}
std::string StackTrace::ToString() const {
@@ -281,7 +281,7 @@
}
std::string StackTrace::ToStringWithPrefix(const char* prefix_string) const {
std::stringstream stream; std::stringstream stream;
-#if !defined(__UCLIBC__) && !defined(_AIX) -#if !defined(__UCLIBC__) && !defined(_AIX)
+#if defined(__GLIBC__) && !defined(_AIX) +#if defined(__GLIBC__) && !defined(_AIX)
OutputToStreamWithPrefix(&stream, prefix_string); OutputToStreamWithPrefix(&stream, prefix_string);
#endif #endif
return stream.str(); return stream.str();
@@ -335,7 +335,7 @@
}
std::ostream& operator<<(std::ostream& os, const StackTrace& s) {
-#if !defined(__UCLIBC__) && !defined(_AIX)
+#if defined(__GLIBC__) && !defined(_AIX)
s.OutputToStream(&os);
#else
os << "StackTrace::OutputToStream not implemented.";
--- a/base/debug/stack_trace_unittest.cc --- a/base/debug/stack_trace_unittest.cc
+++ b/base/debug/stack_trace_unittest.cc +++ b/base/debug/stack_trace_unittest.cc
@@ -33,7 +33,7 @@ @@ -33,7 +33,7 @@
@ -66,3 +67,12 @@ for discussion about this, see https://www.openwall.com/lists/musl/2021/07/16/1
// StackTrace::OutputToStream() is not implemented under uclibc, nor AIX. // StackTrace::OutputToStream() is not implemented under uclibc, nor AIX.
// See https://crbug.com/706728 // See https://crbug.com/706728
@@ -156,7 +156,7 @@
#endif // !defined(__UCLIBC__) && !defined(_AIX)
-#if BUILDFLAG(IS_POSIX) && !BUILDFLAG(IS_ANDROID)
+#if (BUILDFLAG(IS_POSIX) && defined(__GLIBC__)) && !BUILDFLAG(IS_ANDROID)
#if !BUILDFLAG(IS_IOS)
static char* newArray() {
// Clang warns about the mismatched new[]/delete if they occur in the same

View file

@ -100,25 +100,25 @@ musl does not implement mallinfo()/mallinfo2()
/* Define to 1 if you have the <malloc.h> header file. */ /* Define to 1 if you have the <malloc.h> header file. */
#define HAVE_MALLOC_H 1 #define HAVE_MALLOC_H 1
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc.cc --- a/base/allocator/partition_allocator/shim/allocator_shim_default_dispatch_to_partition_alloc.cc
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc.cc +++ b/base/allocator/partition_allocator/shim/allocator_shim_default_dispatch_to_partition_alloc.cc
@@ -660,7 +660,7 @@ @@ -717,7 +717,7 @@
#endif // !PA_BUILDFLAG(IS_APPLE) && !PA_BUILDFLAG(IS_ANDROID) #endif // !BUILDFLAG(IS_APPLE) && !BUILDFLAG(IS_ANDROID)
-#if PA_BUILDFLAG(IS_LINUX) || PA_BUILDFLAG(IS_CHROMEOS) -#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
+#if 0 +#if 0
SHIM_ALWAYS_EXPORT struct mallinfo mallinfo(void) __THROW { SHIM_ALWAYS_EXPORT struct mallinfo mallinfo(void) __THROW {
partition_alloc::SimplePartitionStatsDumper allocator_dumper; base::SimplePartitionStatsDumper allocator_dumper;
Allocator()->DumpStats("malloc", true, &allocator_dumper); Allocator()->DumpStats("malloc", true, &allocator_dumper);
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc_unittest.cc --- a/base/allocator/partition_allocator/shim/allocator_shim_default_dispatch_to_partition_alloc_unittest.cc
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc_unittest.cc +++ b/base/allocator/partition_allocator/shim/allocator_shim_default_dispatch_to_partition_alloc_unittest.cc
@@ -29,7 +29,7 @@ @@ -24,7 +24,7 @@
#if PA_BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC) #if BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC)
// Platforms on which we override weak libc symbols. // Platforms on which we override weak libc symbols.
-#if PA_BUILDFLAG(IS_LINUX) || PA_BUILDFLAG(IS_CHROMEOS) -#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
+#if (PA_BUILDFLAG(IS_LINUX) && defined(__GLIBC__)) || PA_BUILDFLAG(IS_CHROMEOS) +#if (BUILDFLAG(IS_LINUX) && defined(__GLIBC__)) || BUILDFLAG(IS_CHROMEOS)
PA_NOINLINE void FreeForTest(void* data) { NOINLINE void FreeForTest(void* data) {
free(data); free(data);

View file

@ -0,0 +1,13 @@
M115 needs ifuncs for this
--
--- a/base/allocator/partition_allocator/partition_alloc_config.h
+++ b/base/allocator/partition_allocator/partition_alloc_config.h
@@ -155,7 +155,7 @@
#define PA_CONFIG_HAS_MEMORY_TAGGING() \
(defined(ARCH_CPU_ARM64) && defined(__clang__) && \
!defined(ADDRESS_SANITIZER) && \
- (BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_ANDROID)))
+ ((BUILDFLAG(IS_LINUX) && defined(__GLIBC__)) || BUILDFLAG(IS_ANDROID)))
#if PA_CONFIG(HAS_MEMORY_TAGGING)
static_assert(sizeof(void*) == 8);

View file

@ -1,25 +0,0 @@
Hard-disable memory tagging on ARM64 - it does exist there but musl is
missing some required interface headers for it, and it's not clear how
to make the partalloc support code for it work.
--- ./base/allocator/partition_allocator/partition_alloc.gni.orig
+++ ./base/allocator/partition_allocator/partition_alloc.gni
@@ -30,7 +30,7 @@
}
has_memory_tagging =
- current_cpu == "arm64" && is_clang && !is_asan && (is_linux || is_android)
+ false
declare_args() {
# Causes all the allocations to be routed via allocator_shim.cc. Usually,
--- ./base/allocator/partition_allocator/src/partition_alloc/aarch64_support.h.orig
+++ ./base/allocator/partition_allocator/src/partition_alloc/aarch64_support.h
@@ -10,7 +10,7 @@
#include "partition_alloc/build_config.h"
#include "partition_alloc/partition_alloc_buildflags.h"
-#if PA_BUILDFLAG(IS_ANDROID) || PA_BUILDFLAG(IS_LINUX)
+#if PA_BUILDFLAG(IS_ANDROID) || (PA_BUILDFLAG(IS_LINUX) && defined(__GLIBC__))
#define HAS_HW_CAPS
#endif

View file

@ -1,33 +0,0 @@
the pvalloc/valloc symbols are obsolete and not implemented in musl
--
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_unittest.cc
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_unittest.cc
@@ -410,7 +410,7 @@
ASSERT_GE(aligned_allocs_intercepted_by_alignment[128], 1u);
ASSERT_GE(aligned_allocs_intercepted_by_size[53], 1u);
-#if PA_BUILDFLAG(IS_POSIX) && !PA_BUILDFLAG(IS_ANDROID)
+#if (PA_BUILDFLAG(IS_POSIX) && defined(__GLIBC__)) && !PA_BUILDFLAG(IS_ANDROID)
void* pvalloc_ptr = pvalloc(67);
ASSERT_NE(nullptr, pvalloc_ptr);
ASSERT_EQ(0u, reinterpret_cast<uintptr_t>(pvalloc_ptr) % kPageSize);
@@ -449,7 +449,7 @@
free(memalign_ptr);
ASSERT_GE(frees_intercepted_by_addr[Hash(memalign_ptr)], 1u);
-#if PA_BUILDFLAG(IS_POSIX) && !PA_BUILDFLAG(IS_ANDROID)
+#if (PA_BUILDFLAG(IS_POSIX) && defined(__GLIBC__)) && !PA_BUILDFLAG(IS_ANDROID)
free(pvalloc_ptr);
ASSERT_GE(frees_intercepted_by_addr[Hash(pvalloc_ptr)], 1u);
#endif // PA_BUILDFLAG(IS_POSIX) && !PA_BUILDFLAG(IS_ANDROID)
--- a/base/process/memory_unittest.cc
+++ b/base/process/memory_unittest.cc
@@ -359,7 +359,7 @@
#endif // BUILDFLAG(IS_WIN)
#endif // !BUILDFLAG(IS_MAC)
-#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
+#if (BUILDFLAG(IS_LINUX) && defined(__GLIBC__)) || BUILDFLAG(IS_CHROMEOS)
TEST_F(OutOfMemoryDeathTest, Valloc) {
ASSERT_OOM_DEATH({

View file

@ -0,0 +1,10 @@
--- a/third_party/node/update_npm_deps
+++ b/third_party/node/update_npm_deps
@@ -20,7 +20,6 @@
patch -d node_modules/@types/d3/ -p1 < chromium_d3_types_index.patch
patch -d node_modules/html-minifier/ -p1 < html_minifier.patch
-patch -p1 < typescript.patch
rsync -c --delete -r -q --include-from="npm_include.txt" --exclude-from="npm_exclude.txt" \
--prune-empty-dirs "node_modules/" "node_modules_filtered/"

View file

@ -0,0 +1,113 @@
--- a/chrome/browser/process_singleton_posix.cc
+++ b/chrome/browser/process_singleton_posix.cc
@@ -607,7 +607,7 @@
// |reader| is for sending back ACK message.
void HandleMessage(const std::string& current_dir,
const std::vector<std::string>& argv,
- const std::vector<const uint8_t> additional_data,
+ const std::vector<uint8_t> additional_data,
SocketReader* reader);
private:
@@ -664,7 +664,7 @@
void ProcessSingleton::LinuxWatcher::HandleMessage(
const std::string& current_dir,
const std::vector<std::string>& argv,
- const std::vector<const uint8_t> additional_data,
+ const std::vector<uint8_t> additional_data,
SocketReader* reader) {
DCHECK(ui_task_runner_->BelongsToCurrentThread());
DCHECK(reader);
@@ -754,7 +754,7 @@
base::StringToSizeT(tokens[0], &num_args);
std::vector<std::string> command_line(tokens.begin() + 1, tokens.begin() + 1 + num_args);
- std::vector<const uint8_t> additional_data;
+ std::vector<uint8_t> additional_data;
if (tokens.size() >= 3 + num_args) {
size_t additional_data_size;
base::StringToSizeT(tokens[1 + num_args], &additional_data_size);
@@ -763,7 +763,7 @@
std::string(1, kTokenDelimiter));
const uint8_t* additional_data_bits =
reinterpret_cast<const uint8_t*>(remaining_args.c_str());
- additional_data = std::vector<const uint8_t>(
+ additional_data = std::vector<uint8_t>(
additional_data_bits, additional_data_bits + additional_data_size);
}
--- a/chrome/browser/process_singleton.h
+++ b/chrome/browser/process_singleton.h
@@ -102,7 +102,7 @@
using NotificationCallback =
base::RepeatingCallback<bool(const base::CommandLine& command_line,
const base::FilePath& current_directory,
- const std::vector<const uint8_t> additional_data)>;
+ const std::vector<uint8_t> additional_data)>;
#if BUILDFLAG(IS_WIN)
ProcessSingleton(const std::string& program_name,
--- a/chrome/browser/process_singleton_win.cc
+++ b/chrome/browser/process_singleton_win.cc
@@ -81,7 +81,7 @@
bool ParseCommandLine(const COPYDATASTRUCT* cds,
base::CommandLine* parsed_command_line,
base::FilePath* current_directory,
- std::vector<const uint8_t>* parsed_additional_data) {
+ std::vector<uint8_t>* parsed_additional_data) {
// We should have enough room for the shortest command (min_message_size)
// and also be a multiple of wchar_t bytes. The shortest command
// possible is L"START\0\0" (empty command line, current directory,
@@ -163,7 +163,7 @@
msg.substr(fourth_null + 1, fifth_null - fourth_null);
const uint8_t* additional_data_bytes =
reinterpret_cast<const uint8_t*>(additional_data.c_str());
- *parsed_additional_data = std::vector<const uint8_t>(additional_data_bytes,
+ *parsed_additional_data = std::vector<uint8_t>(additional_data_bytes,
additional_data_bytes + additional_data_length);
return true;
@@ -187,7 +187,7 @@
base::CommandLine parsed_command_line(base::CommandLine::NO_PROGRAM);
base::FilePath current_directory;
- std::vector<const uint8_t> additional_data;
+ std::vector<uint8_t> additional_data;
if (!ParseCommandLine(cds, &parsed_command_line, &current_directory, &additional_data)) {
*result = TRUE;
return true;
--- a/electron/shell/browser/api/electron_api_app.cc
+++ b/electron/shell/browser/api/electron_api_app.cc
@@ -519,10 +519,10 @@
const base::RepeatingCallback<
void(const base::CommandLine& command_line,
const base::FilePath& current_directory,
- const std::vector<const uint8_t> additional_data)>& callback,
+ const std::vector<uint8_t> additional_data)>& callback,
const base::CommandLine& cmd,
const base::FilePath& cwd,
- const std::vector<const uint8_t> additional_data) {
+ const std::vector<uint8_t> additional_data) {
// Make sure the callback is called after app gets ready.
if (Browser::Get()->is_ready()) {
callback.Run(cmd, cwd, std::move(additional_data));
@@ -1082,7 +1082,7 @@
void App::OnSecondInstance(const base::CommandLine& cmd,
const base::FilePath& cwd,
- const std::vector<const uint8_t> additional_data) {
+ const std::vector<uint8_t> additional_data) {
v8::Isolate* isolate = JavascriptEnvironment::GetIsolate();
v8::Locker locker(isolate);
v8::HandleScope handle_scope(isolate);
--- a/electron/shell/browser/api/electron_api_app.h
+++ b/electron/shell/browser/api/electron_api_app.h
@@ -195,7 +195,7 @@
std::string GetLocaleCountryCode();
void OnSecondInstance(const base::CommandLine& cmd,
const base::FilePath& cwd,
- const std::vector<const uint8_t> additional_data);
+ const std::vector<uint8_t> additional_data);
bool HasSingleInstanceLock() const;
bool RequestSingleInstanceLock(gin::Arguments* args);
void ReleaseSingleInstanceLock();

View file

@ -1,11 +0,0 @@
--- ./buildtools/third_party/libc++/__config_site.orig
+++ ./buildtools/third_party/libc++/__config_site
@@ -18,7 +18,7 @@
/* #undef _LIBCPP_ABI_FORCE_MICROSOFT */
/* #undef _LIBCPP_HAS_NO_THREADS */
/* #undef _LIBCPP_HAS_NO_MONOTONIC_CLOCK */
-/* #undef _LIBCPP_HAS_MUSL_LIBC */
+#define _LIBCPP_HAS_MUSL_LIBC 1
/* #undef _LIBCPP_HAS_THREAD_API_PTHREAD */
/* #undef _LIBCPP_HAS_THREAD_API_EXTERNAL */
/* #undef _LIBCPP_HAS_THREAD_API_WIN32 */

View file

@ -1,8 +1,8 @@
# Contributor: Aiden Grossman <agrossman154@yahoo.com> # Contributor: Aiden Grossman <agrossman154@yahoo.com>
# Maintainer: # Maintainer: Aiden Grossman <agrossman154@yahoo.com>
pkgname=fdm-materials pkgname=fdm-materials
pkgver=5.2.2 pkgver=5.2.2
pkgrel=1 pkgrel=0
pkgdesc="FDM Material Database" pkgdesc="FDM Material Database"
url="https://github.com/Ultimaker/fdm_materials" url="https://github.com/Ultimaker/fdm_materials"
arch="noarch" arch="noarch"

View file

@ -1,13 +1,12 @@
# Contributor: Aiden Grossman <agrossman154@yahoo.com> # Contributor: Aiden Grossman <agrossman154@yahoo.com>
# Maintainer: Aiden Grossman <agrossman154@yahoo.com> # Maintainer: Aiden Grossman <agrossman154@yahoo.com>
pkgname=freecad pkgname=freecad
pkgver=0.20.2 pkgver=0.21.0
pkgrel=5 pkgrel=0
pkgdesc="Free and open source 3D parametric modeler" pkgdesc="Free and open source 3D parametric modeler"
url="https://freecadweb.org/" url="https://freecadweb.org/"
license="LGPL-2.0-or-later" license="LGPL-2.0-or-later"
arch="" # removed dependency py3-pyside2 arch="x86_64" # dependency OpenCascade is only x86_64
#arch="x86_64" # dependency OpenCascade is only x86_64
depends=" depends="
graphviz graphviz
hdf5 hdf5
@ -45,16 +44,11 @@ makedepends="
vtk-dev vtk-dev
xerces-c-dev xerces-c-dev
" "
options="!check" # Failing test units upstream as well
checkdepends="xvfb-run mesa mesa-dri-gallium font-opensans" checkdepends="xvfb-run mesa mesa-dri-gallium font-opensans"
source="https://github.com/FreeCAD/FreeCAD/archive/$pkgver/freecad-$pkgver.tar.gz source="https://github.com/FreeCAD/FreeCAD/archive/$pkgver/freecad-$pkgver.tar.gz
$pkgname-python3.11-1.patch::https://github.com/FreeCAD/FreeCAD/commit/fe02d63c8c9b1280978be841d04e68a0a55cceb9.patch $pkgname-cstdint.patch::https://github.com/FreeCAD/FreeCAD/commit/f8f02f089537497a1bf46bc1057f01659b6636c1.patch
numpy-1.20.patch
no-execinfo.patch no-execinfo.patch
no-workaround-spnav.patch
resourceDirectory.patch
tests.patch
opencascade-7.8.0.patch
missing-include-cstdint.patch
" "
builddir="$srcdir/FreeCAD-$pkgver" builddir="$srcdir/FreeCAD-$pkgver"
@ -96,13 +90,7 @@ package() {
} }
sha512sums=" sha512sums="
c3acd77dd2bb9a2a23ac354da3b6102effb89c95d675e91421d65486414dfe8cc0188a7212245e0deb63f17b9c5df76133017be09e4cd14b833be8cbec52a08d freecad-0.20.2.tar.gz f5b4428217d9abd18aed1dcd8a5c77132b90173d07627ff9164919a87f04d242f9d616e9655751ec06ff57fc3e242090cc4d7e1d7045fdae9bc2e281e7855281 freecad-0.21.0.tar.gz
75a237f7ed7a89a98c0e5bdb3d3f0788749602daf718089aa0814e05f93ced1e15ad5867c7c87f170b48c5984f9ace1bbc95c4f386ce72bfb8d616323b47f1e5 freecad-python3.11-1.patch 27dffdbb124096e7fcbec3589cdcfc587ea09bf039c1c1108edb68a9a4fd1278f66a39642636f98b8759a44da92485fd748f8b3d615b0625bc37fc717130e237 freecad-cstdint.patch
80b08b031810fce7b6d698c662f64fa4f8a904f283f46b478b1d718529164c0ee61ce190f633abf04e03212720480f3f0603b0c1e160af79d7b6bb82da3bd0e4 numpy-1.20.patch
73aaba7015dce7048eb7d2456131b5b5ba4673cc980503331987be54d99daed5f61db015ca33d7d2ef0f02bd3192da8ce122c103c3b93f9959927deb4f0b933e no-execinfo.patch 73aaba7015dce7048eb7d2456131b5b5ba4673cc980503331987be54d99daed5f61db015ca33d7d2ef0f02bd3192da8ce122c103c3b93f9959927deb4f0b933e no-execinfo.patch
15696bdaaf77482f1b5d3806535a8004c8cec7d598d62092d9f0394b4ca9e2ad6cedd77c4b86a83a06324d16678c1c6bbf3a390b807729717a2f513e858afd50 no-workaround-spnav.patch
8ba13b17bad66316757d180c1b9e9e72a24382627eac7c43a2264b3b5101e6e8f701775f2b805ed733f500fbcd8b0e8e422ec58a9ab3d948d613b666157d4c52 resourceDirectory.patch
5db19e0aa2ca1fd21f4c56afc9db54390a799262aaa0a741704c2c304b0068fd6ca1dcc086465e12e9c0cfe06aac750aaf9b8f5f4db324539af4dd3394803ff9 tests.patch
f933680dea8744e147f38abce389cb7fd0ec3fb3566454fdd5e6ea07b2faaac5fe61aabe1df3bda9f0d7b4fca16055aa2ad700e9cce10d2604ae37b761b68ade opencascade-7.8.0.patch
fec515cc63830f0e715527c7890173705b24e7d99d225821ec4300104cf3affdee49243bbd4d0a331a902cf04db756a1b8f18f0a17cc71f5757f8b5c73c78ede missing-include-cstdint.patch
" "

View file

@ -1,8 +1,8 @@
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net> # Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net> # Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=freetube pkgname=freetube
pkgver=0.21.3 pkgver=0.19.1
pkgrel=2 pkgrel=0
pkgdesc="An open source desktop YouTube player built with privacy in mind." pkgdesc="An open source desktop YouTube player built with privacy in mind."
arch="x86_64 aarch64" # blocked by electron arch="x86_64 aarch64" # blocked by electron
license="AGPL-3.0-only" license="AGPL-3.0-only"
@ -11,46 +11,42 @@ depends="
font-roboto font-roboto
" "
makedepends=" makedepends="
electron-tasje npm
nodejs
yarn yarn
" "
url="https://freetubeapp.io" url="https://freetubeapp.io"
source=" source="
$pkgname-$pkgver.tar.gz::https://github.com/FreeTubeApp/FreeTube/archive/v$pkgver-beta.tar.gz $pkgname-$pkgver.tar.gz::https://github.com/FreeTubeApp/FreeTube/archive/v$pkgver-beta.tar.gz
freetube.sh freetube.sh
tasje-dotdash.patch freetube.desktop
electron27.patch
package-only-necessary.patch
disable-asar.patch
" "
options="net !check" # No testsuite options="net !check" # No testsuite
builddir="$srcdir"/FreeTube-$pkgver-beta builddir="$srcdir"/FreeTube-$pkgver-beta
export npm_config_nodedir=/usr/include/electron/node_headers
export npm_config_build_from_source=true
prepare() {
default_prepare
yarn --cache-folder "$srcdir"/yarn-cache install --frozen-lockfile --ignore-scripts
}
build() { build() {
yarn --cache-folder "$srcdir"/yarn-cache run pack yarn --cache-folder "$srcdir"/yarn-cache install
yarn --cache-folder "$srcdir"/yarn-cache --frozen-lockfile build
tasje --config _scripts/ebuilder.config.js pack
} }
package() { package() {
# directory creation + program copy # directory creation + program copy
install -Dm755 build/resources/app.asar -t "$pkgdir"/usr/lib/$pkgname/ install -dm755 "$pkgdir"/usr/lib
cp -r "$builddir"/build/linux-unpacked/resources/app "$pkgdir"/usr/lib/$pkgname
# link binaries + other items # link binaries + other items
install -Dm755 "$srcdir"/freetube.sh "$pkgdir"/usr/bin/$pkgname install -Dm755 "$srcdir"/freetube.sh "$pkgdir"/usr/bin/$pkgname
install -Dm644 "$builddir"/_icons/icon.svg "$pkgdir"/usr/share/icons/hicolor/scalable/$pkgname.svg install -Dm644 "$builddir"/_icons/icon.svg "$pkgdir"/usr/share/icons/hicolor/scalable/$pkgname.svg
install -Dm644 build/freetube.desktop "$pkgdir"/usr/share/applications/$pkgname.desktop install -Dm644 "$srcdir"/freetube.desktop "$pkgdir"/usr/share/applications/$pkgname.desktop
} }
sha512sums=" sha512sums="
22e5ab677cd442d50237b2d62534698d8ad73a37e1731003dc23c4ea3da992b3cae936f0bb3a0a86cd4b7fba731c9fa53276cb0a6cd5bab213ff2a6c9006cb05 freetube-0.21.3.tar.gz 2b607754092af7239e7afcc441d86009dba1295bf6f836ab0f2fe9ceffb3045c7da1e4d175a25aa72179241ea62d4905185c6990641213e8e66573d4daa74bfe freetube-0.19.1.tar.gz
2ce2effc794bb663789cefe968b5899122127983dbfa1b240aa33a2be383720b18204e6d01b4a550df72956f02b6636b79c93a58f470a970b09b770f5b8f2fc4 freetube.sh 8508af3d983b305b9e3c8ffb478c723feb4f8301a69cd1ad588854ce13e9b51bab1b68599c2f7f8a6005e9aa8949d36d057a3660d70e4acf2f4d125e5f8ef136 freetube.sh
d27cb896b65a7e8d52ffe86e5f74eed72b6cf976b28e1a13012d34c7eceba5ff6f20298017738dfa93c0336ffa52b8ee4da7e06b02747062898db7e678819526 tasje-dotdash.patch 0b74e115852e71f665730a09e4d06a07fd3ae68fbc9bf8aaf280a32b70298910c692c99535cd2294df95394ad9b3b9f4a127bb6c9d50824946895dbb54649872 freetube.desktop
f12c19969981fe22c05d14007d6d9900e854757119ab16b5af8185d94850f3d96b0e22ef03bddc776e47088bc3de88d86bcd631e5e15fb74616f26b021b2a93f electron27.patch
0869c3de3b93b92b8964d3baffa3dc3cf584cd1185af29104cad7bd928c39d9bc30209ed6113053afe96ca624f72fd3cc44de241f96f1cb757f16908b037ad0f package-only-necessary.patch
67107797825cbb98cc3a46e288fe92e06e0cc91b1a44a10edcf68045d06c9679c587bd2706051ce0b109091295247b5e8ec572297cd0c72532afd529cf56449d disable-asar.patch
" "

View file

@ -0,0 +1,26 @@
From 913f1a251bee06fc8e8380b9c5e5ac3ceadcb876 Mon Sep 17 00:00:00 2001
From: Antoine Martin <dev@ayakael.net>
Date: Fri, 12 Aug 2022 22:34:57 -0400
Subject: [PATCH 1/1] disable-asar
---
package.json | 3 +++
1 file changed, 3 insertions(+)
diff --git a/package.json b/package.json
index 842bdda8..a82a1ec3 100644
--- a/package.json
+++ b/package.json
@@ -18,6 +18,9 @@
"bugs": {
"url": "https://github.com/FreeTubeApp/FreeTube/issues"
},
+ "build": {
+ "asar" : false
+ },
"scripts": {
"build": "run-s rebuild:electron pack build-release",
"build:arm64": "run-s rebuild:electron pack build-release:arm64",
--
2.36.2

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,10 @@
[Desktop Entry]
Name=FreeTube
GenericName=YouTube Player
Comment=An open source desktop YouTube player built with privacy in mind.
Exec=freetube %U
Terminal=false
Type=Application
Icon=freetube
MimeType=x-scheme-handler/freetube;
Categories=Network;

View file

@ -1,3 +1,3 @@
#!/bin/sh #!/bin/sh
exec electron /usr/lib/freetube/app.asar "$@" exec electron /usr/lib/freetube "$@"

View file

@ -0,0 +1,28 @@
diff --git a/_scripts/build.js.orig b/_scripts/build.js
index 035f986..75991d9 100644
--- a/_scripts/build.js.orig
+++ b/_scripts/build.js
@@ -36,12 +36,13 @@ if (platform === 'darwin') {
arch = Arch.armv7l
}
- targets = Platform.LINUX.createTarget(['deb', 'zip', '7z', 'apk', 'rpm', 'AppImage', 'pacman'], arch)
+ targets = Platform.LINUX.createTarget(['dir'], arch)
}
const config = {
appId: `io.freetubeapp.${name}`,
copyright: 'Copyleft © 2020-2023 freetubeapp@protonmail.com',
+ electronDist: '/usr/lib/electron',
// asar: false,
// compression: 'store',
productName,
@@ -85,7 +86,7 @@ const config = {
linux: {
category: 'Network',
icon: '_icons/icon.svg',
- target: ['deb', 'zip', '7z', 'apk', 'rpm', 'AppImage', 'pacman'],
+ target: ['dir'],
},
// See the following issues for more information
// https://github.com/jordansissel/fpm/issues/1503

View file

@ -1,12 +0,0 @@
https://codeberg.org/selfisekai/electron_tasje/issues/27
--- ./_scripts/ebuilder.config.js.orig
+++ ./_scripts/ebuilder.config.js
@@ -20,7 +20,7 @@
files: [
'_icons/iconColor.*',
'icon.svg',
- './dist/**/*',
+ 'dist/**/*',
'!dist/web/*',
'!node_modules/**/*',
],

View file

@ -0,0 +1,43 @@
# Contributor: Aiden Grossman <agrossman154@yahoo.com>
# Maintainer: Aiden Grossman <agrossman154@yahoo.com>
pkgname=libmedc
pkgver=4.1.1
pkgrel=0
pkgdesc="Open source library for numerical simulation"
url="https://www.salome-platform.org/"
arch="all"
license="GPL-3.0-or-later"
makedepends="cmake hdf5-dev swig python3-dev samurai"
options="!check" #test suite is nonfunctional with python bindings
subpackages="$pkgname-dev $pkgname-doc $pkgname-python-pyc $pkgname-python:_py"
source="
https://ftp.kaist.ac.kr/macports/distfiles/libmed/med-$pkgver.tar.gz
hdf5.patch
cmake-config-dir.patch
"
builddir="$srcdir/med-${pkgver}_SRC"
build() {
cmake -B build -G Ninja \
-DCMAKE_BUILD_TYPE=None \
-DCMAKE_INSTALL_PREFIX=/usr \
-DMEDFILE_BUILD_TESTS=OFF \
-DMEDFILE_BUILD_PYTHON=ON
cmake --build build
}
package() {
DESTDIR="$pkgdir" cmake --install build
}
_py() {
pkgdesc="Python bindings for libmedc"
depends="python3"
amove usr/lib/python3*
}
sha512sums="
8917e7ecfe30e1259b0927c8e1c3d6efd86ed2386813f6d90217bd95589199478e587f0815031ab65cacf7901a30b77a6307414f9073caffe6e7f013e710d768 med-4.1.1.tar.gz
68d9291e73a68d674081314028c0fce7bbd4a7b78b93b7e5078117ce62f2d07318bc33ec95091ce677148ec3926c1ce653d0760c34e74b29257a7be59210f040 hdf5.patch
8d0f58cd67d205fbacaff0e6da76e2ee5473457b478ede13a551ebe5853c0716c7406b74c3792e1ace33a34d352fccca8dd2940f063a7c060a12529d060a991a cmake-config-dir.patch
"

View file

@ -0,0 +1,11 @@
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -101,7 +101,7 @@
IF(WIN32 AND NOT CYGWIN)
SET(INSTALL_CMAKE_CONFIG_DIR cmake)
ELSE()
- SET(INSTALL_CMAKE_CONFIG_DIR share/cmake/medfile-${MED_STR_VERSION})
+ SET(INSTALL_CMAKE_CONFIG_DIR lib${LIB_SUFFIX}/cmake/medfile-${MED_STR_VERSION})
ENDIF()
SET(INSTALL_INCLUDE_DIR include)

View file

@ -0,0 +1,94 @@
Originally from https://gist.github.com/jedbrown/527ef81ff59a0dccf833da40fdd15a47
diff -rupN med-4.1.0/config/cmake_files/medMacros.cmake med-4.1.0-new/config/cmake_files/medMacros.cmake
--- med-4.1.0/config/cmake_files/medMacros.cmake 2021-12-03 09:35:30.675827163 +0100
+++ med-4.1.0-new/config/cmake_files/medMacros.cmake 2021-12-03 09:32:31.894994147 +0100
@@ -447,7 +447,7 @@ MACRO(MED_FIND_HDF5)
##
## Requires 1.10.x version
##
- IF (NOT HDF_VERSION_MAJOR_REF EQUAL 1 OR NOT HDF_VERSION_MINOR_REF EQUAL 10 OR NOT HDF_VERSION_RELEASE_REF GREATER 1)
+ IF (HDF5_VERSION VERSION_LESS 1.10.2)
MESSAGE(FATAL_ERROR "HDF5 version is ${HDF_VERSION_REF}. Only versions >= 1.10.2 are supported.")
ENDIF()
##
diff -rupN med-4.1.0/src/ci/MEDfileCompatibility.c med-4.1.0-new/src/ci/MEDfileCompatibility.c
--- med-4.1.0/src/ci/MEDfileCompatibility.c 2021-12-03 09:35:30.676827162 +0100
+++ med-4.1.0-new/src/ci/MEDfileCompatibility.c 2021-12-03 09:33:26.292942149 +0100
@@ -71,7 +71,7 @@ MEDfileCompatibility(const char* const f
_hversionMMR=10000*_hmajeur+100*_hmineur+_hrelease;
/* ISCRUTE(_hversionMMR); */
/* ISCRUTE(HDF_VERSION_NUM_REF); */
- if ( (_hversionMMR >= HDF_VERSION_NUM_REF) && (_hmineur == HDF_VERSION_MINOR_REF) ) *hdfok = MED_TRUE;
+ if (_hversionMMR >= HDF_VERSION_NUM_REF) *hdfok = MED_TRUE;
/* TODO : Vérifier si la version mineure HDF du fichier est supérieure
à la version mineure de la bibliothèque HDF utilisée :
@@ -113,7 +113,7 @@ MEDfileCompatibility(const char* const f
#if MED_NUM_MAJEUR != 4
#error "Don't forget to update the test version here when you change the major version of the library !"
#endif
-#if H5_VERS_MINOR > 10
+#if H5_VERS_MINOR > 14
#error "Don't forget to check the compatibility version of the library, depending on the internal hdf model choice !"
#error "Cf. _MEDfileCreate ..."
#endif
diff -rupN med-4.1.0/src/hdfi/_MEDfileCreate.c med-4.1.0-new/src/hdfi/_MEDfileCreate.c
--- med-4.1.0/src/hdfi/_MEDfileCreate.c 2021-12-03 09:35:30.677827161 +0100
+++ med-4.1.0-new/src/hdfi/_MEDfileCreate.c 2021-12-03 09:32:31.894994147 +0100
@@ -159,7 +159,7 @@ med_idt _MEDfileCreate(const char * cons
* En HDF5-1.10.0p1 cela n'a aucun effet !
* Un test autoconf permet de fixer un intervalle de version HDF à MED.
*/
-#if H5_VERS_MINOR > 10
+#if H5_VERS_MINOR > 14
#error "Don't forget to change the compatibility version of the library !"
#endif
diff -rupN med-4.1.0/src/hdfi/_MEDfileOpen.c med-4.1.0-new/src/hdfi/_MEDfileOpen.c
--- med-4.1.0/src/hdfi/_MEDfileOpen.c 2021-12-03 09:35:30.677827161 +0100
+++ med-4.1.0-new/src/hdfi/_MEDfileOpen.c 2021-12-03 09:32:31.894994147 +0100
@@ -72,7 +72,7 @@ med_idt _MEDfileOpen(const char * const
• The creation order tracking property, H5P_CRT_ORDER_TRACKED, has been set in the group creation property list (see H5Pset_link_creation_order).
*/
-#if H5_VERS_MINOR > 10
+#if H5_VERS_MINOR > 14
#error "Don't forget to change the compatibility version of the library !"
#endif
/* L'avantage de bloquer le modèle interne HDF5
diff -rupN med-4.1.0/src/hdfi/_MEDmemFileOpen.c med-4.1.0-new/src/hdfi/_MEDmemFileOpen.c
--- med-4.1.0/src/hdfi/_MEDmemFileOpen.c 2021-12-03 09:35:30.678827160 +0100
+++ med-4.1.0-new/src/hdfi/_MEDmemFileOpen.c 2021-12-03 09:32:31.894994147 +0100
@@ -434,7 +434,7 @@ med_idt _MEDmemFileOpen(const char * con
goto ERROR;
}
-#if H5_VERS_MINOR > 10
+#if H5_VERS_MINOR > 14
#error "Don't forget to change the compatibility version of the library !"
#endif
if ( H5Pset_libver_bounds( _fapl, H5F_LIBVER_18, H5F_LIBVER_18) ) {
diff -rupN med-4.1.0/src/hdfi/_MEDparFileCreate.c med-4.1.0-new/src/hdfi/_MEDparFileCreate.c
--- med-4.1.0/src/hdfi/_MEDparFileCreate.c 2021-12-03 09:35:30.678827160 +0100
+++ med-4.1.0-new/src/hdfi/_MEDparFileCreate.c 2021-12-03 09:32:31.894994147 +0100
@@ -64,7 +64,7 @@ med_idt _MEDparFileCreate(const char * c
* En HDF5-1.10.0p1 cela n'a aucun effet !
* Un test autoconf permet de fixer un intervalle de version HDF à MED.
*/
-#if H5_VERS_MINOR > 10
+#if H5_VERS_MINOR > 14
#error "Don't forget to change the compatibility version of the library !"
#endif
diff -rupN med-4.1.0/src/hdfi/_MEDparFileOpen.c med-4.1.0-new/src/hdfi/_MEDparFileOpen.c
--- med-4.1.0/src/hdfi/_MEDparFileOpen.c 2021-12-03 09:35:30.679827159 +0100
+++ med-4.1.0-new/src/hdfi/_MEDparFileOpen.c 2021-12-03 09:32:31.894994147 +0100
@@ -55,7 +55,7 @@ med_idt _MEDparFileOpen(const char * con
MED_ERR_(_fid,MED_ERR_INIT,MED_ERR_PROPERTY,MED_ERR_PARALLEL_MSG);
goto ERROR;
}
-#if H5_VERS_MINOR > 10
+#if H5_VERS_MINOR > 14
#error "Don't forget to change the compatibility version of the library !"
#endif
if ( H5Pset_libver_bounds( _fapl, H5F_LIBVER_18, H5F_LIBVER_18 ) ) {

View file

@ -2,7 +2,7 @@
# Maintainer: Anjandev Momi <anjan@momi.ca> # Maintainer: Anjandev Momi <anjan@momi.ca>
pkgname=libnest2d pkgname=libnest2d
pkgver=0.4 pkgver=0.4
pkgrel=6 pkgrel=5
pkgdesc="2D irregular bin packaging and nesting library written in modern C++" pkgdesc="2D irregular bin packaging and nesting library written in modern C++"
url="https://github.com/tamasmeszaros/libnest2d" url="https://github.com/tamasmeszaros/libnest2d"
arch="noarch" arch="noarch"

View file

@ -0,0 +1,44 @@
# Contributor: guddaff <guddaff@protonmail.com>
# Maintainer: guddaff <guddaff@protonmail.com>
pkgname=libspnav
pkgver=1.1
pkgrel=0
pkgdesc="library for communicating with spacenavd or 3dxsrv"
url="https://spacenav.sourceforge.net/"
arch="all"
license="BSD-3-Clause"
options="!check" #no checks
makedepends="libx11-dev"
subpackages="$pkgname-static $pkgname-dev"
source="https://github.com/FreeSpacenav/libspnav/releases/download/v$pkgver/libspnav-$pkgver.tar.gz
configure.patch
"
prepare() {
default_prepare
sed -i "s/@PKGVERSION@/$pkgver/" "$builddir"/configure
}
build() {
./configure \
--build=$CBUILD \
--host=$CHOST \
--prefix=/usr \
--sysconfdir=/etc \
--mandir=/usr/share/man \
--localstatedir=/var
make
}
check() {
make check
}
package() {
make DESTDIR="$pkgdir" install
}
sha512sums="
94770d9449dd02ade041d3589bcae7664fa990c4a4feca7b2b1e6542b65aa7073305595310b9e639f10716cf15aaad913e57496fb79bdd4dba5bf703ec8299ab libspnav-1.1.tar.gz
1536a172843459c1f26806ad774194afddf93baca0421d0803337eb90b6dd4bcc9dc9b6ebcb1a4e7de0c6f52b16da51538d302a07d57f0625a3a3311a14327f9 configure.patch
"

View file

@ -0,0 +1,22 @@
--- a/configure
+++ b/configure
@@ -6,18 +6,8 @@
OPT=yes
DBG=yes
X11=yes
-VER=`git describe --tags 2>/dev/null`
+VER="@PKGVERSION@"
-if [ -z "$VER" ]; then
- VER=`git rev-parse --short HEAD 2>/dev/null`
- if [ -z "$VER" ]; then
- VER=v`pwd | grep 'libspnav-[0-9]\+\.' | sed 's/.*libspnav-\(\([0-9]\+\.\)\+[0-9]\+\).*$/\1/'`
- if [ $VER = v ]; then
- VER='<unknown version>'
- fi
- fi
-fi
-
echo "configuring libspnav - $VER"
srcdir="`dirname "$0"`"

View file

@ -1,7 +1,7 @@
# Contributor: Quillith <tag.quill@protonmail.com> # Contributor: Quillith <tag.quill@protonmail.com>
# Maintainer: Quillith <tag.quill@protonmail.com> # Maintainer: Quillith <tag.quill@protonmail.com>
pkgname=nb pkgname=nb
pkgver=7.12.1 pkgver=7.9.1
pkgrel=0 pkgrel=0
pkgdesc="Command line note-taking, bookmarking, archiving, and knowledge base application" pkgdesc="Command line note-taking, bookmarking, archiving, and knowledge base application"
url="https://github.com/xwmx/nb" url="https://github.com/xwmx/nb"
@ -41,5 +41,5 @@ full() {
} }
sha512sums=" sha512sums="
ed3d41a809e39a19711c6c97c38216f17f144b8b474eb94aec4134f9756da03440073f3f6557acf8f7959d3d9fba6392d1d5f59e8b94d5269b7336b11353457e nb-7.12.1.tar.gz cb7aee4f7fb6fa3a0e5e47fdbc87911375a431e5ad821e748d84769cb3384bc7098368b0611c70d4548a12d884cb161ac5286fc28da6a40514736cc3668fc751 nb-7.9.1.tar.gz
" "

View file

@ -1,71 +1,38 @@
# Contributor: Anjandev Momi <anjan@momi.ca> # Contributor: Anjandev Momi <anjan@momi.ca>
# Maintainer: Celeste <cielesti@protonmail.com> # Maintainer: Anjandev Momi <anjan@momi.ca>
maintainer="Celeste <cielesti@protonmail.com>"
pkgname=nlopt pkgname=nlopt
pkgver=2.8.0 pkgver=2.7.1
pkgrel=0 pkgrel=0
pkgdesc="Library for nonlinear optimization" pkgdesc="library for nonlinear optimization, wrapping many algorithms for global and local, constrained or unconstrained, optimization"
url="https://github.com/stevengj/nlopt" url="https://github.com/stevengj/nlopt/"
arch="all" arch="all"
license="LGPL-2.1-or-later" license="LGPL-2.1-or-later"
makedepends=" makedepends="samurai cmake"
cmake subpackages="$pkgname-dev $pkgname-doc"
guile-dev
python3-dev
samurai
swig
"
subpackages="
$pkgname-dev
$pkgname-doc
$pkgname-guile
"
source="$pkgname-$pkgver.tar.gz::https://github.com/stevengj/nlopt/archive/refs/tags/v$pkgver.tar.gz" source="$pkgname-$pkgver.tar.gz::https://github.com/stevengj/nlopt/archive/refs/tags/v$pkgver.tar.gz"
case "$CARCH" in
# octave unavailable on these 3 archs
s390x|riscv64|ppc64le) ;;
*)
makedepends="$makedepends octave-dev"
subpackages="$subpackages $pkgname-octave"
;;
esac
build() { build() {
if [ "$CBUILD" != "$CHOST" ]; then if [ "$CBUILD" != "$CHOST" ]; then
local crossopts="-DCMAKE_SYSTEM_NAME=Linux -DCMAKE_HOST_SYSTEM_NAME=Linux" CMAKE_CROSSOPTS="-DCMAKE_SYSTEM_NAME=Linux -DCMAKE_HOST_SYSTEM_NAME=Linux"
fi fi
cmake -B build -G Ninja \ cmake -B build -G Ninja \
-DCMAKE_INSTALL_PREFIX=/usr \ -DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_INSTALL_LIBDIR=lib \ -DCMAKE_INSTALL_LIBDIR=lib \
-DBUILD_SHARED_LIBS=ON \ -DBUILD_SHARED_LIBS=True \
-DCMAKE_BUILD_TYPE=MinSizeRel \ -DCMAKE_BUILD_TYPE=minsizerel \
$crossopts $CMAKE_CROSSOPTS .
cmake --build build cmake --build build
} }
check() { check() {
ctest --test-dir build --output-on-failure cd build
CTEST_OUTPUT_ON_FAILURE=TRUE ctest
} }
package() { package() {
DESTDIR="$pkgdir" cmake --install build DESTDIR="$pkgdir" cmake --install build
} }
guile() {
pkgdesc="$pkgdesc (Guile bindings)"
depends="$pkgname=$pkgver-r$pkgrel guile"
amove usr/lib/guile usr/share/guile
}
octave() {
pkgdesc="$pkgdesc (Octave bindings)"
depends="$pkgname=$pkgver-r$pkgrel octave"
amove usr/lib/octave usr/share/octave
}
sha512sums=" sha512sums="
cb294caa5532e11ae0d22ed849705920bbae79f712144c840a5ca865ef8e6a15c6c9540c81ced0c3c05b9f44c360d50f74e235e69d893be34b7e1c5599f07c71 nlopt-2.8.0.tar.gz e23cb522fc696010574c14b72be85acc0f8ccf0bf208bf2b8789c57d6c5a6e6d419ee10330581518b1c1567018ae909b626ce7761d4fbd5bf112916871e420e2 nlopt-2.7.1.tar.gz
" "

View file

@ -0,0 +1,76 @@
From 00a4692989c4e2f191525f73f24ad8727bacdf41 Mon Sep 17 00:00:00 2001
From: Torsten Paul <Torsten.Paul@gmx.de>
Date: Sat, 5 Feb 2022 18:38:31 +0100
Subject: [PATCH] CVE-2022-0496 Out-of-bounds memory access in DXF loader.
Public issue:
https://github.com/openscad/openscad/issues/4037
Fix in master branch:
https://github.com/openscad/openscad/pull/4090
---
src/dxfdata.cc | 27 +++++++++++++++++++++++----
1 file changed, 23 insertions(+), 4 deletions(-)
diff --git a/src/dxfdata.cc b/src/dxfdata.cc
index 2bb7236746..aa6b6f3976 100644
--- a/src/dxfdata.cc
+++ b/src/dxfdata.cc
@@ -441,6 +441,11 @@ DxfData::DxfData(double fn, double fs, double fa,
auto lv = grid.data(this->points[lines[idx].idx[j]][0], this->points[lines[idx].idx[j]][1]);
for (size_t ki = 0; ki < lv.size(); ++ki) {
int k = lv.at(ki);
+ if (k < 0 || k >= lines.size()) {
+ LOG(message_group::Warning,Location::NONE,"",
+ "Bad DXF line index in %1$s.",QuotedString(boostfs_uncomplete(filename, fs::current_path()).generic_string()));
+ continue;
+ }
if (k == idx || lines[k].disabled) continue;
goto next_open_path_j;
}
@@ -466,13 +471,20 @@ DxfData::DxfData(double fn, double fs, double fa,
auto lv = grid.data(ref_point[0], ref_point[1]);
for (size_t ki = 0; ki < lv.size(); ++ki) {
int k = lv.at(ki);
+ if (k < 0 || k >= lines.size()) {
+ LOG(message_group::Warning,Location::NONE,"",
+ "Bad DXF line index in %1$s.",QuotedString(boostfs_uncomplete(filename, fs::current_path()).generic_string()));
+ continue;
+ }
if (lines[k].disabled) continue;
- if (grid.eq(ref_point[0], ref_point[1], this->points[lines[k].idx[0]][0], this->points[lines[k].idx[0]][1])) {
+ auto idk0 = lines[k].idx[0]; // make it easier to read and debug
+ auto idk1 = lines[k].idx[1];
+ if (grid.eq(ref_point[0], ref_point[1], this->points[idk0][0], this->points[idk0][1])) {
current_line = k;
current_point = 0;
goto found_next_line_in_open_path;
}
- if (grid.eq(ref_point[0], ref_point[1], this->points[lines[k].idx[1]][0], this->points[lines[k].idx[1]][1])) {
+ if (grid.eq(ref_point[0], ref_point[1], this->points[idk1][0], this->points[idk1][1])) {
current_line = k;
current_point = 1;
goto found_next_line_in_open_path;
@@ -501,13 +513,20 @@ DxfData::DxfData(double fn, double fs, double fa,
auto lv = grid.data(ref_point[0], ref_point[1]);
for (size_t ki = 0; ki < lv.size(); ++ki) {
int k = lv.at(ki);
+ if (k < 0 || k >= lines.size()) {
+ LOG(message_group::Warning,Location::NONE,"",
+ "Bad DXF line index in %1$s.",QuotedString(boostfs_uncomplete(filename, fs::current_path()).generic_string()));
+ continue;
+ }
if (lines[k].disabled) continue;
- if (grid.eq(ref_point[0], ref_point[1], this->points[lines[k].idx[0]][0], this->points[lines[k].idx[0]][1])) {
+ auto idk0 = lines[k].idx[0]; // make it easier to read and debug
+ auto idk1 = lines[k].idx[1];
+ if (grid.eq(ref_point[0], ref_point[1], this->points[idk0][0], this->points[idk0][1])) {
current_line = k;
current_point = 0;
goto found_next_line_in_closed_path;
}
- if (grid.eq(ref_point[0], ref_point[1], this->points[lines[k].idx[1]][0], this->points[lines[k].idx[1]][1])) {
+ if (grid.eq(ref_point[0], ref_point[1], this->points[idk1][0], this->points[idk1][1])) {
current_line = k;
current_point = 1;
goto found_next_line_in_closed_path;

View file

@ -0,0 +1,27 @@
From 84addf3c1efbd51d8ff424b7da276400bbfa1a4b Mon Sep 17 00:00:00 2001
From: Torsten Paul <Torsten.Paul@gmx.de>
Date: Sat, 5 Feb 2022 18:45:29 +0100
Subject: [PATCH] CVE-2022-0497 Out-of-bounds memory access in comment parser.
Public issue:
https://github.com/openscad/openscad/issues/4043
Fix in master branch:
https://github.com/openscad/openscad/pull/4044
---
src/comment.cpp | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/comment.cpp b/src/comment.cpp
index f02ad2c5f6..1ce3ab547b 100644
--- a/src/comment.cpp
+++ b/src/comment.cpp
@@ -92,7 +92,7 @@ static std::string getComment(const std::string &fulltext, int line)
}
int end = start + 1;
- while (fulltext[end] != '\n') end++;
+ while (end < fulltext.size() && fulltext[end] != '\n') end++;
std::string comment = fulltext.substr(start, end - start);

View file

@ -0,0 +1,47 @@
From 9aa0d7e9f2914fe5f547bdde69202161d1c6064d Mon Sep 17 00:00:00 2001
From: Jordan Brown <github@jordan.maileater.net>
Date: Sun, 18 Jul 2021 21:01:46 -0700
Subject: [PATCH] Fix build failure with "generic_print_polyhedron" on
CGAL-5.3.
---
src/cgalutils-polyhedron.cc | 13 -------------
src/cgalutils.h | 1 -
2 files changed, 14 deletions(-)
diff --git a/src/cgalutils-polyhedron.cc b/src/cgalutils-polyhedron.cc
index 9f4f98f623..41cca4b3f2 100644
--- a/src/cgalutils-polyhedron.cc
+++ b/src/cgalutils-polyhedron.cc
@@ -337,19 +337,6 @@ namespace CGALUtils {
}
};
- template <typename Polyhedron>
- std::string printPolyhedron(const Polyhedron &p) {
- std::ostringstream sstream;
- sstream.precision(20);
-
- Polyhedron_writer writer;
- generic_print_polyhedron(sstream, p, writer);
-
- return sstream.str();
- }
-
- template std::string printPolyhedron(const CGAL_Polyhedron &p);
-
}; // namespace CGALUtils
#endif /* ENABLE_CGAL */
diff --git a/src/cgalutils.h b/src/cgalutils.h
index 21a01cdba6..c08a3aa7df 100644
--- a/src/cgalutils.h
+++ b/src/cgalutils.h
@@ -45,7 +45,6 @@ namespace CGALUtils {
bool is_approximately_convex(const PolySet &ps);
Geometry const* applyMinkowski(const Geometry::Geometries &children);
- template <typename Polyhedron> std::string printPolyhedron(const Polyhedron &p);
template <typename Polyhedron> bool createPolySetFromPolyhedron(const Polyhedron &p, PolySet &ps);
template <typename Polyhedron> bool createPolyhedronFromPolySet(const PolySet &ps, Polyhedron &p);
template <class Polyhedron_A, class Polyhedron_B>

View file

@ -0,0 +1,38 @@
From abfebc651343909b534ef337aacc7604c99cf0ea Mon Sep 17 00:00:00 2001
From: Torsten Paul <Torsten.Paul@gmx.de>
Date: Wed, 2 Feb 2022 02:30:59 +0100
Subject: [PATCH] CGAL build fix, v5.4 renames projection traits header files
and classes.
---
src/cgalutils-tess.cc | 9 +++++----
1 file changed, 5 insertions(+), 4 deletions(-)
diff --git a/src/cgalutils-tess.cc b/src/cgalutils-tess.cc
index ec1cc1eeb3..37f8cf08b1 100644
--- a/src/cgalutils-tess.cc
+++ b/src/cgalutils-tess.cc
@@ -6,10 +6,12 @@
#pragma push_macro("NDEBUG")
#undef NDEBUG
#include <CGAL/Constrained_Delaunay_triangulation_2.h>
-#if CGAL_VERSION_NR >= CGAL_VERSION_NUMBER(4,11,0)
- #include <CGAL/Triangulation_2_projection_traits_3.h>
+#if CGAL_VERSION_NR < 1050401000
+#include <CGAL/Triangulation_2_projection_traits_3.h>
+typedef CGAL::Triangulation_2_filtered_projection_traits_3<K> Projection;
#else
- #include <CGAL/Triangulation_2_filtered_projection_traits_3.h>
+#include <CGAL/Projection_traits_3.h>
+typedef CGAL::Filtered_projection_traits_3<K> Projection;
#endif
#include <CGAL/Triangulation_face_base_with_info_2.h>
#pragma pop_macro("NDEBUG")
@@ -19,7 +21,6 @@ struct FaceInfo {
bool in_domain() { return nesting_level%2 == 1; }
};
-typedef CGAL::Triangulation_2_filtered_projection_traits_3<K> Projection;
typedef CGAL::Triangulation_face_base_with_info_2<FaceInfo, K> Fbb;
typedef CGAL::Triangulation_data_structure_2<
CGAL::Triangulation_vertex_base_2<Projection>,

View file

@ -0,0 +1,62 @@
From 08bf69b4115c989fc5671254e0d05735d01bcca5 Mon Sep 17 00:00:00 2001
From: Torsten Paul <Torsten.Paul@gmx.de>
Date: Wed, 2 Feb 2022 00:50:43 +0100
Subject: [PATCH] Fix build issue with overloaded join().
---
src/openscad.cc | 11 +++++------
1 file changed, 5 insertions(+), 6 deletions(-)
diff --git a/src/openscad.cc b/src/openscad.cc
index a6f648d2a2..3c46cda2af 100644
--- a/src/openscad.cc
+++ b/src/openscad.cc
@@ -65,7 +65,6 @@
#include <chrono>
#include <boost/algorithm/string.hpp>
#include <boost/algorithm/string/split.hpp>
-#include <boost/algorithm/string/join.hpp>
#include <boost/range/adaptor/transformed.hpp>
#include <boost/program_options.hpp>
#include <boost/filesystem.hpp>
@@ -307,7 +306,7 @@ void set_render_color_scheme(const std::string color_scheme, const bool exit_if_
}
if (exit_if_not_found) {
- LOG(message_group::None,Location::NONE,"",(boost::join(ColorMap::inst()->colorSchemeNames(), "\n")));
+ LOG(message_group::None,Location::NONE,"",(boost::algorithm::join(ColorMap::inst()->colorSchemeNames(), "\n")));
exit(1);
} else {
@@ -885,7 +884,7 @@ struct CommaSeparatedVector
};
template <class Seq, typename ToString>
-std::string join(const Seq &seq, const std::string &sep, const ToString &toString)
+std::string str_join(const Seq &seq, const std::string &sep, const ToString &toString)
{
return boost::algorithm::join(boost::adaptors::transform(seq, toString), sep);
}
@@ -947,7 +946,7 @@ int main(int argc, char **argv)
("P,P", po::value<string>(), "customizer parameter set")
#ifdef ENABLE_EXPERIMENTAL
("enable", po::value<vector<string>>(), ("enable experimental features: " +
- join(boost::make_iterator_range(Feature::begin(), Feature::end()), " | ",
+ str_join(boost::make_iterator_range(Feature::begin(), Feature::end()), " | ",
[](const Feature *feature) {
return feature->get_name();
}) +
@@ -964,11 +963,11 @@ int main(int argc, char **argv)
("render", po::value<string>()->implicit_value(""), "for full geometry evaluation when exporting png")
("preview", po::value<string>()->implicit_value(""), "[=throwntogether] -for ThrownTogether preview png")
("animate", po::value<unsigned>(), "export N animated frames")
- ("view", po::value<CommaSeparatedVector>(), ("=view options: " + boost::join(viewOptions.names(), " | ")).c_str())
+ ("view", po::value<CommaSeparatedVector>(), ("=view options: " + boost::algorithm::join(viewOptions.names(), " | ")).c_str())
("projection", po::value<string>(), "=(o)rtho or (p)erspective when exporting png")
("csglimit", po::value<unsigned int>(), "=n -stop rendering at n CSG elements when exporting png")
("colorscheme", po::value<string>(), ("=colorscheme: " +
- join(ColorMap::inst()->colorSchemeNames(), " | ",
+ str_join(ColorMap::inst()->colorSchemeNames(), " | ",
[](const std::string& colorScheme) {
return (colorScheme == ColorMap::inst()->defaultColorSchemeName() ? "*" : "") + colorScheme;
}) +

143
backports/openscad/APKBUILD Normal file
View file

@ -0,0 +1,143 @@
# Maintainer: Marian Buschsieweke <marian.buschsieweke@ovgu.de>
pkgname=openscad
pkgver=2021.01
pkgrel=11
pkgdesc="The programmers solid 3D CAD modeller"
url="http://openscad.org/"
# Glew and GLES on ARM don't play well with each other
arch="all !armv7 !aarch64 !armhf"
license="GPL-2.0-only"
subpackages="$pkgname-doc"
makedepends="
bison
boost-dev
cairo-dev
cgal-dev
cmake
double-conversion-dev
eigen-dev
flex
glew-dev
gmp-dev
harfbuzz-dev
libxml2-dev
libzip-dev
libspnav-dev
mpfr-dev
opencsg-dev
qscintilla-dev
qt5-qtbase-dev
qt5-qtmultimedia-dev
"
checkdepends="
imagemagick
mesa
mesa-dri-gallium
python3
xvfb-run
"
source="
https://files.openscad.org/openscad-$pkgver.src.tar.gz
0001-fix-CVE-2022-0496.patch
0002-fix-CVE-2022-0497.patch
0003-fix-build-with-cgal-5.3.patch
0004-fix-build-with-cgal-5.4.patch
0005-fix-boost-join.patch
"
# Handle tests failing only on some arch's, or building against libs only
# provided by some arch's.
case "$CARCH" in
x86_64)
makedepends="$makedepends lib3mf-dev"
export LIB3MF_INCLUDEPATH="/usr/include/lib3mf/Bindings/Cpp"
;;
x86)
makedepends="$makedepends lib3mf-dev"
export LIB3MF_INCLUDEPATH="/usr/include/lib3mf/Bindings/Cpp"
_arch_failing_tests="
3mfpngtest_cube10
"
;;
s390x)
_arch_failing_tests="
3mfpngtest_cube10
cgalbinstlcgalpngtest_bad-stl-pcbvicebar
cgalbinstlcgalpngtest_bad-stl-tardis
cgalbinstlcgalpngtest_fn_bug
cgalbinstlcgalpngtest_issue1225
cgalpngtest_import_3mf-tests
csgpngtest_import_3mf-tests
opencsgtest_import_3mf-tests
throwntogethertest_import_3mf-tests
"
;;
*)
# lib3mf is not available on all other archs
_arch_failing_tests="
3mfpngtest_cube10
cgalpngtest_import_3mf-tests
csgpngtest_import_3mf-tests
opencsgtest_import_3mf-tests
throwntogethertest_import_3mf-tests
"
;;
esac
# secfixes:
# 2021.01-r2:
# - CVE-2022-0496
# - CVE-2022-0497
build() {
qmake-qt5 PREFIX="/usr"
make
}
check() {
cd tests
# collect failing tests in a way that creates friendly git diffs
local failing_tests
failing_tests="
3mfexport_3mf-export
astdumptest_allexpressions
cgalbinstlcgalpngtest_polyhedron-nonplanar-tests
cgalpngtest_nef3_broken
cgalpngtest_polyhedron-nonplanar-tests
cgalstlcgalpngtest_polyhedron-nonplanar-tests
csgpngtest_nef3_broken
csgpngtest_polyhedron-nonplanar-tests
echotest_allexpressions
echotest_function-literal-.*
lazyunion-.*
monotonepngtest_polyhedron-nonplanar-tests
opencsgtest_nef3_broken
pdfexporttest_centered
pdfexporttest_simple-pdf
throwntogethertest_nef3_broken
$_arch_failing_tests
"
# trim whitespace before and after list and separate expressions by | (regex or)
failing_tests="$(echo "$failing_tests" | tr -d '\n' | sed -e 's/^\s*//' -e 's/\s*$//' -e 's/\s\+/|/g')"
cmake \
-DEXPERIMENTAL=OFF \
.
make
xvfb-run ctest -E "$failing_tests"
}
package() {
make INSTALL_ROOT="$pkgdir" install
}
sha512sums="
8deaa26bf4c295c12da38f323d2b2e6f827851337f5bc1cc9c79afc083c9f913c19a263086e6e853bf2c8434c1ccc705ea22ddb02dc99d39bb1e5e03fc58d128 openscad-2021.01.src.tar.gz
369353f13bb6d2bf123af1e25b5f264cd757efa5d0cd8c226a3803a41a0fdb74aff7411ddf91cb394f9cd295e10e05159487fb6e9c2d661280bf060e391bc3da 0001-fix-CVE-2022-0496.patch
708a491c5a6166bdf92419bce68dbcb4eccbb97ce1880c5a3c919a90ae5af19aa9092e46b81fe8705e8470189d43c4ecad19dece9f1e405681a186c0bd8f5ca4 0002-fix-CVE-2022-0497.patch
090b8a8e53cc7560695d4065db891a3602923d893d752e3d007a42f2cd32c5f6f444d735d716658fb6f3d979e8cf2489cbff3a58a9070621ab5448a6dbf3372b 0003-fix-build-with-cgal-5.3.patch
7d3067bcadf5a56b3b21b80f36e8d1fd99047029b86db6803b71760d47a8b199d3485c6486767707273aa57f9a83aaafcb763fe7a1ee295b01a776f7e5db038f 0004-fix-build-with-cgal-5.4.patch
f0538eb98543a2a15534794f91c35f10d21fe04a8696639a4f08cd98daf63e6efc855b102f41ecb05a1e1bffeecc7c445fcfad7c3a90f32ec28c5a1646863466 0005-fix-boost-join.patch
"

View file

@ -3,7 +3,7 @@
pkgname=openssl1.1-compat pkgname=openssl1.1-compat
pkgver=1.1.1w pkgver=1.1.1w
_abiver=${pkgver%.*} _abiver=${pkgver%.*}
pkgrel=1 pkgrel=0
pkgdesc="toolkit for transport layer security (TLS) - version 1.1" pkgdesc="toolkit for transport layer security (TLS) - version 1.1"
url="https://www.openssl.org/" url="https://www.openssl.org/"
arch="all" arch="all"
@ -79,6 +79,9 @@ build() {
case "$CARCH" in case "$CARCH" in
aarch64*) _target="linux-aarch64" ;; aarch64*) _target="linux-aarch64" ;;
arm*) _target="linux-armv4" ;; arm*) _target="linux-armv4" ;;
mips64*) _target="linux64-mips64" ;;
# explicit _optflags is needed to prevent automatic -mips3 addition
mips*) _target="linux-mips32"; _optflags="-mips32" ;;
ppc) _target="linux-ppc" ;; ppc) _target="linux-ppc" ;;
ppc64) _target="linux-ppc64" ;; ppc64) _target="linux-ppc64" ;;
ppc64le) _target="linux-ppc64le" ;; ppc64le) _target="linux-ppc64le" ;;
@ -86,7 +89,6 @@ build() {
x86_64) _target="linux-x86_64"; _optflags="enable-ec_nistp_64_gcc_128" ;; x86_64) _target="linux-x86_64"; _optflags="enable-ec_nistp_64_gcc_128" ;;
s390x) _target="linux64-s390x";; s390x) _target="linux64-s390x";;
riscv64) _target="linux-generic64";; riscv64) _target="linux-generic64";;
loongarch64) _target="linux-generic64";;
*) msg "Unable to determine architecture from (CARCH=$CARCH)" ; return 1 ;; *) msg "Unable to determine architecture from (CARCH=$CARCH)" ; return 1 ;;
esac esac
@ -104,7 +106,7 @@ build() {
perl ./Configure \ perl ./Configure \
$_target \ $_target \
--prefix=/usr \ --prefix=/usr \
--libdir=/usr/lib \ --libdir=lib \
--openssldir=/etc/ssl1.1 \ --openssldir=/etc/ssl1.1 \
shared \ shared \
no-zlib \ no-zlib \
@ -145,16 +147,23 @@ package() {
_libcrypto() { _libcrypto() {
pkgdesc="Crypto library from openssl" pkgdesc="Crypto library from openssl"
replaces="libressl2.7-libcrypto" replaces="libressl2.7-libcrypto"
mkdir -p "$subpkgdir"/lib "$subpkgdir"/usr/lib
amove etc mv "$pkgdir"/etc "$subpkgdir"/
amove usr/lib/libcrypto* for i in "$pkgdir"/usr/lib/libcrypto*; do
amove usr/lib/engines-$_abiver mv $i "$subpkgdir"/lib/
ln -s ../../lib/${i##*/} "$subpkgdir"/usr/lib/${i##*/}
done
mv "$pkgdir"/usr/lib/engines-$_abiver "$subpkgdir"/usr/lib/
} }
_libssl() { _libssl() {
pkgdesc="SSL shared libraries" pkgdesc="SSL shared libraries"
amove usr/lib/libssl* mkdir -p "$subpkgdir"/lib "$subpkgdir"/usr/lib
for i in "$pkgdir"/usr/lib/libssl*; do
mv $i "$subpkgdir"/lib/
ln -s ../../lib/${i##*/} "$subpkgdir"/usr/lib/${i##*/}
done
} }
_static() { _static() {

View file

@ -1,39 +1,47 @@
# Automatically generated by apkbuild-cpan, template 4 # Automatically generated by apkbuild-cpan, template 2
# Contributor: Timo Teräs <timo.teras@iki.fi> # Contributor: Timo Teräs <timo.teras@iki.fi>
# Maintainer: Celeste <cielesti@protonmail.com> # Maintainer: Timo Teräs <timo.teras@iki.fi>
maintainer="Celeste <cielesti@protonmail.com>"
pkgname=perl-math-random-isaac-xs pkgname=perl-math-random-isaac-xs
pkgver=1.004
pkgrel=8
#_pkgreal is used by apkbuild-cpan to find modules at MetaCpan
_pkgreal=Math-Random-ISAAC-XS _pkgreal=Math-Random-ISAAC-XS
pkgver=1.004
pkgrel=6
pkgdesc="C implementation of the ISAAC PRNG algorithm" pkgdesc="C implementation of the ISAAC PRNG algorithm"
url="https://metacpan.org/release/Math-Random-ISAAC-XS/" url="https://metacpan.org/release/Math-Random-ISAAC-XS/"
arch="all" arch="all"
license="Public-Domain" license="unrestricted"
depends="perl" cpandepends=""
makedepends="perl-dev perl-module-build" cpanmakedepends="perl-test-nowarnings"
checkdepends="perl-test-nowarnings" cpancheckdepends=""
depends="$cpandepends"
makedepends="perl-dev perl-module-build $cpanmakedepends"
checkdepends="$cpancheckdepends"
subpackages="$pkgname-doc" subpackages="$pkgname-doc"
source="https://cpan.metacpan.org/authors/id/J/JA/JAWNSY/Math-Random-ISAAC-XS-$pkgver.tar.gz" source="https://cpan.metacpan.org/authors/id/J/JA/JAWNSY/$_pkgreal-$pkgver.tar.gz"
builddir="$srcdir/$_pkgreal-$pkgver" builddir="$srcdir/$_pkgreal-$pkgver"
build() { prepare() {
default_prepare || return 1
cd "$builddir"
export CFLAGS=$(perl -MConfig -E 'say $Config{ccflags}')
perl Build.PL installdirs=vendor || return 1
}
build() {
cd "$builddir"
export CFLAGS=$(perl -MConfig -E 'say $Config{ccflags}') export CFLAGS=$(perl -MConfig -E 'say $Config{ccflags}')
perl Build.PL \
--installdirs=vendor \
--create_packlist=0
./Build ./Build
} }
package() {
cd "$builddir"
./Build install destdir="$pkgdir" || return 1
find "$pkgdir" \( -name perllocal.pod -o -name .packlist \) -delete
}
check() { check() {
cd "$builddir"
./Build test ./Build test
} }
package() { sha512sums="40c46b5f247f585a407ef9f36b5874d9cf03ec05963a9d92d988ebd63daf1e37b1b51308845d4596f47b5ad7203953bcb7fbb421c905b526dbe99b246ccb4d87 Math-Random-ISAAC-XS-1.004.tar.gz"
./Build install --destdir="$pkgdir"
}
sha512sums="
40c46b5f247f585a407ef9f36b5874d9cf03ec05963a9d92d988ebd63daf1e37b1b51308845d4596f47b5ad7203953bcb7fbb421c905b526dbe99b246ccb4d87 Math-Random-ISAAC-XS-1.004.tar.gz
"

View file

@ -0,0 +1,39 @@
# Automatically generated by apkbuild-cpan, template 1
# Contributor: Francesco Colista <fcolista@alpinelinux.org>
# Maintainer: Francesco Colista <fcolista@alpinelinux.org>
pkgname=perl-module-find
_pkgreal=Module-Find
pkgver=0.16
pkgrel=0
pkgdesc="Find and use installed modules in a (sub)category"
url="https://metacpan.org/release/Module-Find"
arch="noarch"
license="GPL-1.0-or-later OR Artistic-1.0-Perl"
depends="perl"
subpackages="$pkgname-doc"
source="https://cpan.metacpan.org/authors/id/C/CR/CRENZ/Module-Find-$pkgver.tar.gz"
builddir="$srcdir/$_pkgreal-$pkgver"
prepare() {
export CFLAGS=$(perl -MConfig -E 'say $Config{ccflags}')
PERL_MM_USE_DEFAULT=1 perl Makefile.PL INSTALLDIRS=vendor
}
build() {
export CFLAGS=$(perl -MConfig -E 'say $Config{ccflags}')
make
}
check() {
make test
}
package() {
make DESTDIR="$pkgdir" install
find "$pkgdir" \( -name perllocal.pod -o -name .packlist \) -delete
}
sha512sums="
a0c935fd229320ce74052180571c0da9667dc87a717e039bec27120a8ac1552988352038efd1805d62ac40fcaf7985c44b6d9c56648b379f1ca0f03727e550bd Module-Find-0.16.tar.gz
"

View file

@ -0,0 +1,39 @@
# Maintainer: Andy Postnikov <apostnikov@gmail.com>
pkgname=php81-pecl-xmlrpc
_extname=xmlrpc
pkgver=1.0.0_rc3
_pkgver=${pkgver/_rc/RC}
pkgrel=1
pkgdesc="PHP 8.1 extension to write XML-RPC servers and clients - PECL"
url="https://pecl.php.net/package/xmlrpc"
arch="all"
license="PHP-3.01"
depends="php81-xml"
makedepends="php81-dev libxml2-dev"
source="php-pecl-$_extname-$pkgver.tar.gz::https://pecl.php.net/get/$_extname-$_pkgver.tgz"
builddir="$srcdir"/$_extname-$_pkgver
build() {
phpize81
./configure --prefix=/usr --with-php-config=/usr/bin/php-config81
make
}
check() {
make NO_INTERACTION=1 REPORT_EXIT_STATUS=1 test TESTS=--show-diff \
PHP_TEST_SHARED_EXTENSIONS=" \
-d extension=/usr/lib/php81/modules/xml.so \
-d extension=modules/$_extname.so"
php81 -d extension=modules/$_extname.so --ri $_extname
}
package() {
make INSTALL_ROOT="$pkgdir" install
local _confdir="$pkgdir"/etc/php81/conf.d
install -d $_confdir
echo "extension=$_extname" > $_confdir/$_extname.ini
}
sha512sums="
aa8e24d5a732b175adb9870357991d48ead046faed89e579eb63afe06c7ef20af9de29e2d70e71e54e5a15ee37265f69a7d52b39ef2465ced93ba818c1f7e9c1 php-pecl-xmlrpc-1.0.0_rc3.tar.gz
"

View file

@ -0,0 +1,40 @@
# Maintainer: Andy Postnikov <apostnikov@gmail.com>
pkgname=php82-pecl-xmlrpc
_extname=xmlrpc
pkgver=1.0.0_rc3
_pkgver=${pkgver/_rc/RC}
pkgrel=1
pkgdesc="PHP 8.2 extension to write XML-RPC servers and clients - PECL"
url="https://pecl.php.net/package/xmlrpc"
arch="all"
license="PHP-3.01"
depends="php82-xml"
options="!check" # Failure
makedepends="php82-dev libxml2-dev"
source="php-pecl-$_extname-$pkgver.tar.gz::https://pecl.php.net/get/$_extname-$_pkgver.tgz"
builddir="$srcdir"/$_extname-$_pkgver
build() {
phpize82
./configure --prefix=/usr --with-php-config=/usr/bin/php-config82
make
}
check() {
make NO_INTERACTION=1 REPORT_EXIT_STATUS=1 test TESTS=--show-diff \
PHP_TEST_SHARED_EXTENSIONS=" \
-d extension=/usr/lib/php82/modules/xml.so \
-d extension=modules/$_extname.so"
php82 -d extension=modules/$_extname.so --ri $_extname
}
package() {
make INSTALL_ROOT="$pkgdir" install
local _confdir="$pkgdir"/etc/php82/conf.d
install -d $_confdir
echo "extension=$_extname" > $_confdir/$_extname.ini
}
sha512sums="
aa8e24d5a732b175adb9870357991d48ead046faed89e579eb63afe06c7ef20af9de29e2d70e71e54e5a15ee37265f69a7d52b39ef2465ced93ba818c1f7e9c1 php-pecl-xmlrpc-1.0.0_rc3.tar.gz
"

View file

@ -0,0 +1,40 @@
# Maintainer: Patrycja Rosa <alpine@ptrcnull.me>
pkgname=py3-a2wsgi
pkgver=1.8.0
pkgrel=0
pkgdesc="Convert WSGI app to ASGI app or ASGI app to WSGI app"
url="https://github.com/abersheeran/a2wsgi"
arch="noarch"
license="Apache-2.0"
depends="python3"
makedepends="py3-gpep517 py3-pdm-backend py3-wheel"
checkdepends="py3-pytest py3-httpx py3-pytest-asyncio"
subpackages="$pkgname-pyc"
source="https://github.com/abersheeran/a2wsgi/archive/refs/tags/v$pkgver/a2wsgi-$pkgver.tar.gz"
builddir="$srcdir/a2wsgi-$pkgver"
case "$CARCH" in
# test suite blocked by py3-httpx
armhf|ppc64le) options="!check" ;;
esac
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 -m pytest
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
0c3a45ea279e2c104a52a7b70780de7a055312e857b712b534020a2c9f43b62b5d60639b5ce721c5e3717f0315c58496134813772613135b22111905df10d5ad a2wsgi-1.8.0.tar.gz
"

View file

@ -0,0 +1,12 @@
diff --git a/pyproject.toml b/pyproject.toml
index 2e67514..c7f3b40 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -35,5 +35,5 @@ benchmark = [
includes = ["a2wsgi"]
[build-system]
-build-backend = "pdm.pep517.api"
-requires = ["pdm-pep517>=1.0.0"]
+build-backend = "pdm.backend"
+requires = ["pdm-backend>=1.0.0"]

View file

@ -1,46 +0,0 @@
# Contributor: Francesco Colista <fcolista@alpinelinux.org>
# Maintainer: Francesco Colista <fcolista@alpinelinux.org>
pkgname=py3-apsw
_pkgname=apsw
pkgver=3.46.1.0
pkgrel=0
pkgdesc="Another Python SQLite Wrapper"
url="https://github.com/rogerbinns/apsw"
arch="all"
license="Zlib"
depends="python3"
makedepends="
python3-dev
py3-gpep517
py3-setuptools
py3-wheel
sqlite-dev
"
subpackages="$pkgname-pyc"
source="$pkgname-$pkgver.zip::https://github.com/rogerbinns/apsw/releases/download/$pkgver/apsw-$pkgver.zip
detect-sqlite-config.patch
"
builddir="$srcdir/$_pkgname-$pkgver"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 setup.py build_test_extension test
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
8d24825c8346b05a99b8959ce1fd45ae5162c95b020ecc63bd3491bfd1579370a0e6b1a962f7f64a7e7e415846007e64d90b28e2065ae047e228d60b12b9cb02 py3-apsw-3.46.1.0.zip
8f3957bd6fecb5660a7cab367043e4ccdacd87d8963bbe41cc3d525265de28f08aa207099658d785be29c5c90b818c1418f766995cd780d02b8e36252a389758 detect-sqlite-config.patch
"

View file

@ -1,8 +0,0 @@
diff --git a/setup.apsw b/setup.apsw
index 68dedb9..3ceb10b 100644
--- a/setup.apsw
+++ b/setup.apsw
@@ -1 +1,3 @@
# You can put ini format directives here in addition to command line flags
+[build_ext]
+use_system_sqlite_config = True

View file

@ -1,9 +1,9 @@
# Contributor: Aiden Grossman <agrossman154@yahoo.com> # Contributor: Aiden Grossman <agrossman154@yahoo.com>
# Maintainer: # Maintainer: Aiden Grossman <agrossman154@yahoo.com>
pkgname=py3-arcus pkgname=py3-arcus
# Needs to be upgraded in sync with libarcus # Needs to be upgraded in sync with libarcus
pkgver=5.3.0 pkgver=5.2.2
pkgrel=1 pkgrel=3
pkgdesc="Python bindings for libarcus" pkgdesc="Python bindings for libarcus"
url="https://github.com/Ultimaker/pyArcus" url="https://github.com/Ultimaker/pyArcus"
arch="all" arch="all"
@ -35,7 +35,7 @@ package() {
} }
sha512sums=" sha512sums="
d4a114994fa3e3156eae95dde58df13237b8bb0571a1219d6dee6b6338fd65f911f27887d6ab32b7a3cb32bc45ca6c25147e7c2d246cb0707326b88246abfbcd py3-arcus-5.3.0.tar.gz b24cbb9a5d7aa917b1b21e46f935b1293645e731d4e475a5abe364237f35708e8e96a1c6f06fe78e2e0e381737f4a27a6db87e42a5f4de9d48091d9de096f9b0 py3-arcus-5.2.2.tar.gz
f14e55cd31c13051981f26364e34da8c94e8eb5227b1cfd6fe44b9f97b5a4dcf6142a1751fa62eb0514a47583e6ec2d51dc253f23cf72c3fe6a1cb5dca136f21 cmake.patch f14e55cd31c13051981f26364e34da8c94e8eb5227b1cfd6fe44b9f97b5a4dcf6142a1751fa62eb0514a47583e6ec2d51dc253f23cf72c3fe6a1cb5dca136f21 cmake.patch
de75b985607feae0a9c511742915814e9c3d4bc467183f010ccc334ce4d0d952b6ff86020360b78558c4738cc03cf62c386b44ed76bcec12075c4a93dd03eeb7 cmake-helpers.patch de75b985607feae0a9c511742915814e9c3d4bc467183f010ccc334ce4d0d952b6ff86020360b78558c4738cc03cf62c386b44ed76bcec12075c4a93dd03eeb7 cmake-helpers.patch
ef593230d5c78da8ba0fc6ea83225c4543857de1837d3151c45e59ffd7c98063b8f97f25d01c15b6a8f90c26c919206f9f7fa26c9650117f4ce7be49ebca876f pyproject.patch ef593230d5c78da8ba0fc6ea83225c4543857de1837d3151c45e59ffd7c98063b8f97f25d01c15b6a8f90c26c919206f9f7fa26c9650117f4ce7be49ebca876f pyproject.patch

View file

@ -0,0 +1,49 @@
# Contributor: Marian Buschsieweke <marian.buschsieweke@ovgu.de>
# Maintainer: Marian Buschsieweke <marian.buschsieweke@ovgu.de>
pkgname=py3-cbor2
pkgver=5.5.0
pkgrel=0
pkgdesc="encoding and decoding for CBOR (RFC 8949)"
url="https://github.com/agronholm/cbor2"
arch="all"
license="MIT"
depends="
python3
"
makedepends="
py3-gpep517
py3-setuptools
py3-setuptools_scm
py3-wheel
python3-dev
"
checkdepends="
py3-hypothesis
py3-pytest
py3-pytest-cov
py3-tomli
"
subpackages="$pkgname-pyc"
source="$pkgname-$pkgver.tar.gz::https://files.pythonhosted.org/packages/source/c/cbor2/cbor2-$pkgver.tar.gz"
builddir="$srcdir/"cbor2-$pkgver
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 -m pytest
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
64b9ef0e358ac02113f85a2e633ed52900c4d5d6c89f889d9322f181e29fbdf9462663b5eaec7a255d5e6d47c82c4902cb7ff56999251c5ec070c7b610e615b3 py3-cbor2-5.5.0.tar.gz
"

View file

@ -0,0 +1,45 @@
# Contributor: Marian Buschsieweke <marian.buschsieweke@ovgu.de>
# Maintainer: Marian Buschsieweke <marian.buschsieweke@ovgu.de>
pkgname=py3-codespell
_pkgname=${pkgname#py3-}
pkgver=2.2.6
pkgrel=0
pkgdesc="Fix common misspellings in text files, primarily source code"
url="https://github.com/codespell-project/codespell/"
arch="noarch"
license="GPL-2.0-only"
makedepends="
py3-gpep517
py3-setuptools
py3-setuptools_scm
py3-wheel
"
checkdepends="py3-pytest-cov"
depends="py3-chardet"
subpackages="$pkgname-pyc"
source="$pkgname-$pkgver.tar.gz::https://github.com/codespell-project/codespell/archive/refs/tags/v$pkgver.tar.gz"
builddir="$srcdir"/$_pkgname-$pkgver
build() {
SETUPTOOLS_SCM_PRETEND_VERSION=$pkgver \
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
# tests want to run bin/codespell
PATH="$builddir/.testenv/bin:$PATH" .testenv/bin/python3 -m pytest
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
37a6fbcf196447e8204a973499c6b3f7efda67e3caaaa707897da438c602834f91ca49b95020d2c03dcb95528c03451a2d9cfe145b2a24e88615c4a5418a4d2b py3-codespell-2.2.6.tar.gz
"

View file

@ -2,13 +2,13 @@
pkgname=py3-colored pkgname=py3-colored
_pyname=${pkgname/py3-/} _pyname=${pkgname/py3-/}
pkgver=1.4.4 pkgver=1.4.4
pkgrel=3 pkgrel=1
pkgdesc="Simple Python library for color and formatting in terminal" pkgdesc="Simple Python library for color and formatting in terminal"
url="https://gitlab.com/dslackw/colored" url="https://gitlab.com/dslackw/colored"
arch="noarch" arch="noarch"
license="MIT" license="MIT"
depends="python3" depends="python3"
makedepends="py3-setuptools py3-gpep517" makedepends="py3-setuptools"
checkdepends="py3-pytest" checkdepends="py3-pytest"
subpackages="$pkgname-pyc" subpackages="$pkgname-pyc"
source="https://gitlab.com/dslackw/colored/-/archive/$pkgver/colored-$pkgver.tar.gz" source="https://gitlab.com/dslackw/colored/-/archive/$pkgver/colored-$pkgver.tar.gz"
@ -16,14 +16,11 @@ builddir="$srcdir/$_pyname-$pkgver"
options="!check" # No testsuite options="!check" # No testsuite
build() { build() {
gpep517 build-wheel \ python3 setup.py build
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
} }
package() { package() {
gpep517 install-wheel --destdir "$pkgdir" \ python3 setup.py install --prefix=/usr --root="$pkgdir"
.dist/*.whl
} }
sha512sums=" sha512sums="

Some files were not shown because too many files have changed in this diff Show more