Compare commits

...

35 commits
edge ... v3.16

Author SHA1 Message Date
d9e1d9a75a
forgejo-ci: use new forge repo 2024-08-21 10:56:44 -04:00
49fc7c5a6e
README: update name 2024-08-12 12:56:44 -04:00
91385431d0
README: update upstream 2024-08-12 12:55:49 -04:00
8239ff460a
README: update to use forge repo 2024-08-12 12:54:03 -04:00
af703bd6b3
gitlab-ci: drop in favor of forgejo actions 2024-08-12 12:46:11 -04:00
b2d72c2376
forgejo: initial implementation 2024-08-12 12:45:43 -04:00
5ad13b5468
README.md: new repo location 2024-08-10 16:36:17 -04:00
1c3bc7ed25
README: update for codeberg 2024-08-09 22:49:40 -04:00
0ade28bd8d
backports/postgresql15: new aport 2023-07-12 17:24:54 -04:00
31a53678e1
user/mastodon: upgrade to 4.1.4 2023-07-12 17:24:50 -04:00
37d1ce98d5
user/gitaly: fix bundler config issues 2023-03-14 16:19:42 -04:00
2e2b6cea68
user/gitlab-foss: upgrade to 15.9.3 2023-03-14 15:35:51 -04:00
f74aa56727
user/gitlab-foss: add missing patch 2023-03-08 13:06:53 -05:00
455d41278e
user/gitlab-foss: use gitlab-shell 14.17 2023-03-03 01:40:15 -05:00
0daa0f5273
user/gitlab-shell: upgrade to 14.17.0 2023-03-03 01:40:01 -05:00
d47baab861
gitlab-ci: fix public RSA key creation and APKINDEX signing 2023-03-03 01:39:38 -05:00
46754752e6
user/gitlab-foss: upgrade to 15.9.2 2023-03-03 00:47:45 -05:00
bf6ae6587a
gitlab-ci: fix APKINDEX signing on push 2023-02-21 22:15:29 -05:00
45ee55f106 Merge branch 'ci-initial-316' into 'v3.16'
gitlab-ci: initial v3.16

See merge request ayakael/user-aports!2
2023-02-10 05:37:41 +00:00
304a1fef18 gitlab-ci: initial v3.16 2023-02-10 05:37:40 +00:00
fcbaddcfd3
gitlab-ci: unhardcode pulled repo-apk branch 2023-02-09 18:03:58 -05:00
669a372fe7
gitlab-ci: Initial verify-build-push pipeline 2023-02-09 18:03:53 -05:00
build@apk-groulx
de3d580722 user/gitlab-foss: upgrade to 15.8.1 2023-02-03 08:49:49 +00:00
build@apk-groulx
26dfacb896 user/gitlab-shell: upgrade to 4.15.0 2023-02-03 08:20:18 +00:00
build@apk-groulx
00faf34c75 user/gitlab-foss: upgrade to 15.8.0 2023-02-03 08:19:55 +00:00
build@apk-groulx
6d3b99ced7 user/git-mirror: new aport 2023-02-03 08:18:18 +00:00
build@apk-groulx
6a9a5e3d15 backports/py3-sphinxcontrib-autoprogram: backported from edge/testing 2023-02-03 08:17:48 +00:00
build@apk-groulx
14f32da4e4 backports/py3-gitlab: backported from edge/testing 2023-02-03 08:17:28 +00:00
build@apk-groulx
9b43f1ac3c community/gitlab-foss: upgrade to 15.8.0 2023-01-30 19:18:15 +00:00
eb114fea0c
user/gitlab-foss: fix shell secret generator 2023-01-14 10:09:11 -05:00
dfeca8184d
user/gitlab-shell: keep check 2023-01-14 09:38:21 -05:00
e0e123d878
user/gitlab-foss: depend on gitlab-shell 14.14 2023-01-13 15:29:03 -05:00
207645bae5
user/gitlab-shell: downgrade to 14.14.0 2023-01-13 15:28:31 -05:00
a3be6f96b6
user/gitlab-shell: upgrade to 14.15.0 2023-01-13 14:52:27 -05:00
build@apk-groulx
787729f3ad Added missing v3.16 backports 2023-01-12 18:10:32 +00:00
158 changed files with 21828 additions and 3613 deletions

26
.forgejo/bin/deploy.sh Executable file
View file

@ -0,0 +1,26 @@
#!/bin/sh
# shellcheck disable=SC3040
set -eu -o pipefail
readonly REPOS="backports user"
readonly BASEBRANCH=$GITHUB_BASE_REF
readonly TARGET_REPO=$CI_ALPINE_REPO
apkgs=$(find package -type f -name "*.apk")
for apk in $apkgs; do
branch=$(echo $apk | awk -F '/' '{print $2}')
arch=$(echo $apk | awk -F '/' '{print $3}')
name=$(echo $apk | awk -F '/' '{print $4}')
echo "Sending $name of arch $arch to $TARGET_REPO/$BASEBRANCH/$branch"
return=$(curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN --upload-file $apk $TARGET_REPO/$BASEBRANCH/$branch 2>&1)
echo $return
if [ "$return" == "package file already exists" ]; then
echo "Package already exists, refreshing..."
curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN -X DELETE $TARGET_REPO/$BASEBRANCH/$branch/$arch/$name
curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN --upload-file $apk $TARGET_REPO/$BASEBRANCH/$branch
fi
done

View file

@ -0,0 +1,66 @@
diff --git a/usr/local/bin/build.sh.orig b/usr/local/bin/build.sh
old mode 100644
new mode 100755
index c3b8f7a..f609018
--- a/usr/local/bin/build.sh.orig
+++ b/usr/local/bin/build.sh
@@ -7,13 +7,15 @@
set -eu -o pipefail
readonly APORTSDIR=$CI_PROJECT_DIR
-readonly REPOS="main community testing non-free"
+readonly REPOS="backports user"
+readonly ALPINE_REPOS="main community testing"
readonly ARCH=$(apk --print-arch)
# gitlab variables
readonly BASEBRANCH=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME
: "${REPODEST:=$HOME/packages}"
-: "${MIRROR:=https://dl-cdn.alpinelinux.org/alpine}"
+: "${MIRROR:=https://ayakael.net/api/packages/forge/alpine}"
+: "${ALPINE_MIRROR:=http://dl-cdn.alpinelinux.org/alpine}"
: "${MAX_ARTIFACT_SIZE:=300000000}" #300M
: "${CI_DEBUG_BUILD:=}"
@@ -68,8 +70,8 @@ report() {
get_release() {
case $BASEBRANCH in
- *-stable) echo v"${BASEBRANCH%-*}";;
- master) echo edge;;
+ v*) echo v"${BASEBRANCH%-*}";;
+ edge) echo edge;;
*) die "Branch \"$BASEBRANCH\" not supported!"
esac
}
@@ -101,11 +103,11 @@ set_repositories_for() {
release=$(get_release)
for repo in $REPOS; do
[ "$repo" = "non-free" ] && continue
- [ "$release" != "edge" ] && [ "$repo" == "testing" ] && continue
+ [ "$release" == "edge" ] && [ "$repo" == "backports" ] && continue
repos="$repos $MIRROR/$release/$repo $REPODEST/$repo"
[ "$repo" = "$target_repo" ] && break
done
- doas sh -c "printf '%s\n' $repos > /etc/apk/repositories"
+ doas sh -c "printf '%s\n' $repos >> /etc/apk/repositories"
doas apk update
}
@@ -118,7 +120,15 @@ apply_offset_limit() {
}
setup_system() {
- doas sh -c "echo $MIRROR/$(get_release)/main > /etc/apk/repositories"
+ local repos='' repo=''
+ local release
+
+ release=$(get_release)
+ for repo in $ALPINE_REPOS; do
+ [ "$release" != "edge" ] && [ "$repo" == "testing" ] && continue
+ repos="$repos $ALPINE_MIRROR/$release/$repo"
+ done
+ doas sh -c "printf '%s\n' $repos > /etc/apk/repositories"
doas apk -U upgrade -a || apk fix || die "Failed to up/downgrade system"
abuild-keygen -ain
doas sed -i -E 's/export JOBS=[0-9]+$/export JOBS=$(nproc)/' /etc/abuild.conf

View file

@ -0,0 +1,52 @@
on:
pull_request:
types: [ assigned, opened, synchronize, reopened ]
jobs:
build-aarch64:
runs-on: aarch64
container:
image: alpinelinux/alpine-gitlab-ci:latest
env:
CI_PROJECT_DIR: ${{ github.workspace }}
CI_DEBUG_BUILD: ${{ runner.debug }}
CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }}
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
steps:
- name: Environment setup
run: |
doas apk add nodejs git patch curl
cd /etc/apk/keys
doas curl -JO https://ayakael.net/api/packages/forge/alpine/key
- name: Repo pull
uses: actions/checkout@v4
with:
fetch-depth: 500
- name: Package build
run: |
doas patch -d / -p1 -i ${{ github.workspace }}/.forgejo/patches/build.patch
build.sh
- name: Package upload
uses: forgejo/upload-artifact@v3
with:
name: package
path: packages
deploy-aarch64:
needs: [build-aarch64]
runs-on: aarch64
container:
image: alpine:latest
env:
CI_ALPINE_REPO: 'https://ayakael.net/api/packages/forge/alpine'
FORGE_REPO_TOKEN: ${{ secrets.FORGE_REPO_TOKEN }}
FORGE_REPO_USER: ${{ vars.FORGE_REPO_USER }}
steps:
- name: Setting up environment
run: apk add nodejs curl findutils git gawk
- name: Repo pull
uses: actions/checkout@v4
- name: Package download
uses: forgejo/download-artifact@v3
- name: Package deployment
run: ${{ github.workspace }}/.forgejo/bin/deploy.sh

View file

@ -0,0 +1,52 @@
on:
pull_request:
types: [ assigned, opened, synchronize, reopened ]
jobs:
build-x86_64:
runs-on: x86_64
container:
image: alpinelinux/alpine-gitlab-ci:latest
env:
CI_PROJECT_DIR: ${{ github.workspace }}
CI_DEBUG_BUILD: ${{ runner.debug }}
CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }}
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
steps:
- name: Environment setup
run: |
doas apk add nodejs git patch curl
cd /etc/apk/keys
doas curl -JO https://ayakael.net/api/packages/forge/alpine/key
- name: Repo pull
uses: actions/checkout@v4
with:
fetch-depth: 500
- name: Package build
run: |
doas patch -d / -p1 -i ${{ github.workspace }}/.forgejo/patches/build.patch
build.sh
- name: Package upload
uses: forgejo/upload-artifact@v3
with:
name: package
path: packages
deploy-x86_64:
needs: [build-x86_64]
runs-on: x86_64
container:
image: alpine:latest
env:
CI_ALPINE_REPO: 'https://ayakael.net/api/packages/forge/alpine'
FORGE_REPO_TOKEN: ${{ secrets.FORGE_REPO_TOKEN }}
FORGE_REPO_USER: ${{ vars.FORGE_REPO_USER }}
steps:
- name: Setting up environment
run: apk add nodejs curl findutils git gawk
- name: Repo pull
uses: actions/checkout@v4
- name: Package download
uses: forgejo/download-artifact@v3
- name: Package deployment
run: ${{ github.workspace }}/.forgejo/bin/deploy.sh

View file

@ -0,0 +1,21 @@
on:
pull_request:
types: [ assigned, opened, synchronize, reopened ]
jobs:
lint:
run-name: lint
runs-on: x86_64
container:
image: alpinelinux/apkbuild-lint-tools:latest
env:
CI_PROJECT_DIR: ${{ github.workspace }}
CI_DEBUG_BUILD: ${{ runner.debug }}
CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }}
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
steps:
- run: doas apk add nodejs git
- uses: actions/checkout@v4
with:
fetch-depth: 500
- run: lint

View file

@ -1,118 +0,0 @@
stages:
- verify
variables:
GIT_STRATEGY: clone
GIT_DEPTH: "500"
lint:
stage: verify
image: alpinelinux/apkbuild-lint-tools:latest
interruptible: true
script:
- lint
allow_failure: true
only:
- merge_requests
tags:
- docker-alpine
- x86_64
.build:
stage: verify
image: alpinelinux/alpine-gitlab-ci:latest
interruptible: true
script:
- build.sh
artifacts:
paths:
- packages/
- keys/
- logs/
expire_in: 1 day
when: always
only:
- merge_requests
build-x86_64:
extends: .build
artifacts:
name: MR${CI_MERGE_REQUEST_ID}_x86_64
tags:
- docker-alpine
- ci-build
- x86_64
build-x86:
extends: .build
image:
name: alpinelinux/alpine-gitlab-ci:latest-x86
entrypoint: ["linux32", "sh", "-c"]
artifacts:
name: MR${CI_MERGE_REQUEST_ID}_x86
tags:
- docker-alpine
- ci-build
- x86
build-s390x:
extends: .build
artifacts:
name: MR${CI_MERGE_REQUEST_ID}_s390x
tags:
- docker-alpine
- ci-build
- s390x
build-ppc64le:
extends: .build
artifacts:
name: MR${CI_MERGE_REQUEST_ID}_ppc64le
tags:
- docker-alpine
- ci-build
- ppc64le
build-aarch64:
extends: .build
artifacts:
name: MR${CI_MERGE_REQUEST_ID}_aarch64
tags:
- docker-alpine
- ci-build
- aarch64
build-armv7:
extends: .build
image:
name: alpinelinux/alpine-gitlab-ci:latest-armv7
entrypoint: ["linux32", "sh", "-c"]
artifacts:
name: MR${CI_MERGE_REQUEST_ID}_armv7
tags:
- docker-alpine
- ci-build
- armv7
build-armhf:
extends: .build
image:
name: alpinelinux/alpine-gitlab-ci:latest-armhf
entrypoint: ["linux32", "sh", "-c"]
artifacts:
name: MR${CI_MERGE_REQUEST_ID}_armhf
tags:
- docker-alpine
- ci-build
- armhf
build-riscv64-emulated:
extends: .build
when: manual
artifacts:
name: MR${CI_MERGE_REQUEST_ID}_riscv64
tags:
- docker-alpine
- ci-build
- riscv64

View file

@ -1,5 +1,62 @@
Repository with various custom APKBUILDs. # ayaports
Upstream: https://ayakael.net/forge/ayaports
Use at your own risk. While they will likely work, as I built them for my own use, I cannot guarantee that they'll stay up to date or that they won't burn your house down. ## Description
One of these days I'll PR them to Alpine's package repository. This repository contains aports that are not yet merged in the official Alpine
Linux repository or dont adhere to Alpine polices. Packages are automatically
built using CI. Once built, they are deployed to a git-lfs repository, making
them available to apk.
Branches are matched to Alpine releases.
## Repositories
You can browse all the repositories at https://codeberg.org/ayakael/ayaports
Affixed to each repository description is the appropriate link for use in
`/etc/apk/repositories`.
#### Backports
```
https://ayakael.net/api/packages/forge/alpine/v3.16/backports
```
Aports from the official Alpine repositories backported from edge.
#### User
```
https://ayakael.net/api/packages/forge/alpine/v3.16/user
```
Aports that have yet to be (or may never be) upstreamed to the official
aports.
## How to use
Add security key of the repo-apk repository to your /etc/apk/keys:
```shell
cd /etc/apk/keys
wget https://ayakael.net/api/packages/forge/alpine/v3.16/antoine.martin@protonmail.com-5b3109ad.rsa.pub
```
Add repositories that you want to use (see above) to `/etc/apk/repositories`.
## Support
Generally, only the latest branch is kept up-to-date. That said, if an aport
is broken on the latest release due to a dependency incompatibility, it will be
kept up-to-date on the release it last works on.
As these aports are built for my own application, I make no guarantees that
they will work for you.
## Contribution & bug reports
If you wish to contribute to this aports collection, or wish to report a bug,
you can do so on Codeberg here:
https://codeberg.org/ayakael/ayaports/issues
For packages that are in backports, bug reports and merge requests
should be done on Alpine's aports repo instance:
https://gitlab.alpinelinux.org/alpine/aports
## License
This readme, abuilds and support scripts are licensed under MIT License.

View file

@ -1,462 +0,0 @@
# Maintainer: psykose <alice@ayaya.dev>
pkgname=electron
pkgver=21.3.3
pkgrel=3
_chromium=106.0.5249.199
_depot_tools=6fde0fbe9226ae3fc9f5c709adb93249924e5c49
pkgdesc="Electron cross-platform desktop toolkit"
url="https://github.com/electron/electron"
arch="aarch64 x86_64" # same as chromium
license="MIT"
depends="gtk+3.0 so:libudev.so.1 xdg-utils"
makedepends="
alsa-lib-dev
aom-dev
bash
brotli-dev
bsd-compat-headers
bzip2-dev
c-ares-dev
cairo-dev
clang-dev
clang-extra-tools
compiler-rt
cups-dev
curl-dev
dav1d-dev
dbus-glib-dev
eudev-dev
ffmpeg-dev
findutils
flac-dev
flex
freetype-dev
gperf
gtk+3.0-dev
gn
gzip
harfbuzz-dev
hunspell-dev
http-parser-dev
hwids-usb
java-jdk
jpeg-dev
jsoncpp-dev
krb5-dev
lcms2-dev
libarchive-tools
libavif-dev
libbsd-dev
libcap-dev
libevent-dev
libexif-dev
libgcrypt-dev
libjpeg-turbo-dev
libnotify-dev
libusb-dev
libva-dev
libwebp-dev
libxcomposite-dev
libxcursor-dev
libxinerama-dev
libxml2-dev
libxrandr-dev
libxscrnsaver-dev
libxslt-dev
linux-headers
lld
llvm
mesa-dev
minizip-dev
nghttp2-dev
nodejs
npm
nss-dev
opus-dev
pciutils-dev
perl
pipewire-dev
pulseaudio-dev
py3-httplib2
py3-parsing
py3-six
python3
re2-dev
samurai
snappy-dev
speex-dev
sqlite-dev
woff2-dev
xcb-proto
yarn
zlib-dev
"
subpackages="$pkgname-lang $pkgname-dev"
# the lower patches are specific to electron, the top ones are from the equivalent chromium version
source="https://dev.alpinelinux.org/archive/electron/electron-$pkgver.tar.xz
angle-wayland-include.patch
canonicalize-file-name.patch
chromium-VirtualCursor-standard-layout.patch
chromium-revert-drop-of-system-java.patch
chromium-use-alpine-target.patch
credentials-sys-types-header.patch
default-pthread-stacksize.patch
dns-resolver.patch
fix-crashpad.patch
fix-missing-cstdint-include-musl.patch
fix-narrowing-cast.patch
gdbinit.patch
jsoncpp.patch
memory-tagging-arm64.patch
musl-sandbox.patch
musl-tid-caching.patch
musl-v8-monotonic-pthread-cont_timedwait.patch
no-execinfo.patch
no-glibc-version.patch
no-mallinfo.patch
no-res-ninit-nclose.patch
no-stat-redefine.patch
nullptr-t.patch
partition-atfork.patch
py3.11.patch
quiche-arena-size.patch
roll-src-third_party-ffmpeg-102.patch
roll-src-third_party-ffmpeg-106.patch
scoped-file-no-close.patch
temp-failure-retry.patch
wtf-stacksize.patch
icon.patch
python-jinja-3.10.patch
system-node.patch
vector-const.patch
webpack-hash.patch
default.conf
electron.desktop
electron-launcher.sh
"
options="!check suid"
# clang uses much less memory (and this doesn't support gcc)
export CC=clang
export CXX=clang++
# required to find the tools
export AR=llvm-ar
export NM=llvm-nm
export LD=clang++
# less log spam, reproducible
export CFLAGS="${CFLAGS/-g/} -O2 -Wno-unknown-warning-option -Wno-builtin-macro-redefined"
export CXXFLAGS="${CXXFLAGS/-g/} -O2 -Wno-unknown-warning-option -Wno-builtin-macro-redefined"
export CPPFLAGS="$CPPFLAGS -D__DATE__= -D__TIME__= -D__TIMESTAMP__="
_gn_flags() {
echo "$@"
}
# creates a dist tarball that does not need to git clone everything at build time.
snapshot() {
export CHROMIUM_BUILDTOOLS_PATH="$srcdir/src/buildtools"
mkdir -p "$srcdir"
cd "$srcdir"
git clone --branch=$_chromium --depth=1 \
https://chromium.googlesource.com/chromium/src.git
git clone https://github.com/electron/electron.git
(
git clone --depth 1 -b main https://chromium.googlesource.com/chromium/tools/depot_tools.git
cd depot_tools
git fetch --depth 1 origin $_depot_tools
git checkout $_depot_tools
)
export PATH="$PATH:$srcdir/depot_tools"
echo "solutions = [
{
\"name\": \"src/electron\",
\"url\": \"file://$srcdir/electron@v$pkgver\",
\"deps_file\": \"DEPS\",
\"managed\": False,
\"custom_deps\": {
\"src\": None,
},
\"custom_vars\": {},
},
]" > .gclient
python3 depot_tools/gclient.py sync \
--with_branch_heads \
--with_tags \
--nohooks
python3 src/build/landmines.py
python3 src/build/util/lastchange.py -o src/build/util/LASTCHANGE
python3 src/build/util/lastchange.py -s src/third_party/dawn \
--revision src/gpu/webgpu/DAWN_VERSION
python3 src/build/util/lastchange.py -m GPU_LISTS_VERSION \
--revision-id-only --header src/gpu/config/gpu_lists_version.h
python3 src/build/util/lastchange.py -m SKIA_COMMIT_HASH \
-s src/third_party/skia --header src/skia/ext/skia_commit_hash.h
# why?
cp -r electron/patches/ffmpeg src/electron/patches/
python3 electron/script/apply_all_patches.py \
electron/patches/config.json
python3 src/tools/download_optimization_profile.py \
--newest_state=src/chrome/android/profiles/newest.txt \
--local_state=src/chrome/android/profiles/local.txt \
--output_name=src/chrome/android/profiles/afdo.prof \
--gs_url_base=chromeos-prebuilt/afdo-job/llvm
mv src $pkgname-$pkgver
# extra binaries are most likely things we don't want, so nuke them all
for elf in $(scanelf -RA -F "%F" $pkgname-$pkgver); do
rm -f "$elf"
done
msg "generating tarball.. (this takes a while)"
tar -cf $pkgname-$pkgver.tar \
--exclude="ChangeLog*" \
--exclude="testdata/" \
--exclude="test_data/" \
--exclude="android_rust_toolchain/toolchain/" \
--exclude-backups \
--exclude-caches-all \
--exclude-vcs \
$pkgname-$pkgver
xz -T0 -e -9 -vv -k $pkgname-$pkgver.tar
}
prepare() {
default_prepare
git init .
# link to system tools
ln -sfv /usr/bin/clang-format buildtools/linux64/clang-format
mkdir -p third_party/node/linux/node-linux-x64/bin
ln -sfv /usr/bin/node third_party/node/linux/node-linux-x64/bin/node
ln -sfv /usr/bin/java third_party/jdk/current/bin/java
(
cd electron
git init .
git config user.email "example@example.com"
git config user.name "example"
git add LICENSE
git commit -m "init"
git tag "v$pkgver"
# jesus christ what the fuck is wrong with you?
touch .git/packed-refs
yarn install --frozen-lockfile
)
(
cd third_party/node
npm ci
)
local use_system="
brotli
dav1d
ffmpeg
flac
fontconfig
freetype
harfbuzz-ng
icu
jsoncpp
libaom
libavif
libdrm
libevent
libjpeg
libwebp
libxml
libxslt
opus
re2
snappy
woff2
zlib
"
for _lib in $use_system libjpeg_turbo; do
msg "Removing buildscripts for system provided $_lib"
find . -type f -path "*third_party/$_lib/*" \
\! -path "*third_party/$_lib/chromium/*" \
\! -path "*third_party/$_lib/google/*" \
\! -path './base/third_party/icu/*' \
\! -path './third_party/libxml/*' \
\! -path './third_party/pdfium/third_party/freetype/include/pstables.h' \
\! -path './third_party/harfbuzz-ng/utils/hb_scoped.h' \
\! -path './third_party/crashpad/crashpad/third_party/zlib/zlib_crashpad.h' \
\! -regex '.*\.\(gn\|gni\|isolate\|py\)' \
-delete
done
# https://groups.google.com/a/chromium.org/d/topic/chromium-packagers/9JX1N2nf4PU/discussion
touch chrome/test/data/webui/i18n_process_css_test.html
# Use the file at run time instead of effectively compiling it in
sed 's|//third_party/usb_ids/usb.ids|/usr/share/hwdata/usb.ids|g' \
-i services/device/public/cpp/usb/BUILD.gn
python3 build/linux/unbundle/replace_gn_files.py --system-libraries \
$use_system
python3 third_party/libaddressinput/chromium/tools/update-strings.py
# prevent annoying errors when regenerating gni
sed -i 's,^update_readme$,#update_readme,' \
third_party/libvpx/generate_gni.sh
# allow system dependencies in "official builds"
sed -i 's/OFFICIAL_BUILD/GOOGLE_CHROME_BUILD/' \
tools/generate_shim_headers/generate_shim_headers.py
# https://crbug.com/893950
sed -i -e 's/\<xmlMalloc\>/malloc/' -e 's/\<xmlFree\>/free/' \
third_party/blink/renderer/core/xml/*.cc \
third_party/blink/renderer/core/xml/parser/xml_document_parser.cc \
third_party/libxml/chromium/*.cc \
third_party/maldoca/src/maldoca/ole/oss_utils.h
msg "Configuring build"
local gn_config="
import(\"//electron/build/args/release.gn\")
blink_enable_generated_code_formatting=false
chrome_pgo_phase=0
clang_use_chrome_plugins=false
custom_toolchain=\"//build/toolchain/linux/unbundle:default\"
disable_fieldtrial_testing_config=true
enable_hangout_services_extension=true
enable_nacl=false
enable_stripping=false
enable_vr=false
fatal_linker_warnings=false
ffmpeg_branding=\"Chrome\"
host_toolchain=\"//build/toolchain/linux/unbundle:default\"
icu_use_data_file=true
is_cfi=false
is_component_ffmpeg=true
is_debug=false
is_official_build=true
link_pulseaudio=true
proprietary_codecs=true
rtc_link_pipewire=true
rtc_use_pipewire=true
symbol_level=0
treat_warnings_as_errors=false
use_custom_libcxx=false
use_gnome_keyring=false
use_pulseaudio=true
use_sysroot=false
use_system_freetype=true
use_system_harfbuzz=true
use_system_lcms2=true
use_system_libdrm=true
use_system_libjpeg=true
use_system_wayland_scanner=true
use_system_zlib=true
use_vaapi=true
"
gn gen out/Release \
--args="$(echo $gn_config)" \
--export-compile-commands
}
build() {
ninja -C out/Release \
electron_dist_zip \
node_gypi_headers \
node_version_header \
tar_headers
}
package() {
mkdir -p "$pkgdir"/usr/lib/electron "$pkgdir"/usr/bin
bsdtar -xf out/Release/dist.zip -C "$pkgdir"/usr/lib/electron
chmod u+s "$pkgdir"/usr/lib/electron/chrome-sandbox
install -Dm755 "$srcdir"/electron-launcher.sh "$pkgdir"/usr/bin/electron
install -Dm755 "$srcdir"/default.conf "$pkgdir"/etc/electron/default.conf
mkdir -p "$pkgdir"/usr/include/electron
mv -v "$builddir"/out/Release/gen/node_headers "$pkgdir"/usr/include/electron
ln -sv /usr/include/electron/node_headers/include/node "$pkgdir"/usr/include/electron/node
mkdir -p "$pkgdir"/usr/include/electron/node_headers/include/nan
cp -v "$builddir"/third_party/nan/*.h "$pkgdir"/usr/include/electron/node_headers/include/nan
ln -sv /usr/include/electron/node_headers/include/nan "$pkgdir"/usr/include/electron/nan
install -Dm644 electron/default_app/icon.png \
"$pkgdir"/usr/share/icons/hicolor/1024x1024/apps/electron.png
install -Dm644 "$srcdir"/electron.desktop \
-t "$pkgdir"/usr/share/applications/
}
lang() {
pkgdesc="$pkgdesc (translations)"
install_if="$pkgname=$pkgver-r$pkgrel lang"
mkdir -p "$subpkgdir"/usr/lib/electron/locales
mv "$pkgdir"/usr/lib/electron/locales/*.pak \
"$subpkgdir"/usr/lib/electron/locales
mv "$subpkgdir"/usr/lib/electron/locales/en-US.pak \
"$pkgdir"/usr/lib/electron/locales
}
sha512sums="
1d21e74875ade836625c28d8d9351b41d2776def248193e9c82d4cd50375e9e9b2f7c40026673fe2a191a936f05c3fe639b0423964356ad678f41545aceede3c electron-21.3.3.tar.xz
f19ba0c0f542115e6f53019659df256471e811a23d2f37569c9d4dfa265c0c1ace3e62c74d7507f82e6b7b4152c704e651810a00616f8f531592b14bb2af01d9 angle-wayland-include.patch
252b37a2ecc5e7a25385943045f426dc2e30991b28d206ceaff1be7fd8ffeeb024310a8fca6b3e69a4b1c57db535d51c570935351053525f393682d5ecd0f9a9 canonicalize-file-name.patch
ac0a80174f95d733f33ddc06fc88cdcf7db0973378c28d8544dc9c19e2dabeac47f91c99b3e7384f650b3405554a9e222543f0860b6acc407c078a8c9180d727 chromium-VirtualCursor-standard-layout.patch
c4654d5b23c6f5d9502507e534fe1951d6749c62251e49b6adfe10d1569431e7f7a5a6fa5ff09ec30984415ced27a5e20985df8c91295de34af3c84557fa5b91 chromium-revert-drop-of-system-java.patch
d2b5b0396173367fcf1804aaee8e7fbefce9c63ac2a91c3a1ede759cb85e567317a57e4d4f82d3ca021682fb7856e15c074276a03eda946d9b28e1cb706b07ad chromium-use-alpine-target.patch
ec04bf43278a19d4bb090eddd636ad7093c7e757cb2ffa1875971e0709761174790e109b9be03a8e299d4077a87fbd8dabd301b8754bb6fe53c055396e8af556 credentials-sys-types-header.patch
4ab8261bf95547b10df44e4d528c06a64c33602c10a1e09d531190dc8947ba6ef9e69145405b801cd364707570f791fee6d93e3bf5d57831f5a85212ddf01729 default-pthread-stacksize.patch
f011f66e5aae5a6f0d440492ee9400878b47b66214c1bc8dc1477fdd07ad0a508cdbb74415e55e27085c4e61e112e7c2ae2edfa217f2fb387e13097b24cb17b1 dns-resolver.patch
9d1edb1e0624ee61825e3af23fbb8c5dbc09d2b92d7769d19f8ca618edae8de8a3e051fedf4ad92c230e1373dc8495922c46971aef93a580c04ad80bc33516c0 fix-crashpad.patch
8bebf4a9d5225c6e47edc5b07c4b97be24a45cc221f49632836915ceeb4ecb69b7f79a31ea7f82171cde3443f45fec541f409892542cf1014e81aa6acd01566d fix-missing-cstdint-include-musl.patch
efe97b9dd2ec5965fa0cdf1b2a3c01253835c2df710da7ea105c4ce008c11f9caaf8b4321736a2b91f06d8d61972c08e225b16509dc05176a2c39337688ad5b9 fix-narrowing-cast.patch
cf73cbe5bf19d6a22157fb7aafb43f326885e852fc6292728f4ed1cd145d07ba5af51b6ec808095136cd406478aaa427ee1b9611c855fbd87976e1a91e1609bd gdbinit.patch
fa2637f92f851614347e296fbab744af2c5d7edcbb444aeb4a5d3182a8ec2549593d75e717d7e78e9b2a7257e693b48fc88c149c1591052d7ae802f4fda0a775 jsoncpp.patch
e0afb7066c2cb41aa461feb9e45e571517229deab9d06186490b527783a7ba826a4d67d3a14a33a164eea64fa561eb5b93a1d4dfd0acc2e7a9eb038e6ee273db memory-tagging-arm64.patch
fdf8ba7badbd5b61d415ad9d49c66b6ef0a6a40ec95a47e13af48711fe1bd3a5574e987929a3c486cdd02c239863b8517e7f834cecd30f156479e43a9441a18e musl-sandbox.patch
85c2842a251a3f8aa59c701ca5f2ce2f5d49b5c7e4773d5387dc597447fb47c9d876f5fb308576686c9a8abc7e35cfc172b6cdfb9f5f1dc1509329e3590b38d8 musl-tid-caching.patch
d2825aa9525fcbb53791f0ef2502c0f444a9d668f09db6ae4987b94dc4d0e6f1cf58a6e9e772ab11c896a469ca32242feb3ad9c9dbb20df9316cd74151ab0ec9 musl-v8-monotonic-pthread-cont_timedwait.patch
ebd5cf28277853dc5e984961c370ab4a6331488ae7cff45083fea0470262f56486664bc9bd7947fdd796c8635e479633c4d08cfd89270c0310f3ec21cc6642d2 no-execinfo.patch
f0bf97a80e663041e33cb0468fd8c47c5f351d6de61059ce1f359a813c40db8e247eaef294c3f562c0a8204e4f1992a918f1d879b1da9891027500e21f482b79 no-glibc-version.patch
10ae0f74a4c1db899b571508100af63e5af8d0f7c41a37fc9b7987cbf9f27f4c55894c02d6820957d7522a528929059f562f96c2f05fd6509f60c6c71d9d8256 no-mallinfo.patch
a5aa82c30402773903db1d3876208132fdef175f56ebc0ce1ee4c104a98d498d709c5565c4381736c04c238203b3c8a9cd7a5b5e69876f6afb65d7fc48df23d0 no-res-ninit-nclose.patch
2c0d7239728d98c0564ad7b81d6d243e58e56de58a21357fa30c39e333fc29c1aa98529c6e1b6fa7694169b513391ca27fa542f69b483bbde644cc2ed739bbdd no-stat-redefine.patch
5e9f6279698195467e3b506cea9be0d97ec2d970672b1b12d3d7880eec4f6f53b8f92942dc3fc6738b02889382534ce0f4310a1f94b33e21f8fbc70b85640b81 nullptr-t.patch
ad563e29ac7d83c203f5af966c4ed3ebdeb5c997835a45fb28ecde08dde5231d0a775fb413f44867af28724504c42316b27d5a6aaa602057642dcbdf7ec20a7a partition-atfork.patch
65aa0c7c9909a803e59b88ecb6d79c4db491079f3324f7bd02ee485a7bb7a81674b8f0591dab766c97070a401116db7f629fee36af7416a0fefc38f4ae0ad13d py3.11.patch
083ed731e0a3788f2cb04a3035022fbb3e4db99eba01516ea233ea3229f7d898943d8115463a48655ac83eb3cc7a48aceb8bf17c68930a5a1d83b1af95dfade8 quiche-arena-size.patch
128ec0fd14349e065c8bb0910d53cbea7423182a06e06f7b7765f3cba1e5ba5e7a8bccbcdab079335b5235abb7bf0d46dee21ecc8c221be7e1c5c6d9795f958e roll-src-third_party-ffmpeg-102.patch
1063e68f477645914ddf5641eabdd3eaf744e569635d645ea860c3546f67a7ef91eded99331378ee75071b67ecfe9bd4be307bd3da7c7ad93509e2bd634d1ec3 roll-src-third_party-ffmpeg-106.patch
3cf36b269e9fcfa74975d267bbf31bef68b533a51672e5ed81ae511a70f28a45206168af370961a3dab5695ddaff41cb8839c8c2fa53f22a9f3c88d207cb2996 scoped-file-no-close.patch
f2f7673f9e793dfbf4456ff8c2be785ea551c36bd512572245d04bf44da08b0133e98d85a1ffd51158009754c83121cad48d755cbc153735df2d2e73233856c0 temp-failure-retry.patch
1d4e8c6e65205e6b72af47b9a2fa6f96aaada9b7d5a74f4e11a345a885df3078b523d02aaf8e9dac3aa30d72bbbd07cd6dc7edcf44fb9ae57a7f81251c398f65 wtf-stacksize.patch
905565c10f5e5600e7d4db965c892cc45009a258e9995da958974d838ace469e1db1019195307e8807860d5b55ba6bfeea478b1f39a9b99e82c619b2816a1a22 icon.patch
e05180199ee1d559e4e577cedd3e589844ecf40d98a86321bf1bea5607b02eeb5feb486deddae40e1005b644550331f6b8500177aa7e79bcb3750d3c1ceb76c3 python-jinja-3.10.patch
26a8e4040e69f335a7104f42d012b9d933a40985b33a7be02add27a801c097c5a2be4c6e69faf9175ce8945210ae4c5592ecad2123ccff2beee5473194a765e3 system-node.patch
71571b15cf8bd6259b7fd22bea0e46b64890f3db776365de33fe539f26ce9ef99459e05c3dde9434c3657225bc67160abc915acd93033cb487c770c6a2a5975f vector-const.patch
2aa340854316f1284217c0ca17cbf44953684ad6c7da90815117df30928612eb9fb9ffb734b948dfc309cd25d1a67cd57f77aac2d052a3dd9aca07a3a58cbb30 webpack-hash.patch
08460b15037511a4e5469ceac6ae5dd4db4c8cb87c129aaaf40ba58b16c60b8a307ffdd85805efead235758abed09ec31db1ef4cf9159f7b9acdcee3031bc96c default.conf
191559fc7aa1ea0353c6fb0cc321ee1d5803a0e44848c8be941cfab96277b0de6a59962d373e2a2a1686c8f9be2bcf2d2f33706759a339a959e297d3f7fda463 electron.desktop
03750694e5e0b66f084c6e43135e60be15abb059e23486346ee4352dcc236984f2f35467b47f2b2ad46c98c22091cc2b978de8e73680febadba169d960f13f9f electron-launcher.sh
"

View file

@ -1,39 +0,0 @@
Patch-Source: https://github.com/archlinux/svntogit-packages/blob/a353833a5a731abfaa465b658f61894a516aa49b/trunk/angle-wayland-include-protocol.patch
diff -upr third_party/angle.orig/BUILD.gn third_party/angle/BUILD.gn
--- a/third_party/angle.orig/BUILD.gn 2022-08-17 19:38:11.000000000 +0000
+++ b/third_party/angle/BUILD.gn 2022-08-18 11:04:09.061751111 +0000
@@ -489,6 +489,12 @@ config("angle_vulkan_wayland_config") {
if (angle_enable_vulkan && angle_use_wayland &&
defined(vulkan_wayland_include_dirs)) {
include_dirs = vulkan_wayland_include_dirs
+ } else if (angle_enable_vulkan && angle_use_wayland) {
+ include_dirs = [
+ "$wayland_gn_dir/src/src",
+ "$wayland_gn_dir/include/src",
+ "$wayland_gn_dir/include/protocol",
+ ]
}
}
@@ -1073,6 +1079,7 @@ if (angle_use_wayland) {
include_dirs = [
"$wayland_dir/egl",
"$wayland_dir/src",
+ "$wayland_gn_dir/include/protocol",
]
}
diff -upr third_party/angle.orig/src/third_party/volk/BUILD.gn third_party/angle/src/third_party/volk/BUILD.gn
--- a/third_party/angle.orig/src/third_party/volk/BUILD.gn 2022-08-17 19:38:12.000000000 +0000
+++ b/third_party/angle/src/third_party/volk/BUILD.gn 2022-08-18 11:04:36.499828006 +0000
@@ -21,6 +21,9 @@ source_set("volk") {
configs += [ "$angle_root:angle_no_cfi_icall" ]
public_deps = [ "$angle_vulkan_headers_dir:vulkan_headers" ]
if (angle_use_wayland) {
- include_dirs = [ "$wayland_dir/src" ]
+ include_dirs = [
+ "$wayland_dir/src",
+ "$wayland_gn_dir/include/protocol",
+ ]
}
}

View file

@ -1,13 +0,0 @@
no canonicalize_file_name on musl. funnily, the file using this says this is
not portable, but avoids the nonportability of realpath(path, NULL);
--- a/third_party/nasm/config/config-linux.h
+++ b/third_party/nasm/config/config-linux.h
@@ -139,7 +139,7 @@
#define HAVE_ACCESS 1
/* Define to 1 if you have the `canonicalize_file_name' function. */
-#define HAVE_CANONICALIZE_FILE_NAME 1
+/* #define HAVE_CANONICALIZE_FILE_NAME 1 */
/* Define to 1 if you have the `cpu_to_le16' intrinsic function. */
/* #undef HAVE_CPU_TO_LE16 */

View file

@ -1,217 +0,0 @@
needed for libstdc++11 + clang only
diff --git a/sql/recover_module/btree.cc b/sql/recover_module/btree.cc
index 9ecaafe..839318a 100644
--- a/sql/recover_module/btree.cc
+++ b/sql/recover_module/btree.cc
@@ -135,16 +135,25 @@
"Move the destructor to the .cc file if it's non-trival");
#endif // !DCHECK_IS_ON()
-LeafPageDecoder::LeafPageDecoder(DatabasePageReader* db_reader) noexcept
- : page_id_(db_reader->page_id()),
- db_reader_(db_reader),
- cell_count_(ComputeCellCount(db_reader)),
- next_read_index_(0),
- last_record_size_(0) {
+void LeafPageDecoder::Initialize(DatabasePageReader* db_reader) {
+ DCHECK(db_reader);
DCHECK(IsOnValidPage(db_reader));
+ page_id_ = db_reader->page_id();
+ db_reader_ = db_reader;
+ cell_count_ = ComputeCellCount(db_reader);
+ next_read_index_ = 0;
+ last_record_size_ = 0;
DCHECK(DatabasePageReader::IsValidPageId(page_id_));
}
+void LeafPageDecoder::Reset() {
+ db_reader_ = nullptr;
+ page_id_ = 0;
+ cell_count_ = 0;
+ next_read_index_ = 0;
+ last_record_size_ = 0;
+}
+
bool LeafPageDecoder::TryAdvance() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(CanAdvance());
diff --git a/sql/recover_module/btree.h b/sql/recover_module/btree.h
index d76d076..33114b0 100644
--- a/sql/recover_module/btree.h
+++ b/sql/recover_module/btree.h
@@ -102,7 +102,7 @@
//
// |db_reader| must have been used to read an inner page of a table B-tree.
// |db_reader| must outlive this instance.
- explicit LeafPageDecoder(DatabasePageReader* db_reader) noexcept;
+ explicit LeafPageDecoder() noexcept = default;
~LeafPageDecoder() noexcept = default;
LeafPageDecoder(const LeafPageDecoder&) = delete;
@@ -150,6 +150,15 @@
// read as long as CanAdvance() returns true.
bool TryAdvance();
+ // Initialize with DatabasePageReader
+ void Initialize(DatabasePageReader* db_reader);
+
+ // Reset internal DatabasePageReader
+ void Reset();
+
+ // True if DatabasePageReader is valid
+ bool IsValid() { return (db_reader_ != nullptr); }
+
// True if the given reader may point to an inner page in a table B-tree.
//
// The last ReadPage() call on |db_reader| must have succeeded.
@@ -163,14 +172,14 @@
static int ComputeCellCount(DatabasePageReader* db_reader);
// The number of the B-tree page this reader is reading.
- const int64_t page_id_;
+ int64_t page_id_;
// Used to read the tree page.
//
// Raw pointer usage is acceptable because this instance's owner is expected
// to ensure that the DatabasePageReader outlives this.
- DatabasePageReader* const db_reader_;
+ DatabasePageReader* db_reader_;
// Caches the ComputeCellCount() value for this reader's page.
- const int cell_count_ = ComputeCellCount(db_reader_);
+ int cell_count_;
// The reader's cursor state.
//
diff --git a/sql/recover_module/cursor.cc b/sql/recover_module/cursor.cc
index 0029ff9..42548bc 100644
--- a/sql/recover_module/cursor.cc
+++ b/sql/recover_module/cursor.cc
@@ -26,7 +26,7 @@
int VirtualCursor::First() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
inner_decoders_.clear();
- leaf_decoder_ = nullptr;
+ leaf_decoder_.Reset();
AppendPageDecoder(table_->root_page_id());
return Next();
@@ -36,18 +36,18 @@
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
record_reader_.Reset();
- while (!inner_decoders_.empty() || leaf_decoder_.get()) {
- if (leaf_decoder_.get()) {
- if (!leaf_decoder_->CanAdvance()) {
+ while (!inner_decoders_.empty() || leaf_decoder_.IsValid()) {
+ if (leaf_decoder_.IsValid()) {
+ if (!leaf_decoder_.CanAdvance()) {
// The leaf has been exhausted. Remove it from the DFS stack.
- leaf_decoder_ = nullptr;
+ leaf_decoder_.Reset();
continue;
}
- if (!leaf_decoder_->TryAdvance())
+ if (!leaf_decoder_.TryAdvance())
continue;
- if (!payload_reader_.Initialize(leaf_decoder_->last_record_size(),
- leaf_decoder_->last_record_offset())) {
+ if (!payload_reader_.Initialize(leaf_decoder_.last_record_size(),
+ leaf_decoder_.last_record_offset())) {
continue;
}
if (!record_reader_.Initialize())
@@ -99,13 +99,13 @@
int64_t VirtualCursor::RowId() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(record_reader_.IsInitialized());
- DCHECK(leaf_decoder_.get());
- return leaf_decoder_->last_record_rowid();
+ DCHECK(leaf_decoder_.IsValid());
+ return leaf_decoder_.last_record_rowid();
}
void VirtualCursor::AppendPageDecoder(int page_id) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- DCHECK(leaf_decoder_.get() == nullptr)
+ DCHECK(!leaf_decoder_.IsValid())
<< __func__
<< " must only be called when the current path has no leaf decoder";
@@ -113,7 +113,7 @@
return;
if (LeafPageDecoder::IsOnValidPage(&db_reader_)) {
- leaf_decoder_ = std::make_unique<LeafPageDecoder>(&db_reader_);
+ leaf_decoder_.Initialize(&db_reader_);
return;
}
diff --git a/sql/recover_module/cursor.h b/sql/recover_module/cursor.h
index afcd690..b15c31d 100644
--- a/sql/recover_module/cursor.h
+++ b/sql/recover_module/cursor.h
@@ -129,7 +129,7 @@
std::vector<std::unique_ptr<InnerPageDecoder>> inner_decoders_;
// Decodes the leaf page containing records.
- std::unique_ptr<LeafPageDecoder> leaf_decoder_;
+ LeafPageDecoder leaf_decoder_;
SEQUENCE_CHECKER(sequence_checker_);
};
diff --git a/sql/recover_module/pager.cc b/sql/recover_module/pager.cc
index 58e75de..5fe9620 100644
--- a/sql/recover_module/pager.cc
+++ b/sql/recover_module/pager.cc
@@ -23,8 +23,7 @@
"ints are not appropriate for representing page IDs");
DatabasePageReader::DatabasePageReader(VirtualTable* table)
- : page_data_(std::make_unique<uint8_t[]>(table->page_size())),
- table_(table) {
+ : page_data_(), table_(table) {
DCHECK(table != nullptr);
DCHECK(IsValidPageSize(table->page_size()));
}
@@ -57,8 +56,8 @@
std::numeric_limits<int64_t>::max(),
"The |read_offset| computation above may overflow");
- int sqlite_status =
- RawRead(sqlite_file, read_size, read_offset, page_data_.get());
+ int sqlite_status = RawRead(sqlite_file, read_size, read_offset,
+ const_cast<uint8_t*>(page_data_.data()));
// |page_id_| needs to be set to kInvalidPageId if the read failed.
// Otherwise, future ReadPage() calls with the previous |page_id_| value
diff --git a/sql/recover_module/pager.h b/sql/recover_module/pager.h
index 0e388ddc..99314e3 100644
--- a/sql/recover_module/pager.h
+++ b/sql/recover_module/pager.h
@@ -5,6 +5,7 @@
#ifndef SQL_RECOVER_MODULE_PAGER_H_
#define SQL_RECOVER_MODULE_PAGER_H_
+#include <array>
#include <cstdint>
#include <memory>
@@ -70,7 +71,7 @@
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK_NE(page_id_, kInvalidPageId)
<< "Successful ReadPage() required before accessing pager state";
- return page_data_.get();
+ return page_data_.data();
}
// The number of bytes in the page read by the last ReadPage() call.
@@ -137,7 +138,7 @@
int page_id_ = kInvalidPageId;
// Stores the bytes of the last page successfully read by ReadPage().
// The content is undefined if the last call to ReadPage() did not succeed.
- const std::unique_ptr<uint8_t[]> page_data_;
+ const std::array<uint8_t, kMaxPageSize> page_data_;
// Raw pointer usage is acceptable because this instance's owner is expected
// to ensure that the VirtualTable outlives this.
VirtualTable* const table_;

View file

@ -1,15 +0,0 @@
This was dropped for some reason in 6951c37cecd05979b232a39e5c10e6346a0f74ef
--- a/third_party/closure_compiler/compiler.py 2021-05-20 04:17:53.000000000 +0200
+++ b/third_party/closure_compiler/compiler.py 2021-05-20 04:17:53.000000000 +0200
@@ -13,8 +13,9 @@
_CURRENT_DIR = os.path.join(os.path.dirname(__file__))
-_JAVA_PATH = os.path.join(_CURRENT_DIR, "..", "jdk", "current", "bin", "java")
-assert os.path.isfile(_JAVA_PATH), "java only allowed in android builds"
+_JAVA_BIN = "java"
+_JDK_PATH = os.path.join(_CURRENT_DIR, "..", "jdk", "current", "bin", "java")
+_JAVA_PATH = _JDK_PATH if os.path.isfile(_JDK_PATH) else _JAVA_BIN
class Compiler(object):
"""Runs the Closure compiler on given source files to typecheck them

View file

@ -1,13 +0,0 @@
--- a/build/config/compiler/BUILD.gn
+++ b/build/config/compiler/BUILD.gn
@@ -917,8 +917,8 @@
} else if (current_cpu == "arm64") {
if (is_clang && !is_android && !is_nacl && !is_fuchsia &&
!(is_chromeos_lacros && is_chromeos_device)) {
- cflags += [ "--target=aarch64-linux-gnu" ]
- ldflags += [ "--target=aarch64-linux-gnu" ]
+ cflags += [ "--target=aarch64-alpine-linux-musl" ]
+ ldflags += [ "--target=aarch64-alpine-linux-musl" ]
}
if (is_android) {
# Outline atomics crash on Exynos 9810. http://crbug.com/1272795

View file

@ -1,11 +0,0 @@
--- a/sandbox/linux/services/credentials.h
+++ b/sandbox/linux/services/credentials.h
@@ -14,6 +14,8 @@
#include <string>
#include <vector>
+#include <sys/types.h>
+
#include "sandbox/linux/system_headers/capability.h"
#include "sandbox/sandbox_export.h"

View file

@ -1,45 +0,0 @@
--- a/base/threading/platform_thread_linux.cc
+++ b/base/threading/platform_thread_linux.cc
@@ -186,7 +186,8 @@
size_t GetDefaultThreadStackSize(const pthread_attr_t& attributes) {
#if !defined(THREAD_SANITIZER)
- return 0;
+ // use 2mb to avoid running out of space. This is what android uses
+ return 2 * (1 << 20);
#else
// ThreadSanitizer bloats the stack heavily. Evidence has been that the
// default stack size isn't enough for some browser tests.
--- a/base/threading/platform_thread_unittest.cc
+++ b/base/threading/platform_thread_unittest.cc
@@ -420,7 +420,7 @@
((BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)) && \
!defined(THREAD_SANITIZER)) || \
(BUILDFLAG(IS_ANDROID) && !defined(ADDRESS_SANITIZER))
- EXPECT_EQ(0u, stack_size);
+ EXPECT_EQ(2u << 20, stack_size);
#else
EXPECT_GT(stack_size, 0u);
EXPECT_LT(stack_size, 20u * (1 << 20));
--- a/chrome/browser/shutdown_signal_handlers_posix.cc
+++ b/chrome/browser/shutdown_signal_handlers_posix.cc
@@ -187,11 +187,19 @@
g_shutdown_pipe_read_fd = pipefd[0];
g_shutdown_pipe_write_fd = pipefd[1];
#if !defined(ADDRESS_SANITIZER)
+# if defined(__GLIBC__)
const size_t kShutdownDetectorThreadStackSize = PTHREAD_STACK_MIN * 2;
+# else
+ const size_t kShutdownDetectorThreadStackSize = PTHREAD_STACK_MIN * 2 * 8; // match up musls 2k PTHREAD_STACK_MIN with glibcs 16k
+# endif
#else
+# if defined(__GLIBC__)
// ASan instrumentation bloats the stack frames, so we need to increase the
// stack size to avoid hitting the guard page.
const size_t kShutdownDetectorThreadStackSize = PTHREAD_STACK_MIN * 4;
+# else
+ const size_t kShutdownDetectorThreadStackSize = PTHREAD_STACK_MIN * 4 * 8; // match up musls 2k PTHREAD_STACK_MIN with glibcs 16k
+# endif
#endif
ShutdownDetector* detector = new ShutdownDetector(
g_shutdown_pipe_read_fd, std::move(shutdown_callback), task_runner);

View file

@ -1,5 +0,0 @@
# Default settings for electron. This file is sourced by /bin/sh from
# the electron launcher.
# Options to pass to electron.
ELECTRON_FLAGS="--ozone-platform-hint=auto"

View file

@ -1,36 +0,0 @@
--- a/net/dns/host_resolver_manager.cc
+++ b/net/dns/host_resolver_manager.cc
@@ -3014,8 +3014,7 @@
NetworkChangeNotifier::AddConnectionTypeObserver(this);
if (system_dns_config_notifier_)
system_dns_config_notifier_->AddObserver(this);
-#if BUILDFLAG(IS_POSIX) && !BUILDFLAG(IS_APPLE) && !BUILDFLAG(IS_OPENBSD) && \
- !BUILDFLAG(IS_ANDROID)
+#if defined(__GLIBC__)
EnsureDnsReloaderInit();
#endif
--- a/net/dns/dns_reloader.cc
+++ b/net/dns/dns_reloader.cc
@@ -6,8 +6,7 @@
#include "build/build_config.h"
-#if BUILDFLAG(IS_POSIX) && !BUILDFLAG(IS_APPLE) && !BUILDFLAG(IS_OPENBSD) && \
- !BUILDFLAG(IS_ANDROID) && !BUILDFLAG(IS_FUCHSIA)
+#if defined(__GLIBC__)
#include <resolv.h>
--- a/net/dns/host_resolver_proc.cc
+++ b/net/dns/host_resolver_proc.cc
@@ -176,8 +176,7 @@
base::ScopedBlockingCall scoped_blocking_call(FROM_HERE,
base::BlockingType::WILL_BLOCK);
-#if BUILDFLAG(IS_POSIX) && \
- !(BUILDFLAG(IS_APPLE) || BUILDFLAG(IS_OPENBSD) || BUILDFLAG(IS_ANDROID))
+#if defined(__GLIBC__)
DnsReloaderMaybeReload();
#endif
absl::optional<AddressInfo> ai;

View file

@ -1,14 +0,0 @@
#!/bin/sh
# Allow the user to override command-line flags
# This is based on Debian's chromium-browser package, and is intended
# to be consistent with Debian.
for f in /etc/electron/*.conf; do
[ -f ${f} ] && . "${f}"
done
# Prefer user defined ELECTRON_USER_FLAGS (from env) over system
# default ELECTRON_FLAGS (from /etc/electron/default.conf).
ELECTRON_FLAGS=${ELECTRON_USER_FLAGS:-"$ELECTRON_FLAGS"}
exec "/usr/lib/electron/electron" "$@" ${ELECTRON_FLAGS}

View file

@ -1,8 +0,0 @@
[Desktop Entry]
Type=Application
Name=electron21
Icon=electron
Exec=electron %u
Categories=Development;GTK;
StartupNotify=true
StartupWMClass=electron

View file

@ -1,31 +0,0 @@
--- a/third_party/crashpad/crashpad/client/BUILD.gn
+++ b/third_party/crashpad/crashpad/client/BUILD.gn
@@ -81,6 +81,7 @@
deps = [
":common",
"$mini_chromium_source_parent:chromeos_buildflags",
+ "../util",
]
if (crashpad_is_win) {
--- a/third_party/crashpad/crashpad/util/linux/ptracer.cc
+++ b/third_party/crashpad/crashpad/util/linux/ptracer.cc
@@ -26,6 +26,7 @@
#if defined(ARCH_CPU_X86_FAMILY)
#include <asm/ldt.h>
+#include <asm/ptrace-abi.h>
#endif
namespace crashpad {
--- a/third_party/crashpad/crashpad/util/linux/thread_info.h
+++ b/third_party/crashpad/crashpad/util/linux/thread_info.h
@@ -273,7 +273,7 @@ union FloatContext {
"Size mismatch");
#elif defined(ARCH_CPU_ARMEL)
static_assert(sizeof(f32_t::fpregs) == sizeof(user_fpregs), "Size mismatch");
-#if !defined(__GLIBC__)
+#if defined(OS_ANDROID)
static_assert(sizeof(f32_t::vfp) == sizeof(user_vfp), "Size mismatch");
#endif
#elif defined(ARCH_CPU_ARM64)

View file

@ -1,11 +0,0 @@
Patch-Source: https://github.com/void-linux/void-packages/blob/378db3cf5087877588aebaaa8ca3c9d94dfb54e0/srcpkgs/chromium/patches/fix-missing-cstdint-include-musl.patch
--- a/net/third_party/quiche/src/quiche/http2/adapter/window_manager.h
+++ b/net/third_party/quiche/src/quiche/http2/adapter/window_manager.h
@@ -3,6 +3,7 @@
#include <stddef.h>
+#include <cstdint>
#include <functional>
#include "common/platform/api/quiche_export.h"

View file

@ -1,44 +0,0 @@
--- a/base/files/file_util_linux.cc
+++ b/base/files/file_util_linux.cc
@@ -30,7 +30,7 @@
case EXT2_SUPER_MAGIC: // Also ext3 and ext4
case MSDOS_SUPER_MAGIC:
case REISERFS_SUPER_MAGIC:
- case static_cast<int>(BTRFS_SUPER_MAGIC):
+ case BTRFS_SUPER_MAGIC:
case 0x5346544E: // NTFS
case 0x58465342: // XFS
case 0x3153464A: // JFS
@@ -40,14 +40,14 @@
*type = FILE_SYSTEM_NFS;
break;
case SMB_SUPER_MAGIC:
- case static_cast<int>(0xFF534D42): // CIFS
+ case 0xFF534D42: // CIFS
*type = FILE_SYSTEM_SMB;
break;
case CODA_SUPER_MAGIC:
*type = FILE_SYSTEM_CODA;
break;
- case static_cast<int>(HUGETLBFS_MAGIC):
- case static_cast<int>(RAMFS_MAGIC):
+ case HUGETLBFS_MAGIC:
+ case RAMFS_MAGIC:
case TMPFS_MAGIC:
*type = FILE_SYSTEM_MEMORY;
break;
--- a/base/system/sys_info_posix.cc
+++ b/base/system/sys_info_posix.cc
@@ -100,10 +100,10 @@
if (HANDLE_EINTR(statfs(path.value().c_str(), &stats)) != 0)
return false;
switch (stats.f_type) {
case TMPFS_MAGIC:
- case static_cast<int>(HUGETLBFS_MAGIC):
- case static_cast<int>(RAMFS_MAGIC):
+ case HUGETLBFS_MAGIC:
+ case RAMFS_MAGIC:
return true;
}
return false;

View file

@ -1,21 +0,0 @@
--- a/tools/gdb/gdbinit
+++ b/tools/gdb/gdbinit
@@ -50,17 +50,7 @@
def set_src_dir(compile_dir):
global src_dir
- git = subprocess.Popen(
- ['git', '-C', compile_dir, 'rev-parse', '--show-toplevel'],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- src_dir, _ = git.communicate()
- if git.returncode:
- return
- if isinstance(src_dir, str):
- src_dir = src_dir.rstrip()
- else:
- src_dir = src_dir.decode('utf-8').rstrip()
+ src_dir = os.path.abspath(os.getcwd())
load_libcxx_pretty_printers(src_dir)

View file

@ -1,21 +0,0 @@
--- a/electron/default_app/default_app.ts
+++ b/electron/default_app/default_app.ts
@@ -60,7 +60,7 @@
};
if (process.platform === 'linux') {
- options.icon = path.join(__dirname, 'icon.png');
+ options.icon = '/usr/share/icons/hicolor/1024x1024/apps/electron.png';
}
mainWindow = new BrowserWindow(options);
--- a/electron/filenames.gni
+++ b/electron/filenames.gni
@@ -6,7 +6,6 @@
]
default_app_static_sources = [
- "default_app/icon.png",
"default_app/index.html",
"default_app/package.json",
"default_app/styles.css",

View file

@ -1,39 +0,0 @@
Patch-Source: https://github.com/archlinux/svntogit-packages/blob/bf2401407df5bcc938382eb03748fbef41e41c89/trunk/unbundle-jsoncpp-avoid-CFI-faults-with-is_cfi-true.patch
From ed8d931e35f81d8566835a579caf7d61368f85b7 Mon Sep 17 00:00:00 2001
From: Evangelos Foutras <evangelos@foutrelis.com>
Date: Tue, 27 Sep 2022 22:20:41 +0000
Subject: [PATCH] unbundle/jsoncpp: avoid CFI faults with is_cfi=true
Ensure jsoncpp symbols have public visibility and are thus excluded from
CFI checks and whole-program optimization. This is achieved by defining
JSON_DLL_BUILD which in turn causes json/config.h to define JSON_API as
__attribute__((visibility("default"))). The latter macro is used to tag
jsoncpp classes and namespace functions throughout jsoncpp's headers.
BUG=1365218
Change-Id: I56277737b7d9ecaeb5e17c8d21a2e55f3d5d5bc9
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/3919652
Reviewed-by: Thomas Anderson <thomasanderson@chromium.org>
Commit-Queue: Thomas Anderson <thomasanderson@chromium.org>
Cr-Commit-Position: refs/heads/main@{#1052077}
---
build/linux/unbundle/jsoncpp.gn | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/build/linux/unbundle/jsoncpp.gn b/build/linux/unbundle/jsoncpp.gn
index 544f9d13c9..e84a0ef27a 100644
--- a/build/linux/unbundle/jsoncpp.gn
+++ b/build/linux/unbundle/jsoncpp.gn
@@ -3,6 +3,11 @@ import("//build/shim_headers.gni")
pkg_config("jsoncpp_config") {
packages = [ "jsoncpp" ]
+
+ # Defining JSON_DLL_BUILD applies public visibility to jsoncpp classes
+ # thus deactivating CFI checks for them. This avoids CFI violations in
+ # virtual calls to system jsoncpp library (https://crbug.com/1365218).
+ defines = [ "JSON_DLL_BUILD" ]
}
shim_headers("jsoncpp_shim") {

View file

@ -1,18 +0,0 @@
--- a/base/allocator/partition_allocator/tagging.cc
+++ b/base/allocator/partition_allocator/tagging.cc
@@ -19,15 +19,6 @@
#define PR_GET_TAGGED_ADDR_CTRL 56
#define PR_TAGGED_ADDR_ENABLE (1UL << 0)
-#if BUILDFLAG(IS_LINUX)
-#include <linux/version.h>
-
-// Linux headers already provide these since v5.10.
-#if LINUX_VERSION_CODE >= KERNEL_VERSION(5, 10, 0)
-#define HAS_PR_MTE_MACROS
-#endif
-#endif
-
#ifndef HAS_PR_MTE_MACROS
#define PR_MTE_TCF_SHIFT 1
#define PR_MTE_TCF_NONE (0UL << PR_MTE_TCF_SHIFT)

View file

@ -1,107 +0,0 @@
diff --git a/sandbox/linux/seccomp-bpf-helpers/syscall_parameters_restrictions.cc ./sandbox/linux/seccomp-bpf-helpers/syscall_parameters_restrictions.cc
index ff5a1c0..da56b9b 100644
--- a/sandbox/linux/seccomp-bpf-helpers/syscall_parameters_restrictions.cc
+++ b/sandbox/linux/seccomp-bpf-helpers/syscall_parameters_restrictions.cc
@@ -139,21 +139,11 @@ namespace sandbox {
// present (as in newer versions of posix_spawn).
ResultExpr RestrictCloneToThreadsAndEPERMFork() {
const Arg<unsigned long> flags(0);
-
- // TODO(mdempsky): Extend DSL to support (flags & ~mask1) == mask2.
- const uint64_t kAndroidCloneMask = CLONE_VM | CLONE_FS | CLONE_FILES |
- CLONE_SIGHAND | CLONE_THREAD |
- CLONE_SYSVSEM;
- const uint64_t kObsoleteAndroidCloneMask = kAndroidCloneMask | CLONE_DETACHED;
-
- const uint64_t kGlibcPthreadFlags =
- CLONE_VM | CLONE_FS | CLONE_FILES | CLONE_SIGHAND | CLONE_THREAD |
- CLONE_SYSVSEM | CLONE_SETTLS | CLONE_PARENT_SETTID | CLONE_CHILD_CLEARTID;
- const BoolExpr glibc_test = flags == kGlibcPthreadFlags;
-
- const BoolExpr android_test =
- AnyOf(flags == kAndroidCloneMask, flags == kObsoleteAndroidCloneMask,
- flags == kGlibcPthreadFlags);
+ const int required = CLONE_VM | CLONE_FS | CLONE_FILES | CLONE_SIGHAND |
+ CLONE_THREAD | CLONE_SYSVSEM;
+ const int safe = CLONE_SETTLS | CLONE_PARENT_SETTID | CLONE_CHILD_CLEARTID |
+ CLONE_DETACHED;
+ const BoolExpr thread_clone_ok = (flags&~safe)==required;
// The following two flags are the two important flags in any vfork-emulating
// clone call. EPERM any clone call that contains both of them.
@@ -163,7 +153,7 @@ ResultExpr RestrictCloneToThreadsAndEPERMFork() {
AnyOf((flags & (CLONE_VM | CLONE_THREAD)) == 0,
(flags & kImportantCloneVforkFlags) == kImportantCloneVforkFlags);
- return If(IsAndroid() ? android_test : glibc_test, Allow())
+ return If(thread_clone_ok, Allow())
.ElseIf(is_fork_or_clone_vfork, Error(EPERM))
.Else(CrashSIGSYSClone());
}
diff --git a/sandbox/linux/seccomp-bpf-helpers/syscall_sets.cc ./sandbox/linux/seccomp-bpf-helpers/syscall_sets.cc
index d9d1882..0567557 100644
--- a/sandbox/linux/seccomp-bpf-helpers/syscall_sets.cc
+++ b/sandbox/linux/seccomp-bpf-helpers/syscall_sets.cc
@@ -392,6 +392,7 @@ bool SyscallSets::IsAllowedProcessStartOrDeath(int sysno) {
#if defined(__i386__)
case __NR_waitpid:
#endif
+ case __NR_set_tid_address:
return true;
case __NR_clone: // Should be parameter-restricted.
case __NR_setns: // Privileged.
@@ -404,7 +405,6 @@ bool SyscallSets::IsAllowedProcessStartOrDeath(int sysno) {
#if defined(__i386__) || defined(__x86_64__) || defined(__mips__)
case __NR_set_thread_area:
#endif
- case __NR_set_tid_address:
case __NR_unshare:
#if !defined(__mips__) && !defined(__aarch64__)
case __NR_vfork:
@@ -514,6 +514,8 @@ bool SyscallSets::IsAllowedAddressSpaceAccess(int sysno) {
case __NR_mlock:
case __NR_munlock:
case __NR_munmap:
+ case __NR_mremap:
+ case __NR_membarrier:
return true;
case __NR_madvise:
case __NR_mincore:
@@ -531,7 +533,6 @@ bool SyscallSets::IsAllowedAddressSpaceAccess(int sysno) {
case __NR_modify_ldt:
#endif
case __NR_mprotect:
- case __NR_mremap:
case __NR_msync:
case __NR_munlockall:
case __NR_readahead:
diff --git a/sandbox/linux/system_headers/linux_syscalls.h ./sandbox/linux/system_headers/linux_syscalls.h
index 2b78a0c..b6fedb5 100644
--- a/sandbox/linux/system_headers/linux_syscalls.h
+++ b/sandbox/linux/system_headers/linux_syscalls.h
@@ -10,6 +10,7 @@
#define SANDBOX_LINUX_SYSTEM_HEADERS_LINUX_SYSCALLS_H_
#include "build/build_config.h"
+#include <sys/syscall.h>
#if defined(__x86_64__)
#include "sandbox/linux/system_headers/x86_64_linux_syscalls.h"
diff --git a/services/service_manager/sandbox/linux/bpf_renderer_policy_linux.cc ./services/service_manager/sandbox/linux/bpf_renderer_policy_linux.cc
index a85c0ea..715aa1e 100644
--- a/sandbox/policy/linux/bpf_renderer_policy_linux.cc
+++ b/sandbox/policy/linux/bpf_renderer_policy_linux.cc
@@ -102,11 +102,11 @@
#if defined(__arm__) || defined(__aarch64__)
case __NR_getcpu:
#endif
- return Allow();
- case __NR_sched_getaffinity:
case __NR_sched_getparam:
case __NR_sched_getscheduler:
case __NR_sched_setscheduler:
+ return Allow();
+ case __NR_sched_getaffinity:
return RestrictSchedTarget(GetPolicyPid(), sysno);
case __NR_prlimit64:
// See crbug.com/662450 and setrlimit comment above.

View file

@ -1,81 +0,0 @@
--- a/sandbox/linux/services/namespace_sandbox.cc
+++ b/sandbox/linux/services/namespace_sandbox.cc
@@ -209,6 +209,70 @@
return base::LaunchProcess(argv, launch_options_copy);
}
+#if defined(__aarch64__)
+#define TLS_ABOVE_TP
+#endif
+
+struct musl_pthread
+{
+ /* Part 1 -- these fields may be external or
+ * internal (accessed via asm) ABI. Do not change. */
+ struct pthread *self;
+#ifndef TLS_ABOVE_TP
+ uintptr_t *dtv;
+#endif
+ struct pthread *prev, *next; /* non-ABI */
+ uintptr_t sysinfo;
+#ifndef TLS_ABOVE_TP
+#ifdef CANARY_PAD
+ uintptr_t canary_pad;
+#endif
+ uintptr_t canary;
+#endif
+
+/* Part 2 -- implementation details, non-ABI. */
+ int tid;
+ int errno_val;
+ volatile int detach_state;
+ volatile int cancel;
+ volatile unsigned char canceldisable, cancelasync;
+ unsigned char tsd_used:1;
+ unsigned char dlerror_flag:1;
+ unsigned char *map_base;
+ size_t map_size;
+ void *stack;
+ size_t stack_size;
+ size_t guard_size;
+ void *result;
+ struct __ptcb *cancelbuf;
+ void **tsd;
+ struct {
+ volatile void *volatile head;
+ long off;
+ volatile void *volatile pending;
+ } robust_list;
+ int h_errno_val;
+ volatile int timer_id;
+ locale_t locale;
+ volatile int killlock[1];
+ char *dlerror_buf;
+ void *stdio_locks;
+
+ /* Part 3 -- the positions of these fields relative to
+ * the end of the structure is external and internal ABI. */
+#ifdef TLS_ABOVE_TP
+ uintptr_t canary;
+ uintptr_t *dtv;
+#endif
+};
+
+void MaybeUpdateMuslTidCache()
+{
+ pid_t real_tid = sys_gettid();
+ pid_t* cached_tid_location = &reinterpret_cast<struct musl_pthread*>(pthread_self())->tid;
+ *cached_tid_location = real_tid;
+}
+
// static
pid_t NamespaceSandbox::ForkInNewPidNamespace(bool drop_capabilities_in_child) {
const pid_t pid =
@@ -226,6 +290,7 @@
#if defined(LIBC_GLIBC)
MaybeUpdateGlibcTidCache();
#endif
+ MaybeUpdateMuslTidCache();
return 0;
}

View file

@ -1,22 +0,0 @@
Use monotonic clock for pthread_cond_timedwait with musl too.
--- a/v8/src/base/platform/condition-variable.cc
+++ b/v8/src/base/platform/condition-variable.cc
@@ -16,7 +16,7 @@
ConditionVariable::ConditionVariable() {
#if (V8_OS_FREEBSD || V8_OS_NETBSD || V8_OS_OPENBSD || \
- (V8_OS_LINUX && V8_LIBC_GLIBC))
+ V8_OS_LINUX)
// On Free/Net/OpenBSD and Linux with glibc we can change the time
// source for pthread_cond_timedwait() to use the monotonic clock.
pthread_condattr_t attr;
@@ -92,7 +92,7 @@
&native_handle_, &mutex->native_handle(), &ts);
#else
#if (V8_OS_FREEBSD || V8_OS_NETBSD || V8_OS_OPENBSD || \
- (V8_OS_LINUX && V8_LIBC_GLIBC))
+ V8_OS_LINUX)
// On Free/Net/OpenBSD and Linux with glibc we can change the time
// source for pthread_cond_timedwait() to use the monotonic clock.
result = clock_gettime(CLOCK_MONOTONIC, &ts);

View file

@ -1,107 +0,0 @@
--- a/base/debug/stack_trace_posix.cc
+++ b/base/debug/stack_trace_posix.cc
@@ -27,7 +27,7 @@
#if !defined(USE_SYMBOLIZE)
#include <cxxabi.h>
#endif
-#if !defined(__UCLIBC__) && !defined(_AIX)
+#if defined(__GLIBC__) && !defined(_AIX)
#include <execinfo.h>
#endif
@@ -89,7 +89,7 @@
// Note: code in this function is NOT async-signal safe (std::string uses
// malloc internally).
-#if !defined(__UCLIBC__) && !defined(_AIX)
+#if defined(__GLIBC__) && !defined(_AIX)
std::string::size_type search_from = 0;
while (search_from < text->size()) {
// Look for the start of a mangled symbol, from search_from.
@@ -136,7 +136,7 @@
virtual ~BacktraceOutputHandler() = default;
};
-#if !defined(__UCLIBC__) && !defined(_AIX)
+#if defined(__GLIBC__) && !defined(_AIX)
void OutputPointer(void* pointer, BacktraceOutputHandler* handler) {
// This should be more than enough to store a 64-bit number in hex:
// 16 hex digits + 1 for null-terminator.
@@ -839,7 +839,7 @@
// If we do not have unwind tables, then try tracing using frame pointers.
return base::debug::TraceStackFramePointers(const_cast<const void**>(trace),
count, 0);
-#elif !defined(__UCLIBC__) && !defined(_AIX)
+#elif defined(__GLIBC__) && !defined(_AIX)
// Though the backtrace API man page does not list any possible negative
// return values, we take no chance.
return base::saturated_cast<size_t>(backtrace(trace, count));
@@ -852,13 +852,13 @@
// NOTE: This code MUST be async-signal safe (it's used by in-process
// stack dumping signal handler). NO malloc or stdio is allowed here.
-#if !defined(__UCLIBC__) && !defined(_AIX)
+#if defined(__GLIBC__) && !defined(_AIX)
PrintBacktraceOutputHandler handler;
ProcessBacktrace(trace_, count_, prefix_string, &handler);
#endif
}
-#if !defined(__UCLIBC__) && !defined(_AIX)
+#if defined(__GLIBC__) && !defined(_AIX)
void StackTrace::OutputToStreamWithPrefix(std::ostream* os,
const char* prefix_string) const {
StreamBacktraceOutputHandler handler(os);
--- a/v8/src/codegen/external-reference-table.cc
+++ b/v8/src/codegen/external-reference-table.cc
@@ -11,7 +11,9 @@
#if defined(DEBUG) && defined(V8_OS_LINUX) && !defined(V8_OS_ANDROID)
#define SYMBOLIZE_FUNCTION
+#if defined(__GLIBC__)
#include <execinfo.h>
+#endif
#include <vector>
@@ -96,7 +98,7 @@
}
const char* ExternalReferenceTable::ResolveSymbol(void* address) {
-#ifdef SYMBOLIZE_FUNCTION
+#if defined(SYMBOLIZE_FUNCTION) && defined(__GLIBC__)
char** names = backtrace_symbols(&address, 1);
const char* name = names[0];
// The array of names is malloc'ed. However, each name string is static
--- a/third_party/swiftshader/third_party/llvm-subzero/build/Linux/include/llvm/Config/config.h
+++ b/third_party/swiftshader/third_party/llvm-subzero/build/Linux/include/llvm/Config/config.h
@@ -58,7 +58,7 @@
#define HAVE_ERRNO_H 1
/* Define to 1 if you have the <execinfo.h> header file. */
-#define HAVE_EXECINFO_H 1
+/* #define HAVE_EXECINFO_H 1 */
/* Define to 1 if you have the <fcntl.h> header file. */
#define HAVE_FCNTL_H 1
--- a/base/debug/stack_trace.cc
+++ b/base/debug/stack_trace.cc
@@ -251,7 +253,9 @@
}
void StackTrace::OutputToStream(std::ostream* os) const {
+#if defined(__GLIBC__) && !defined(_AIX)
OutputToStreamWithPrefix(os, nullptr);
+#endif
}
std::string StackTrace::ToString() const {
@@ -281,7 +281,7 @@
}
std::string StackTrace::ToStringWithPrefix(const char* prefix_string) const {
std::stringstream stream;
-#if !defined(__UCLIBC__) && !defined(_AIX)
+#if defined(__GLIBC__) && !defined(_AIX)
OutputToStreamWithPrefix(&stream, prefix_string);
#endif
return stream.str();

View file

@ -1,19 +0,0 @@
--- a/chrome/browser/metrics/chrome_browser_main_extra_parts_metrics.cc
+++ b/chrome/browser/metrics/chrome_browser_main_extra_parts_metrics.cc
@@ -61,7 +61,6 @@
// TODO(crbug.com/1052397): Revisit the macro expression once build flag switch
// of lacros-chrome is complete.
#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS)
-#include <gnu/libc-version.h>
#include "base/linux_util.h"
#include "base/strings/string_split.h"
@@ -324,7 +323,7 @@
void RecordLinuxGlibcVersion() {
// TODO(crbug.com/1052397): Revisit the macro expression once build flag switch
// of lacros-chrome is complete.
-#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS)
+#if defined(__GLIBC__) || BUILDFLAG(IS_CHROMEOS_LACROS)
base::Version version(gnu_get_libc_version());
UMALinuxGlibcVersion glibc_version_result = UMA_LINUX_GLIBC_NOT_PARSEABLE;

View file

@ -1,110 +0,0 @@
--- a/base/trace_event/malloc_dump_provider.cc
+++ b/base/trace_event/malloc_dump_provider.cc
@@ -185,7 +185,6 @@
#define MALLINFO2_FOUND_IN_LIBC
struct mallinfo2 info = mallinfo2();
#endif
-#endif // defined(__GLIBC__) && defined(__GLIBC_PREREQ)
#if !defined(MALLINFO2_FOUND_IN_LIBC)
struct mallinfo info = mallinfo();
#endif
@@ -205,6 +204,7 @@
sys_alloc_dump->AddScalar(MemoryAllocatorDump::kNameSize,
MemoryAllocatorDump::kUnitsBytes, info.uordblks);
}
+#endif // defined(__GLIBC__) && defined(__GLIBC_PREREQ)
}
#endif
@@ -339,7 +340,7 @@
&allocated_objects_count);
#elif BUILDFLAG(IS_FUCHSIA)
// TODO(fuchsia): Port, see https://crbug.com/706592.
-#else
+#elif defined(__GLIBC__)
ReportMallinfoStats(/*pmd=*/nullptr, &total_virtual_size, &resident_size,
&allocated_objects_size, &allocated_objects_count);
#endif
--- a/base/process/process_metrics_posix.cc
+++ b/base/process/process_metrics_posix.cc
@@ -105,7 +105,7 @@
#endif // !BUILDFLAG(IS_FUCHSIA)
-#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS) || BUILDFLAG(IS_ANDROID)
+#if (BUILDFLAG(IS_LINUX) && defined(__GLIBC__)) || BUILDFLAG(IS_CHROMEOS) || BUILDFLAG(IS_ANDROID)
namespace {
size_t GetMallocUsageMallinfo() {
@@ -123,7 +123,7 @@
}
} // namespace
-#endif // BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS) ||
+#endif // (BUILDFLAG(IS_LINUX) && defined(__GLIBC__)) || BUILDFLAG(IS_CHROMEOS) ||
// BUILDFLAG(IS_ANDROID)
size_t ProcessMetrics::GetMallocUsage() {
@@ -131,9 +131,9 @@
malloc_statistics_t stats = {0};
malloc_zone_statistics(nullptr, &stats);
return stats.size_in_use;
-#elif BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS) || BUILDFLAG(IS_ANDROID)
+#elif (BUILDFLAG(IS_LINUX) && defined(__GLIBC__)) || BUILDFLAG(IS_CHROMEOS) || BUILDFLAG(IS_ANDROID)
return GetMallocUsageMallinfo();
-#elif BUILDFLAG(IS_FUCHSIA)
+#else
// TODO(fuchsia): Not currently exposed. https://crbug.com/735087.
return 0;
#endif
--- a/third_party/tflite/src/tensorflow/lite/profiling/memory_info.cc
+++ b/third_party/tflite/src/tensorflow/lite/profiling/memory_info.cc
@@ -35,7 +35,7 @@
MemoryUsage GetMemoryUsage() {
MemoryUsage result;
-#ifdef __linux__
+#if defined(__linux__) && defined(__GLIBC__)
rusage res;
if (getrusage(RUSAGE_SELF, &res) == 0) {
result.max_rss_kb = res.ru_maxrss;
--- a/third_party/swiftshader/third_party/llvm-subzero/lib/Support/Unix/Process.inc
+++ b/third_party/swiftshader/third_party/llvm-subzero/lib/Support/Unix/Process.inc
@@ -86,11 +86,11 @@
}
size_t Process::GetMallocUsage() {
-#if defined(HAVE_MALLINFO2)
+#if defined(HAVE_MALLINFO2) && defined(__GLIBC__)
struct mallinfo2 mi;
mi = ::mallinfo2();
return mi.uordblks;
-#elif defined(HAVE_MALLINFO)
+#elif defined(HAVE_MALLINFO) && defined(__GLIBC__)
struct mallinfo mi;
mi = ::mallinfo();
return mi.uordblks;
--- a/third_party/swiftshader/third_party/llvm-10.0/configs/linux/include/llvm/Config/config.h
+++ b/third_party/swiftshader/third_party/llvm-10.0/configs/linux/include/llvm/Config/config.h
@@ -122,7 +122,9 @@
/* #undef HAVE_MALLCTL */
/* Define to 1 if you have the `mallinfo' function. */
+#if defined(__GLIBC__)
#define HAVE_MALLINFO 1
+#endif
/* Define to 1 if you have the <malloc.h> header file. */
#define HAVE_MALLOC_H 1
--- a/base/allocator/allocator_shim_default_dispatch_to_partition_alloc.cc
+++ b/base/allocator/allocator_shim_default_dispatch_to_partition_alloc.cc
@@ -717,7 +717,7 @@
#endif // !BUILDFLAG(IS_APPLE) && !BUILDFLAG(IS_ANDROID)
-#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
+#if 0
SHIM_ALWAYS_EXPORT struct mallinfo mallinfo(void) __THROW {
base::SimplePartitionStatsDumper allocator_dumper;
Allocator()->DumpStats("malloc", true, &allocator_dumper);

View file

@ -1,30 +0,0 @@
--- a/net/dns/public/scoped_res_state.cc
+++ b/net/dns/public/scoped_res_state.cc
@@ -13,7 +13,7 @@
namespace net {
ScopedResState::ScopedResState() {
-#if BUILDFLAG(IS_OPENBSD) || BUILDFLAG(IS_FUCHSIA)
+#if BUILDFLAG(IS_OPENBSD) || BUILDFLAG(IS_FUCHSIA) || defined(_GNU_SOURCE)
// Note: res_ninit in glibc always returns 0 and sets RES_INIT.
// res_init behaves the same way.
memset(&_res, 0, sizeof(_res));
@@ -25,16 +25,8 @@
}
ScopedResState::~ScopedResState() {
-#if !BUILDFLAG(IS_OPENBSD) && !BUILDFLAG(IS_FUCHSIA)
-
- // Prefer res_ndestroy where available.
-#if BUILDFLAG(IS_APPLE) || BUILDFLAG(IS_FREEBSD)
- res_ndestroy(&res_);
-#else
- res_nclose(&res_);
-#endif // BUILDFLAG(IS_APPLE) || BUILDFLAG(IS_FREEBSD)
-
-#endif // !BUILDFLAG(IS_OPENBSD) && !BUILDFLAG(IS_FUCHSIA)
+ // musl res_init() doesn't actually do anything
+ // no destruction is necessary as no memory has been allocated
}
bool ScopedResState::IsValid() const {

View file

@ -1,12 +0,0 @@
--- a/base/files/file.h
+++ b/base/files/file.h
@@ -19,7 +19,8 @@
#include "build/build_config.h"
#if BUILDFLAG(IS_BSD) || BUILDFLAG(IS_APPLE) || BUILDFLAG(IS_NACL) || \
- BUILDFLAG(IS_FUCHSIA) || (BUILDFLAG(IS_ANDROID) && __ANDROID_API__ < 21)
+ BUILDFLAG(IS_FUCHSIA) || (BUILDFLAG(IS_ANDROID) && __ANDROID_API__ < 21) || \
+ (defined(OS_LINUX) && !defined(__GLIBC__))
struct stat;
namespace base {
typedef struct stat stat_wrapper_t;

View file

@ -1,11 +0,0 @@
--- a/chrome/browser/ui/autofill/autofill_popup_controller_impl.h
+++ b/chrome/browser/ui/autofill/autofill_popup_controller_impl.h
@@ -178,7 +178,7 @@
class AutofillPopupViewPtr {
public:
AutofillPopupViewPtr() = default;
- AutofillPopupViewPtr(nullptr_t) : ptr_(nullptr) {}
+ AutofillPopupViewPtr(std::nullptr_t) : ptr_(nullptr) {}
AutofillPopupViewPtr(AutofillPopupView* ptr) : ptr_(ptr) {}
explicit operator bool() const { return ptr_; }

View file

@ -1,15 +0,0 @@
--- a/base/allocator/partition_allocator/partition_root.cc
+++ b/base/allocator/partition_allocator/partition_root.cc
@@ -248,9 +248,9 @@
// However, no perfect solution really exists to make threads + fork()
// cooperate, but deadlocks are real (and fork() is used in DEATH_TEST()s),
// and other malloc() implementations use the same techniques.
- int err =
- pthread_atfork(BeforeForkInParent, AfterForkInParent, AfterForkInChild);
- PA_CHECK(err == 0);
+ //int err =
+ // pthread_atfork(BeforeForkInParent, AfterForkInParent, AfterForkInChild);
+ //PA_CHECK(err == 0);
#endif // BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
}

View file

@ -1,11 +0,0 @@
--- a/tools/grit/grit/util.py
+++ b/tools/grit/grit/util.py
@@ -209,7 +209,7 @@
mode = 'rb'
encoding = None
else:
- mode = 'rU'
+ mode = 'r'
with io.open(filename, mode, encoding=encoding) as f:
return f.read()

View file

@ -1,22 +0,0 @@
--- a/third_party/electron_node/tools/inspector_protocol/jinja2/runtime.py
+++ b/third_party/electron_node/tools/inspector_protocol/jinja2/runtime.py
@@ -315,7 +315,7 @@ class Context(with_metaclass(ContextMeta
# register the context as mapping if possible
try:
- from collections import Mapping
+ from collections.abc import Mapping
Mapping.register(Context)
except ImportError:
pass
--- a/third_party/electron_node/tools/inspector_protocol/jinja2/sandbox.py
+++ b/third_party/electron_node/tools/inspector_protocol/jinja2/sandbox.py
@@ -14,7 +14,7 @@
"""
import types
import operator
-from collections import Mapping
+from collections.abc import Mapping
from jinja2.environment import Environment
from jinja2.exceptions import SecurityError
from jinja2._compat import string_types, PY2

View file

@ -1,11 +0,0 @@
--- a/net/third_party/quiche/src/quiche/quic/core/quic_one_block_arena.h
+++ b/net/third_party/quiche/src/quiche/quic/core/quic_one_block_arena.h
@@ -69,7 +69,7 @@
// QuicConnections currently use around 1KB of polymorphic types which would
// ordinarily be on the heap. Instead, store them inline in an arena.
-using QuicConnectionArena = QuicOneBlockArena<1280>;
+using QuicConnectionArena = QuicOneBlockArena<1504>;
} // namespace quic

View file

@ -1,287 +0,0 @@
--- a/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.cc
+++ b/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.cc
@@ -74,7 +74,7 @@
codec_context->sample_fmt = AV_SAMPLE_FMT_NONE;
}
- codec_context->ch_layout.nb_channels = config.channel_count;
+ codec_context->channels = config.channel_count;
codec_context->sample_rate = config.samples_per_second;
if (config.extra_data) {
@@ -124,8 +124,8 @@
case cdm::kAudioFormatPlanarS16:
case cdm::kAudioFormatPlanarF32: {
const int decoded_size_per_channel =
- decoded_audio_size / av_frame.ch_layout.nb_channels;
- for (int i = 0; i < av_frame.ch_layout.nb_channels; ++i) {
+ decoded_audio_size / av_frame.channels;
+ for (int i = 0; i < av_frame.channels; ++i) {
memcpy(output_buffer, av_frame.extended_data[i],
decoded_size_per_channel);
output_buffer += decoded_size_per_channel;
@@ -185,14 +185,13 @@
// Success!
decoding_loop_ = std::make_unique<FFmpegDecodingLoop>(codec_context_.get());
samples_per_second_ = config.samples_per_second;
- bytes_per_frame_ =
- codec_context_->ch_layout.nb_channels * config.bits_per_channel / 8;
+ bytes_per_frame_ = codec_context_->channels * config.bits_per_channel / 8;
output_timestamp_helper_ =
std::make_unique<AudioTimestampHelper>(config.samples_per_second);
is_initialized_ = true;
// Store initial values to guard against midstream configuration changes.
- channels_ = codec_context_->ch_layout.nb_channels;
+ channels_ = codec_context_->channels;
av_sample_format_ = codec_context_->sample_fmt;
return true;
@@ -292,19 +291,17 @@
for (auto& frame : audio_frames) {
int decoded_audio_size = 0;
if (frame->sample_rate != samples_per_second_ ||
- frame->ch_layout.nb_channels != channels_ ||
- frame->format != av_sample_format_) {
+ frame->channels != channels_ || frame->format != av_sample_format_) {
DLOG(ERROR) << "Unsupported midstream configuration change!"
<< " Sample Rate: " << frame->sample_rate << " vs "
- << samples_per_second_
- << ", Channels: " << frame->ch_layout.nb_channels << " vs "
- << channels_ << ", Sample Format: " << frame->format << " vs "
- << av_sample_format_;
+ << samples_per_second_ << ", Channels: " << frame->channels
+ << " vs " << channels_ << ", Sample Format: " << frame->format
+ << " vs " << av_sample_format_;
return cdm::kDecodeError;
}
decoded_audio_size = av_samples_get_buffer_size(
- nullptr, codec_context_->ch_layout.nb_channels, frame->nb_samples,
+ nullptr, codec_context_->channels, frame->nb_samples,
codec_context_->sample_fmt, 1);
if (!decoded_audio_size)
continue;
@@ -323,9 +320,9 @@
size_t* total_size,
std::vector<std::unique_ptr<AVFrame, ScopedPtrAVFreeFrame>>* audio_frames,
AVFrame* frame) {
- *total_size += av_samples_get_buffer_size(
- nullptr, codec_context_->ch_layout.nb_channels, frame->nb_samples,
- codec_context_->sample_fmt, 1);
+ *total_size += av_samples_get_buffer_size(nullptr, codec_context_->channels,
+ frame->nb_samples,
+ codec_context_->sample_fmt, 1);
audio_frames->emplace_back(av_frame_clone(frame));
return true;
}
--- a/media/ffmpeg/ffmpeg_common.cc
+++ b/media/ffmpeg/ffmpeg_common.cc
@@ -345,11 +345,10 @@
codec_context->sample_fmt, codec_context->codec_id);
ChannelLayout channel_layout =
- codec_context->ch_layout.nb_channels > 8
+ codec_context->channels > 8
? CHANNEL_LAYOUT_DISCRETE
- : ChannelLayoutToChromeChannelLayout(
- codec_context->ch_layout.u.mask,
- codec_context->ch_layout.nb_channels);
+ : ChannelLayoutToChromeChannelLayout(codec_context->channel_layout,
+ codec_context->channels);
int sample_rate = codec_context->sample_rate;
switch (codec) {
@@ -402,7 +401,7 @@
extra_data, encryption_scheme, seek_preroll,
codec_context->delay);
if (channel_layout == CHANNEL_LAYOUT_DISCRETE)
- config->SetChannelsForDiscrete(codec_context->ch_layout.nb_channels);
+ config->SetChannelsForDiscrete(codec_context->channels);
#if BUILDFLAG(ENABLE_PLATFORM_AC3_EAC3_AUDIO)
// These are bitstream formats unknown to ffmpeg, so they don't have
@@ -471,7 +470,7 @@
// TODO(scherkus): should we set |channel_layout|? I'm not sure if FFmpeg uses
// said information to decode.
- codec_context->ch_layout.nb_channels = config.channels();
+ codec_context->channels = config.channels();
codec_context->sample_rate = config.samples_per_second();
if (config.extra_data().empty()) {
--- a/media/filters/audio_file_reader.cc
+++ b/media/filters/audio_file_reader.cc
@@ -113,15 +113,14 @@
// Verify the channel layout is supported by Chrome. Acts as a sanity check
// against invalid files. See http://crbug.com/171962
- if (ChannelLayoutToChromeChannelLayout(
- codec_context_->ch_layout.u.mask,
- codec_context_->ch_layout.nb_channels) ==
+ if (ChannelLayoutToChromeChannelLayout(codec_context_->channel_layout,
+ codec_context_->channels) ==
CHANNEL_LAYOUT_UNSUPPORTED) {
return false;
}
// Store initial values to guard against midstream configuration changes.
- channels_ = codec_context_->ch_layout.nb_channels;
+ channels_ = codec_context_->channels;
audio_codec_ = CodecIDToAudioCodec(codec_context_->codec_id);
sample_rate_ = codec_context_->sample_rate;
av_sample_format_ = codec_context_->sample_fmt;
@@ -224,7 +223,7 @@
if (frames_read < 0)
return false;
- const int channels = frame->ch_layout.nb_channels;
+ const int channels = frame->channels;
if (frame->sample_rate != sample_rate_ || channels != channels_ ||
frame->format != av_sample_format_) {
DLOG(ERROR) << "Unsupported midstream configuration change!"
--- a/media/filters/audio_file_reader_unittest.cc
+++ b/media/filters/audio_file_reader_unittest.cc
@@ -121,11 +121,11 @@
EXPECT_FALSE(reader_->Open());
}
- void RunTestFailingDecode(const char* fn, int expect_read = 0) {
+ void RunTestFailingDecode(const char* fn) {
Initialize(fn);
EXPECT_TRUE(reader_->Open());
std::vector<std::unique_ptr<AudioBus>> decoded_audio_packets;
- EXPECT_EQ(reader_->Read(&decoded_audio_packets), expect_read);
+ EXPECT_EQ(reader_->Read(&decoded_audio_packets), 0);
}
void RunTestPartialDecode(const char* fn) {
@@ -219,7 +219,7 @@
}
TEST_F(AudioFileReaderTest, MidStreamConfigChangesFail) {
- RunTestFailingDecode("midstream_config_change.mp3", 42624);
+ RunTestFailingDecode("midstream_config_change.mp3");
}
#endif
--- a/media/filters/audio_video_metadata_extractor.cc
+++ b/media/filters/audio_video_metadata_extractor.cc
@@ -113,15 +113,6 @@
if (!stream)
continue;
- void* display_matrix =
- av_stream_get_side_data(stream, AV_PKT_DATA_DISPLAYMATRIX, nullptr);
- if (display_matrix) {
- rotation_ = VideoTransformation::FromFFmpegDisplayMatrix(
- static_cast<int32_t*>(display_matrix))
- .rotation;
- info.tags["rotate"] = base::NumberToString(rotation_);
- }
-
// Extract dictionary from streams also. Needed for containers that attach
// metadata to contained streams instead the container itself, like OGG.
ExtractDictionary(stream->metadata, &info.tags);
@@ -264,6 +255,8 @@
if (raw_tags->find(tag->key) == raw_tags->end())
(*raw_tags)[tag->key] = tag->value;
+ if (ExtractInt(tag, "rotate", &rotation_))
+ continue;
if (ExtractString(tag, "album", &album_))
continue;
if (ExtractString(tag, "artist", &artist_))
--- a/media/filters/ffmpeg_aac_bitstream_converter.cc
+++ b/media/filters/ffmpeg_aac_bitstream_converter.cc
@@ -195,15 +195,14 @@
if (!header_generated_ || codec_ != stream_codec_parameters_->codec_id ||
audio_profile_ != stream_codec_parameters_->profile ||
sample_rate_index_ != sample_rate_index ||
- channel_configuration_ !=
- stream_codec_parameters_->ch_layout.nb_channels ||
+ channel_configuration_ != stream_codec_parameters_->channels ||
frame_length_ != header_plus_packet_size) {
header_generated_ =
GenerateAdtsHeader(stream_codec_parameters_->codec_id,
0, // layer
stream_codec_parameters_->profile, sample_rate_index,
0, // private stream
- stream_codec_parameters_->ch_layout.nb_channels,
+ stream_codec_parameters_->channels,
0, // originality
0, // home
0, // copyrighted_stream
@@ -215,7 +214,7 @@
codec_ = stream_codec_parameters_->codec_id;
audio_profile_ = stream_codec_parameters_->profile;
sample_rate_index_ = sample_rate_index;
- channel_configuration_ = stream_codec_parameters_->ch_layout.nb_channels;
+ channel_configuration_ = stream_codec_parameters_->channels;
frame_length_ = header_plus_packet_size;
}
--- a/media/filters/ffmpeg_aac_bitstream_converter_unittest.cc
+++ b/media/filters/ffmpeg_aac_bitstream_converter_unittest.cc
@@ -34,7 +34,7 @@
memset(&test_parameters_, 0, sizeof(AVCodecParameters));
test_parameters_.codec_id = AV_CODEC_ID_AAC;
test_parameters_.profile = FF_PROFILE_AAC_MAIN;
- test_parameters_.ch_layout.nb_channels = 2;
+ test_parameters_.channels = 2;
test_parameters_.extradata = extradata_header_;
test_parameters_.extradata_size = sizeof(extradata_header_);
}
--- a/media/filters/ffmpeg_audio_decoder.cc
+++ b/media/filters/ffmpeg_audio_decoder.cc
@@ -28,7 +28,7 @@
// Return the number of channels from the data in |frame|.
static inline int DetermineChannels(AVFrame* frame) {
- return frame->ch_layout.nb_channels;
+ return frame->channels;
}
// Called by FFmpeg's allocation routine to allocate a buffer. Uses
@@ -231,7 +231,7 @@
// Translate unsupported into discrete layouts for discrete configurations;
// ffmpeg does not have a labeled discrete configuration internally.
ChannelLayout channel_layout = ChannelLayoutToChromeChannelLayout(
- codec_context_->ch_layout.u.mask, codec_context_->ch_layout.nb_channels);
+ codec_context_->channel_layout, codec_context_->channels);
if (channel_layout == CHANNEL_LAYOUT_UNSUPPORTED &&
config_.channel_layout() == CHANNEL_LAYOUT_DISCRETE) {
channel_layout = CHANNEL_LAYOUT_DISCRETE;
@@ -348,11 +348,11 @@
// Success!
av_sample_format_ = codec_context_->sample_fmt;
- if (codec_context_->ch_layout.nb_channels != config.channels()) {
+ if (codec_context_->channels != config.channels()) {
MEDIA_LOG(ERROR, media_log_)
<< "Audio configuration specified " << config.channels()
<< " channels, but FFmpeg thinks the file contains "
- << codec_context_->ch_layout.nb_channels << " channels";
+ << codec_context_->channels << " channels";
ReleaseFFmpegResources();
state_ = DecoderState::kUninitialized;
return false;
@@ -403,7 +403,7 @@
if (frame->nb_samples <= 0)
return AVERROR(EINVAL);
- if (s->ch_layout.nb_channels != channels) {
+ if (s->channels != channels) {
DLOG(ERROR) << "AVCodecContext and AVFrame disagree on channel count.";
return AVERROR(EINVAL);
}
@@ -436,8 +436,7 @@
ChannelLayout channel_layout =
config_.channel_layout() == CHANNEL_LAYOUT_DISCRETE
? CHANNEL_LAYOUT_DISCRETE
- : ChannelLayoutToChromeChannelLayout(s->ch_layout.u.mask,
- s->ch_layout.nb_channels);
+ : ChannelLayoutToChromeChannelLayout(s->channel_layout, s->channels);
if (channel_layout == CHANNEL_LAYOUT_UNSUPPORTED) {
DLOG(ERROR) << "Unsupported channel layout.";

View file

@ -1,15 +0,0 @@
--- a/media/filters/audio_file_reader.cc
+++ b/media/filters/audio_file_reader.cc
@@ -243,10 +243,10 @@
// silence from being output. In the case where we are also discarding some
// portion of the packet (as indicated by a negative pts), we further want to
// adjust the duration downward by however much exists before zero.
- if (audio_codec_ == AudioCodec::kAAC && frame->duration) {
+ if (audio_codec_ == AudioCodec::kAAC && frame->pkt_duration) {
const base::TimeDelta pkt_duration = ConvertFromTimeBase(
glue_->format_context()->streams[stream_index_]->time_base,
- frame->duration + std::min(static_cast<int64_t>(0), frame->pts));
+ frame->pkt_duration + std::min(static_cast<int64_t>(0), frame->pts));
const base::TimeDelta frame_duration =
base::Seconds(frames_read / static_cast<double>(sample_rate_));

View file

@ -1,22 +0,0 @@
for some reason this breaks and the fd returned after close() after a few
cycles is still in the lock array
so, just don't enforce or wrap anything.
--- a/base/files/scoped_file_linux.cc
+++ b/base/files/scoped_file_linux.cc
@@ -77,15 +77,3 @@
}
} // namespace base
-
-extern "C" {
-
-int __close(int);
-
-__attribute__((visibility("default"), noinline)) int close(int fd) {
- if (base::IsFDOwned(fd) && g_is_ownership_enforced)
- CrashOnFdOwnershipViolation();
- return __close(fd);
-}
-
-} // extern "C"

View file

@ -1,53 +0,0 @@
--- a/third_party/electron_node/BUILD.gn
+++ b/third_party/electron_node/BUILD.gn
@@ -42,6 +42,18 @@
node_module_version = ""
}
+if (is_linux) {
+ import("//build/config/linux/pkg_config.gni")
+
+ pkg_config("cares") {
+ packages = [ "libcares" ]
+ }
+
+ pkg_config("nghttp2") {
+ packages = [ "libnghttp2" ]
+ }
+}
+
assert(!node_use_dtrace, "node_use_dtrace not supported in GN")
assert(!node_use_etw, "node_use_etw not supported in GN")
@@ -182,11 +194,9 @@
component("node_lib") {
deps = [
":node_js2c",
- "deps/cares",
"deps/histogram",
"deps/googletest:gtest",
"deps/llhttp",
- "deps/nghttp2",
"deps/uvwasi",
"//third_party/zlib",
"//third_party/brotli:dec",
@@ -202,6 +212,19 @@
public_configs = [ ":node_lib_config" ]
include_dirs = [ "src" ]
libs = []
+ if (is_linux) {
+ configs += [
+ ":cares",
+ ":nghttp2",
+ ]
+ libs += [ "http_parser" ]
+ } else {
+ deps += [
+ "deps/cares",
+ "deps/http_parser",
+ "deps/nghttp2",
+ ]
+ }
frameworks = []
cflags_cc = [
"-Wno-deprecated-declarations",

View file

@ -1,18 +0,0 @@
random glibc macro
--- a/sandbox/linux/suid/process_util.h
+++ b/sandbox/linux/suid/process_util.h
@@ -11,6 +11,14 @@
#include <stdbool.h>
#include <sys/types.h>
+// Some additional functions
+# define TEMP_FAILURE_RETRY(expression) \
+ (__extension__ \
+ ({ long int __result; \
+ do __result = (long int) (expression); \
+ while (__result == -1L && errno == EINTR); \
+ __result; }))
+
// This adjusts /proc/process/oom_score_adj so the Linux OOM killer
// will prefer certain process types over others. The range for the
// adjustment is [-1000, 1000], with [0, 1000] being user accessible.

View file

@ -1,113 +0,0 @@
--- a/chrome/browser/process_singleton_posix.cc
+++ b/chrome/browser/process_singleton_posix.cc
@@ -607,7 +607,7 @@
// |reader| is for sending back ACK message.
void HandleMessage(const std::string& current_dir,
const std::vector<std::string>& argv,
- const std::vector<const uint8_t> additional_data,
+ const std::vector<uint8_t> additional_data,
SocketReader* reader);
private:
@@ -664,7 +664,7 @@
void ProcessSingleton::LinuxWatcher::HandleMessage(
const std::string& current_dir,
const std::vector<std::string>& argv,
- const std::vector<const uint8_t> additional_data,
+ const std::vector<uint8_t> additional_data,
SocketReader* reader) {
DCHECK(ui_task_runner_->BelongsToCurrentThread());
DCHECK(reader);
@@ -754,7 +754,7 @@
base::StringToSizeT(tokens[0], &num_args);
std::vector<std::string> command_line(tokens.begin() + 1, tokens.begin() + 1 + num_args);
- std::vector<const uint8_t> additional_data;
+ std::vector<uint8_t> additional_data;
if (tokens.size() >= 3 + num_args) {
size_t additional_data_size;
base::StringToSizeT(tokens[1 + num_args], &additional_data_size);
@@ -763,7 +763,7 @@
std::string(1, kTokenDelimiter));
const uint8_t* additional_data_bits =
reinterpret_cast<const uint8_t*>(remaining_args.c_str());
- additional_data = std::vector<const uint8_t>(
+ additional_data = std::vector<uint8_t>(
additional_data_bits, additional_data_bits + additional_data_size);
}
--- a/chrome/browser/process_singleton.h
+++ b/chrome/browser/process_singleton.h
@@ -102,7 +102,7 @@
using NotificationCallback =
base::RepeatingCallback<bool(const base::CommandLine& command_line,
const base::FilePath& current_directory,
- const std::vector<const uint8_t> additional_data)>;
+ const std::vector<uint8_t> additional_data)>;
#if BUILDFLAG(IS_WIN)
ProcessSingleton(const std::string& program_name,
--- a/chrome/browser/process_singleton_win.cc
+++ b/chrome/browser/process_singleton_win.cc
@@ -81,7 +81,7 @@
bool ParseCommandLine(const COPYDATASTRUCT* cds,
base::CommandLine* parsed_command_line,
base::FilePath* current_directory,
- std::vector<const uint8_t>* parsed_additional_data) {
+ std::vector<uint8_t>* parsed_additional_data) {
// We should have enough room for the shortest command (min_message_size)
// and also be a multiple of wchar_t bytes. The shortest command
// possible is L"START\0\0" (empty command line, current directory,
@@ -163,7 +163,7 @@
msg.substr(fourth_null + 1, fifth_null - fourth_null);
const uint8_t* additional_data_bytes =
reinterpret_cast<const uint8_t*>(additional_data.c_str());
- *parsed_additional_data = std::vector<const uint8_t>(additional_data_bytes,
+ *parsed_additional_data = std::vector<uint8_t>(additional_data_bytes,
additional_data_bytes + additional_data_length);
return true;
@@ -187,7 +187,7 @@
base::CommandLine parsed_command_line(base::CommandLine::NO_PROGRAM);
base::FilePath current_directory;
- std::vector<const uint8_t> additional_data;
+ std::vector<uint8_t> additional_data;
if (!ParseCommandLine(cds, &parsed_command_line, &current_directory, &additional_data)) {
*result = TRUE;
return true;
--- a/electron/shell/browser/api/electron_api_app.cc
+++ b/electron/shell/browser/api/electron_api_app.cc
@@ -519,10 +519,10 @@
const base::RepeatingCallback<
void(const base::CommandLine& command_line,
const base::FilePath& current_directory,
- const std::vector<const uint8_t> additional_data)>& callback,
+ const std::vector<uint8_t> additional_data)>& callback,
const base::CommandLine& cmd,
const base::FilePath& cwd,
- const std::vector<const uint8_t> additional_data) {
+ const std::vector<uint8_t> additional_data) {
// Make sure the callback is called after app gets ready.
if (Browser::Get()->is_ready()) {
callback.Run(cmd, cwd, std::move(additional_data));
@@ -1082,7 +1082,7 @@
void App::OnSecondInstance(const base::CommandLine& cmd,
const base::FilePath& cwd,
- const std::vector<const uint8_t> additional_data) {
+ const std::vector<uint8_t> additional_data) {
v8::Isolate* isolate = JavascriptEnvironment::GetIsolate();
v8::Locker locker(isolate);
v8::HandleScope handle_scope(isolate);
--- a/electron/shell/browser/api/electron_api_app.h
+++ b/electron/shell/browser/api/electron_api_app.h
@@ -195,7 +195,7 @@
std::string GetLocaleCountryCode();
void OnSecondInstance(const base::CommandLine& cmd,
const base::FilePath& cwd,
- const std::vector<const uint8_t> additional_data);
+ const std::vector<uint8_t> additional_data);
bool HasSingleInstanceLock() const;
bool RequestSingleInstanceLock(gin::Arguments* args);
void ReleaseSingleInstanceLock();

View file

@ -1,12 +0,0 @@
--- a/electron/build/webpack/webpack.config.base.js
+++ b/electron/build/webpack/webpack.config.base.js
@@ -117,7 +117,8 @@
entry,
target: alwaysHasNode ? 'node' : 'web',
output: {
- filename: outputFilename
+ filename: outputFilename,
+ hashFunction: 'sha256'
},
resolve: {
alias: {

View file

@ -1,20 +0,0 @@
--- a/third_party/blink/renderer/platform/wtf/stack_util.cc
+++ b/third_party/blink/renderer/platform/wtf/stack_util.cc
@@ -29,7 +29,7 @@
// FIXME: On Mac OSX and Linux, this method cannot estimate stack size
// correctly for the main thread.
-#elif defined(__GLIBC__) || BUILDFLAG(IS_ANDROID) || BUILDFLAG(IS_FREEBSD) || \
+#elif BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_ANDROID) || BUILDFLAG(IS_FREEBSD) || \
BUILDFLAG(IS_FUCHSIA)
// pthread_getattr_np() can fail if the thread is not invoked by
// pthread_create() (e.g., the main thread of blink_unittests).
@@ -97,7 +97,7 @@
}
void* GetStackStart() {
-#if defined(__GLIBC__) || BUILDFLAG(IS_ANDROID) || BUILDFLAG(IS_FREEBSD) || \
+#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_ANDROID) || BUILDFLAG(IS_FREEBSD) || \
BUILDFLAG(IS_FUCHSIA)
pthread_attr_t attr;
int error;

82
backports/pandoc/APKBUILD Normal file
View file

@ -0,0 +1,82 @@
# Contributor: Jean-Louis Fuchs <jean-louis.fuchs@adfinis-sygroup.ch>
# Maintainer: Jean-Louis Fuchs <jean-louis.fuchs@adfinis-sygroup.ch>
pkgname=pandoc
pkgver=2.19.2
pkgrel=0
pkgdesc="universal markup converter"
url="https://pandoc.org/"
# limited by ghc
arch="aarch64 x86_64"
license="GPL-2.0-or-later"
makedepends="ghc cabal zlib-dev libffi-dev"
subpackages="$pkgname-doc"
source="https://hackage.haskell.org/package/pandoc-$pkgver/pandoc-$pkgver.tar.gz
texmath-0.12.5.4.patch
cabal.config
"
options="net"
# Cabal seems to be built without sandbox, moving the cabal-dir into src
export CABAL_DIR="$srcdir/.cabal"
cabal_update() {
msg "Freezing $pkgname dependencies"
# Resolve deps and generate fresh cabal.config with version constraints.
cabal update
(
cd "$builddir"
cabal v1-freeze --shadow-installed-packages
# Add version tag at the first line.
sed -i "1i--$pkgver" "cabal.config"
mv "cabal.config" "$startdir/"
)
if ! abuild checksum; then
die "Failed to update checksum, run 'abuild checksum' manually"
fi
}
prepare() {
default_prepare
if [ "$(head -n 1 "$srcdir/cabal.config")" != "--$pkgver" ]; then
die "Requirements file is outdated, run 'abuild cabal_update'"
fi
ln -sf "$srcdir/cabal.config" "$builddir/cabal.project.freeze"
}
build() {
export PATH="$PATH:/usr/lib/llvm14/bin"
cabal update
cabal install --only-dependencies
cabal configure \
--prefix='/usr' \
--enable-tests \
--enable-split-sections \
--ghc-option="-split-sections" \
--flags="+embed_data_files -trypandoc +static"
cabal build --jobs=${JOBS:-1}
}
check() {
cabal test --jobs=${JOBS:-1}
}
package() {
_bindir="$pkgdir/usr/bin"
mkdir -p "$_bindir"
cabal install \
--installdir="$_bindir" \
--install-method=copy
install -Dm644 man/pandoc.1 "$pkgdir"/usr/share/man/man1/pandoc.1
}
sha512sums="
3628a9193d5138294bae562726bcd94567eec10fa0053d43739af04d4eba0a53bd49c2c000a5360afcac08153960a9bf2ee4be3c419cec7e5c13273e718edc80 pandoc-2.19.2.tar.gz
172f8f57c18cc08c976b3c4853be54918fab57aaead2c272685be2183de2e8db9163c26e5f4477ed5059de08b1ed100b6508b0b1ea98c0a20cb6ef9ae6eb52cd texmath-0.12.5.4.patch
02013589a1acd53ffb9ef50bf76ad31b569823f8ef382a783372d0941f462aa53830e506e5b64a1755899eb25111cc912b69628c5ef1c889aec999d4d7883b5d cabal.config
"

View file

@ -0,0 +1,228 @@
--2.19.2
constraints: Cabal ==3.4.0.0,
Glob ==0.10.2,
HUnit ==1.6.2.0,
HsYAML ==0.2.1.1,
JuicyPixels ==3.3.8,
OneTuple ==0.3.1,
QuickCheck ==2.14.2,
SHA ==1.6.4.4,
StateVar ==1.2.2,
aeson ==2.1.1.0,
aeson-pretty ==0.8.9,
ansi-terminal ==0.11.4,
ansi-wl-pprint ==0.6.9,
appar ==0.1.8,
array ==0.5.4.0,
asn1-encoding ==0.9.6,
asn1-parse ==0.9.5,
asn1-types ==0.3.4,
assoc ==1.0.2,
async ==2.2.4,
attoparsec ==0.14.4,
attoparsec-iso8601 ==1.1.0.0,
auto-update ==0.1.6,
base ==4.15.0.0,
base-compat ==0.12.2,
base-compat-batteries ==0.12.2,
base-orphans ==0.8.7,
base16-bytestring ==1.0.2.0,
base64 ==0.4.2.4,
base64-bytestring ==1.2.1.0,
basement ==0.0.15,
bifunctors ==5.5.14,
binary ==0.8.8.0,
bitvec ==1.1.3.0,
blaze-builder ==0.4.2.2,
blaze-html ==0.9.1.2,
blaze-markup ==0.8.2.8,
boring ==0.2,
bsb-http-chunked ==0.0.0.4,
byteorder ==1.0.4,
bytestring ==0.10.12.1,
cabal-doctest ==1.0.9,
call-stack ==0.4.0,
case-insensitive ==1.2.1.0,
cereal ==0.5.8.3,
citeproc ==0.8.0.2,
cmdargs ==0.10.21,
colour ==2.3.6,
commonmark ==0.2.2,
commonmark-extensions ==0.2.3.3,
commonmark-pandoc ==0.2.1.2,
comonad ==5.0.8,
conduit ==1.3.4.3,
conduit-extra ==1.3.6,
connection ==0.3.1,
constraints ==0.13.4,
containers ==0.6.4.1,
contravariant ==1.5.5,
cookie ==0.4.6,
cryptonite ==0.30,
data-array-byte ==0.1.0.1,
data-default ==0.7.1.1,
data-default-class ==0.1.2.0,
data-default-instances-containers ==0.0.1,
data-default-instances-dlist ==0.0.1,
data-default-instances-old-locale ==0.0.1,
data-fix ==0.3.2,
dec ==0.0.5,
deepseq ==1.4.5.0,
digest ==0.0.1.4,
directory ==1.3.6.1,
distributive ==0.6.2.1,
dlist ==1.0,
doclayout ==0.4,
doctemplates ==0.10.0.2,
easy-file ==0.2.2,
emojis ==0.1.2,
exceptions ==0.10.4,
fast-logger ==3.1.1,
file-embed ==0.0.15.0,
filepath ==1.4.2.1,
generically ==0.1,
ghc-bignum ==1.0,
ghc-bignum-orphans ==0.1.1,
ghc-boot-th ==9.0.1,
ghc-prim ==0.7.0,
gridtables ==0.0.3.0,
haddock-library ==1.11.0,
happy ==1.20.0,
hashable ==1.4.2.0,
haskell-lexer ==1.1.1,
hourglass ==0.2.12,
hsc2hs ==0.68.8,
hslua ==2.2.1,
hslua-aeson ==2.2.1,
hslua-classes ==2.2.0,
hslua-core ==2.2.1,
hslua-marshalling ==2.2.1,
hslua-module-doclayout ==1.0.4,
hslua-module-path ==1.0.3,
hslua-module-system ==1.0.2,
hslua-module-text ==1.0.3.1,
hslua-module-version ==1.0.3,
hslua-objectorientation ==2.2.1,
hslua-packaging ==2.2.1,
http-api-data ==0.5,
http-client ==0.7.13.1,
http-client-tls ==0.3.6.1,
http-date ==0.0.11,
http-media ==0.8.0.0,
http-types ==0.12.3,
http2 ==3.0.3,
indexed-traversable ==0.1.2,
indexed-traversable-instances ==0.1.1.1,
integer-gmp ==1.1,
integer-logarithms ==1.0.3.1,
iproute ==1.7.12,
ipynb ==0.2,
jira-wiki-markup ==1.4.0,
libyaml ==0.1.2,
lpeg ==1.0.3,
lua ==2.2.1,
memory ==0.18.0,
mime-types ==0.1.1.0,
mmorph ==1.2.0,
monad-control ==1.0.3.1,
mono-traversable ==1.0.15.3,
mtl ==2.2.2,
network ==3.1.2.7,
network-byte-order ==0.1.6,
network-uri ==2.6.4.2,
old-locale ==1.0.0.7,
old-time ==1.1.0.3,
optparse-applicative ==0.17.0.0,
pandoc-lua-marshal ==0.1.7,
pandoc-types ==1.22.2.1,
parsec ==3.1.14.0,
pem ==0.2.4,
pretty ==1.1.3.6,
pretty-show ==1.10,
primitive ==0.7.4.0,
process ==1.6.11.0,
psqueues ==0.2.7.3,
random ==1.2.1.1,
recv ==0.0.0,
resourcet ==1.2.6,
rts ==1.0,
safe ==0.3.19,
safe-exceptions ==0.1.7.3,
scientific ==0.3.7.0,
semialign ==1.2.0.1,
semigroupoids ==5.3.7,
servant ==0.19.1,
servant-server ==0.19.2,
simple-sendfile ==0.2.30,
singleton-bool ==0.1.6,
skylighting ==0.13.2,
skylighting-core ==0.13.2,
skylighting-format-ansi ==0.1,
skylighting-format-blaze-html ==0.1.1,
skylighting-format-context ==0.1.0.1,
skylighting-format-latex ==0.1,
socks ==0.6.1,
some ==1.0.4.1,
sop-core ==0.5.0.2,
split ==0.2.3.5,
splitmix ==0.1.0.4,
stm ==2.5.0.0,
streaming-commons ==0.2.2.5,
strict ==0.4.0.1,
string-conversions ==0.4.0.1,
syb ==0.7.2.2,
tagged ==0.8.6.1,
tagsoup ==0.14.8,
template-haskell ==2.17.0.0,
temporary ==1.3,
texmath ==0.12.5.4,
text ==1.2.4.1,
text-conversions ==0.3.1.1,
text-short ==0.1.5,
th-abstraction ==0.4.5.0,
th-compat ==0.1.4,
th-lift ==0.8.2,
th-lift-instances ==0.1.20,
these ==1.1.1.1,
time ==1.9.3,
time-compat ==1.9.6.1,
time-manager ==0.0.0,
tls ==1.6.0,
transformers ==0.5.6.2,
transformers-base ==0.4.6,
transformers-compat ==0.7.2,
type-equality ==1,
typed-process ==0.2.10.1,
unicode-collation ==0.1.3.3,
unicode-data ==0.4.0.1,
unicode-transforms ==0.4.0.1,
uniplate ==1.6.13,
unix ==2.7.2.2,
unix-compat ==0.6,
unix-time ==0.4.8,
unliftio ==0.2.23.0,
unliftio-core ==0.2.0.1,
unordered-containers ==0.2.19.1,
utf8-string ==1.0.2,
uuid-types ==1.0.5,
vault ==0.3.1.5,
vector ==0.13.0.0,
vector-algorithms ==0.9.0.1,
vector-stream ==0.1.0.0,
wai ==3.2.3,
wai-app-static ==3.1.7.4,
wai-extra ==3.1.13.0,
wai-logger ==2.4.0,
warp ==3.3.23,
witherable ==0.4.2,
word8 ==0.1.3,
x509 ==1.7.7,
x509-store ==1.6.9,
x509-system ==1.6.7,
x509-validation ==1.6.12,
xml ==1.3.14,
xml-conduit ==1.9.1.1,
xml-types ==0.3.8,
yaml ==0.11.8.0,
zip-archive ==0.4.2.2,
zlib ==0.6.3.0

View file

@ -0,0 +1,13 @@
diff --git a/test/writer.ms b/test/writer.ms
index 9df9083..836d7a2 100644
--- a/test/writer.ms
+++ b/test/writer.ms
@@ -700,7 +700,7 @@ LaTeX
.IP \[bu] 3
Here\[cq]s some display math:
.EQ
-d over {d x} f left ( x right ) = lim sub {h -> 0} {f left ( x + h right ) \[u2212] f left ( x right )} over h
+d over {d x} f left ( x right ) = lim sub {h -> 0} {f left ( x + h right ) - f left ( x right )} over h
.EN
.IP \[bu] 3
Here\[cq]s one that has a line break in it: @alpha + omega times x sup 2@.

View file

@ -0,0 +1,551 @@
# Maintainer: Jakub Jirutka <jakub@jirutka.cz>
# Contributor: G.J.R. Timmer <gjr.timmer@gmail.com>
# Contributor: Jakub Jirutka <jakub@jirutka.cz>
_pkgname=postgresql
pkgver=15.3
pkgrel=1
_majorver=${pkgver%%[_.]*}
# Should this aport provide libpq* and libecpg*? true/false
# Exactly one postgresql aport must be the default one!
_default_ver=true
pkgname=$_pkgname$_majorver
pkgdesc="A sophisticated object-relational DBMS, version $_majorver"
url="https://www.postgresql.org/"
arch="all"
license="PostgreSQL"
_llvmver=13
depends="$pkgname-client postgresql-common tzdata"
depends_dev="
libpq-dev
libecpg-dev
clang
icu-dev
llvm$_llvmver
lz4-dev
openssl-dev
zstd-dev
"
checkdepends="
diffutils
icu-data-full
perl-ipc-run
"
makedepends="$depends_dev
bison
flex
libxml2-dev
linux-headers
llvm$_llvmver-dev
openldap-dev
perl-dev
python3-dev
readline-dev
tcl-dev
util-linux-dev
zlib-dev
"
pkgusers="postgres"
pkggroups="postgres"
install="$pkgname.post-install $pkgname.pre-deinstall"
provider_priority=$_majorver
provides="postgresql"
replaces="postgresql" # for backward compatibility
$_default_ver && subpackages="
libpq
libpq-dev:libpq_dev
libecpg
libecpg-dev:libecpg_dev
"
subpackages="
$subpackages
$pkgname-client
$pkgname-jit
$pkgname-contrib
$pkgname-plperl
$pkgname-plperl-contrib:plperl_contrib
$pkgname-plpython3
$pkgname-plpython3-contrib:plpython3_contrib
$pkgname-pltcl
$pkgname-contrib-jit:contrib_jit
$pkgname-dev
$pkgname-doc
$pkgname-openrc
"
source="https://ftp.postgresql.org/pub/source/v$pkgver/postgresql-$pkgver.tar.bz2
initdb.patch
perl-rpath.patch
per-version-dirs.patch
unix_socket_directories.patch
disable-html-docs.patch
remove-libecpg_compat.patch
czech-snowball-stemmer.patch
make-split-headers.patch
jit-datalayout-mismatch-on-s390x-and-x86.patch
pg_config-add-major-version.patch
dont-use-locale-a-on-musl.patch
icu-collations-hack.patch
libpgport-pkglibdir.patch.txt
external-libpq.patch.txt
pltcl_create_tables.sql
"
builddir="$srcdir/$_pkgname-$pkgver"
# FIXME: https://gitlab.alpinelinux.org/alpine/aports/-/issues/14359
options="net !check"
# secfixes:
# 15.3-r0:
# - CVE-2023-2454
# - CVE-2023-2455
# 15.2-r0:
# - CVE-2022-41862
# 14.5-r0:
# - CVE-2022-2625
# 14.3-r0:
# - CVE-2022-1552
# 14.1-r0:
# - CVE-2021-23214
# - CVE-2021-23222
# 13.4-r0:
# - CVE-2021-3677
# 13.3-r0:
# - CVE-2021-32027
# - CVE-2021-32028
# - CVE-2021-32029
# 13.2-r0:
# - CVE-2021-3393
# - CVE-2021-20229
# 12.5-r0:
# - CVE-2020-25694
# - CVE-2020-25695
# - CVE-2020-25696
# 12.4-r0:
# - CVE-2020-14349
# - CVE-2020-14350
# 12.2-r0:
# - CVE-2020-1720
# 11.5-r0:
# - CVE-2019-10208
# - CVE-2019-10209
# 11.4-r0:
# - CVE-2019-10164
# 11.3-r0:
# - CVE-2019-10129
# - CVE-2019-10130
# 11.1-r0:
# - CVE-2018-16850
# 10.5-r0:
# - CVE-2018-10915
# - CVE-2018-10925
# 10.4-r0:
# - CVE-2018-1115
# 10.3-r0:
# - CVE-2018-1058
# 10.2-r0:
# - CVE-2018-1052
# - CVE-2018-1053
# 10.1-r0:
# - CVE-2017-15098
# - CVE-2017-15099
# 9.6.4-r0:
# - CVE-2017-7546
# - CVE-2017-7547
# - CVE-2017-7548
# 9.6.3-r0:
# - CVE-2017-7484
# - CVE-2017-7485
# - CVE-2017-7486
_bindir=usr/libexec/$pkgname
_datadir=usr/share/$pkgname
_docdir=usr/share/doc/$pkgname
_mandir=$_datadir/man
_includedir=usr/include/postgresql
# Directory for server-related libraries. This is hard-coded in
# per-version-dirs.patch.
_srvlibdir=usr/lib/$pkgname
# Programs to be included in the -client subpackage.
# TODO: This was probably originally copied from Debian and I have no idea
# why these are considered as front-end (client) programs and the rest of
# the programs are not. So it should be reviewed.
_client_cmds="
clusterdb
createdb
createuser
dropdb
dropuser
pg_amcheck
pg_basebackup
pg_dump
pg_dumpall
pg_isready
pg_receivewal
pg_recvlogical
pg_restore
pg_verifybackup
pgbench
psql
reindexdb
vacuumdb
"
prepare() {
default_prepare
if $_default_ver; then
cp -rl "$builddir" "$builddir-ifaces"
else
msg 'external-libpq.patch'
patch -p1 < "$srcdir"/external-libpq.patch.txt
fi
# Note: This must be applied after clonning $builddir-ifaces.
patch -p1 < "$srcdir"/libpgport-pkglibdir.patch.txt
}
build() {
export LLVM_CONFIG="/usr/lib/llvm$_llvmver/bin/llvm-config"
export PYTHON=/usr/bin/python3
export CFLAGS="${CFLAGS/-Os/-O2}"
export CPPFLAGS="${CPPFLAGS/-Os/-O2}"
# older clang versions don't have a 'clang' exe anymore.
export CLANG=clang-$_llvmver
_configure --with-ldap
make world
if $_default_ver; then
cd "$builddir-ifaces"
_configure --without-ldap
local dir; for dir in include common port interfaces bin/pg_config; do
make -C src/$dir
done
fi
}
_configure() {
local _extra_opts
# When disable-spinlocks is no longer required - check postgresql-bdr package.
case "$CARCH" in
riscv64) _extra_opts='--disable-spinlocks';;
esac
want_check && _extra_opts="$_extra_opts --enable-tap-tests"
./configure \
--build=$CBUILD \
--host=$CHOST \
--prefix=/usr \
--bindir=/$_bindir \
--datarootdir=/usr/share \
--datadir=/$_datadir \
--docdir=/$_docdir \
--includedir=/$_includedir \
--libdir=/usr/lib \
--mandir=/$_mandir \
--sysconfdir=/etc/postgresql \
--disable-rpath \
--with-system-tzdata=/usr/share/zoneinfo \
--with-libxml \
--with-openssl \
--with-uuid=e2fs \
--with-llvm \
--with-icu \
--with-perl \
--with-python \
--with-tcl \
--with-lz4 \
--with-zstd \
$_extra_opts \
"$@"
}
check() {
_run_tests src/test
_run_tests src/pl
_run_tests contrib
}
package() {
make DESTDIR="$pkgdir" install install-docs
if $_default_ver; then
cd "$builddir-ifaces"
# Override libpq and libecpg files with the build without LDAP support.
local dir; for dir in common port interfaces bin/pg_config; do
make -C src/$dir DESTDIR="$pkgdir" bindir=/usr/bin install
done
make -C src/include DESTDIR="$pkgdir" install-interfaces
fi
cd "$pkgdir"
# Duplicate of usr/bin/ecpg.
rm -f ./$_bindir/ecpg
mkdir -p ./usr/bin
ln -s /$_bindir/postgres ./usr/bin/postgres$_majorver
# This file is used by pg_versions and init script.
echo "$_majorver" > ./$_bindir/PG_VERSION
install -d -m750 -o postgres -g postgres \
./etc/postgresql$_majorver \
./var/lib/postgresql \
./var/log/postgresql
local server_cmds=$(_setdiff "$(ls -1 $_bindir)" "$_client_cmds pg_config ecpg PG_VERSION")
[ "$server_cmds" ] || die 'package: variable server_cmds is empty'
# These commands are symlinked to /usr/bin by pg_versions script after
# installation.
provides="$provides $(echo "$server_cmds" | sed 's/^/cmd:&/')"
}
libpq() {
pkgdesc="PostgreSQL client library"
depends=""
replaces=""
amove usr/lib/libpq.so.*
}
libpq_dev() {
pkgdesc="PostgreSQL client library (development files)"
depends=""
replaces=""
amove usr/bin/pg_config
amove $_includedir/internal/*
amove $_includedir/libpq-*.h
amove $_includedir/libpq/*
amove $_includedir/pg_config*.h
amove $_includedir/postgres_ext.h
amove usr/lib/libpq.*
amove usr/lib/libpgcommon*.a
amove usr/lib/libpgport*.a
amove usr/lib/pkgconfig/libpq.pc
}
libecpg() {
pkgdesc="ECPG - Embedded SQL in C"
depends=""
provides="postgresql-libs" # for backward compatibility (Alpine <3.15)
replaces="$provides" # for backward compatibility (Alpine <3.15)
amove usr/lib/libecpg.so.*
amove usr/lib/libpgtypes.so.*
}
libecpg_dev() {
pkgdesc="ECPG - Embedded SQL in C (development files)"
depends="libpq-dev=$pkgver-r$pkgrel"
replaces=""
amove usr/bin/ecpg
amove $_includedir/ecpg*.h
amove $_includedir/informix/*
amove $_includedir/pgtypes*.h
amove $_includedir/sql3types.h
amove $_includedir/sqlca.h
amove $_includedir/sqlda*.h
amove usr/lib/libecpg.*
amove usr/lib/libpgtypes.*
amove usr/lib/pkgconfig/libecpg.pc
amove usr/lib/pkgconfig/libpgtypes.pc
}
client() {
pkgdesc="PostgreSQL client"
depends="postgresql-common"
_subpkg_common
local cmd; for cmd in $_client_cmds; do
amove $_bindir/$cmd
# These commands are symlinked to /usr/bin by pg_versions script after
# installation.
provides="$provides cmd:$cmd"
done
amove $_bindir/PG_VERSION
}
jit() {
pkgdesc="Just-in-time compilation support for PostgreSQL"
depends="$pkgname=$pkgver-r$pkgrel"
_subpkg_common
amove $_srvlibdir/bitcode/*
amove $_srvlibdir/llvmjit.so
amove $_srvlibdir/llvmjit_types.bc
}
contrib() {
pkgdesc="Extension modules distributed with PostgreSQL"
depends="$pkgname=$pkgver-r$pkgrel"
_subpkg_common
cd "$builddir"
# Avoid installing plperl and plpython extensions, these will be
# installed into separate subpackages.
sed -Ei -e 's/(.*_plperl)/#\1/' \
-e 's/(.*_plpython)/#\1/' \
contrib/Makefile
make -C contrib DESTDIR="$subpkgdir" install
_contrib_common
provides="$provides $(ls -1 "$subpkgdir"/$_bindir | sed 's/^/cmd:&/')"
}
pltcl() {
pkgdesc="PL/Tcl procedural language for PostgreSQL"
depends="$pkgname=$pkgver-r$pkgrel pgtcl"
_subpkg_common
amove $_srvlibdir/pltcl.so
amove $_datadir/extension/pltcl*
install -m 644 "$srcdir"/pltcl_create_tables.sql -t "$subpkgdir"/$_datadir/
}
plperl() {
pkgdesc="PL/Perl procedural language for PostgreSQL"
depends="$pkgname=$pkgver-r$pkgrel"
_subpkg_common
amove $_srvlibdir/plperl.so
amove $_datadir/extension/plperl*
}
plperl_contrib() {
_plcontrib plperl "PL/Perl"
cd "$builddir"
make -C contrib/hstore_plperl DESTDIR="$subpkgdir" install
_contrib_common
}
plpython3() {
pkgdesc="PL/Python3 procedural language for PostgreSQL"
depends="$pkgname=$pkgver-r$pkgrel python3"
_subpkg_common
amove $_srvlibdir/plpython3.so
amove $_datadir/extension/plpython*
}
plpython3_contrib() {
_plcontrib plpython3 "PL/Python 3"
cd "$builddir"
make -C contrib/hstore_plpython DESTDIR="$subpkgdir" install
make -C contrib/ltree_plpython DESTDIR="$subpkgdir" install
_contrib_common
}
contrib_jit() {
pkgdesc="Extension modules distributed with PostgreSQL (JIT support)"
depends="$pkgname-contrib=$pkgver-r$pkgrel"
install_if="$pkgname-jit $pkgname-contrib=$pkgver-r$pkgrel"
_subpkg_common
amove $_srvlibdir/bitcode/*
}
dev() {
default_dev
_subpkg_common
replaces=""
amove $_srvlibdir/pgxs/*
}
doc() {
default_doc
_subpkg_common
amove $_mandir
}
openrc() {
default_openrc
depends="postgresql-common-openrc"
mkdir -p "$subpkgdir"
}
_plcontrib() {
local subname="$1"
pkgdesc="$2 extension modules distributed with PostgreSQL"
depends="$pkgname-$subname=$pkgver-r$pkgrel"
install_if="$pkgname-$subname=$pkgver-r$pkgrel $pkgname-contrib=$pkgver-r$pkgrel"
_subpkg_common
}
_subpkg_common() {
provides="postgresql${subpkgname#$pkgname}"
replaces="$provides" # for backward compatibility
}
_contrib_common() {
# Move headers, bitcode and docs from subpackage back to pkgdir, so it
# can be catched by subsequent split functions.
local dir; for dir in $_includedir $_srvlibdir/bitcode $_docdir; do
[ -d "$subpkgdir"/$dir ] || continue
mkdir -p "$pkgdir"/$dir
cp -rf "$subpkgdir"/$dir/* "$pkgdir"/$dir/
rm -rf "$subpkgdir"/$dir/*
rmdir -p "$subpkgdir"/$dir || true
done
}
_run_tests() {
local path="$1"; shift
msg "Running test suite at $path..."
# Note: some tests fail when running in parallel.
make -k -j 1 -C "$path" "$@" check MAX_CONNECTIONS=5 || {
printf "\n%s\n\n" "Trying to find all regression.diffs files in build directory..." >&2
find "$path" -name regression.diffs | while read -r file; do
echo "=== test failure: $file ===" >&2
cat "$file" >&2
done
return 1
}
}
# $1: whitespace-separated items of set A
# $2: whitespace-separated items of set B
# stdout: newline-separated items of A - B
_setdiff() {
python3 -c 'import sys;print("\n".join(set(sys.argv[1].split()).difference(set(sys.argv[2].split()))))' "$@"
}
sha512sums="
cac97edeb40df1e8f2162f401b465751132929d7249495ef001e950645a2db46343bd732e7bd6504a7f795e25aea66724f2f4ab0065e3d9331b36db4b3a3bec6 postgresql-15.3.tar.bz2
73080f61ae274a214966d2d010de49c8c90eb3180e4f56d86a9e23063eaddbe7f8d2c40ed414591c16bc9cfe88577866af2a965c2b6effb54965d19ef96a3fa9 initdb.patch
27e00b58fe5c3899c66fc0dde51846c14701bcfedd132b106d676783ba603e8cbdc6e620f29b52dc892bdaa9302052788cf5e575a1659f61c017a12e0d2ee4d0 perl-rpath.patch
413e979745f81a5b95b7ad9ea44452217d095e3ee6b4a82b636a7b3c89093fefbe7b8d210e4f6bfdab1d6f81309133719e90b7eab93ba60ef2bb78308c52b3b9 per-version-dirs.patch
d0040599d7646b709af4e3413bf0c228090f256395e806f3fee230ccc6037ed9c0df1cd160d01b6a73b25a742fdd3fb070733a14a78f53d297fbdbbb44f6f878 unix_socket_directories.patch
2e33e1ae38d60e0daf3ed18e6eaa9ddf6762b4b1bb7e51f1d2690e3df9d602aa1a700e603ba3ee69314a75a963131c7dc67c1b1f8b7eb5564e9c4253e81a4db4 disable-html-docs.patch
d8eb4274a54b94bed4a2ded7ae775c5a95ca0f051b831b859ccf78bf6d2ea6fe89a9a0611771f6ad85573995a7e3af1fdf5859e20cae3267a52239f12e1b61c3 remove-libecpg_compat.patch
673707b331cd09c105d18f39e83ec39371886d5d8cf706b6ebbd30ace294fa3377c391bea1ab409c001644c90ac718e3250cbc4acd9224bc0e7ac1f86c832424 czech-snowball-stemmer.patch
5262f4944844bccc839c4441570b9eb2e0792390234ebfdb8ebb7b83380ce5f5de84b038cb03045526da202a185af9c3972c2ae1b9e0e743a95c6e84f4621cf9 make-split-headers.patch
c24986becfd73546a062612335e53b4da34829e42a0c30e85ed7481924f13b1fd0a0e5079d680905a52afea1e99f2210cc3692c529a34a74cd6961c88cce9163 jit-datalayout-mismatch-on-s390x-and-x86.patch
7790e4e4374f7bdc6b4484ba87a5fa709d30d3cbdce61ee7bf9c5dfce40cb51c7bd54ab42f4050fb48eede08ef573624d819128e57cc8c976e01202854740308 pg_config-add-major-version.patch
b0688d66fdd7d612c24d9aa69bdd80d30787d2d6409b4524c79b41797144fc743213460e6de9c536bfb72da089f92cf89731f15137b1407fd04ca97fd393bfd2 dont-use-locale-a-on-musl.patch
a98e5ecdd421f68f11b55bf07bf60280cf80508f89c5d21a084c7e7aa3be0a22c88b8e2ce611a13dd5c11efdd052eb9018271b549040c30663b9fd9b7c4fc611 icu-collations-hack.patch
f8ed2b7b96fd22cd87c982151e659d82bcae10033a97f403f7847fce6daa8fc580e998cfb3813af9cb59a12f0c6bcc276397c28b1fc48321eed8c7ba5f3f92ed libpgport-pkglibdir.patch.txt
6078defb3da67e7df96665cc130d32b69eebfcaf49d92eef368ea8eea8bb311fab56064c104bc97f53da9cd925301bef696b506af33e0b66d65bc6cd41ec7499 external-libpq.patch.txt
5c9bfd9e295dcf678298bf0aa974347a7c311d6e7c2aa76a6920fcb751d01fd1ab77abbec11f3c672f927ad9deaa88e04e370c0b5cd1b60087554c474b748731 pltcl_create_tables.sql
"

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,38 @@
From: Jakub Jirutka <jakub@jirutka.cz>
Date: Thu, 28 Oct 2021 01:27:53 +0200
Subject: [PATCH] Disable HTML docs, install only man pages
HTML docs are big and they are available only anyway.
--- a/doc/src/sgml/Makefile
+++ b/doc/src/sgml/Makefile
@@ -26,9 +26,9 @@
include $(top_builddir)/src/Makefile.global
-all: html man
+all: man
-distprep: html distprep-man
+distprep: distprep-man
ifndef DBTOEPUB
@@ -220,14 +220,14 @@
## Install
##
-install: install-html install-man
+install: install-man
installdirs:
- $(MKDIR_P) '$(DESTDIR)$(htmldir)'/html $(addprefix '$(DESTDIR)$(mandir)'/man, 1 3 $(sqlmansectnum))
+ $(MKDIR_P) $(addprefix '$(DESTDIR)$(mandir)'/man, 1 3 $(sqlmansectnum))
# If the install used a man directory shared with other applications, this will remove all files.
uninstall:
- rm -f '$(DESTDIR)$(htmldir)/html/'* $(addprefix '$(DESTDIR)$(mandir)'/man, 1/* 3/* $(sqlmansectnum)/*)
+ rm -f $(addprefix '$(DESTDIR)$(mandir)'/man, 1/* 3/* $(sqlmansectnum)/*)
## Install html

View file

@ -0,0 +1,31 @@
From: Jakub Jirutka <jakub@jirutka.cz>
Date: Wed, 03 Aug 2022 20:40:33 +0200
Subject: [PATCH] Don't generate collations based on locale(1)
When the PostgreSQL cluster is initialized (using initdb(1)) or the
DB administrator calls `pg_import_system_collations()` directly, this
function creates COLLATIONs in the system catalog (pg_collations).
If the locale(1) command is available, this function creates COLLATIONs
based on the `locale -a` output.
The locale(1) command is normally not available on Alpine Linux, so it
does nothing and only the default and ICU-based COLLATIONs are created.
However, there's a musl-locales package that provides locale(1), but it
doesn't implement any collations. This package just provides locale
translations. So if the user happens to have locale(1) installed and they
initialize the cluster or call `pg_import_system_collations()`, they end
up with dozens of libc-based COLLATIONs in the system catalog that
actually do not work! They will all behave like "C", because musl libc
doesn't implement locales.
--- a/src/backend/commands/collationcmds.c
+++ b/src/backend/commands/collationcmds.c
@@ -401,7 +401,7 @@
/* will we use "locale -a" in pg_import_system_collations? */
-#if defined(HAVE_LOCALE_T) && !defined(WIN32)
+#if defined(HAVE_LOCALE_T) && !defined(WIN32) && defined(__GLIBC__) // XXX-Patched
#define READ_LOCALE_A_OUTPUT
#endif

View file

@ -0,0 +1,41 @@
Patch-Source: https://src.fedoraproject.org/rpms/postgresql/blob/f35/f/postgresql-external-libpq.patch
This patch is applied only when building non-default postgresql aport, i.e.
we want to link against libpq provided by the default postgresql aport.
diff --git a/src/Makefile b/src/Makefile
index bcdbd95..4bea236 100644
--- a/src/Makefile
+++ b/src/Makefile
@@ -20,7 +20,6 @@ SUBDIRS = \
backend/utils/mb/conversion_procs \
backend/snowball \
include \
- interfaces \
backend/replication/libpqwalreceiver \
backend/replication/pgoutput \
fe_utils \
diff --git a/src/Makefile.global.in b/src/Makefile.global.in
index b9d86ac..29df69f 100644
--- a/src/Makefile.global.in
+++ b/src/Makefile.global.in
@@ -549,7 +549,7 @@ endif
# How to link to libpq. (This macro may be used as-is by backend extensions.
# Client-side code should go through libpq_pgport or libpq_pgport_shlib,
# instead.)
-libpq = -L$(libpq_builddir) -lpq
+libpq = -lpq
# libpq_pgport is for use by client executables (not libraries) that use libpq.
# We force clients to pull symbols from the non-shared libraries libpgport
@@ -579,7 +579,6 @@ endif
# Commonly used submake targets
submake-libpq: | submake-generated-headers
- $(MAKE) -C $(libpq_builddir) all
submake-libpgport: | submake-generated-headers
$(MAKE) -C $(top_builddir)/src/port all
--
2.21.0

View file

@ -0,0 +1,893 @@
From: Jakub Jirutka <jakub@jirutka.cz>
Date: Wed, 03 Aug 2022 20:40:33 +0200
Subject: [PATCH] Hack to generate usable ICU-based collations with
icu-data-en
This is a downstream patch for Alpine Linux, it should never be
upstreamed in this form!
When the PostgreSQL cluster is initialized (using initdb(1)) or the
DB administrator calls `pg_import_system_collations()` directly, this
function creates COLLATIONs in the system catalog (pg_collations).
There are two types: libc-based and ICU-based. The latter are created
based on *locales* (not collations) known to ICU, i.e. based on the ICU
data installed at the time.
collationcmds.c includes the following comment:
> We use uloc_countAvailable()/uloc_getAvailable() rather than
> ucol_countAvailable()/ucol_getAvailable(). The former returns a full
> set of language+region combinations, whereas the latter only returns
> language+region combinations if they are distinct from the language's
> base collation. So there might not be a de-DE or en-GB, which would be
> confusing.
There's a problem with this approach: locales and collations are two
different things. ICU data may include collation algorithms and data for
all or some languages, but not locales (language + country/region).
The collation data is small compared to locales. There are ~800 locales
(combinations of language, country and variants), but only 98 collations.
There's a mapping between collations and locales hidden somewhere in ICU
data.
Since full ICU data is very big (30 MiB), we have created a stripped down
variant with only English locale (package icu-data-en, 2.6 MiB). It also
includes a subset of 18 collations that cover hundreds of languages.
When the cluster is initialized or `pg_import_system_collations()` is
called directly and only icu-data-en (default) is installed, the user
ends up with only und, en and en_GB ICU-based COLLATIONs. The user can
create missing COLLATIONs manually, but this a) is not expected nor
reasonable behaviour, b) it's not easy to find out for which locales
there's a collation available for.
I couldn't find any way how to list all language+country variants for the
given collation. It can be constructed when we iterate over all locales,
but this approach is useless when we don't have the locale data
available... I should also note that the reverse lookup (locale ->
collation) is not a problem for ICU when full locale data is stripped.
So I ended up with a very ugly workaround: pre-generating a list of
collation -> locale mapping and embedding it in the collationcmds.c
source. Then we replace `uloc_countAvailable()`/`uloc_getAvailable()`
with `ucol_countAvailable()` / `ucol_getAvailable()` to iterate over
the collations instead of locales and lookup the locales in the
pre-generated list.
This data is quite stable, there's a very low risk of getting outdated in
a way that would be a problem.
`icu_coll_locales` has been generated using the following code:
#include <stdio.h>
#include <string.h>
#include <unicode/ucol.h>
// Copy-pasted from collationcmds.c.
static char *get_icu_language_tag(const char *localename) {
char buf[ULOC_FULLNAME_CAPACITY];
UErrorCode status = U_ZERO_ERROR;
uloc_toLanguageTag(localename, buf, sizeof(buf), true, &status);
if (U_FAILURE(status)) {
fprintf(stderr, "could not convert locale name \"%s\" to language tag: %s\n",
localename, u_errorName(status));
return strdup(localename);
}
return strdup(buf);
}
int main() {
UErrorCode status = U_ZERO_ERROR;
for (int i = 0; i < uloc_countAvailable(); i++) {
const char *locale = uloc_getAvailable(i);
UCollator *collator = ucol_open(locale, &status);
const char *actual_locale = ucol_getLocaleByType(collator, ULOC_ACTUAL_LOCALE, &status);
// Strip @.*
char *ptr = strchr(actual_locale, '@');
if (ptr != NULL) {
*ptr = '\0';
}
if (strcmp(actual_locale, "root") == 0) {
actual_locale = "";
}
if (strcmp(actual_locale, locale) != 0) {
printf("\"%s\", \"%s\",\n", actual_locale, get_icu_language_tag(locale));
}
ucol_close(collator);
}
return 0;
}
compiled and executed using:
gcc -o main main.c $(pkg-config --libs icu-uc icu-io) && ./main | sort | uniq
--- a/src/backend/commands/collationcmds.c
+++ b/src/backend/commands/collationcmds.c
@@ -572,6 +572,715 @@
return result;
}
+
+/*
+ * XXX-Patched: Added a static mapping: collation name (parent) to locale (children)
+ * I'm gonna burn in hell for this...
+ */
+static char* icu_coll_locales[] = {
+ "", "agq",
+ "", "agq-CM",
+ "", "ak",
+ "", "ak-GH",
+ "", "asa",
+ "", "asa-TZ",
+ "", "ast",
+ "", "ast-ES",
+ "", "bas",
+ "", "bas-CM",
+ "", "bem",
+ "", "bem-ZM",
+ "", "bez",
+ "", "bez-TZ",
+ "", "bm",
+ "", "bm-ML",
+ "", "brx",
+ "", "brx-IN",
+ "", "ca",
+ "", "ca-AD",
+ "", "ca-ES",
+ "", "ca-FR",
+ "", "ca-IT",
+ "", "ccp",
+ "", "ccp-BD",
+ "", "ccp-IN",
+ "", "ce",
+ "", "ce-RU",
+ "", "cgg",
+ "", "cgg-UG",
+ "", "ckb",
+ "", "ckb-IQ",
+ "", "ckb-IR",
+ "", "dav",
+ "", "dav-KE",
+ "", "de",
+ "", "de-AT",
+ "", "de-BE",
+ "", "de-CH",
+ "", "de-DE",
+ "", "de-IT",
+ "", "de-LI",
+ "", "de-LU",
+ "", "dje",
+ "", "dje-NE",
+ "", "doi",
+ "", "doi-IN",
+ "", "dua",
+ "", "dua-CM",
+ "", "dyo",
+ "", "dyo-SN",
+ "", "dz",
+ "", "dz-BT",
+ "", "ebu",
+ "", "ebu-KE",
+ "", "en",
+ "", "en-001",
+ "", "en-150",
+ "", "en-AE",
+ "", "en-AG",
+ "", "en-AI",
+ "", "en-AS",
+ "", "en-AT",
+ "", "en-AU",
+ "", "en-BB",
+ "", "en-BE",
+ "", "en-BI",
+ "", "en-BM",
+ "", "en-BS",
+ "", "en-BW",
+ "", "en-BZ",
+ "", "en-CA",
+ "", "en-CC",
+ "", "en-CH",
+ "", "en-CK",
+ "", "en-CM",
+ "", "en-CX",
+ "", "en-CY",
+ "", "en-DE",
+ "", "en-DG",
+ "", "en-DK",
+ "", "en-DM",
+ "", "en-ER",
+ "", "en-FI",
+ "", "en-FJ",
+ "", "en-FK",
+ "", "en-FM",
+ "", "en-GB",
+ "", "en-GD",
+ "", "en-GG",
+ "", "en-GH",
+ "", "en-GI",
+ "", "en-GM",
+ "", "en-GU",
+ "", "en-GY",
+ "", "en-HK",
+ "", "en-IE",
+ "", "en-IL",
+ "", "en-IM",
+ "", "en-IN",
+ "", "en-IO",
+ "", "en-JE",
+ "", "en-JM",
+ "", "en-KE",
+ "", "en-KI",
+ "", "en-KN",
+ "", "en-KY",
+ "", "en-LC",
+ "", "en-LR",
+ "", "en-LS",
+ "", "en-MG",
+ "", "en-MH",
+ "", "en-MO",
+ "", "en-MP",
+ "", "en-MS",
+ "", "en-MT",
+ "", "en-MU",
+ "", "en-MV",
+ "", "en-MW",
+ "", "en-MY",
+ "", "en-NA",
+ "", "en-NF",
+ "", "en-NG",
+ "", "en-NL",
+ "", "en-NR",
+ "", "en-NU",
+ "", "en-NZ",
+ "", "en-PG",
+ "", "en-PH",
+ "", "en-PK",
+ "", "en-PN",
+ "", "en-PR",
+ "", "en-PW",
+ "", "en-RW",
+ "", "en-SB",
+ "", "en-SC",
+ "", "en-SD",
+ "", "en-SE",
+ "", "en-SG",
+ "", "en-SH",
+ "", "en-SI",
+ "", "en-SL",
+ "", "en-SS",
+ "", "en-SX",
+ "", "en-SZ",
+ "", "en-TC",
+ "", "en-TK",
+ "", "en-TO",
+ "", "en-TT",
+ "", "en-TV",
+ "", "en-TZ",
+ "", "en-UG",
+ "", "en-UM",
+ "", "en-US",
+ "", "en-VC",
+ "", "en-VG",
+ "", "en-VI",
+ "", "en-VU",
+ "", "en-WS",
+ "", "en-ZA",
+ "", "en-ZM",
+ "", "en-ZW",
+ "", "eu",
+ "", "eu-ES",
+ "", "ewo",
+ "", "ewo-CM",
+ "", "ff",
+ "", "ff-Latn",
+ "", "ff-Latn-BF",
+ "", "ff-Latn-CM",
+ "", "ff-Latn-GH",
+ "", "ff-Latn-GM",
+ "", "ff-Latn-GN",
+ "", "ff-Latn-GW",
+ "", "ff-Latn-LR",
+ "", "ff-Latn-MR",
+ "", "ff-Latn-NE",
+ "", "ff-Latn-NG",
+ "", "ff-Latn-SL",
+ "", "ff-Latn-SN",
+ "", "fr",
+ "", "fr-BE",
+ "", "fr-BF",
+ "", "fr-BI",
+ "", "fr-BJ",
+ "", "fr-BL",
+ "", "fr-CD",
+ "", "fr-CF",
+ "", "fr-CG",
+ "", "fr-CH",
+ "", "fr-CI",
+ "", "fr-CM",
+ "", "fr-DJ",
+ "", "fr-DZ",
+ "", "fr-FR",
+ "", "fr-GA",
+ "", "fr-GF",
+ "", "fr-GN",
+ "", "fr-GP",
+ "", "fr-GQ",
+ "", "fr-HT",
+ "", "fr-KM",
+ "", "fr-LU",
+ "", "fr-MA",
+ "", "fr-MC",
+ "", "fr-MF",
+ "", "fr-MG",
+ "", "fr-ML",
+ "", "fr-MQ",
+ "", "fr-MR",
+ "", "fr-MU",
+ "", "fr-NC",
+ "", "fr-NE",
+ "", "fr-PF",
+ "", "fr-PM",
+ "", "fr-RE",
+ "", "fr-RW",
+ "", "fr-SC",
+ "", "fr-SN",
+ "", "fr-SY",
+ "", "fr-TD",
+ "", "fr-TG",
+ "", "fr-TN",
+ "", "fr-VU",
+ "", "fr-WF",
+ "", "fr-YT",
+ "", "fur",
+ "", "fur-IT",
+ "", "fy",
+ "", "fy-NL",
+ "", "ga",
+ "", "ga-GB",
+ "", "ga-IE",
+ "", "gd",
+ "", "gd-GB",
+ "", "gsw",
+ "", "gsw-CH",
+ "", "gsw-FR",
+ "", "gsw-LI",
+ "", "guz",
+ "", "guz-KE",
+ "", "gv",
+ "", "gv-IM",
+ "", "ia",
+ "", "ia-001",
+ "", "id",
+ "", "id-ID",
+ "", "ii",
+ "", "ii-CN",
+ "", "it",
+ "", "it-CH",
+ "", "it-IT",
+ "", "it-SM",
+ "", "it-VA",
+ "", "jgo",
+ "", "jgo-CM",
+ "", "jmc",
+ "", "jmc-TZ",
+ "", "jv",
+ "", "jv-ID",
+ "", "kab",
+ "", "kab-DZ",
+ "", "kam",
+ "", "kam-KE",
+ "", "kde",
+ "", "kde-TZ",
+ "", "kea",
+ "", "kea-CV",
+ "", "kgp",
+ "", "kgp-BR",
+ "", "khq",
+ "", "khq-ML",
+ "", "ki",
+ "", "ki-KE",
+ "", "kkj",
+ "", "kkj-CM",
+ "", "kln",
+ "", "kln-KE",
+ "", "ks",
+ "", "ks-Arab",
+ "", "ks-Arab-IN",
+ "", "ks-Deva",
+ "", "ks-Deva-IN",
+ "", "ksb",
+ "", "ksb-TZ",
+ "", "ksf",
+ "", "ksf-CM",
+ "", "ksh",
+ "", "ksh-DE",
+ "", "kw",
+ "", "kw-GB",
+ "", "lag",
+ "", "lag-TZ",
+ "", "lb",
+ "", "lb-LU",
+ "", "lg",
+ "", "lg-UG",
+ "", "lrc",
+ "", "lrc-IQ",
+ "", "lrc-IR",
+ "", "lu",
+ "", "lu-CD",
+ "", "luo",
+ "", "luo-KE",
+ "", "luy",
+ "", "luy-KE",
+ "", "mai",
+ "", "mai-IN",
+ "", "mas",
+ "", "mas-KE",
+ "", "mas-TZ",
+ "", "mer",
+ "", "mer-KE",
+ "", "mfe",
+ "", "mfe-MU",
+ "", "mg",
+ "", "mg-MG",
+ "", "mgh",
+ "", "mgh-MZ",
+ "", "mgo",
+ "", "mgo-CM",
+ "", "mi",
+ "", "mi-NZ",
+ "", "mni",
+ "", "mni-Beng",
+ "", "mni-Beng-IN",
+ "", "ms",
+ "", "ms-BN",
+ "", "ms-ID",
+ "", "ms-MY",
+ "", "ms-SG",
+ "", "mua",
+ "", "mua-CM",
+ "", "mzn",
+ "", "mzn-IR",
+ "", "naq",
+ "", "naq-NA",
+ "", "nd",
+ "", "nd-ZW",
+ "", "nl",
+ "", "nl-AW",
+ "", "nl-BE",
+ "", "nl-BQ",
+ "", "nl-CW",
+ "", "nl-NL",
+ "", "nl-SR",
+ "", "nl-SX",
+ "", "nmg",
+ "", "nmg-CM",
+ "", "nnh",
+ "", "nnh-CM",
+ "", "nus",
+ "", "nus-SS",
+ "", "nyn",
+ "", "nyn-UG",
+ "", "os",
+ "", "os-GE",
+ "", "os-RU",
+ "", "pcm",
+ "", "pcm-NG",
+ "", "pt",
+ "", "pt-AO",
+ "", "pt-BR",
+ "", "pt-CH",
+ "", "pt-CV",
+ "", "pt-GQ",
+ "", "pt-GW",
+ "", "pt-LU",
+ "", "pt-MO",
+ "", "pt-MZ",
+ "", "pt-PT",
+ "", "pt-ST",
+ "", "pt-TL",
+ "", "qu",
+ "", "qu-BO",
+ "", "qu-EC",
+ "", "qu-PE",
+ "", "rm",
+ "", "rm-CH",
+ "", "rn",
+ "", "rn-BI",
+ "", "rof",
+ "", "rof-TZ",
+ "", "rw",
+ "", "rw-RW",
+ "", "rwk",
+ "", "rwk-TZ",
+ "", "sa",
+ "", "sa-IN",
+ "", "sah",
+ "", "sah-RU",
+ "", "saq",
+ "", "saq-KE",
+ "", "sat",
+ "", "sat-Olck",
+ "", "sat-Olck-IN",
+ "", "sbp",
+ "", "sbp-TZ",
+ "", "sc",
+ "", "sc-IT",
+ "", "sd",
+ "", "sd-Arab",
+ "", "sd-Arab-PK",
+ "", "sd-Deva",
+ "", "sd-Deva-IN",
+ "", "seh",
+ "", "seh-MZ",
+ "", "ses",
+ "", "ses-ML",
+ "", "sg",
+ "", "sg-CF",
+ "", "shi",
+ "", "shi-Latn",
+ "", "shi-Latn-MA",
+ "", "shi-Tfng",
+ "", "shi-Tfng-MA",
+ "", "sn",
+ "", "sn-ZW",
+ "", "so",
+ "", "so-DJ",
+ "", "so-ET",
+ "", "so-KE",
+ "", "so-SO",
+ "", "su",
+ "", "su-Latn",
+ "", "su-Latn-ID",
+ "", "sw",
+ "", "sw-CD",
+ "", "sw-KE",
+ "", "sw-TZ",
+ "", "sw-UG",
+ "", "teo",
+ "", "teo-KE",
+ "", "teo-UG",
+ "", "tg",
+ "", "tg-TJ",
+ "", "ti",
+ "", "ti-ER",
+ "", "ti-ET",
+ "", "tt",
+ "", "tt-RU",
+ "", "twq",
+ "", "twq-NE",
+ "", "tzm",
+ "", "tzm-MA",
+ "", "vai",
+ "", "vai-Latn",
+ "", "vai-Latn-LR",
+ "", "vai-Vaii",
+ "", "vai-Vaii-LR",
+ "", "vun",
+ "", "vun-TZ",
+ "", "wae",
+ "", "wae-CH",
+ "", "xh",
+ "", "xh-ZA",
+ "", "xog",
+ "", "xog-UG",
+ "", "yav",
+ "", "yav-CM",
+ "", "yrl",
+ "", "yrl-BR",
+ "", "yrl-CO",
+ "", "yrl-VE",
+ "", "zgh",
+ "", "zgh-MA",
+ "", "zu",
+ "", "zu-ZA",
+ "af", "af-NA",
+ "af", "af-ZA",
+ "am", "am-ET",
+ "ar", "ar-001",
+ "ar", "ar-AE",
+ "ar", "ar-BH",
+ "ar", "ar-DJ",
+ "ar", "ar-DZ",
+ "ar", "ar-EG",
+ "ar", "ar-EH",
+ "ar", "ar-ER",
+ "ar", "ar-IL",
+ "ar", "ar-IQ",
+ "ar", "ar-JO",
+ "ar", "ar-KM",
+ "ar", "ar-KW",
+ "ar", "ar-LB",
+ "ar", "ar-LY",
+ "ar", "ar-MA",
+ "ar", "ar-MR",
+ "ar", "ar-OM",
+ "ar", "ar-PS",
+ "ar", "ar-QA",
+ "ar", "ar-SA",
+ "ar", "ar-SD",
+ "ar", "ar-SO",
+ "ar", "ar-SS",
+ "ar", "ar-SY",
+ "ar", "ar-TD",
+ "ar", "ar-TN",
+ "ar", "ar-YE",
+ "as", "as-IN",
+ "az", "az-Cyrl",
+ "az", "az-Cyrl-AZ",
+ "az", "az-Latn",
+ "az", "az-Latn-AZ",
+ "be", "be-BY",
+ "bg", "bg-BG",
+ "bn", "bn-BD",
+ "bn", "bn-IN",
+ "bo", "bo-CN",
+ "bo", "bo-IN",
+ "br", "br-FR",
+ "bs", "bs-Latn",
+ "bs", "bs-Latn-BA",
+ "bs_Cyrl", "bs-Cyrl-BA",
+ "ceb", "ceb-PH",
+ "chr", "chr-US",
+ "cs", "cs-CZ",
+ "cy", "cy-GB",
+ "da", "da-DK",
+ "da", "da-GL",
+ "dsb", "dsb-DE",
+ "ee", "ee-GH",
+ "ee", "ee-TG",
+ "el", "el-CY",
+ "el", "el-GR",
+ "eo", "eo-001",
+ "es", "es-419",
+ "es", "es-AR",
+ "es", "es-BO",
+ "es", "es-BR",
+ "es", "es-BZ",
+ "es", "es-CL",
+ "es", "es-CO",
+ "es", "es-CR",
+ "es", "es-CU",
+ "es", "es-DO",
+ "es", "es-EA",
+ "es", "es-EC",
+ "es", "es-ES",
+ "es", "es-GQ",
+ "es", "es-GT",
+ "es", "es-HN",
+ "es", "es-IC",
+ "es", "es-MX",
+ "es", "es-NI",
+ "es", "es-PA",
+ "es", "es-PE",
+ "es", "es-PH",
+ "es", "es-PR",
+ "es", "es-PY",
+ "es", "es-SV",
+ "es", "es-US",
+ "es", "es-UY",
+ "es", "es-VE",
+ "et", "et-EE",
+ "fa", "fa-IR",
+ "ff_Adlm", "ff-Adlm-BF",
+ "ff_Adlm", "ff-Adlm-CM",
+ "ff_Adlm", "ff-Adlm-GH",
+ "ff_Adlm", "ff-Adlm-GM",
+ "ff_Adlm", "ff-Adlm-GN",
+ "ff_Adlm", "ff-Adlm-GW",
+ "ff_Adlm", "ff-Adlm-LR",
+ "ff_Adlm", "ff-Adlm-MR",
+ "ff_Adlm", "ff-Adlm-NE",
+ "ff_Adlm", "ff-Adlm-NG",
+ "ff_Adlm", "ff-Adlm-SL",
+ "ff_Adlm", "ff-Adlm-SN",
+ "fi", "fi-FI",
+ "fil", "fil-PH",
+ "fo", "fo-DK",
+ "fo", "fo-FO",
+ "gl", "gl-ES",
+ "gu", "gu-IN",
+ "ha", "ha-GH",
+ "ha", "ha-NE",
+ "ha", "ha-NG",
+ "haw", "haw-US",
+ "he", "he-IL",
+ "hi", "hi-IN",
+ "hi", "hi-Latn",
+ "hi", "hi-Latn-IN",
+ "hr", "hr-BA",
+ "hr", "hr-HR",
+ "hsb", "hsb-DE",
+ "hu", "hu-HU",
+ "hy", "hy-AM",
+ "ig", "ig-NG",
+ "is", "is-IS",
+ "ja", "ja-JP",
+ "ka", "ka-GE",
+ "kk", "kk-KZ",
+ "kl", "kl-GL",
+ "km", "km-KH",
+ "kn", "kn-IN",
+ "ko", "ko-KP",
+ "ko", "ko-KR",
+ "kok", "kok-IN",
+ "ku", "ku-TR",
+ "ky", "ky-KG",
+ "lkt", "lkt-US",
+ "ln", "ln-AO",
+ "ln", "ln-CD",
+ "ln", "ln-CF",
+ "ln", "ln-CG",
+ "lo", "lo-LA",
+ "lt", "lt-LT",
+ "lv", "lv-LV",
+ "mk", "mk-MK",
+ "ml", "ml-IN",
+ "mn", "mn-MN",
+ "mr", "mr-IN",
+ "mt", "mt-MT",
+ "my", "my-MM",
+ "ne", "ne-IN",
+ "ne", "ne-NP",
+ "no", "nb",
+ "no", "nb-NO",
+ "no", "nb-SJ",
+ "no", "nn",
+ "no", "nn-NO",
+ "om", "om-ET",
+ "om", "om-KE",
+ "or", "or-IN",
+ "pa", "pa-Arab",
+ "pa", "pa-Arab-PK",
+ "pa", "pa-Guru",
+ "pa", "pa-Guru-IN",
+ "pl", "pl-PL",
+ "ps", "ps-AF",
+ "ps", "ps-PK",
+ "ro", "ro-MD",
+ "ro", "ro-RO",
+ "ru", "ru-BY",
+ "ru", "ru-KG",
+ "ru", "ru-KZ",
+ "ru", "ru-MD",
+ "ru", "ru-RU",
+ "ru", "ru-UA",
+ "se", "se-FI",
+ "se", "se-NO",
+ "se", "se-SE",
+ "si", "si-LK",
+ "sk", "sk-SK",
+ "sl", "sl-SI",
+ "smn", "smn-FI",
+ "sq", "sq-AL",
+ "sq", "sq-MK",
+ "sq", "sq-XK",
+ "sr", "sr-Cyrl",
+ "sr", "sr-Cyrl-BA",
+ "sr", "sr-Cyrl-ME",
+ "sr", "sr-Cyrl-RS",
+ "sr", "sr-Cyrl-XK",
+ "sr_Latn", "sr-Latn-BA",
+ "sr_Latn", "sr-Latn-ME",
+ "sr_Latn", "sr-Latn-RS",
+ "sr_Latn", "sr-Latn-XK",
+ "sv", "sv-AX",
+ "sv", "sv-FI",
+ "sv", "sv-SE",
+ "ta", "ta-IN",
+ "ta", "ta-LK",
+ "ta", "ta-MY",
+ "ta", "ta-SG",
+ "te", "te-IN",
+ "th", "th-TH",
+ "tk", "tk-TM",
+ "to", "to-TO",
+ "tr", "tr-CY",
+ "tr", "tr-TR",
+ "ug", "ug-CN",
+ "uk", "uk-UA",
+ "ur", "ur-IN",
+ "ur", "ur-PK",
+ "uz", "uz-Arab",
+ "uz", "uz-Arab-AF",
+ "uz", "uz-Cyrl",
+ "uz", "uz-Cyrl-UZ",
+ "uz", "uz-Latn",
+ "uz", "uz-Latn-UZ",
+ "vi", "vi-VN",
+ "wo", "wo-SN",
+ "yi", "yi-001",
+ "yo", "yo-BJ",
+ "yo", "yo-NG",
+ "zh", "yue",
+ "zh", "yue-Hans",
+ "zh", "yue-Hans-CN",
+ "zh", "yue-Hant",
+ "zh", "yue-Hant-HK",
+ "zh", "zh-Hans",
+ "zh", "zh-Hans-CN",
+ "zh", "zh-Hans-HK",
+ "zh", "zh-Hans-MO",
+ "zh", "zh-Hans-SG",
+ "zh", "zh-Hant",
+ "zh", "zh-Hant-HK",
+ "zh", "zh-Hant-MO",
+ "zh", "zh-Hant-TW",
+ NULL, NULL,
+};
+
#endif /* USE_ICU */
@@ -772,18 +1481,19 @@
* Start the loop at -1 to sneak in the root locale without too much
* code duplication.
*/
- for (i = -1; i < uloc_countAvailable(); i++)
+ for (i = -1; i < ucol_countAvailable(); i++) /* XXX-Patched: changed from uloc_countAvailable() */
{
const char *name;
char *langtag;
char *icucomment;
const char *iculocstr;
Oid collid;
+ char **ptr; /* XXX-Patched: added */
if (i == -1)
name = ""; /* ICU root locale */
else
- name = uloc_getAvailable(i);
+ name = ucol_getAvailable(i); /* XXX-Patched: changed from uloc_getAvailable() */
langtag = get_icu_language_tag(name);
iculocstr = U_ICU_VERSION_MAJOR_NUM >= 54 ? langtag : name;
@@ -812,6 +1523,44 @@
CreateComments(collid, CollationRelationId, 0,
icucomment);
}
+
+ /*
+ * XXX-Patched: The following block is added to create collations also for derived
+ * locales (combination of language+country/region).
+ * It's terribly inefficient, but in the big picture, it doesn't matter that much
+ * (it's typically called only once in the life of the cluster).
+ */
+ for (ptr = icu_coll_locales; *ptr != NULL; ptr++)
+ {
+ /*
+ * icu_coll_locales is a 1D array of pairs: collation name and locale (langtag).
+ * ptr++ moves pointer to the second string of the pair and it's a post-increment,
+ * so after the comparison with name is evaluated.
+ */
+ if (strcmp(*ptr++, name) == 0) {
+ const char *langtag;
+
+ langtag = pstrdup(*ptr);
+ collid = CollationCreate(psprintf("%s-x-icu", langtag),
+ nspid, GetUserId(),
+ COLLPROVIDER_ICU, true, -1,
+ NULL, NULL, langtag,
+ get_collation_actual_version(COLLPROVIDER_ICU, langtag),
+ true, true);
+
+ if (OidIsValid(collid))
+ {
+ ncreated++;
+
+ CommandCounterIncrement();
+
+ icucomment = get_icu_locale_comment(langtag);
+ if (icucomment)
+ CreateComments(collid, CollationRelationId, 0,
+ icucomment);
+ }
+ }
+ }
}
}
#endif /* USE_ICU */

View file

@ -0,0 +1,14 @@
diff --git a/src/bin/initdb/initdb.c b/src/bin/initdb/initdb.c
--- a/src/bin/initdb/initdb.c
+++ b/src/bin/initdb/initdb.c
@@ -3259,9 +3259,7 @@
/* translator: This is a placeholder in a shell command. */
appendPQExpBuffer(start_db_cmd, " -l %s start", _("logfile"));
- printf(_("\nSuccess. You can now start the database server using:\n\n"
- " %s\n\n"),
- start_db_cmd->data);
+ printf(_("\nSuccess.\n\n"));
destroyPQExpBuffer(start_db_cmd);
}

View file

@ -0,0 +1,111 @@
From: Tom Stellard <tstellar@redhat.com>
From: Jakub Jirutka <jakub@jirutka.cz>
Date: Sat, 18 Dec 2021 23:09:03 +0100
Subject: [PATCH] jit: Workaround datalayout mismatch on s390x and x86
This patch is based on https://src.fedoraproject.org/rpms/postgresql/blob/f35/f/postgresql-datalayout-mismatch-on-s390.patch.
Original description:
> LLVM's s390x target uses a different datalayout for z13 and newer processors.
> If llvmjit_types.bc is compiled to target a processor older than z13, and
> then the JIT runs on a z13 or newer processor, then there will be a mismatch
> in datalayouts between llvmjit_types.bc and the JIT engine. This mismatch
> causes the JIT to fail at runtime.
We encountered an analogous problem even on x86 (legacy 32bit arch).
However, I didn't wanna waste my time researching what exact CPU features
are problematic on this dead architecture, so I just disabled usage of any
host specific CPU features when creating the JIT on x86. And while I was on
it, I also conditioned the s390x workaround for s390x only. -jirutka
diff --git a/src/backend/jit/llvm/llvmjit.c b/src/backend/jit/llvm/llvmjit.c
index 98a27f08bf..05b6438ba8 100644
--- a/src/backend/jit/llvm/llvmjit.c
+++ b/src/backend/jit/llvm/llvmjit.c
@@ -776,7 +776,38 @@
errhidecontext(true)));
}
+#if defined(__s390__) || defined(__s390x__)
/*
+ * For the systemz target, LLVM uses a different datalayout for z13 and newer
+ * CPUs than it does for older CPUs. This can cause a mismatch in datalayouts
+ * in the case where the llvm_types_module is compiled with a pre-z13 CPU
+ * and the JIT is running on z13 or newer.
+ * See computeDataLayout() function in
+ * llvm/lib/Target/SystemZ/SystemZTargetMachine.cpp for information on the
+ * datalayout differences.
+ */
+static bool
+needs_systemz_workaround(void)
+{
+ bool ret = false;
+ LLVMContextRef llvm_context;
+ LLVMTypeRef vec_type;
+ LLVMTargetDataRef llvm_layoutref;
+ if (strncmp(LLVMGetTargetName(llvm_targetref), "systemz", strlen("systemz")))
+ {
+ return false;
+ }
+
+ llvm_context = LLVMGetModuleContext(llvm_types_module);
+ vec_type = LLVMVectorType(LLVMIntTypeInContext(llvm_context, 32), 4);
+ llvm_layoutref = LLVMCreateTargetData(llvm_layout);
+ ret = (LLVMABIAlignmentOfType(llvm_layoutref, vec_type) == 16);
+ LLVMDisposeTargetData(llvm_layoutref);
+ return ret;
+}
+#endif
+
+/*
* Per session initialization.
*/
static void
@@ -785,6 +816,7 @@
MemoryContext oldcontext;
char *error = NULL;
char *cpu = NULL;
+ char *host_features = NULL;
char *features = NULL;
LLVMTargetMachineRef opt0_tm;
LLVMTargetMachineRef opt3_tm;
@@ -815,11 +847,22 @@
* latter is needed because some CPU architectures default to enabling
* features not all CPUs have (weird, huh).
*/
+#if !defined(__i386__) && !defined(__i386) // XXX: quick workaround for 32-bit x86
cpu = LLVMGetHostCPUName();
- features = LLVMGetHostCPUFeatures();
+ features = host_features = LLVMGetHostCPUFeatures();
elog(DEBUG2, "LLVMJIT detected CPU \"%s\", with features \"%s\"",
cpu, features);
+#endif
+#if defined(__s390__) || defined(__s390x__)
+ if (needs_systemz_workaround())
+ {
+ const char *no_vector =",-vector";
+ features = malloc(sizeof(char) * (strlen(host_features) + strlen(no_vector) + 1));
+ sprintf(features, "%s%s", host_features, no_vector);
+ }
+#endif
+
opt0_tm =
LLVMCreateTargetMachine(llvm_targetref, llvm_triple, cpu, features,
LLVMCodeGenLevelNone,
@@ -833,8 +876,13 @@
LLVMDisposeMessage(cpu);
cpu = NULL;
- LLVMDisposeMessage(features);
+ if (features != host_features)
+ {
+ free(features);
+ }
features = NULL;
+ LLVMDisposeMessage(host_features);
+ host_features = NULL;
/* force symbols in main binary to be loaded */
LLVMLoadLibraryPermanently(NULL);

View file

@ -0,0 +1,84 @@
Patch-Source: https://sources.debian.org/src/postgresql-14/14.0-1/debian/patches/libpgport-pkglibdir
Author: Christoph Berg <myon@debian.org>
Description: Move libpgport/libpgcommon/libpgfeutils from libdir to pkglibdir
This allows client applications to link to version-specific libraries.
Used by pg-checksums.
--- a/src/common/Makefile
+++ b/src/common/Makefile
@@ -68,15 +68,15 @@ all: libpgcommon.a libpgcommon_shlib.a l
# libpgcommon is needed by some contrib
install: all installdirs
- $(INSTALL_STLIB) libpgcommon.a '$(DESTDIR)$(libdir)/libpgcommon.a'
- $(INSTALL_STLIB) libpgcommon_shlib.a '$(DESTDIR)$(libdir)/libpgcommon_shlib.a'
+ $(INSTALL_STLIB) libpgcommon.a '$(DESTDIR)$(pkglibdir)/libpgcommon.a'
+ $(INSTALL_STLIB) libpgcommon_shlib.a '$(DESTDIR)$(pkglibdir)/libpgcommon_shlib.a'
installdirs:
- $(MKDIR_P) '$(DESTDIR)$(libdir)'
+ $(MKDIR_P) '$(DESTDIR)$(pkglibdir)'
uninstall:
- rm -f '$(DESTDIR)$(libdir)/libpgcommon.a'
- rm -f '$(DESTDIR)$(libdir)/libpgcommon_shlib.a'
+ rm -f '$(DESTDIR)$(pkglibdir)/libpgcommon.a'
+ rm -f '$(DESTDIR)$(pkglibdir)/libpgcommon_shlib.a'
libpgcommon.a: $(OBJS_FRONTEND)
rm -f $@
--- a/src/fe_utils/Makefile
+++ b/src/fe_utils/Makefile
@@ -35,13 +35,13 @@ distprep: psqlscan.c
# libpgfeutils could be useful to contrib, so install it
install: all installdirs
- $(INSTALL_STLIB) libpgfeutils.a '$(DESTDIR)$(libdir)/libpgfeutils.a'
+ $(INSTALL_STLIB) libpgfeutils.a '$(DESTDIR)$(pkglibdir)/libpgfeutils.a'
installdirs:
- $(MKDIR_P) '$(DESTDIR)$(libdir)'
+ $(MKDIR_P) '$(DESTDIR)$(pkglibdir)'
uninstall:
- rm -f '$(DESTDIR)$(libdir)/libpgfeutils.a'
+ rm -f '$(DESTDIR)$(pkglibdir)/libpgfeutils.a'
clean distclean:
rm -f libpgfeutils.a $(OBJS) lex.backup
--- a/src/port/Makefile
+++ b/src/port/Makefile
@@ -54,15 +54,15 @@ all: libpgport.a libpgport_shlib.a libpg
# libpgport is needed by some contrib
install: all installdirs
- $(INSTALL_STLIB) libpgport.a '$(DESTDIR)$(libdir)/libpgport.a'
- $(INSTALL_STLIB) libpgport_shlib.a '$(DESTDIR)$(libdir)/libpgport_shlib.a'
+ $(INSTALL_STLIB) libpgport.a '$(DESTDIR)$(pkglibdir)/libpgport.a'
+ $(INSTALL_STLIB) libpgport_shlib.a '$(DESTDIR)$(pkglibdir)/libpgport_shlib.a'
installdirs:
- $(MKDIR_P) '$(DESTDIR)$(libdir)'
+ $(MKDIR_P) '$(DESTDIR)$(pkglibdir)'
uninstall:
- rm -f '$(DESTDIR)$(libdir)/libpgport.a'
- rm -f '$(DESTDIR)$(libdir)/libpgport_shlib.a'
+ rm -f '$(DESTDIR)$(pkglibdir)/libpgport.a'
+ rm -f '$(DESTDIR)$(pkglibdir)/libpgport_shlib.a'
libpgport.a: $(OBJS)
rm -f $@
--- a/src/Makefile.global.in
+++ b/src/Makefile.global.in
@@ -549,8 +549,8 @@ libpq = -L$(libpq_builddir) -lpq
# on client link lines, since that also appears in $(LIBS).
# libpq_pgport_shlib is the same idea, but for use in client shared libraries.
ifdef PGXS
-libpq_pgport = -L$(libdir) -lpgcommon -lpgport $(libpq)
-libpq_pgport_shlib = -L$(libdir) -lpgcommon_shlib -lpgport_shlib $(libpq)
+libpq_pgport = -L$(pkglibdir) -lpgcommon -lpgport $(libpq)
+libpq_pgport_shlib = -L$(pkglibdir) -lpgcommon_shlib -lpgport_shlib $(libpq)
else
libpq_pgport = -L$(top_builddir)/src/common -lpgcommon -L$(top_builddir)/src/port -lpgport $(libpq)
libpq_pgport_shlib = -L$(top_builddir)/src/common -lpgcommon_shlib -L$(top_builddir)/src/port -lpgport_shlib $(libpq)

View file

@ -0,0 +1,50 @@
From: Jakub Jirutka <jakub@jirutka.cz>
Date: Mon, 08 Nov 2021 14:56:33 +0100
Subject: [PATCH] Allow to install interfaces headers and server headers separately
Don't install headers of the interfaces (libpq and libecpg) by the install
target, allow to install them separately from the headers needed for server.
We need this for two reasons:
a. Allow building PostgreSQL server with LDAP support, but libpq without
dependency on LDAP.
b. Allow building/installing PostgreSQL server without the interfaces.
This is used for non-default postgresql aports (older versions of
PostgreSQL) - we want to provide just one version of libpq and libecpg
library provided by the default (newest) postgresql aport.
--- a/src/include/Makefile
+++ b/src/include/Makefile
@@ -26,8 +26,10 @@
port/win32_msvc/sys port/win32/arpa port/win32/netinet \
port/win32/sys portability
-# Install all headers
-install: all installdirs
+# Install server headers
+install: install-server
+install-interfaces: all
+ $(MKDIR_P) '$(DESTDIR)$(includedir)/libpq' '$(DESTDIR)$(includedir_internal)/libpq'
# These headers are needed by the public headers of the interfaces.
$(INSTALL_DATA) $(srcdir)/postgres_ext.h '$(DESTDIR)$(includedir)'
$(INSTALL_DATA) $(srcdir)/libpq/libpq-fs.h '$(DESTDIR)$(includedir)/libpq'
@@ -40,6 +42,8 @@
$(INSTALL_DATA) $(srcdir)/port.h '$(DESTDIR)$(includedir_internal)'
$(INSTALL_DATA) $(srcdir)/postgres_fe.h '$(DESTDIR)$(includedir_internal)'
$(INSTALL_DATA) $(srcdir)/libpq/pqcomm.h '$(DESTDIR)$(includedir_internal)/libpq'
+install-server: all
+ $(MKDIR_P) $(addprefix '$(DESTDIR)$(includedir_server)'/, $(SUBDIRS))
# These headers are needed for server-side development
$(INSTALL_DATA) pg_config.h '$(DESTDIR)$(includedir_server)'
$(INSTALL_DATA) pg_config_ext.h '$(DESTDIR)$(includedir_server)'
@@ -64,10 +68,6 @@
chmod $(INSTALL_DATA_MODE) *.h || exit; \
done
-installdirs:
- $(MKDIR_P) '$(DESTDIR)$(includedir)/libpq' '$(DESTDIR)$(includedir_internal)/libpq'
- $(MKDIR_P) $(addprefix '$(DESTDIR)$(includedir_server)'/, $(SUBDIRS))
-
uninstall:
rm -f $(addprefix '$(DESTDIR)$(includedir)'/, pg_config.h pg_config_ext.h pg_config_os.h pg_config_manual.h postgres_ext.h libpq/libpq-fs.h)

View file

@ -0,0 +1,69 @@
Author: Martin Pitt <mpitt@debian.org>
Description: Use version specific installation directories so that several major versions can be installed in parallel.
* Install server lib files into /usr/lib/postgresql<version>/
* Install server related header files into /usr/include/postgresql/<version>/server/
* Disable PostgreSQL's automagic path mangling and fix libdir for pg_config,
so that pg_config in /usr/bin and /usr/libexec/postgresql<version> behave
identically.
Bug-Debian: http://bugs.debian.org/462037
Patch-Source: https://sources.debian.org/src/postgresql-14/14.0-1/debian/patches/50-per-version-dirs.patch
--- a/src/Makefile.global.in
+++ b/src/Makefile.global.in
@@ -119,7 +119,7 @@ libdir := @libdir@
pkglibdir = $(libdir)
ifeq "$(findstring pgsql, $(pkglibdir))" ""
ifeq "$(findstring postgres, $(pkglibdir))" ""
-override pkglibdir := $(pkglibdir)/postgresql
+override pkglibdir := /usr/lib/postgresql@PG_MAJORVERSION@
endif
endif
@@ -167,7 +167,7 @@ endif # PGXS
# These derived path variables aren't separately configurable.
-includedir_server = $(pkgincludedir)/server
+includedir_server = $(pkgincludedir)/@PG_MAJORVERSION@/server
includedir_internal = $(pkgincludedir)/internal
pgxsdir = $(pkglibdir)/pgxs
bitcodedir = $(pkglibdir)/bitcode
--- a/src/bin/pg_config/pg_config.c
+++ b/src/bin/pg_config/pg_config.c
@@ -27,6 +27,8 @@
#include "common/config_info.h"
#include "port.h"
+#include "../port/pg_config_paths.h"
+
static const char *progname;
/*
@@ -149,11 +151,7 @@ main(int argc, char **argv)
}
}
- if (find_my_exec(argv[0], my_exec_path) < 0)
- {
- fprintf(stderr, _("%s: could not find own program executable\n"), progname);
- exit(1);
- }
+ snprintf(my_exec_path, sizeof(my_exec_path), "%s/%s", PGBINDIR, progname);
configdata = get_configdata(my_exec_path, &configdata_len);
/* no arguments -> print everything */
--- a/src/test/perl/PostgreSQL/Test/Utils.pm
+++ b/src/test/perl/PostgreSQL/Test/Utils.pm
@@ -688,6 +688,10 @@
chomp($stdout);
$stdout =~ s/\r$//;
+ # Alpine's pg_config is not relocatable, manually check for correct location
+ if (-d "../../../build/tmp_install/usr/include/postgresql") {
+ $stdout = "../../../build/tmp_install/usr/include/postgresql";
+ }
open my $pg_config_h, '<', "$stdout/pg_config.h" or die "$!";
my $match = (grep { /^$regexp/ } <$pg_config_h>);
close $pg_config_h;

View file

@ -0,0 +1,22 @@
We configure Postgres with --disable-rpath because for the most part we
want to leave it to ldconfig to determine where libraries are. However,
for some reason the Perl package puts libperl.so in a nonstandard place
and doesn't add that place to the ldconfig search path. I think this
is a Perl packaging bug, myself, but apparently it's not going to change.
So work around it by adding an rpath spec to plperl.so (only).
Alpine notes:
This patch is copied from Fedora.
--- a/src/pl/plperl/GNUmakefile
+++ b/src/pl/plperl/GNUmakefile
@@ -55,6 +55,9 @@
SHLIB_LINK = $(perl_embed_ldflags)
+# Force rpath to be used even though we disable it everywhere else
+SHLIB_LINK += $(rpath)
+
REGRESS_OPTS = --dbname=$(PL_TESTDB)
REGRESS = plperl_setup plperl plperl_lc plperl_trigger plperl_shared \
plperl_elog plperl_util plperl_init plperlu plperl_array \

View file

@ -0,0 +1,49 @@
From: Jakub Jirutka <jakub@jirutka.cz>
Date: Tue, 09 Nov 2021 00:33:22 +0100
Subject: [PATCH] pg_config: Add new option --major-version
We use this option in aports for PostgreSQL extensions to easily get major
version of the default postgresql.
--- a/src/bin/pg_config/pg_config.c
+++ b/src/bin/pg_config/pg_config.c
@@ -65,6 +65,7 @@
{"--ldflags_ex", "LDFLAGS_EX"},
{"--ldflags_sl", "LDFLAGS_SL"},
{"--libs", "LIBS"},
+ {"--major-version", "MAJOR-VERSION"},
{"--version", "VERSION"},
{NULL, NULL}
};
@@ -101,6 +102,8 @@
printf(_(" --ldflags_ex show LDFLAGS_EX value used when PostgreSQL was built\n"));
printf(_(" --ldflags_sl show LDFLAGS_SL value used when PostgreSQL was built\n"));
printf(_(" --libs show LIBS value used when PostgreSQL was built\n"));
+ printf(_(" --major-version show the PostgreSQL major version number\n"
+ " (Alpine Linux specific option)\n"));
printf(_(" --version show the PostgreSQL version\n"));
printf(_(" -?, --help show this help, then exit\n"));
printf(_("\nWith no arguments, all known items are shown.\n\n"));
--- a/src/common/config_info.c
+++ b/src/common/config_info.c
@@ -38,7 +38,7 @@
int i = 0;
/* Adjust this to match the number of items filled below */
- *configdata_len = 23;
+ *configdata_len = 24;
configdata = (ConfigData *) palloc(*configdata_len * sizeof(ConfigData));
configdata[i].name = pstrdup("BINDIR");
@@ -193,6 +193,11 @@
configdata[i].name = pstrdup("VERSION");
configdata[i].setting = pstrdup("PostgreSQL " PG_VERSION);
+ i++;
+
+ // XXX-Patched: Alpine Linux specific, used in extension aports.
+ configdata[i].name = pstrdup("MAJOR-VERSION");
+ configdata[i].setting = pstrdup(PG_MAJORVERSION);
i++;
Assert(i == *configdata_len);

View file

@ -0,0 +1,13 @@
-- Create tables needed for PL/Tcl autoloading. This script should be run by
-- the database administrator only.
--
-- Statements in this script are extracted from pltcl_loadmod script.
--
-- Author: G.J.R. Timmer
-- Date: 2017-01-28
create table pltcl_modules (modname name, modseq int2, modsrc text);
create index pltcl_modules_i on pltcl_modules using btree (modname name_ops);
create table pltcl_modfuncs (funcname name, modname name);
create index pltcl_modfuncs_i on pltcl_modfuncs using hash (funcname name_ops);

View file

@ -0,0 +1,12 @@
#!/bin/sh
majorver=${1%%.*}
cat >&2 <<EOF
*
* If you want to use JIT in PostgreSQL, install postgresql$majorver-jit or
* postgresql-jit (if you didn't install specific major version of postgresql).
*
EOF
exit 0

View file

@ -0,0 +1,35 @@
#!/bin/sh
pkgver=$1
pkgver_major=${pkgver%%.*}
default_ver=$(pg_versions get-default 2>/dev/null) || true
# If this package is not set as the default PostgreSQL version (see
# pg_versions), let it go.
[ "$default_ver" = "$pkgver_major" ] || exit 0
# If this package ('postgresql<majorver>') has been installed explicitly, i.e.
# not via 'postgresql' provider or as a dependency, the user is apparently
# uninstalling it intentionally, so let it go.
grep -Fqx "postgresql$pkgver_major" /etc/apk/world 2>/dev/null && exit 0
data_dir=$(
. /etc/conf.d/postgresql 2>/dev/null
echo "${data_dir:-"/var/lib/postgresql/$pkgver_major/data"}"
)
# If data_dir for this version does not exist or is empty, let it go.
[ -f "$data_dir"/PG_VERSION ] || exit 0
cat >&2 <<EOF
*
* You are uninstalling your default PostgreSQL version ($default_ver) which seems to be
* in use! If it's *not* intentional and you want to preserve this version,
* install it explicitly: \`apk add postgresql$default_ver\`.
*
* Please note that to upgrade your cluster to a new major version using
* pg_upgrade(1), you must have both the old and new versions installed.
*
EOF
exit 1

View file

@ -0,0 +1,18 @@
Nothing ever depended on libecpg_compat.so.3 in Fedora, so don't build
it now, at least till somebody explicitly requests that.
Patch-Source: https://src.fedoraproject.org/rpms/libecpg/blob/f35/f/libecpg-10.5-no-compat-lib.patch
diff --git a/src/interfaces/ecpg/Makefile b/src/interfaces/ecpg/Makefile
index 41460a1..cc3dd37 100644
--- a/src/interfaces/ecpg/Makefile
+++ b/src/interfaces/ecpg/Makefile
@@ -2,7 +2,7 @@ subdir = src/interfaces/ecpg
top_builddir = ../../..
include $(top_builddir)/src/Makefile.global
-SUBDIRS = include pgtypeslib ecpglib compatlib preproc
+SUBDIRS = include pgtypeslib ecpglib preproc
# Suppress parallel build of subdirectories to avoid a bug in GNU make 3.82, cf
# http://savannah.gnu.org/bugs/?30653

View file

@ -0,0 +1,29 @@
Using /tmp for sockets allows everyone to spoof a PostgreSQL server. Thus use
/run/postgresql/ for "system" clusters which run as 'postgres' (user
clusters will still use /tmp). Since system cluster are by far the common case,
set it as default.
This is inspired by Fedora and Debian patches.
--- a/src/backend/utils/misc/postgresql.conf.sample
+++ b/src/backend/utils/misc/postgresql.conf.sample
@@ -63,7 +63,7 @@
#port = 5432 # (change requires restart)
#max_connections = 100 # (change requires restart)
#superuser_reserved_connections = 3 # (change requires restart)
-#unix_socket_directories = '/tmp' # comma-separated list of directories
+unix_socket_directories = '/run/postgresql' # comma-separated list of directories
# (change requires restart)
#unix_socket_group = '' # (change requires restart)
#unix_socket_permissions = 0777 # begin with 0 to use octal notation
--- a/src/include/pg_config_manual.h
+++ b/src/include/pg_config_manual.h
@@ -201,7 +201,7 @@
* support them yet.
*/
#ifndef WIN32
-#define DEFAULT_PGSOCKET_DIR "/tmp"
+#define DEFAULT_PGSOCKET_DIR "/run/postgresql"
#else
#define DEFAULT_PGSOCKET_DIR ""
#endif

View file

@ -0,0 +1,45 @@
# Contributor: Michał Polański <michal@polanski.me>
# Maintainer: Michał Polański <michal@polanski.me>
pkgname=py3-anyio
pkgver=3.6.2
pkgrel=1
pkgdesc="High level compatibility layer for multiple asynchronous event loop implementations"
url="https://github.com/agronholm/anyio"
license="MIT"
arch="noarch !armhf !ppc64le" # limited by py3-uvloop
depends="python3 py3-idna py3-sniffio"
makedepends="py3-setuptools py3-setuptools_scm"
# change this when 4.x releases and upgrade py3-trio
checkdepends="py3-pytest py3-pytest-mock py3-hypothesis py3-trustme py3-trio<0.22 py3-uvloop"
source="https://github.com/agronholm/anyio/archive/$pkgver/py3-anyio-$pkgver.tar.gz"
builddir="$srcdir/anyio-$pkgver"
case "$CARCH" in
x86*)
# weird dns resolution errors on builders
options="$options !check"
;;
esac
export SETUPTOOLS_SCM_PRETEND_VERSION=$pkgver
build() {
python3 setup.py build
}
check() {
python3 setup.py install --root="$PWD/test_install" --skip-build
# Behavior of getaddrinfo differs between event loop implementations
# on musl-based systems
PYTHONPATH="$(echo $PWD/test_install/usr/lib/python3*/site-packages)" pytest \
--deselect tests/test_sockets.py::test_getaddrinfo_ipv6addr
}
package() {
python3 setup.py install --root="$pkgdir" --skip-build
}
sha512sums="
4a0d3dd11393bd3d7a99d3365825df14d70fa14fa6ddf0e3f9eb9affcde7a9ac1f9e5ba38d6ac9b7f246ba9e7d4bea0dd9c8049f1dc8beadbe6b4b803571fc21 py3-anyio-3.6.2.tar.gz
"

View file

@ -11,6 +11,7 @@ depends="
python3 python3
" "
makedepends=" makedepends="
py3-pip
py3-setuptools py3-setuptools
py3-setuptools_scm py3-setuptools_scm
py3-wheel py3-wheel

View file

@ -0,0 +1,49 @@
# Maintainer: Hoang Nguyen <folliekazetani@protonmail.com>
pkgname=py3-gitlab
_pyname=${pkgname/py3/python}
pkgver=3.12.0
pkgrel=1
pkgdesc="Python wrapper for Gitlab's API"
url="https://github.com/python-gitlab/python-gitlab"
arch="noarch"
license="LGPL-3.0-or-later"
depends="python3 py3-requests py3-requests-toolbelt"
makedepends="
py3-setuptools
py3-gpep517
py3-installer
py3-wheel
py3-sphinx
py3-sphinxcontrib-autoprogram
py3-myst-parser
"
checkdepends="py3-pytest py3-responses"
option="!check"
subpackages="$pkgname-doc"
source="$pkgname-$pkgver.tar.gz::https://github.com/python-gitlab/python-gitlab/archive/refs/tags/v$pkgver.tar.gz"
builddir="$srcdir/$_pyname-$pkgver"
build() {
gpep517 build-wheel \
--wheel-dir dist \
--output-fd 3 3>&1 >&2
sphinx-build -W -b man docs/ man/
}
check() {
sed -i "s|/usr/bin/false|/bin/false|g" tests/unit/test_config.py
python3 -m venv --system-site-packages testenv
testenv/bin/python3 -m installer dist/*.whl
testenv/bin/python3 -m pytest --ignore=tests/functional/
}
package() {
python3 -m installer -d "$pkgdir" \
dist/*.whl
install -Dm644 man/$_pyname.1 \
"$pkgdir"/usr/share/man/man1/$_pyname.1
}
sha512sums="
25229542fcb790dfc96bb5f946628136dce342ae825edc7ed318651dac9840ec16f896198c357a4751bde8cda7ee7abb3fbc1f4e4abfa5a3cce7bc61f956f2e6 py3-gitlab-3.12.0.tar.gz
"

View file

@ -0,0 +1,31 @@
# Contributor: Rasmus Thomsen <oss@cogitri.dev>
# Maintainer: Rasmus Thomsen <oss@cogitri.dev>
pkgname=py3-gnupg
pkgver=0.5.0
pkgrel=1
pkgdesc="Python3 wrapper for the Gnu Privacy Guard (GPG or GnuPG)"
url="https://gnupg.readthedocs.io/en/latest/"
arch="noarch"
license="BSD-3-Clause"
depends="python3 gnupg"
makedepends="py3-setuptools py3-wheel py3-build py3-installer"
checkdepends="py3-pytest"
source="https://pypi.io/packages/source/p/python-gnupg/python-gnupg-$pkgver.tar.gz"
builddir="$srcdir/python-gnupg-$pkgver"
build() {
python3 -m build --no-isolation --wheel
}
check() {
NO_EXTERNAL_TESTS=no pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
dist/python_gnupg-$pkgver-py2.py3-none-any.whl
}
sha512sums="
cfd302257b53fdc9318004db7323ea5bf4bddc055b65b24386a1ecb27cd476fdf1bc771adcdde70a4eef442982a0c57dc832b92274bbe5ba16cbdf3247f4e77a python-gnupg-0.5.0.tar.gz
"

View file

@ -0,0 +1,52 @@
# Contributor: Michał Polański <michal@polanski.me>
# Maintainer: Michał Polański <michal@polanski.me>
pkgname=py3-httpcore
pkgver=0.15.0
pkgrel=2
pkgdesc="Minimal HTTP client"
url="https://www.encode.io/httpcore/"
license="BSD-3-Clause"
arch="noarch !armhf !ppc64le" # limited by py3-anyio
depends="
python3
py3-anyio
py3-certifi
py3-h11
py3-sniffio
py3-trio<0.22
"
makedepends="py3-setuptools"
checkdepends="
py3-h2
py3-hpack
py3-hyperframe
py3-pytest
py3-pytest-asyncio
py3-pytest-httpbin
py3-pytest-trio
py3-socksio
"
subpackages="$pkgname-doc"
source="https://github.com/encode/httpcore/archive/$pkgver/py3-httpcore-$pkgver.tar.gz
pytest-asyncio-warning.patch
"
builddir="$srcdir/httpcore-$pkgver"
build() {
python3 setup.py build
}
check() {
PYTHONPATH="$PWD/build/lib" pytest
}
package() {
python3 setup.py install --root="$pkgdir" --skip-build
install -Dm644 LICENSE.md "$pkgdir"/usr/share/licenses/$pkgname/LICENSE.md
}
sha512sums="
3c25630d582448e3c7d46176c8862e4d92c6c4aac954bfe46b06e26297b32f996db2e002a87c7187accb5bf4ef86e82d7f9051404bee651ce5254119571d0c95 py3-httpcore-0.15.0.tar.gz
28aa7bc050d56a09df5eb19c2edc8eda389a77c73c043945dcf74f8832387849d7a08c9366c403f451c65ab888720fcc4c5d233d3fb9a6628e91800f6c82ab3f pytest-asyncio-warning.patch
"

View file

@ -0,0 +1,11 @@
looks like upstream enabled a pytest equivalent of -Werror without testing too much
--- a/setup.cfg
+++ b/setup.cfg
@@ -29,6 +29,7 @@
ignore:unclosed <(socket\.socket|ssl\.SSLSocket) .*:ResourceWarning
ignore:ssl\.wrap_socket\(\) is deprecated, use SSLContext\.wrap_socket\(\):DeprecationWarning
ignore:ssl\.PROTOCOL_TLS is deprecated:DeprecationWarning
+ ignore:The 'asyncio_mode' default value will change:DeprecationWarning
[coverage:run]
omit = venv/*, httpcore/_sync/*

View file

@ -0,0 +1,44 @@
# Contributor: Michał Polański <michal@polanski.me>
# Maintainer: Michał Polański <michal@polanski.me>
pkgname=py3-httpx
pkgver=0.23.0
pkgrel=1
pkgdesc="Next generation HTTP client for Python"
url="https://www.python-httpx.org/"
license="BSD-3-Clause"
arch="noarch !armhf !ppc64le" # limited by py3-httpcore
depends="
python3
py3-certifi
py3-httpcore
py3-idna
py3-rfc3986
py3-sniffio
"
makedepends="py3-setuptools"
checkdepends="py3-pytest py3-pytest-asyncio py3-pytest-trio py3-socksio py3-trustme uvicorn"
subpackages="$pkgname-doc"
source="https://github.com/encode/httpx/archive/$pkgver/py3-httpx-$pkgver.tar.gz
relax-dependencies.patch
"
builddir="$srcdir/httpx-$pkgver"
options="!check" # cyclic dependency on uvicorn
build() {
python3 setup.py build
}
check() {
PYTHONPATH="$PWD/build/lib" pytest
}
package() {
python3 setup.py install --root="$pkgdir" --skip-build
install -Dm644 LICENSE.md "$pkgdir"/usr/share/licenses/$pkgname/LICENSE.md
}
sha512sums="
3cfdf2b3b2f15967a1eec0be05ed947c5e18a46576b68a9cbfd5147dfd4736cb7c389f5431732b93f3a11f3ec6c6f25f7cbb3d96d845f00b58e2b8dae047c1d5 py3-httpx-0.23.0.tar.gz
dc64c27e15116fdd061972747f96caedda4e4f73ca4545e77785d8c319763d55701f059339a502edc709441e21076e689caf113484986d0cb4a09b569b41603a relax-dependencies.patch
"

View file

@ -0,0 +1,13 @@
Relax dependencies
--- a/setup.py
+++ b/setup.py
@@ -58,7 +58,7 @@ setup(
install_requires=[
"certifi",
"sniffio",
- "rfc3986[idna2008]>=1.3,<2",
+ "rfc3986[idna2008]",
"httpcore>=0.15.0,<0.16.0",
],
extras_require={

View file

@ -0,0 +1,26 @@
# Maintainer: psykose <alice@ayaya.dev>
pkgname=py3-nose2
pkgver=0.12.0
pkgrel=1
pkgdesc="Successor to nose, based on unittest"
url="https://docs.nose2.io/en/latest/"
arch="noarch"
license="LGPL-2.0-or-later"
depends="python3"
makedepends="py3-setuptools"
source="https://github.com/nose-devs/nose2/archive/refs/tags/$pkgver/nose2-$pkgver.tar.gz"
options="!check" # don't work
builddir="$srcdir/nose2-$pkgver"
build() {
rm -rf nose2/tests
python3 setup.py build
}
package() {
python3 setup.py install --root="$pkgdir" --skip-build
}
sha512sums="
7f1462613b7d38a77cd0764f721be4223b8f4ae4694c49ba416a076c321e07484969ee1a7c2d2f89484c0c3b2a96e74d6a42321981af4930ebe5f63f07df7755 nose2-0.12.0.tar.gz
"

View file

@ -0,0 +1,30 @@
# Contributor: Rejah Rehim <rejah@beaglesecurity.com>
# Maintainer: Rejah Rehim <rejah@beaglesecurity.com>
pkgname=py3-python-jwt
_pkgname=python_jwt
pkgver=4.0.0
pkgrel=0
pkgdesc="Module for generating and verifying JSON Web Tokens"
options="!check" # no test suite
url="https://github.com/davedoesdev/python-jwt"
arch="noarch"
license="MIT"
depends="python3 py3-jwcrypto"
makedepends="py3-setuptools"
subpackages="$pkgname-doc"
source="https://files.pythonhosted.org/packages/source/p/$_pkgname/$_pkgname-$pkgver.tar.gz"
builddir="$srcdir"/$_pkgname-$pkgver
build() {
python3 setup.py build
}
package() {
python3 setup.py install --skip-build --root="$pkgdir"
install -Dm644 LICENCE "$pkgdir"/usr/share/licenses/$pkgname/LICENCE
install -Dm644 README.md "$pkgdir"/usr/share/licenses/$pkgname/README.md
}
sha512sums="
94c6ebd7738da3087b7192f3eff3e6af3aa7dce3f38cd0e001261e61a5aa42e03fa08d76bb56b7d033ee64723a428bfbad480b6d46934c9b1ef446f613b1cbe0 python_jwt-4.0.0.tar.gz
"

View file

@ -0,0 +1,51 @@
# Contributor: Marian Buschsieweke <marian.buschsieweke@ovgu.de>
# Maintainer: Marian Buschsieweke <marian.buschsieweke@ovgu.de>
pkgname=py3-rapidjson
pkgver=1.9
pkgrel=1
pkgdesc="Python3 wrapper around RapidJSON"
url="https://github.com/python-rapidjson/python-rapidjson"
arch="all"
license="MIT"
depends="
python3
"
makedepends="
py3-setuptools
rapidjson-dev
python3-dev
"
checkdepends="
py3-pytest
py3-tz
"
source="$pkgname-$pkgver.tar.gz::https://github.com/python-rapidjson/python-rapidjson/archive/refs/tags/v$pkgver.tar.gz"
builddir="$srcdir/"python-rapidjson-$pkgver
build() {
python3 setup.py \
--rj-include-dir=/usr/include/rapidjson \
build
}
check() {
PYTHONPATH="$(echo "$PWD"/build/lib.linux*)" pytest \
--ignore benchmarks \
--deselect tests/test_base_types.py::test_base_values \
--deselect tests/test_unicode.py::test_unicode_decode_error \
--deselect tests/test_validator.py::test_additional_and_pattern_properties_valid \
#
}
package() {
python3 setup.py \
--rj-include-dir=/usr/include/rapidjson \
install \
--skip-build \
--prefix=/usr \
--root="$pkgdir"
}
sha512sums="
d3f4c06d021058ea42f01f676e77dfa84997a681d548582060d20713878ecf73ada257186026847de43718764078f7e3a2467a165fa6d8b15b2f3e3d4bded4e1 py3-rapidjson-1.9.tar.gz
"

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,183 @@
From 9bf9f40f41141942be166966ec434720da5b85bd Mon Sep 17 00:00:00 2001
From: Drew DeVault <sir@cmpwn.com>
Date: Wed, 29 Dec 2021 10:16:53 +0100
Subject: [PATCH 2/2] Drop tests/test_ssl.py
This test expects to be run in the upstream project's CI enviornment.
Ref https://github.com/redis/redis-py/issues/1838
---
tests/test_ssl.py | 161 ----------------------------------------------
1 file changed, 161 deletions(-)
delete mode 100644 tests/test_ssl.py
diff --git a/tests/test_ssl.py b/tests/test_ssl.py
deleted file mode 100644
index a2f66b2..0000000
--- a/tests/test_ssl.py
+++ /dev/null
@@ -1,161 +0,0 @@
-import os
-import socket
-import ssl
-from urllib.parse import urlparse
-
-import pytest
-
-import redis
-from redis.exceptions import ConnectionError, RedisError
-
-from .conftest import skip_if_cryptography, skip_if_nocryptography
-
-
-@pytest.mark.ssl
-class TestSSL:
- """Tests for SSL connections
-
- This relies on the --redis-ssl-url purely for rebuilding the client
- and connecting to the appropriate port.
- """
-
- ROOT = os.path.join(os.path.dirname(__file__), "..")
- CERT_DIR = os.path.abspath(os.path.join(ROOT, "docker", "stunnel", "keys"))
- if not os.path.isdir(CERT_DIR): # github actions package validation case
- CERT_DIR = os.path.abspath(
- os.path.join(ROOT, "..", "docker", "stunnel", "keys")
- )
- if not os.path.isdir(CERT_DIR):
- raise IOError(f"No SSL certificates found. They should be in {CERT_DIR}")
-
- def test_ssl_with_invalid_cert(self, request):
- ssl_url = request.config.option.redis_ssl_url
- sslclient = redis.from_url(ssl_url)
- with pytest.raises(ConnectionError) as e:
- sslclient.ping()
- assert "SSL: CERTIFICATE_VERIFY_FAILED" in str(e)
-
- def test_ssl_connection(self, request):
- ssl_url = request.config.option.redis_ssl_url
- p = urlparse(ssl_url)[1].split(":")
- r = redis.Redis(host=p[0], port=p[1], ssl=True, ssl_cert_reqs="none")
- assert r.ping()
-
- def test_ssl_connection_without_ssl(self, request):
- ssl_url = request.config.option.redis_ssl_url
- p = urlparse(ssl_url)[1].split(":")
- r = redis.Redis(host=p[0], port=p[1], ssl=False)
-
- with pytest.raises(ConnectionError) as e:
- r.ping()
- assert "Connection closed by server" in str(e)
-
- def test_validating_self_signed_certificate(self, request):
- ssl_url = request.config.option.redis_ssl_url
- p = urlparse(ssl_url)[1].split(":")
- r = redis.Redis(
- host=p[0],
- port=p[1],
- ssl=True,
- ssl_certfile=os.path.join(self.CERT_DIR, "server-cert.pem"),
- ssl_keyfile=os.path.join(self.CERT_DIR, "server-key.pem"),
- ssl_cert_reqs="required",
- ssl_ca_certs=os.path.join(self.CERT_DIR, "server-cert.pem"),
- )
- assert r.ping()
-
- def _create_oscp_conn(self, request):
- ssl_url = request.config.option.redis_ssl_url
- p = urlparse(ssl_url)[1].split(":")
- r = redis.Redis(
- host=p[0],
- port=p[1],
- ssl=True,
- ssl_certfile=os.path.join(self.CERT_DIR, "server-cert.pem"),
- ssl_keyfile=os.path.join(self.CERT_DIR, "server-key.pem"),
- ssl_cert_reqs="required",
- ssl_ca_certs=os.path.join(self.CERT_DIR, "server-cert.pem"),
- ssl_validate_ocsp=True,
- )
- return r
-
- @skip_if_cryptography()
- def test_ssl_ocsp_called(self, request):
- r = self._create_oscp_conn(request)
- with pytest.raises(RedisError) as e:
- assert r.ping()
- assert "cryptography not installed" in str(e)
-
- @skip_if_nocryptography()
- def test_ssl_ocsp_called_withcrypto(self, request):
- r = self._create_oscp_conn(request)
- with pytest.raises(ConnectionError) as e:
- assert r.ping()
- assert "No AIA information present in ssl certificate" in str(e)
-
- # rediss://, url based
- ssl_url = request.config.option.redis_ssl_url
- sslclient = redis.from_url(ssl_url)
- with pytest.raises(ConnectionError) as e:
- sslclient.ping()
- assert "No AIA information present in ssl certificate" in str(e)
-
- @skip_if_nocryptography()
- def test_valid_ocsp_cert_http(self):
- from redis.ocsp import OCSPVerifier
-
- hostnames = ["github.com", "aws.amazon.com", "ynet.co.il", "microsoft.com"]
- for hostname in hostnames:
- context = ssl.create_default_context()
- with socket.create_connection((hostname, 443)) as sock:
- with context.wrap_socket(sock, server_hostname=hostname) as wrapped:
- ocsp = OCSPVerifier(wrapped, hostname, 443)
- assert ocsp.is_valid()
-
- @skip_if_nocryptography()
- def test_revoked_ocsp_certificate(self):
- from redis.ocsp import OCSPVerifier
-
- context = ssl.create_default_context()
- hostname = "revoked.badssl.com"
- with socket.create_connection((hostname, 443)) as sock:
- with context.wrap_socket(sock, server_hostname=hostname) as wrapped:
- ocsp = OCSPVerifier(wrapped, hostname, 443)
- assert ocsp.is_valid() is False
-
- @skip_if_nocryptography()
- def test_unauthorized_ocsp(self):
- from redis.ocsp import OCSPVerifier
-
- context = ssl.create_default_context()
- hostname = "stackoverflow.com"
- with socket.create_connection((hostname, 443)) as sock:
- with context.wrap_socket(sock, server_hostname=hostname) as wrapped:
- ocsp = OCSPVerifier(wrapped, hostname, 443)
- with pytest.raises(ConnectionError):
- ocsp.is_valid()
-
- @skip_if_nocryptography()
- def test_ocsp_not_present_in_response(self):
- from redis.ocsp import OCSPVerifier
-
- context = ssl.create_default_context()
- hostname = "google.co.il"
- with socket.create_connection((hostname, 443)) as sock:
- with context.wrap_socket(sock, server_hostname=hostname) as wrapped:
- ocsp = OCSPVerifier(wrapped, hostname, 443)
- assert ocsp.is_valid() is False
-
- @skip_if_nocryptography()
- def test_unauthorized_then_direct(self):
- from redis.ocsp import OCSPVerifier
-
- # these certificates on the socket end return unauthorized
- # then the second call succeeds
- hostnames = ["wikipedia.org", "squarespace.com"]
- for hostname in hostnames:
- context = ssl.create_default_context()
- with socket.create_connection((hostname, 443)) as sock:
- with context.wrap_socket(sock, server_hostname=hostname) as wrapped:
- ocsp = OCSPVerifier(wrapped, hostname, 443)
- assert ocsp.is_valid()
--
2.34.1

View file

@ -0,0 +1,42 @@
# Maintainer: Eivind Uggedal <eu@eju.no>
pkgname=py3-redis
_pkgname=redis
pkgver=4.1.0
pkgrel=1
pkgdesc="Python3 client for Redis key-value store"
url="https://github.com/andymccurdy/redis-py"
arch="noarch"
license="MIT"
depends="python3 py3-deprecated py3-hiredis"
makedepends="py3-setuptools"
checkdepends="py3-pytest py3-mock redis"
source="
https://files.pythonhosted.org/packages/source/${_pkgname:0:1}/$_pkgname/$_pkgname-$pkgver.tar.gz
0001-all-remove-support-for-nonfree-Redis-modules.patch
0002-Drop-tests-test_ssl.py.patch
"
builddir="$srcdir"/$_pkgname-$pkgver
options="!check" # tests fail due to old version + disabled proprietary redis features
replaces="py-redis" # Backwards compatibility
provides="py-redis=$pkgver-r$pkgrel" # Backwards compatibility
build() {
python3 setup.py build
}
check() (
redis-server --dir "$builddir" &
trap "kill $!" EXIT
pytest
)
package() {
python3 setup.py install --skip-build --root="$pkgdir"
}
sha512sums="
85cd09570f4faf34a735befd0677aa8ca2cb0d62b0285c4c040380c2440f2774e47762ec4219381294465343353a15804b96f06b4d6eefa7159a224eb9e72001 redis-4.1.0.tar.gz
b1dd96aeb6129f121108fac3c1ad033b1b657287fb0f959bc7fcab997b26c4b91cc7c0df6f86d6d2ac283951956a4a38826647f0e744514ce5031cf3917d1746 0001-all-remove-support-for-nonfree-Redis-modules.patch
5184efc472ad16020240e57222f906656b1f6db5139d37de22b34298c7a15c9b91f5c2d976f6c8455071459d2ff273f75f6bf76f3f46990bacec6673a83a2872 0002-Drop-tests-test_ssl.py.patch
"

View file

@ -0,0 +1,83 @@
# Contributor: Andrew Manison <amanison@anselsystems.com>
# Contributor: Fabian Affolter <fabian@affolter-engineering.ch>
# Contributor: Leo <thinkabit.ukim@gmail.com>
# Maintainer: psykose <alice@ayaya.dev>
pkgname=py3-setuptools
_pkgname=${pkgname#py3-}
pkgver=65.7.0
pkgrel=0
pkgdesc="Collection of enhancements to the Python3 distutils"
options="!check" # Tests require packages out of main/
url="https://pypi.python.org/pypi/setuptools"
arch="noarch"
license="MIT"
# everything is vendored
depends="
py3-packaging
python3
"
# depends="
# py3-appdirs
# py3-more-itertools
# py3-ordered-set
# py3-packaging
# py3-parsing
# python3
# "
makedepends="py3-setuptools-stage0"
source="$_pkgname-$pkgver.tar.gz::https://github.com/pypa/$_pkgname/archive/v$pkgver.tar.gz"
builddir="$srcdir"/$_pkgname-$pkgver
provides="py-setuptools=$pkgver-r$pkgrel" # Backwards compatibility
replaces="py-setuptools" # Backwards compatiblity
# py3-setuptools needs itself to build, bootstrapped with a lower version
# in main/py3-setuptools-stage0
provides="$provides py3-setuptools-bootstrap"
provider_priority=100 # highest
export SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES=0
prepare() {
default_prepare
# Unbundle
# rm -rf pkg_resources/extern pkg_resources/_vendor \
# setuptools/extern setuptools/_vendor
# Upstream devendoring logic is badly broken, see:
# https://bugs.archlinux.org/task/58670
# https://github.com/pypa/pip/issues/5429
# https://github.com/pypa/setuptools/issues/1383
# The simplest fix is to simply rewrite import paths to use the canonical
# location in the first place
# for _module in setuptools pkg_resources '' ; do
# find . -name \*.py -exec sed -i \
# -e 's/from '$_module.extern' import/import/' \
# -e 's/from '$_module.extern'./from /' \
# -e 's/import '$_module.extern'./import /' \
# -e "s/__import__('$_module.extern./__import__('/" \
# {} +
# done
# Fix post-release tag
sed -e '/tag_build = .post/d' \
-e '/tag_date = 1/d' \
-i setup.cfg
}
build() {
python3 setup.py build
}
package() {
# Otherwise it complains that build/scripts-3.10 cannot be found
# no other changes noted
mkdir -p build/scripts-3.11
python3 setup.py install --root="$pkgdir" --skip-build
}
sha512sums="
864bdd80acc65c34c472c9401ee6eadee057871b2f10793720887b43354bce08a2201e79fcf92767ddf46912285c46df88ec7733cca25982537e0bd51529e409 setuptools-65.7.0.tar.gz
"

View file

@ -0,0 +1,33 @@
# Contributor: Michał Polański <michal@polanski.me>
# Maintainer: Michał Polański <michal@polanski.me>
pkgname=py3-socksio
pkgver=1.0.0
pkgrel=1
pkgdesc="Sans-I/O implementation of SOCKS4, SOCKS4A, and SOCKS5"
url="https://pypi.org/project/socksio/"
license="MIT"
arch="noarch"
depends="python3"
makedepends="py3-setuptools"
checkdepends="py3-pytest py3-pytest-cov"
subpackages="$pkgname-doc"
source="$pkgname-$pkgver.tar.gz::https://files.pythonhosted.org/packages/source/s/socksio/socksio-$pkgver.tar.gz"
builddir="$srcdir/socksio-$pkgver"
build() {
python3 setup.py build
}
check() {
PYTHONPATH="$PWD/build/lib" pytest
}
package() {
python3 setup.py install --root="$pkgdir" --skip-build
install -Dm644 LICENSE "$pkgdir"/usr/share/licenses/$pkgname/LICENSE
}
sha512sums="
89bce7294555e2623df68e99352c27b83af38a4fcc4e3b86c8826c9c4bf545eeaf0a6328b000cfe1d286fa442c756c4579b4887cff03bc9e559cd66414a7ac6f py3-socksio-1.0.0.tar.gz
"

View file

@ -0,0 +1,34 @@
# Maintainer: Hoang Nguyen <folliekazetani@protonmail.com>
pkgname=py3-sphinxcontrib-autoprogram
pkgver=0.1.7
pkgrel=2
pkgdesc="Sphinx extension to document CLI programs"
url="https://github.com/sphinx-contrib/autoprogram"
arch="noarch"
license="BSD-2-Clause"
depends="python3 py3-six py3-sphinx"
makedepends="py3-setuptools"
source="
$pkgname-$pkgver.tar.gz::https://github.com/sphinx-contrib/autoprogram/archive/refs/tags/$pkgver.tar.gz
fix-argparse-output-py310.patch
"
builddir="$srcdir/autoprogram-$pkgver"
# fail with new docutils
options="!check"
build() {
python3 setup.py build
}
check() {
python3 setup.py test
}
package() {
python3 setup.py install --skip-build --root="$pkgdir"
}
sha512sums="
81679a8bed93274193efb5ac784d72280db32f37156aca5a422af6c8e8abd6d4e4a22e070ee73e9b3194eacf22a8228be0df3329f6fd6a0ff2408a3c7973c8a7 py3-sphinxcontrib-autoprogram-0.1.7.tar.gz
2405814d8cf546b6078e8d3c854943e1b5603cbc52558dad67fcaf7a729e1f6a29ae049e1172fe4612ccb20f0209ddebedf7c2a214602f1d6b7720b2fbcf0caa fix-argparse-output-py310.patch
"

View file

@ -0,0 +1,19 @@
Patch-Source: https://github.com/sphinx-contrib/autoprogram/pull/25
diff --git a/sphinxcontrib/autoprogram.py b/sphinxcontrib/autoprogram.py
index c60cf68..0e7d9a0 100644
--- a/sphinxcontrib/autoprogram.py
+++ b/sphinxcontrib/autoprogram.py
@@ -476,7 +476,11 @@ class ScannerTestCase(unittest.TestCase):
# section: default optionals
program, options, group = sections[1]
self.assertEqual([], program)
- self.assertEqual("optional arguments", group.title)
+ # See https://github.com/sphinx-contrib/autoprogram/issues/24
+ if sys.version_info >= (3, 10):
+ self.assertEqual("options", group.title)
+ else:
+ self.assertEqual("optional arguments", group.title)
self.assertEqual(None, group.description)
self.assertEqual(2, len(options))
self.assertEqual(

View file

@ -0,0 +1,37 @@
# Contributor: Michał Polański <michal@polanski.me>
# Maintainer: Michał Polański <michal@polanski.me>
pkgname=py3-uvloop
pkgver=0.16.0
pkgrel=0
pkgdesc="Ultra fast asyncio event loop"
url="https://github.com/MagicStack/uvloop"
license="MIT OR Apache-2.0"
arch="all !armhf !ppc64le" # tests fail
depends="python3"
makedepends="py3-setuptools python3-dev cython libuv-dev py3-pip"
checkdepends="py3-pytest py3-aiohttp py3-openssl py3-psutil"
source="https://github.com/MagicStack/uvloop/archive/v$pkgver/py3-uvloop-$pkgver.tar.gz
tcp-tests.patch
dns-tests.patch
"
options="!check" # mypy validation fails with our mypy version
builddir="$srcdir/uvloop-$pkgver"
build() {
python3 setup.py build build_ext --inplace --cython-always --use-system-libuv
}
check() {
PYTHONASYNCIODEBUG=1 PYTHONPATH="$(echo "$builddir"/build/lib.linux-*)" pytest -v \
-k 'not test_remote_shutdown_receives_trailing_data'
}
package() {
python3 setup.py install --root="$pkgdir" --skip-build
}
sha512sums="
1896d9a60a9c4e4b8d146ad858e664f3e43969ad0c14026fe79c69f546e40bf1dc6a4cce2d388a7a6e0f5b8306b1eb4da3f713cce44c58ba6628b82ac6eaf271 py3-uvloop-0.16.0.tar.gz
809af42dc056b718652ff1e2f99cc493b230a9566367bccf349afc705653ffb830288b7de80bc6016071980af5d5e0e635e72d53f7774ace193ce4fb2b1a62cc tcp-tests.patch
072c955662a9922de1f08713a73f0f9a08bd76b82cabd04e15cbb8b8299d81615516d03bdff207d2f0125afe055e9573604ebc331ad85f5d69ec6bf69668e620 dns-tests.patch
"

View file

@ -0,0 +1,47 @@
Behavior of getaddrinfo provided by libuv differs from musl.
Skip affected tests.
--- a/tests/test_dns.py
+++ b/tests/test_dns.py
@@ -1,4 +1,5 @@
import asyncio
+import pytest
import socket
import unittest
@@ -99,18 +100,22 @@ class BaseTestDNS:
self._test_getaddrinfo(b'example.com', '80')
self._test_getaddrinfo(b'example.com', '80', type=socket.SOCK_STREAM)
+ @pytest.mark.skip(reason="failure")
def test_getaddrinfo_12(self):
self._test_getaddrinfo('127.0.0.1', '80')
self._test_getaddrinfo('127.0.0.1', '80', type=socket.SOCK_STREAM)
+ @pytest.mark.skip(reason="failure")
def test_getaddrinfo_13(self):
self._test_getaddrinfo(b'127.0.0.1', b'80')
self._test_getaddrinfo(b'127.0.0.1', b'80', type=socket.SOCK_STREAM)
+ @pytest.mark.skip(reason="failure")
def test_getaddrinfo_14(self):
self._test_getaddrinfo(b'127.0.0.1', b'http')
self._test_getaddrinfo(b'127.0.0.1', b'http', type=socket.SOCK_STREAM)
+ @pytest.mark.skip(reason="failure")
def test_getaddrinfo_15(self):
self._test_getaddrinfo('127.0.0.1', 'http')
self._test_getaddrinfo('127.0.0.1', 'http', type=socket.SOCK_STREAM)
@@ -127,10 +132,12 @@ class BaseTestDNS:
self._test_getaddrinfo('localhost', b'http')
self._test_getaddrinfo('localhost', b'http', type=socket.SOCK_STREAM)
+ @pytest.mark.skip(reason="failure")
def test_getaddrinfo_19(self):
self._test_getaddrinfo('::1', 80)
self._test_getaddrinfo('::1', 80, type=socket.SOCK_STREAM)
+ @pytest.mark.skip(reason="failure")
def test_getaddrinfo_20(self):
self._test_getaddrinfo('127.0.0.1', 80)
self._test_getaddrinfo('127.0.0.1', 80, type=socket.SOCK_STREAM)

View file

@ -0,0 +1,15 @@
Adjust error message for musl-based systems.
--- a/tests/test_tcp.py
+++ b/tests/test_tcp.py
@@ -221,8 +221,8 @@ class _TestTCP:
addr = sock.getsockname()
with self.assertRaisesRegex(OSError,
- r"error while attempting.*\('127.*: "
- r"address already in use"):
+ r"\[Errno 98\] error while attempting.*\('127.*: "
+ r"address in use"):
self.loop.run_until_complete(
self.loop.create_server(object, *addr))

View file

@ -0,0 +1,33 @@
# Contributor: Anjandev Momi <anjan@momi.ca>
# Maintainer: Anjandev Momi <anjan@momi.ca>
pkgname=py3-zxcvbn
pkgdesc="Python implementation of Dropbox's realistic password strength estimator"
_pkgreal=zxcvbn
pkgver=4.4.28
pkgrel=1
url="http://packages.python.org/pypi/zxcvbn"
arch="noarch"
license="MIT"
depends="python3"
checkdepends="py3-pytest"
makedepends="py3-setuptools"
_pypiprefix="${_pkgreal%"${_pkgreal#?}"}"
source="https://files.pythonhosted.org/packages/source/$_pypiprefix/$_pkgreal/$_pkgreal-$pkgver.tar.gz"
builddir="$srcdir/$_pkgreal-$pkgver"
options="!check" # no upstream checks
build() {
python3 setup.py build
}
check() {
pytest
}
package() {
python3 setup.py install --skip-build --root="$pkgdir"
}
sha512sums="
6ed12b555442b4ee30662c90f38c90988833545310acce45e68a0aa2fc6297729da500ae0f578a1a266e85c09522eb3287c38d92bcfc1017f852ee76bf92c606 zxcvbn-4.4.28.tar.gz
"

View file

@ -1,387 +0,0 @@
# Contributor: Lauren N. Liberda <lauren@selfisekai.rocks>
# Maintainer: Lauren N. Liberda <lauren@selfisekai.rocks>
pkgname=signal-desktop
pkgver=6.1.0
pkgrel=0
pkgdesc="A messaging app for simple private communication with friends"
url="https://github.com/signalapp/Signal-Desktop/"
# same as electron
# aarch64: polyval-0.5.3 crate subdep uses unstable stdsimd feature
arch="x86_64"
license="AGPL-3.0-only"
# this build system sucks massive ass and does not add needed to anything
depends="
electron
ffmpeg-libs
font-barlow
font-eb-garamond
font-inter
font-parisienne
libevent
opus
"
makedepends="
alsa-lib-dev
bsd-compat-headers
cargo
clang-dev
cmake
electron-dev
ffmpeg-dev
git-lfs
glib-dev
gn
libepoxy-dev
libevent-dev
lld
llvm-dev
mesa-dev
nodejs
npm
openssl-dev
opus-dev
pipewire-dev
protoc
pulseaudio-dev
py3-setuptools
python3
samurai
sqlcipher-dev
vips-dev
yarn
"
options="net !check"
# follow signal-desktop package.json -> @signalapp/libsignal-client
_libsignalver=0.21.1
# follow signal-desktop package.json -> ringrtc -> commit title
_ringrtcver=2.22.0
# follow ringrtc (on version above) -> config/version.sh -> WEBRTC_VERSION
# downloading tarball generated with abuild snapshot (with gclient dependencies fetched)
_webrtcver=5005b
source="
https://github.com/signalapp/Signal-Desktop/archive/refs/tags/v$pkgver/Signal-Desktop-$pkgver.tar.gz
https://github.com/signalapp/libsignal/archive/refs/tags/v$_libsignalver/libsignal-$_libsignalver.tar.gz
https://github.com/signalapp/ringrtc/archive/refs/tags/v$_ringrtcver/ringrtc-$_ringrtcver.tar.gz
https://s3.sakamoto.pl/lnl-aports-snapshots/webrtc-$_webrtcver.tar.xz
bettersqlite-use-system-sqlcipher.patch
signal-build-expire-time.patch
signal-disable-updates.patch
signal-update-links.patch
webrtc-canonicalize-file-name.patch
webrtc-use-alpine-target.patch
webrtc-no-shared-deps-in-static-lib.patch
signal-desktop
signal-desktop.desktop
"
builddir="$srcdir/Signal-Desktop-$pkgver"
export CC=clang
export CXX=clang++
# required to find the tools
export AR=llvm-ar
export NM=llvm-nm
export LD=clang++
# less log spam, reproducible, allow lto with rust
export CFLAGS="${CFLAGS/-g/} -O2 -Wno-deprecated-builtins -Wno-unknown-warning-option -Wno-builtin-macro-redefined"
export CXXFLAGS="${CXXFLAGS/-g/} -O2 -Wno-deprecated-builtins -Wno-unknown-warning-option -Wno-builtin-macro-redefined"
export CPPFLAGS="$CPPFLAGS -D__DATE__= -D__TIME__= -D__TIMESTAMP__="
export CARGO_PROFILE_RELEASE_OPT_LEVEL=2
export CARGO_PROFILE_RELEASE_STRIP="symbols"
export RUSTFLAGS="$RUSTFLAGS -C linker=clang -C link-arg=-fuse-ld=lld"
export YARN_CACHE_FOLDER="$srcdir/.yarn"
# webrtc only, the other dependencies are fine with tarballs
snapshot() {
mkdir -p "$srcdir"
cd "$srcdir"
echo "
solutions = [{
'name': 'src',
'url': 'https://github.com/signalapp/webrtc.git@$_webrtcver',
}]
target_cpu = ['x64']
target_cpu_only = True
" > .gclient
gclient sync --no-history --nohooks --tpot-cipd-ignore-platformed
# needed DEPS hooks
python3 'src/build/landmines.py' --landmine-scripts 'src/tools_webrtc/get_landmines.py' --src-dir 'src'
python3 'src/build/util/lastchange.py' -o 'src/build/util/LASTCHANGE'
for elf in $(scanelf -RA -F "%F" src); do
rm -f "$elf"
done
mv src webrtc-$_webrtcver
msg "generating tarball.."
tar -cf webrtc-$_webrtcver.tar \
--exclude="ChangeLog*" \
--exclude="testdata/" \
--exclude="test_data/" \
--exclude="android_rust_toolchain/toolchain/" \
--exclude-backups \
--exclude-caches-all \
--exclude-vcs \
webrtc-$_webrtcver
xz -T0 -e -9 -vv -k webrtc-$_webrtcver.tar
}
prepare() {
ln -s "$srcdir"/webrtc-$_webrtcver "$srcdir"/ringrtc-$_ringrtcver/src/webrtc
ln -sf "$srcdir"/ringrtc-$_ringrtcver/src "$srcdir"/webrtc-$_webrtcver/ringrtc
msg "Applying patches"
for x in $source; do
case "$x" in
signal-*.patch)
msg "$x"
patch -p1 -i "$srcdir"/$x -d "$srcdir"/Signal-Desktop-$pkgver
;;
ringrtc-*.patch)
msg "$x"
patch -p1 -i "$srcdir"/$x -d "$srcdir"/ringrtc-$_ringrtcver
;;
webrtc-*.patch)
msg "$x"
patch -p1 -i "$srcdir"/$x -d "$srcdir"/webrtc-$_webrtcver
;;
esac
done
msg "Installing signal-desktop JS dependencies"
echo 'ignore-engines true' > .yarnrc
yarn --ignore-scripts --frozen-lockfile
(
cd "$srcdir"/webrtc-$_webrtcver
local use_system="
ffmpeg
fontconfig
freetype
harfbuzz-ng
icu
libdrm
libevent
libjpeg
libpng
libwebp
libxml
libxslt
opus
re2
snappy
zlib
"
for _lib in $use_system libjpeg_turbo; do
msg "Removing buildscripts for system provided $_lib"
find . -type f -path "*third_party/$_lib/*" \
\! -path "*third_party/$_lib/chromium/*" \
\! -path "*third_party/$_lib/google/*" \
\! -path './base/third_party/icu/*' \
\! -path './third_party/libxml/*' \
\! -path './third_party/pdfium/third_party/freetype/include/pstables.h' \
\! -path './third_party/harfbuzz-ng/utils/hb_scoped.h' \
\! -path './third_party/crashpad/crashpad/third_party/zlib/zlib_crashpad.h' \
\! -regex '.*\.\(gn\|gni\|isolate\|py\)' \
-delete
done
msg "Replacing gn files"
python3 build/linux/unbundle/replace_gn_files.py --system-libraries \
$use_system
# allow system dependencies in "official builds"
sed -i 's/OFFICIAL_BUILD/GOOGLE_CHROME_BUILD/' \
tools/generate_shim_headers/generate_shim_headers.py
)
(
cd "$srcdir"/ringrtc-$_ringrtcver/src/node
msg "Installing ringrtc js dependencies"
yarn --frozen-lockfile --ignore-scripts
)
(
cd "$srcdir"/libsignal-$_libsignalver/node
# fix target
sed -i 's/unknown-linux-gnu/alpine-linux-musl/g' binding.gyp
msg "Installing libsignal js dependencies"
yarn --ignore-scripts --frozen-lockfile
)
# remove shipped fonts for system-provided (part 1)
rm -rf fonts/
}
build() {
chromium_arch="$(node -e 'console.log(process.arch)')"
# required dependency of ringrtc
(
cd "$srcdir"/webrtc-$_webrtcver
local webrtc_args="
rtc_build_examples=false
rtc_build_tools=false
rtc_enable_protobuf=false
rtc_enable_sctp=false
rtc_include_tests=false
rtc_include_ilbc=false
rtc_libvpx_build_vp9=true
rtc_use_x11=false
build_with_mozilla=false
chrome_pgo_phase=0
clang_use_chrome_plugins=false
custom_toolchain=\"//build/toolchain/linux/unbundle:default\"
is_cfi=false
is_clang=true
is_debug=false
is_official_build=true
host_toolchain=\"//build/toolchain/linux/unbundle:default\"
proprietary_codecs=true
rtc_link_pipewire=true
rtc_use_pipewire=true
symbol_level=0
use_custom_libcxx=false
use_sysroot=false
use_system_freetype=true
use_system_harfbuzz=true
use_system_libjpeg=true
"
mkdir -p "$srcdir"/ringrtc-$_ringrtcver/out/release
msg "Building signal's webrtc"
gn gen "$srcdir"/ringrtc-$_ringrtcver/out/release --args="$(echo $webrtc_args)"
ninja -C "$srcdir"/ringrtc-$_ringrtcver/out/release webrtc
)
# add lto for the remaining c steps (sqlite)
export CFLAGS="$CFLAGS -flto"
export CXXFLAGS="$CXXFLAGS -flto"
export LDFLAGS="$LDFLAGS -flto -fuse-ld=lld"
(
cd "$srcdir"/ringrtc-$_ringrtcver/src/rust
msg "Building libringrtc"
OUTPUT_DIR="$srcdir"/ringrtc-$_ringrtcver/out \
cargo build --features electron --release
mkdir -p ../node/build/linux
cp -fv target/release/libringrtc.so ../node/build/linux/libringrtc-$chromium_arch.node
)
(
cd "$srcdir"/ringrtc-$_ringrtcver/src/node
msg "Building ringrtc JS glue code"
yarn build
)
# module on npm intentionally unbuildable: https://github.com/signalapp/libsignal/issues/464#issuecomment-1160665052
(
cd "$srcdir"/libsignal-$_libsignalver/node
msg "Building libsignal"
yarn node-gyp configure --nodedir=/usr/include/electron/node_headers --build-from-source
yarn node-gyp build --nodedir=/usr/include/electron/node_headers --build-from-source
mkdir -p prebuilds/linux-$chromium_arch
mv build/Release/libsignal_client_linux_$chromium_arch.node prebuilds/linux-$chromium_arch/node.napi.node
msg "Building libsignal glue code"
yarn tsc
)
# from package.json postinstall
yarn build:acknowledgments
yarn patch-package
rm -rf node_modules/dtrace-provider
# use our libsignal
rm -rf node_modules/@signalapp/libsignal-client/
ln -s "$srcdir"/libsignal-$_libsignalver/node/ node_modules/@signalapp/libsignal-client
# use our libringrtc
rm -rf node_modules/ringrtc/
ln -s "$srcdir"/ringrtc-$_ringrtcver/src/node/ node_modules/ringrtc
# patch the sqlcipher module
for x in $source; do
case "$x" in
bettersqlite-*.patch)
msg "$x"
patch -Np1 -i "$srcdir"/$x -d "$srcdir"/Signal-Desktop-$pkgver/node_modules/better-sqlite3/
;;
esac
done
# use system-provided font
echo '' > node_modules/typeface-inter/inter.css
rm -rf 'node_modules/typeface-inter/Inter '*
npm rebuild sharp better-sqlite3 --nodedir=/usr/include/electron/node_headers --build-from-source
NODE_ENV=production \
SIGNAL_ENV=production \
NODE_OPTIONS=--openssl-legacy-provider \
yarn build:dev
NODE_ENV=production \
SIGNAL_ENV=production \
yarn build:electron \
--config.extraMetadata.environment=production \
--config.directories.output=release \
--linux=dir
}
check() {
# tests run against downloaded build of electron for glibc, probably can be patched
yarn test
}
package() {
cd "$builddir"/release/linux-unpacked
install -Dm644 resources/app.asar "$pkgdir"/usr/lib/$pkgname/app.asar
cp -r resources/app.asar.unpacked "$pkgdir"/usr/lib/$pkgname/app.asar.unpacked
install -Dm755 "$srcdir"/$pkgname "$pkgdir"/usr/bin/$pkgname
install -Dm644 "$srcdir"/$pkgname.desktop "$pkgdir"/usr/share/applications/$pkgname.desktop
for i in 16 32 48 128 250 256 1024; do
install -Dm644 "$builddir"/images/icon_$i.png "$pkgdir"/usr/share/icons/hicolor/${i}x$i/apps/$pkgname.png
done
}
sha512sums="
8f5234fa018e0590b3cad934ab46797bef16ff489a36a05364df7f43be80022143e8034665d2f24a171401bfac3315a16918d702a249c9ef8acd1de78e30f52e Signal-Desktop-6.1.0.tar.gz
d2c13bf0d96eb706afe018c0ef9b377f3f50d2a82690f6ccb7260efa7cc620fb5c52ae775d598d6ebcaa581acfaab244dfe6f7f1738755604bf860cd548a62b6 libsignal-0.21.1.tar.gz
e263390ea2ae877edb39e9973a8c8a03e2f396f255ca48487311031c69493a93da0dedbce5f10adfef4859cb8e51579bf285fbd9b94f98fabd538acaee18c413 ringrtc-2.22.0.tar.gz
748f870d35b9a9789311c3d7cee9c0bc150aed8094838406e0a1969f2f824900ffec40d0b4fe2702f2f93d4a78d7987b0f91668cff859a8a34517663b138f8b5 webrtc-5005b.tar.xz
1aaf59c2d401ae127ed73981be330cd999794a217b455cb8033652063eb7b549c7070c63f54e4e60bf4338b475e162b6eae98683c3ce058e8f1a407588b3b2e1 bettersqlite-use-system-sqlcipher.patch
3ecfbd1b0cd03d1a697f3e53c057fa8bc2118de48ff0c3f07f8bb731f128f9478862a388efb36dd4dbc1dc5ad9977165a935fe65664aea915b8b80c38e801070 signal-build-expire-time.patch
60a45285d885922f5c21f64b761a10efbee9081baf3efa4c8c13abc6a43dc4d27662ed10e239b0fa2071ab9e3a0dbbb4b11d6e3d26fe2b74a19f39e72b74a5bd signal-disable-updates.patch
c68a2a6a37c1cdea227f29c0922b9bf15259f044e9b3240b120bba14809d04d66cf0b619f52bb91abd596ad93e51e972be132b5951d0e8f6ea238fcb7bb613eb signal-update-links.patch
252b37a2ecc5e7a25385943045f426dc2e30991b28d206ceaff1be7fd8ffeeb024310a8fca6b3e69a4b1c57db535d51c570935351053525f393682d5ecd0f9a9 webrtc-canonicalize-file-name.patch
6add8b4c293f5392748a2eec9486cb4a6534e161977c6a98de71617b9abcdd1e8ad94b44014256a4b52e33eb9dd4aca380279d4161629a1bb2d7b15f8eb5b459 webrtc-use-alpine-target.patch
bab56a33265b5b094f161af1462166e371913a5269fe8e7d12e9f65ec4f5b908157406b3bcbcf73db15d03470445127d27c64fd731b6ea57c631aba3f4d302cb webrtc-no-shared-deps-in-static-lib.patch
87534e7b5ad7365509eab75629e6bd1a9ed61ee92f7e358405a0abaf0df57de14623fb3894eb082f8785422e5c087e1c50f9e2e5cafbb2529591fd7bf447f7f5 signal-desktop
87ef5f3ffcf64e3cae308aa0f6bc208fb05dd8568f6a288217cdf8498ae1523f276987a7be8d6f5208f13394bab350e08734d806a8e7c08849dd8ba0dda49d66 signal-desktop.desktop
"

View file

@ -1,45 +0,0 @@
--- a/binding.gyp
+++ b/binding.gyp
@@ -7,7 +7,16 @@
'targets': [
{
'target_name': 'better_sqlite3',
- 'dependencies': ['deps/sqlite3.gyp:sqlite3'],
+ 'include_dirs': ['/usr/include/sqlcipher'],
+ 'direct_dependent_settings': {
+ 'include_dirs': ['/usr/include/sqlcipher'],
+ },
+ 'link_settings': {
+ 'libraries': [
+ '-L/usr/lib/sqlcipher',
+ '-lsqlcipher',
+ ]
+ },
'sources': ['src/better_sqlite3.cpp'],
'cflags_cc': ['-std=c++17'],
'xcode_settings': {
@@ -17,14 +26,22 @@
['OS=="linux"', {
'ldflags': [
'-Wl,-Bsymbolic',
- '-Wl,--exclude-libs,ALL',
],
}],
],
},
{
'target_name': 'test_extension',
- 'dependencies': ['deps/sqlite3.gyp:sqlite3'],
+ 'include_dirs': ['/usr/include/sqlcipher'],
+ 'direct_dependent_settings': {
+ 'include_dirs': ['/usr/include/sqlcipher'],
+ },
+ 'link_settings': {
+ 'libraries': [
+ '-L/usr/lib/sqlcipher',
+ '-lsqlcipher',
+ ]
+ },
'conditions': [['sqlite3 == ""', { 'sources': ['deps/test_extension.c'] }]],
},
],

View file

@ -1,15 +0,0 @@
--- a/ts/scripts/get-expire-time.ts
+++ b/ts/scripts/get-expire-time.ts
@@ -7,11 +7,7 @@
import { DAY } from '../util/durations';
-const unixTimestamp = parseInt(
- execSync('git show -s --format=%ct').toString('utf8'),
- 10
-);
-const buildCreation = unixTimestamp * 1000;
+const buildCreation = new Date().getTime();
const buildExpiration = buildCreation + DAY * 90;

View file

@ -1,6 +0,0 @@
#!/bin/sh
# app chooses config (including used endpoints) based on this
export NODE_ENV=production
exec electron /usr/lib/signal-desktop/app.asar

View file

@ -1,10 +0,0 @@
[Desktop Entry]
Name=Signal
Exec=/usr/bin/signal-desktop %U
Terminal=false
Type=Application
Icon=signal-desktop
StartupWMClass=Signal
Comment=Private messaging from your desktop
MimeType=x-scheme-handler/sgnl;x-scheme-handler/signalcaptcha;
Categories=Network;InstantMessaging;Chat;

View file

@ -1,9 +0,0 @@
--- a/config/production.json
+++ b/config/production.json
@@ -11,5 +11,5 @@
},
"serverPublicParams": "AMhf5ywVwITZMsff/eCyudZx9JDmkkkbV6PInzG4p8x3VqVJSFiMvnvlEKWuRob/1eaIetR31IYeAbm0NdOuHH8Qi+Rexi1wLlpzIo1gstHWBfZzy1+qHRV5A4TqPp15YzBPm0WSggW6PbSn+F4lf57VCnHF7p8SvzAA2ZZJPYJURt8X7bbg+H3i+PEjH9DXItNEqs2sNcug37xZQDLm7X36nOoGPs54XsEGzPdEV+itQNGUFEjY6X9Uv+Acuks7NpyGvCoKxGwgKgE5XyJ+nNKlyHHOLb6N1NuHyBrZrgtY/JYJHRooo5CEqYKBqdFnmbTVGEkCvJKxLnjwKWf+fEPoWeQFj5ObDjcKMZf2Jm2Ae69x+ikU5gBXsRmoF94GXQ==",
"serverTrustRoot": "BXu6QIKVz5MA8gstzfOgRQGqyLqOwNKHL6INkv3IHWMF",
- "updatesEnabled": true
+ "updatesEnabled": false
}

View file

@ -1,25 +0,0 @@
--- a/ts/components/DialogExpiredBuild.tsx
+++ b/ts/components/DialogExpiredBuild.tsx
@@ -29,9 +29,9 @@
containerWidthBreakpoint={containerWidthBreakpoint}
type="error"
onClick={() => {
- openLinkInWebBrowser('https://signal.org/download/');
+ openLinkInWebBrowser('https://pkgs.alpinelinux.org/packages?name=signal-desktop');
}}
- clickLabel={i18n('upgrade')}
+ clickLabel={<code>apk upgrade signal-desktop</code>}
hasAction
>
{i18n('expiredWarning')}{' '}
--- a/ts/components/DialogUpdate.tsx
+++ b/ts/components/DialogUpdate.tsx
@@ -27,7 +27,7 @@
currentVersion: string;
};
-const PRODUCTION_DOWNLOAD_URL = 'https://signal.org/download/';
+const PRODUCTION_DOWNLOAD_URL = 'https://pkgs.alpinelinux.org/packages?name=signal-desktop';
const BETA_DOWNLOAD_URL = 'https://support.signal.org/beta';
export const DialogUpdate = ({

View file

@ -1,13 +0,0 @@
no canonicalize_file_name on musl. funnily, the file using this says this is
not portable, but avoids the nonportability of realpath(path, NULL);
--- a/third_party/nasm/config/config-linux.h
+++ b/third_party/nasm/config/config-linux.h
@@ -139,7 +139,7 @@
#define HAVE_ACCESS 1
/* Define to 1 if you have the `canonicalize_file_name' function. */
-#define HAVE_CANONICALIZE_FILE_NAME 1
+/* #define HAVE_CANONICALIZE_FILE_NAME 1 */
/* Define to 1 if you have the `cpu_to_le16' intrinsic function. */
/* #undef HAVE_CPU_TO_LE16 */

View file

@ -1,14 +0,0 @@
--- a/BUILD.gn
+++ b/BUILD.gn
@@ -480,11 +480,6 @@
"rtc_base",
"sdk",
"video",
-
- # Added by RingRTC
- # Necessary for use_custom_libcxx=true,
- # which is in turn necessary for deploying to Ubuntu 16.04.
- "//build/config:shared_library_deps",
]
if (rtc_include_builtin_audio_codecs) {

View file

@ -1,13 +0,0 @@
--- ./build/config/compiler/BUILD.gn
+++ ./build/config/compiler/BUILD.gn
@@ -766,8 +766,8 @@
}
} else if (current_cpu == "arm64") {
if (is_clang && !is_android && !is_nacl && !is_fuchsia) {
- cflags += [ "--target=aarch64-linux-gnu" ]
- ldflags += [ "--target=aarch64-linux-gnu" ]
+ cflags += [ "--target=aarch64-alpine-linux-musl" ]
+ ldflags += [ "--target=aarch64-alpine-linux-musl" ]
}
} else if (current_cpu == "mipsel" && !is_nacl) {
ldflags += [ "-Wl,--hash-style=sysv" ]

Some files were not shown because too many files have changed in this diff Show more