Added missing v3.16 backports

This commit is contained in:
build@apk-groulx 2023-01-12 18:10:32 +00:00
parent f8643d1074
commit 787729f3ad
109 changed files with 17875 additions and 2806 deletions

View file

@ -1,462 +0,0 @@
# Maintainer: psykose <alice@ayaya.dev>
pkgname=electron
pkgver=21.3.3
pkgrel=3
_chromium=106.0.5249.199
_depot_tools=6fde0fbe9226ae3fc9f5c709adb93249924e5c49
pkgdesc="Electron cross-platform desktop toolkit"
url="https://github.com/electron/electron"
arch="aarch64 x86_64" # same as chromium
license="MIT"
depends="gtk+3.0 so:libudev.so.1 xdg-utils"
makedepends="
alsa-lib-dev
aom-dev
bash
brotli-dev
bsd-compat-headers
bzip2-dev
c-ares-dev
cairo-dev
clang-dev
clang-extra-tools
compiler-rt
cups-dev
curl-dev
dav1d-dev
dbus-glib-dev
eudev-dev
ffmpeg-dev
findutils
flac-dev
flex
freetype-dev
gperf
gtk+3.0-dev
gn
gzip
harfbuzz-dev
hunspell-dev
http-parser-dev
hwids-usb
java-jdk
jpeg-dev
jsoncpp-dev
krb5-dev
lcms2-dev
libarchive-tools
libavif-dev
libbsd-dev
libcap-dev
libevent-dev
libexif-dev
libgcrypt-dev
libjpeg-turbo-dev
libnotify-dev
libusb-dev
libva-dev
libwebp-dev
libxcomposite-dev
libxcursor-dev
libxinerama-dev
libxml2-dev
libxrandr-dev
libxscrnsaver-dev
libxslt-dev
linux-headers
lld
llvm
mesa-dev
minizip-dev
nghttp2-dev
nodejs
npm
nss-dev
opus-dev
pciutils-dev
perl
pipewire-dev
pulseaudio-dev
py3-httplib2
py3-parsing
py3-six
python3
re2-dev
samurai
snappy-dev
speex-dev
sqlite-dev
woff2-dev
xcb-proto
yarn
zlib-dev
"
subpackages="$pkgname-lang $pkgname-dev"
# the lower patches are specific to electron, the top ones are from the equivalent chromium version
source="https://dev.alpinelinux.org/archive/electron/electron-$pkgver.tar.xz
angle-wayland-include.patch
canonicalize-file-name.patch
chromium-VirtualCursor-standard-layout.patch
chromium-revert-drop-of-system-java.patch
chromium-use-alpine-target.patch
credentials-sys-types-header.patch
default-pthread-stacksize.patch
dns-resolver.patch
fix-crashpad.patch
fix-missing-cstdint-include-musl.patch
fix-narrowing-cast.patch
gdbinit.patch
jsoncpp.patch
memory-tagging-arm64.patch
musl-sandbox.patch
musl-tid-caching.patch
musl-v8-monotonic-pthread-cont_timedwait.patch
no-execinfo.patch
no-glibc-version.patch
no-mallinfo.patch
no-res-ninit-nclose.patch
no-stat-redefine.patch
nullptr-t.patch
partition-atfork.patch
py3.11.patch
quiche-arena-size.patch
roll-src-third_party-ffmpeg-102.patch
roll-src-third_party-ffmpeg-106.patch
scoped-file-no-close.patch
temp-failure-retry.patch
wtf-stacksize.patch
icon.patch
python-jinja-3.10.patch
system-node.patch
vector-const.patch
webpack-hash.patch
default.conf
electron.desktop
electron-launcher.sh
"
options="!check suid"
# clang uses much less memory (and this doesn't support gcc)
export CC=clang
export CXX=clang++
# required to find the tools
export AR=llvm-ar
export NM=llvm-nm
export LD=clang++
# less log spam, reproducible
export CFLAGS="${CFLAGS/-g/} -O2 -Wno-unknown-warning-option -Wno-builtin-macro-redefined"
export CXXFLAGS="${CXXFLAGS/-g/} -O2 -Wno-unknown-warning-option -Wno-builtin-macro-redefined"
export CPPFLAGS="$CPPFLAGS -D__DATE__= -D__TIME__= -D__TIMESTAMP__="
_gn_flags() {
echo "$@"
}
# creates a dist tarball that does not need to git clone everything at build time.
snapshot() {
export CHROMIUM_BUILDTOOLS_PATH="$srcdir/src/buildtools"
mkdir -p "$srcdir"
cd "$srcdir"
git clone --branch=$_chromium --depth=1 \
https://chromium.googlesource.com/chromium/src.git
git clone https://github.com/electron/electron.git
(
git clone --depth 1 -b main https://chromium.googlesource.com/chromium/tools/depot_tools.git
cd depot_tools
git fetch --depth 1 origin $_depot_tools
git checkout $_depot_tools
)
export PATH="$PATH:$srcdir/depot_tools"
echo "solutions = [
{
\"name\": \"src/electron\",
\"url\": \"file://$srcdir/electron@v$pkgver\",
\"deps_file\": \"DEPS\",
\"managed\": False,
\"custom_deps\": {
\"src\": None,
},
\"custom_vars\": {},
},
]" > .gclient
python3 depot_tools/gclient.py sync \
--with_branch_heads \
--with_tags \
--nohooks
python3 src/build/landmines.py
python3 src/build/util/lastchange.py -o src/build/util/LASTCHANGE
python3 src/build/util/lastchange.py -s src/third_party/dawn \
--revision src/gpu/webgpu/DAWN_VERSION
python3 src/build/util/lastchange.py -m GPU_LISTS_VERSION \
--revision-id-only --header src/gpu/config/gpu_lists_version.h
python3 src/build/util/lastchange.py -m SKIA_COMMIT_HASH \
-s src/third_party/skia --header src/skia/ext/skia_commit_hash.h
# why?
cp -r electron/patches/ffmpeg src/electron/patches/
python3 electron/script/apply_all_patches.py \
electron/patches/config.json
python3 src/tools/download_optimization_profile.py \
--newest_state=src/chrome/android/profiles/newest.txt \
--local_state=src/chrome/android/profiles/local.txt \
--output_name=src/chrome/android/profiles/afdo.prof \
--gs_url_base=chromeos-prebuilt/afdo-job/llvm
mv src $pkgname-$pkgver
# extra binaries are most likely things we don't want, so nuke them all
for elf in $(scanelf -RA -F "%F" $pkgname-$pkgver); do
rm -f "$elf"
done
msg "generating tarball.. (this takes a while)"
tar -cf $pkgname-$pkgver.tar \
--exclude="ChangeLog*" \
--exclude="testdata/" \
--exclude="test_data/" \
--exclude="android_rust_toolchain/toolchain/" \
--exclude-backups \
--exclude-caches-all \
--exclude-vcs \
$pkgname-$pkgver
xz -T0 -e -9 -vv -k $pkgname-$pkgver.tar
}
prepare() {
default_prepare
git init .
# link to system tools
ln -sfv /usr/bin/clang-format buildtools/linux64/clang-format
mkdir -p third_party/node/linux/node-linux-x64/bin
ln -sfv /usr/bin/node third_party/node/linux/node-linux-x64/bin/node
ln -sfv /usr/bin/java third_party/jdk/current/bin/java
(
cd electron
git init .
git config user.email "example@example.com"
git config user.name "example"
git add LICENSE
git commit -m "init"
git tag "v$pkgver"
# jesus christ what the fuck is wrong with you?
touch .git/packed-refs
yarn install --frozen-lockfile
)
(
cd third_party/node
npm ci
)
local use_system="
brotli
dav1d
ffmpeg
flac
fontconfig
freetype
harfbuzz-ng
icu
jsoncpp
libaom
libavif
libdrm
libevent
libjpeg
libwebp
libxml
libxslt
opus
re2
snappy
woff2
zlib
"
for _lib in $use_system libjpeg_turbo; do
msg "Removing buildscripts for system provided $_lib"
find . -type f -path "*third_party/$_lib/*" \
\! -path "*third_party/$_lib/chromium/*" \
\! -path "*third_party/$_lib/google/*" \
\! -path './base/third_party/icu/*' \
\! -path './third_party/libxml/*' \
\! -path './third_party/pdfium/third_party/freetype/include/pstables.h' \
\! -path './third_party/harfbuzz-ng/utils/hb_scoped.h' \
\! -path './third_party/crashpad/crashpad/third_party/zlib/zlib_crashpad.h' \
\! -regex '.*\.\(gn\|gni\|isolate\|py\)' \
-delete
done
# https://groups.google.com/a/chromium.org/d/topic/chromium-packagers/9JX1N2nf4PU/discussion
touch chrome/test/data/webui/i18n_process_css_test.html
# Use the file at run time instead of effectively compiling it in
sed 's|//third_party/usb_ids/usb.ids|/usr/share/hwdata/usb.ids|g' \
-i services/device/public/cpp/usb/BUILD.gn
python3 build/linux/unbundle/replace_gn_files.py --system-libraries \
$use_system
python3 third_party/libaddressinput/chromium/tools/update-strings.py
# prevent annoying errors when regenerating gni
sed -i 's,^update_readme$,#update_readme,' \
third_party/libvpx/generate_gni.sh
# allow system dependencies in "official builds"
sed -i 's/OFFICIAL_BUILD/GOOGLE_CHROME_BUILD/' \
tools/generate_shim_headers/generate_shim_headers.py
# https://crbug.com/893950
sed -i -e 's/\<xmlMalloc\>/malloc/' -e 's/\<xmlFree\>/free/' \
third_party/blink/renderer/core/xml/*.cc \
third_party/blink/renderer/core/xml/parser/xml_document_parser.cc \
third_party/libxml/chromium/*.cc \
third_party/maldoca/src/maldoca/ole/oss_utils.h
msg "Configuring build"
local gn_config="
import(\"//electron/build/args/release.gn\")
blink_enable_generated_code_formatting=false
chrome_pgo_phase=0
clang_use_chrome_plugins=false
custom_toolchain=\"//build/toolchain/linux/unbundle:default\"
disable_fieldtrial_testing_config=true
enable_hangout_services_extension=true
enable_nacl=false
enable_stripping=false
enable_vr=false
fatal_linker_warnings=false
ffmpeg_branding=\"Chrome\"
host_toolchain=\"//build/toolchain/linux/unbundle:default\"
icu_use_data_file=true
is_cfi=false
is_component_ffmpeg=true
is_debug=false
is_official_build=true
link_pulseaudio=true
proprietary_codecs=true
rtc_link_pipewire=true
rtc_use_pipewire=true
symbol_level=0
treat_warnings_as_errors=false
use_custom_libcxx=false
use_gnome_keyring=false
use_pulseaudio=true
use_sysroot=false
use_system_freetype=true
use_system_harfbuzz=true
use_system_lcms2=true
use_system_libdrm=true
use_system_libjpeg=true
use_system_wayland_scanner=true
use_system_zlib=true
use_vaapi=true
"
gn gen out/Release \
--args="$(echo $gn_config)" \
--export-compile-commands
}
build() {
ninja -C out/Release \
electron_dist_zip \
node_gypi_headers \
node_version_header \
tar_headers
}
package() {
mkdir -p "$pkgdir"/usr/lib/electron "$pkgdir"/usr/bin
bsdtar -xf out/Release/dist.zip -C "$pkgdir"/usr/lib/electron
chmod u+s "$pkgdir"/usr/lib/electron/chrome-sandbox
install -Dm755 "$srcdir"/electron-launcher.sh "$pkgdir"/usr/bin/electron
install -Dm755 "$srcdir"/default.conf "$pkgdir"/etc/electron/default.conf
mkdir -p "$pkgdir"/usr/include/electron
mv -v "$builddir"/out/Release/gen/node_headers "$pkgdir"/usr/include/electron
ln -sv /usr/include/electron/node_headers/include/node "$pkgdir"/usr/include/electron/node
mkdir -p "$pkgdir"/usr/include/electron/node_headers/include/nan
cp -v "$builddir"/third_party/nan/*.h "$pkgdir"/usr/include/electron/node_headers/include/nan
ln -sv /usr/include/electron/node_headers/include/nan "$pkgdir"/usr/include/electron/nan
install -Dm644 electron/default_app/icon.png \
"$pkgdir"/usr/share/icons/hicolor/1024x1024/apps/electron.png
install -Dm644 "$srcdir"/electron.desktop \
-t "$pkgdir"/usr/share/applications/
}
lang() {
pkgdesc="$pkgdesc (translations)"
install_if="$pkgname=$pkgver-r$pkgrel lang"
mkdir -p "$subpkgdir"/usr/lib/electron/locales
mv "$pkgdir"/usr/lib/electron/locales/*.pak \
"$subpkgdir"/usr/lib/electron/locales
mv "$subpkgdir"/usr/lib/electron/locales/en-US.pak \
"$pkgdir"/usr/lib/electron/locales
}
sha512sums="
1d21e74875ade836625c28d8d9351b41d2776def248193e9c82d4cd50375e9e9b2f7c40026673fe2a191a936f05c3fe639b0423964356ad678f41545aceede3c electron-21.3.3.tar.xz
f19ba0c0f542115e6f53019659df256471e811a23d2f37569c9d4dfa265c0c1ace3e62c74d7507f82e6b7b4152c704e651810a00616f8f531592b14bb2af01d9 angle-wayland-include.patch
252b37a2ecc5e7a25385943045f426dc2e30991b28d206ceaff1be7fd8ffeeb024310a8fca6b3e69a4b1c57db535d51c570935351053525f393682d5ecd0f9a9 canonicalize-file-name.patch
ac0a80174f95d733f33ddc06fc88cdcf7db0973378c28d8544dc9c19e2dabeac47f91c99b3e7384f650b3405554a9e222543f0860b6acc407c078a8c9180d727 chromium-VirtualCursor-standard-layout.patch
c4654d5b23c6f5d9502507e534fe1951d6749c62251e49b6adfe10d1569431e7f7a5a6fa5ff09ec30984415ced27a5e20985df8c91295de34af3c84557fa5b91 chromium-revert-drop-of-system-java.patch
d2b5b0396173367fcf1804aaee8e7fbefce9c63ac2a91c3a1ede759cb85e567317a57e4d4f82d3ca021682fb7856e15c074276a03eda946d9b28e1cb706b07ad chromium-use-alpine-target.patch
ec04bf43278a19d4bb090eddd636ad7093c7e757cb2ffa1875971e0709761174790e109b9be03a8e299d4077a87fbd8dabd301b8754bb6fe53c055396e8af556 credentials-sys-types-header.patch
4ab8261bf95547b10df44e4d528c06a64c33602c10a1e09d531190dc8947ba6ef9e69145405b801cd364707570f791fee6d93e3bf5d57831f5a85212ddf01729 default-pthread-stacksize.patch
f011f66e5aae5a6f0d440492ee9400878b47b66214c1bc8dc1477fdd07ad0a508cdbb74415e55e27085c4e61e112e7c2ae2edfa217f2fb387e13097b24cb17b1 dns-resolver.patch
9d1edb1e0624ee61825e3af23fbb8c5dbc09d2b92d7769d19f8ca618edae8de8a3e051fedf4ad92c230e1373dc8495922c46971aef93a580c04ad80bc33516c0 fix-crashpad.patch
8bebf4a9d5225c6e47edc5b07c4b97be24a45cc221f49632836915ceeb4ecb69b7f79a31ea7f82171cde3443f45fec541f409892542cf1014e81aa6acd01566d fix-missing-cstdint-include-musl.patch
efe97b9dd2ec5965fa0cdf1b2a3c01253835c2df710da7ea105c4ce008c11f9caaf8b4321736a2b91f06d8d61972c08e225b16509dc05176a2c39337688ad5b9 fix-narrowing-cast.patch
cf73cbe5bf19d6a22157fb7aafb43f326885e852fc6292728f4ed1cd145d07ba5af51b6ec808095136cd406478aaa427ee1b9611c855fbd87976e1a91e1609bd gdbinit.patch
fa2637f92f851614347e296fbab744af2c5d7edcbb444aeb4a5d3182a8ec2549593d75e717d7e78e9b2a7257e693b48fc88c149c1591052d7ae802f4fda0a775 jsoncpp.patch
e0afb7066c2cb41aa461feb9e45e571517229deab9d06186490b527783a7ba826a4d67d3a14a33a164eea64fa561eb5b93a1d4dfd0acc2e7a9eb038e6ee273db memory-tagging-arm64.patch
fdf8ba7badbd5b61d415ad9d49c66b6ef0a6a40ec95a47e13af48711fe1bd3a5574e987929a3c486cdd02c239863b8517e7f834cecd30f156479e43a9441a18e musl-sandbox.patch
85c2842a251a3f8aa59c701ca5f2ce2f5d49b5c7e4773d5387dc597447fb47c9d876f5fb308576686c9a8abc7e35cfc172b6cdfb9f5f1dc1509329e3590b38d8 musl-tid-caching.patch
d2825aa9525fcbb53791f0ef2502c0f444a9d668f09db6ae4987b94dc4d0e6f1cf58a6e9e772ab11c896a469ca32242feb3ad9c9dbb20df9316cd74151ab0ec9 musl-v8-monotonic-pthread-cont_timedwait.patch
ebd5cf28277853dc5e984961c370ab4a6331488ae7cff45083fea0470262f56486664bc9bd7947fdd796c8635e479633c4d08cfd89270c0310f3ec21cc6642d2 no-execinfo.patch
f0bf97a80e663041e33cb0468fd8c47c5f351d6de61059ce1f359a813c40db8e247eaef294c3f562c0a8204e4f1992a918f1d879b1da9891027500e21f482b79 no-glibc-version.patch
10ae0f74a4c1db899b571508100af63e5af8d0f7c41a37fc9b7987cbf9f27f4c55894c02d6820957d7522a528929059f562f96c2f05fd6509f60c6c71d9d8256 no-mallinfo.patch
a5aa82c30402773903db1d3876208132fdef175f56ebc0ce1ee4c104a98d498d709c5565c4381736c04c238203b3c8a9cd7a5b5e69876f6afb65d7fc48df23d0 no-res-ninit-nclose.patch
2c0d7239728d98c0564ad7b81d6d243e58e56de58a21357fa30c39e333fc29c1aa98529c6e1b6fa7694169b513391ca27fa542f69b483bbde644cc2ed739bbdd no-stat-redefine.patch
5e9f6279698195467e3b506cea9be0d97ec2d970672b1b12d3d7880eec4f6f53b8f92942dc3fc6738b02889382534ce0f4310a1f94b33e21f8fbc70b85640b81 nullptr-t.patch
ad563e29ac7d83c203f5af966c4ed3ebdeb5c997835a45fb28ecde08dde5231d0a775fb413f44867af28724504c42316b27d5a6aaa602057642dcbdf7ec20a7a partition-atfork.patch
65aa0c7c9909a803e59b88ecb6d79c4db491079f3324f7bd02ee485a7bb7a81674b8f0591dab766c97070a401116db7f629fee36af7416a0fefc38f4ae0ad13d py3.11.patch
083ed731e0a3788f2cb04a3035022fbb3e4db99eba01516ea233ea3229f7d898943d8115463a48655ac83eb3cc7a48aceb8bf17c68930a5a1d83b1af95dfade8 quiche-arena-size.patch
128ec0fd14349e065c8bb0910d53cbea7423182a06e06f7b7765f3cba1e5ba5e7a8bccbcdab079335b5235abb7bf0d46dee21ecc8c221be7e1c5c6d9795f958e roll-src-third_party-ffmpeg-102.patch
1063e68f477645914ddf5641eabdd3eaf744e569635d645ea860c3546f67a7ef91eded99331378ee75071b67ecfe9bd4be307bd3da7c7ad93509e2bd634d1ec3 roll-src-third_party-ffmpeg-106.patch
3cf36b269e9fcfa74975d267bbf31bef68b533a51672e5ed81ae511a70f28a45206168af370961a3dab5695ddaff41cb8839c8c2fa53f22a9f3c88d207cb2996 scoped-file-no-close.patch
f2f7673f9e793dfbf4456ff8c2be785ea551c36bd512572245d04bf44da08b0133e98d85a1ffd51158009754c83121cad48d755cbc153735df2d2e73233856c0 temp-failure-retry.patch
1d4e8c6e65205e6b72af47b9a2fa6f96aaada9b7d5a74f4e11a345a885df3078b523d02aaf8e9dac3aa30d72bbbd07cd6dc7edcf44fb9ae57a7f81251c398f65 wtf-stacksize.patch
905565c10f5e5600e7d4db965c892cc45009a258e9995da958974d838ace469e1db1019195307e8807860d5b55ba6bfeea478b1f39a9b99e82c619b2816a1a22 icon.patch
e05180199ee1d559e4e577cedd3e589844ecf40d98a86321bf1bea5607b02eeb5feb486deddae40e1005b644550331f6b8500177aa7e79bcb3750d3c1ceb76c3 python-jinja-3.10.patch
26a8e4040e69f335a7104f42d012b9d933a40985b33a7be02add27a801c097c5a2be4c6e69faf9175ce8945210ae4c5592ecad2123ccff2beee5473194a765e3 system-node.patch
71571b15cf8bd6259b7fd22bea0e46b64890f3db776365de33fe539f26ce9ef99459e05c3dde9434c3657225bc67160abc915acd93033cb487c770c6a2a5975f vector-const.patch
2aa340854316f1284217c0ca17cbf44953684ad6c7da90815117df30928612eb9fb9ffb734b948dfc309cd25d1a67cd57f77aac2d052a3dd9aca07a3a58cbb30 webpack-hash.patch
08460b15037511a4e5469ceac6ae5dd4db4c8cb87c129aaaf40ba58b16c60b8a307ffdd85805efead235758abed09ec31db1ef4cf9159f7b9acdcee3031bc96c default.conf
191559fc7aa1ea0353c6fb0cc321ee1d5803a0e44848c8be941cfab96277b0de6a59962d373e2a2a1686c8f9be2bcf2d2f33706759a339a959e297d3f7fda463 electron.desktop
03750694e5e0b66f084c6e43135e60be15abb059e23486346ee4352dcc236984f2f35467b47f2b2ad46c98c22091cc2b978de8e73680febadba169d960f13f9f electron-launcher.sh
"

View file

@ -1,39 +0,0 @@
Patch-Source: https://github.com/archlinux/svntogit-packages/blob/a353833a5a731abfaa465b658f61894a516aa49b/trunk/angle-wayland-include-protocol.patch
diff -upr third_party/angle.orig/BUILD.gn third_party/angle/BUILD.gn
--- a/third_party/angle.orig/BUILD.gn 2022-08-17 19:38:11.000000000 +0000
+++ b/third_party/angle/BUILD.gn 2022-08-18 11:04:09.061751111 +0000
@@ -489,6 +489,12 @@ config("angle_vulkan_wayland_config") {
if (angle_enable_vulkan && angle_use_wayland &&
defined(vulkan_wayland_include_dirs)) {
include_dirs = vulkan_wayland_include_dirs
+ } else if (angle_enable_vulkan && angle_use_wayland) {
+ include_dirs = [
+ "$wayland_gn_dir/src/src",
+ "$wayland_gn_dir/include/src",
+ "$wayland_gn_dir/include/protocol",
+ ]
}
}
@@ -1073,6 +1079,7 @@ if (angle_use_wayland) {
include_dirs = [
"$wayland_dir/egl",
"$wayland_dir/src",
+ "$wayland_gn_dir/include/protocol",
]
}
diff -upr third_party/angle.orig/src/third_party/volk/BUILD.gn third_party/angle/src/third_party/volk/BUILD.gn
--- a/third_party/angle.orig/src/third_party/volk/BUILD.gn 2022-08-17 19:38:12.000000000 +0000
+++ b/third_party/angle/src/third_party/volk/BUILD.gn 2022-08-18 11:04:36.499828006 +0000
@@ -21,6 +21,9 @@ source_set("volk") {
configs += [ "$angle_root:angle_no_cfi_icall" ]
public_deps = [ "$angle_vulkan_headers_dir:vulkan_headers" ]
if (angle_use_wayland) {
- include_dirs = [ "$wayland_dir/src" ]
+ include_dirs = [
+ "$wayland_dir/src",
+ "$wayland_gn_dir/include/protocol",
+ ]
}
}

View file

@ -1,13 +0,0 @@
no canonicalize_file_name on musl. funnily, the file using this says this is
not portable, but avoids the nonportability of realpath(path, NULL);
--- a/third_party/nasm/config/config-linux.h
+++ b/third_party/nasm/config/config-linux.h
@@ -139,7 +139,7 @@
#define HAVE_ACCESS 1
/* Define to 1 if you have the `canonicalize_file_name' function. */
-#define HAVE_CANONICALIZE_FILE_NAME 1
+/* #define HAVE_CANONICALIZE_FILE_NAME 1 */
/* Define to 1 if you have the `cpu_to_le16' intrinsic function. */
/* #undef HAVE_CPU_TO_LE16 */

View file

@ -1,217 +0,0 @@
needed for libstdc++11 + clang only
diff --git a/sql/recover_module/btree.cc b/sql/recover_module/btree.cc
index 9ecaafe..839318a 100644
--- a/sql/recover_module/btree.cc
+++ b/sql/recover_module/btree.cc
@@ -135,16 +135,25 @@
"Move the destructor to the .cc file if it's non-trival");
#endif // !DCHECK_IS_ON()
-LeafPageDecoder::LeafPageDecoder(DatabasePageReader* db_reader) noexcept
- : page_id_(db_reader->page_id()),
- db_reader_(db_reader),
- cell_count_(ComputeCellCount(db_reader)),
- next_read_index_(0),
- last_record_size_(0) {
+void LeafPageDecoder::Initialize(DatabasePageReader* db_reader) {
+ DCHECK(db_reader);
DCHECK(IsOnValidPage(db_reader));
+ page_id_ = db_reader->page_id();
+ db_reader_ = db_reader;
+ cell_count_ = ComputeCellCount(db_reader);
+ next_read_index_ = 0;
+ last_record_size_ = 0;
DCHECK(DatabasePageReader::IsValidPageId(page_id_));
}
+void LeafPageDecoder::Reset() {
+ db_reader_ = nullptr;
+ page_id_ = 0;
+ cell_count_ = 0;
+ next_read_index_ = 0;
+ last_record_size_ = 0;
+}
+
bool LeafPageDecoder::TryAdvance() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(CanAdvance());
diff --git a/sql/recover_module/btree.h b/sql/recover_module/btree.h
index d76d076..33114b0 100644
--- a/sql/recover_module/btree.h
+++ b/sql/recover_module/btree.h
@@ -102,7 +102,7 @@
//
// |db_reader| must have been used to read an inner page of a table B-tree.
// |db_reader| must outlive this instance.
- explicit LeafPageDecoder(DatabasePageReader* db_reader) noexcept;
+ explicit LeafPageDecoder() noexcept = default;
~LeafPageDecoder() noexcept = default;
LeafPageDecoder(const LeafPageDecoder&) = delete;
@@ -150,6 +150,15 @@
// read as long as CanAdvance() returns true.
bool TryAdvance();
+ // Initialize with DatabasePageReader
+ void Initialize(DatabasePageReader* db_reader);
+
+ // Reset internal DatabasePageReader
+ void Reset();
+
+ // True if DatabasePageReader is valid
+ bool IsValid() { return (db_reader_ != nullptr); }
+
// True if the given reader may point to an inner page in a table B-tree.
//
// The last ReadPage() call on |db_reader| must have succeeded.
@@ -163,14 +172,14 @@
static int ComputeCellCount(DatabasePageReader* db_reader);
// The number of the B-tree page this reader is reading.
- const int64_t page_id_;
+ int64_t page_id_;
// Used to read the tree page.
//
// Raw pointer usage is acceptable because this instance's owner is expected
// to ensure that the DatabasePageReader outlives this.
- DatabasePageReader* const db_reader_;
+ DatabasePageReader* db_reader_;
// Caches the ComputeCellCount() value for this reader's page.
- const int cell_count_ = ComputeCellCount(db_reader_);
+ int cell_count_;
// The reader's cursor state.
//
diff --git a/sql/recover_module/cursor.cc b/sql/recover_module/cursor.cc
index 0029ff9..42548bc 100644
--- a/sql/recover_module/cursor.cc
+++ b/sql/recover_module/cursor.cc
@@ -26,7 +26,7 @@
int VirtualCursor::First() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
inner_decoders_.clear();
- leaf_decoder_ = nullptr;
+ leaf_decoder_.Reset();
AppendPageDecoder(table_->root_page_id());
return Next();
@@ -36,18 +36,18 @@
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
record_reader_.Reset();
- while (!inner_decoders_.empty() || leaf_decoder_.get()) {
- if (leaf_decoder_.get()) {
- if (!leaf_decoder_->CanAdvance()) {
+ while (!inner_decoders_.empty() || leaf_decoder_.IsValid()) {
+ if (leaf_decoder_.IsValid()) {
+ if (!leaf_decoder_.CanAdvance()) {
// The leaf has been exhausted. Remove it from the DFS stack.
- leaf_decoder_ = nullptr;
+ leaf_decoder_.Reset();
continue;
}
- if (!leaf_decoder_->TryAdvance())
+ if (!leaf_decoder_.TryAdvance())
continue;
- if (!payload_reader_.Initialize(leaf_decoder_->last_record_size(),
- leaf_decoder_->last_record_offset())) {
+ if (!payload_reader_.Initialize(leaf_decoder_.last_record_size(),
+ leaf_decoder_.last_record_offset())) {
continue;
}
if (!record_reader_.Initialize())
@@ -99,13 +99,13 @@
int64_t VirtualCursor::RowId() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(record_reader_.IsInitialized());
- DCHECK(leaf_decoder_.get());
- return leaf_decoder_->last_record_rowid();
+ DCHECK(leaf_decoder_.IsValid());
+ return leaf_decoder_.last_record_rowid();
}
void VirtualCursor::AppendPageDecoder(int page_id) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- DCHECK(leaf_decoder_.get() == nullptr)
+ DCHECK(!leaf_decoder_.IsValid())
<< __func__
<< " must only be called when the current path has no leaf decoder";
@@ -113,7 +113,7 @@
return;
if (LeafPageDecoder::IsOnValidPage(&db_reader_)) {
- leaf_decoder_ = std::make_unique<LeafPageDecoder>(&db_reader_);
+ leaf_decoder_.Initialize(&db_reader_);
return;
}
diff --git a/sql/recover_module/cursor.h b/sql/recover_module/cursor.h
index afcd690..b15c31d 100644
--- a/sql/recover_module/cursor.h
+++ b/sql/recover_module/cursor.h
@@ -129,7 +129,7 @@
std::vector<std::unique_ptr<InnerPageDecoder>> inner_decoders_;
// Decodes the leaf page containing records.
- std::unique_ptr<LeafPageDecoder> leaf_decoder_;
+ LeafPageDecoder leaf_decoder_;
SEQUENCE_CHECKER(sequence_checker_);
};
diff --git a/sql/recover_module/pager.cc b/sql/recover_module/pager.cc
index 58e75de..5fe9620 100644
--- a/sql/recover_module/pager.cc
+++ b/sql/recover_module/pager.cc
@@ -23,8 +23,7 @@
"ints are not appropriate for representing page IDs");
DatabasePageReader::DatabasePageReader(VirtualTable* table)
- : page_data_(std::make_unique<uint8_t[]>(table->page_size())),
- table_(table) {
+ : page_data_(), table_(table) {
DCHECK(table != nullptr);
DCHECK(IsValidPageSize(table->page_size()));
}
@@ -57,8 +56,8 @@
std::numeric_limits<int64_t>::max(),
"The |read_offset| computation above may overflow");
- int sqlite_status =
- RawRead(sqlite_file, read_size, read_offset, page_data_.get());
+ int sqlite_status = RawRead(sqlite_file, read_size, read_offset,
+ const_cast<uint8_t*>(page_data_.data()));
// |page_id_| needs to be set to kInvalidPageId if the read failed.
// Otherwise, future ReadPage() calls with the previous |page_id_| value
diff --git a/sql/recover_module/pager.h b/sql/recover_module/pager.h
index 0e388ddc..99314e3 100644
--- a/sql/recover_module/pager.h
+++ b/sql/recover_module/pager.h
@@ -5,6 +5,7 @@
#ifndef SQL_RECOVER_MODULE_PAGER_H_
#define SQL_RECOVER_MODULE_PAGER_H_
+#include <array>
#include <cstdint>
#include <memory>
@@ -70,7 +71,7 @@
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK_NE(page_id_, kInvalidPageId)
<< "Successful ReadPage() required before accessing pager state";
- return page_data_.get();
+ return page_data_.data();
}
// The number of bytes in the page read by the last ReadPage() call.
@@ -137,7 +138,7 @@
int page_id_ = kInvalidPageId;
// Stores the bytes of the last page successfully read by ReadPage().
// The content is undefined if the last call to ReadPage() did not succeed.
- const std::unique_ptr<uint8_t[]> page_data_;
+ const std::array<uint8_t, kMaxPageSize> page_data_;
// Raw pointer usage is acceptable because this instance's owner is expected
// to ensure that the VirtualTable outlives this.
VirtualTable* const table_;

View file

@ -1,15 +0,0 @@
This was dropped for some reason in 6951c37cecd05979b232a39e5c10e6346a0f74ef
--- a/third_party/closure_compiler/compiler.py 2021-05-20 04:17:53.000000000 +0200
+++ b/third_party/closure_compiler/compiler.py 2021-05-20 04:17:53.000000000 +0200
@@ -13,8 +13,9 @@
_CURRENT_DIR = os.path.join(os.path.dirname(__file__))
-_JAVA_PATH = os.path.join(_CURRENT_DIR, "..", "jdk", "current", "bin", "java")
-assert os.path.isfile(_JAVA_PATH), "java only allowed in android builds"
+_JAVA_BIN = "java"
+_JDK_PATH = os.path.join(_CURRENT_DIR, "..", "jdk", "current", "bin", "java")
+_JAVA_PATH = _JDK_PATH if os.path.isfile(_JDK_PATH) else _JAVA_BIN
class Compiler(object):
"""Runs the Closure compiler on given source files to typecheck them

View file

@ -1,13 +0,0 @@
--- a/build/config/compiler/BUILD.gn
+++ b/build/config/compiler/BUILD.gn
@@ -917,8 +917,8 @@
} else if (current_cpu == "arm64") {
if (is_clang && !is_android && !is_nacl && !is_fuchsia &&
!(is_chromeos_lacros && is_chromeos_device)) {
- cflags += [ "--target=aarch64-linux-gnu" ]
- ldflags += [ "--target=aarch64-linux-gnu" ]
+ cflags += [ "--target=aarch64-alpine-linux-musl" ]
+ ldflags += [ "--target=aarch64-alpine-linux-musl" ]
}
if (is_android) {
# Outline atomics crash on Exynos 9810. http://crbug.com/1272795

View file

@ -1,11 +0,0 @@
--- a/sandbox/linux/services/credentials.h
+++ b/sandbox/linux/services/credentials.h
@@ -14,6 +14,8 @@
#include <string>
#include <vector>
+#include <sys/types.h>
+
#include "sandbox/linux/system_headers/capability.h"
#include "sandbox/sandbox_export.h"

View file

@ -1,45 +0,0 @@
--- a/base/threading/platform_thread_linux.cc
+++ b/base/threading/platform_thread_linux.cc
@@ -186,7 +186,8 @@
size_t GetDefaultThreadStackSize(const pthread_attr_t& attributes) {
#if !defined(THREAD_SANITIZER)
- return 0;
+ // use 2mb to avoid running out of space. This is what android uses
+ return 2 * (1 << 20);
#else
// ThreadSanitizer bloats the stack heavily. Evidence has been that the
// default stack size isn't enough for some browser tests.
--- a/base/threading/platform_thread_unittest.cc
+++ b/base/threading/platform_thread_unittest.cc
@@ -420,7 +420,7 @@
((BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)) && \
!defined(THREAD_SANITIZER)) || \
(BUILDFLAG(IS_ANDROID) && !defined(ADDRESS_SANITIZER))
- EXPECT_EQ(0u, stack_size);
+ EXPECT_EQ(2u << 20, stack_size);
#else
EXPECT_GT(stack_size, 0u);
EXPECT_LT(stack_size, 20u * (1 << 20));
--- a/chrome/browser/shutdown_signal_handlers_posix.cc
+++ b/chrome/browser/shutdown_signal_handlers_posix.cc
@@ -187,11 +187,19 @@
g_shutdown_pipe_read_fd = pipefd[0];
g_shutdown_pipe_write_fd = pipefd[1];
#if !defined(ADDRESS_SANITIZER)
+# if defined(__GLIBC__)
const size_t kShutdownDetectorThreadStackSize = PTHREAD_STACK_MIN * 2;
+# else
+ const size_t kShutdownDetectorThreadStackSize = PTHREAD_STACK_MIN * 2 * 8; // match up musls 2k PTHREAD_STACK_MIN with glibcs 16k
+# endif
#else
+# if defined(__GLIBC__)
// ASan instrumentation bloats the stack frames, so we need to increase the
// stack size to avoid hitting the guard page.
const size_t kShutdownDetectorThreadStackSize = PTHREAD_STACK_MIN * 4;
+# else
+ const size_t kShutdownDetectorThreadStackSize = PTHREAD_STACK_MIN * 4 * 8; // match up musls 2k PTHREAD_STACK_MIN with glibcs 16k
+# endif
#endif
ShutdownDetector* detector = new ShutdownDetector(
g_shutdown_pipe_read_fd, std::move(shutdown_callback), task_runner);

View file

@ -1,5 +0,0 @@
# Default settings for electron. This file is sourced by /bin/sh from
# the electron launcher.
# Options to pass to electron.
ELECTRON_FLAGS="--ozone-platform-hint=auto"

View file

@ -1,36 +0,0 @@
--- a/net/dns/host_resolver_manager.cc
+++ b/net/dns/host_resolver_manager.cc
@@ -3014,8 +3014,7 @@
NetworkChangeNotifier::AddConnectionTypeObserver(this);
if (system_dns_config_notifier_)
system_dns_config_notifier_->AddObserver(this);
-#if BUILDFLAG(IS_POSIX) && !BUILDFLAG(IS_APPLE) && !BUILDFLAG(IS_OPENBSD) && \
- !BUILDFLAG(IS_ANDROID)
+#if defined(__GLIBC__)
EnsureDnsReloaderInit();
#endif
--- a/net/dns/dns_reloader.cc
+++ b/net/dns/dns_reloader.cc
@@ -6,8 +6,7 @@
#include "build/build_config.h"
-#if BUILDFLAG(IS_POSIX) && !BUILDFLAG(IS_APPLE) && !BUILDFLAG(IS_OPENBSD) && \
- !BUILDFLAG(IS_ANDROID) && !BUILDFLAG(IS_FUCHSIA)
+#if defined(__GLIBC__)
#include <resolv.h>
--- a/net/dns/host_resolver_proc.cc
+++ b/net/dns/host_resolver_proc.cc
@@ -176,8 +176,7 @@
base::ScopedBlockingCall scoped_blocking_call(FROM_HERE,
base::BlockingType::WILL_BLOCK);
-#if BUILDFLAG(IS_POSIX) && \
- !(BUILDFLAG(IS_APPLE) || BUILDFLAG(IS_OPENBSD) || BUILDFLAG(IS_ANDROID))
+#if defined(__GLIBC__)
DnsReloaderMaybeReload();
#endif
absl::optional<AddressInfo> ai;

View file

@ -1,14 +0,0 @@
#!/bin/sh
# Allow the user to override command-line flags
# This is based on Debian's chromium-browser package, and is intended
# to be consistent with Debian.
for f in /etc/electron/*.conf; do
[ -f ${f} ] && . "${f}"
done
# Prefer user defined ELECTRON_USER_FLAGS (from env) over system
# default ELECTRON_FLAGS (from /etc/electron/default.conf).
ELECTRON_FLAGS=${ELECTRON_USER_FLAGS:-"$ELECTRON_FLAGS"}
exec "/usr/lib/electron/electron" "$@" ${ELECTRON_FLAGS}

View file

@ -1,8 +0,0 @@
[Desktop Entry]
Type=Application
Name=electron21
Icon=electron
Exec=electron %u
Categories=Development;GTK;
StartupNotify=true
StartupWMClass=electron

View file

@ -1,31 +0,0 @@
--- a/third_party/crashpad/crashpad/client/BUILD.gn
+++ b/third_party/crashpad/crashpad/client/BUILD.gn
@@ -81,6 +81,7 @@
deps = [
":common",
"$mini_chromium_source_parent:chromeos_buildflags",
+ "../util",
]
if (crashpad_is_win) {
--- a/third_party/crashpad/crashpad/util/linux/ptracer.cc
+++ b/third_party/crashpad/crashpad/util/linux/ptracer.cc
@@ -26,6 +26,7 @@
#if defined(ARCH_CPU_X86_FAMILY)
#include <asm/ldt.h>
+#include <asm/ptrace-abi.h>
#endif
namespace crashpad {
--- a/third_party/crashpad/crashpad/util/linux/thread_info.h
+++ b/third_party/crashpad/crashpad/util/linux/thread_info.h
@@ -273,7 +273,7 @@ union FloatContext {
"Size mismatch");
#elif defined(ARCH_CPU_ARMEL)
static_assert(sizeof(f32_t::fpregs) == sizeof(user_fpregs), "Size mismatch");
-#if !defined(__GLIBC__)
+#if defined(OS_ANDROID)
static_assert(sizeof(f32_t::vfp) == sizeof(user_vfp), "Size mismatch");
#endif
#elif defined(ARCH_CPU_ARM64)

View file

@ -1,11 +0,0 @@
Patch-Source: https://github.com/void-linux/void-packages/blob/378db3cf5087877588aebaaa8ca3c9d94dfb54e0/srcpkgs/chromium/patches/fix-missing-cstdint-include-musl.patch
--- a/net/third_party/quiche/src/quiche/http2/adapter/window_manager.h
+++ b/net/third_party/quiche/src/quiche/http2/adapter/window_manager.h
@@ -3,6 +3,7 @@
#include <stddef.h>
+#include <cstdint>
#include <functional>
#include "common/platform/api/quiche_export.h"

View file

@ -1,44 +0,0 @@
--- a/base/files/file_util_linux.cc
+++ b/base/files/file_util_linux.cc
@@ -30,7 +30,7 @@
case EXT2_SUPER_MAGIC: // Also ext3 and ext4
case MSDOS_SUPER_MAGIC:
case REISERFS_SUPER_MAGIC:
- case static_cast<int>(BTRFS_SUPER_MAGIC):
+ case BTRFS_SUPER_MAGIC:
case 0x5346544E: // NTFS
case 0x58465342: // XFS
case 0x3153464A: // JFS
@@ -40,14 +40,14 @@
*type = FILE_SYSTEM_NFS;
break;
case SMB_SUPER_MAGIC:
- case static_cast<int>(0xFF534D42): // CIFS
+ case 0xFF534D42: // CIFS
*type = FILE_SYSTEM_SMB;
break;
case CODA_SUPER_MAGIC:
*type = FILE_SYSTEM_CODA;
break;
- case static_cast<int>(HUGETLBFS_MAGIC):
- case static_cast<int>(RAMFS_MAGIC):
+ case HUGETLBFS_MAGIC:
+ case RAMFS_MAGIC:
case TMPFS_MAGIC:
*type = FILE_SYSTEM_MEMORY;
break;
--- a/base/system/sys_info_posix.cc
+++ b/base/system/sys_info_posix.cc
@@ -100,10 +100,10 @@
if (HANDLE_EINTR(statfs(path.value().c_str(), &stats)) != 0)
return false;
switch (stats.f_type) {
case TMPFS_MAGIC:
- case static_cast<int>(HUGETLBFS_MAGIC):
- case static_cast<int>(RAMFS_MAGIC):
+ case HUGETLBFS_MAGIC:
+ case RAMFS_MAGIC:
return true;
}
return false;

View file

@ -1,21 +0,0 @@
--- a/tools/gdb/gdbinit
+++ b/tools/gdb/gdbinit
@@ -50,17 +50,7 @@
def set_src_dir(compile_dir):
global src_dir
- git = subprocess.Popen(
- ['git', '-C', compile_dir, 'rev-parse', '--show-toplevel'],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- src_dir, _ = git.communicate()
- if git.returncode:
- return
- if isinstance(src_dir, str):
- src_dir = src_dir.rstrip()
- else:
- src_dir = src_dir.decode('utf-8').rstrip()
+ src_dir = os.path.abspath(os.getcwd())
load_libcxx_pretty_printers(src_dir)

View file

@ -1,21 +0,0 @@
--- a/electron/default_app/default_app.ts
+++ b/electron/default_app/default_app.ts
@@ -60,7 +60,7 @@
};
if (process.platform === 'linux') {
- options.icon = path.join(__dirname, 'icon.png');
+ options.icon = '/usr/share/icons/hicolor/1024x1024/apps/electron.png';
}
mainWindow = new BrowserWindow(options);
--- a/electron/filenames.gni
+++ b/electron/filenames.gni
@@ -6,7 +6,6 @@
]
default_app_static_sources = [
- "default_app/icon.png",
"default_app/index.html",
"default_app/package.json",
"default_app/styles.css",

View file

@ -1,39 +0,0 @@
Patch-Source: https://github.com/archlinux/svntogit-packages/blob/bf2401407df5bcc938382eb03748fbef41e41c89/trunk/unbundle-jsoncpp-avoid-CFI-faults-with-is_cfi-true.patch
From ed8d931e35f81d8566835a579caf7d61368f85b7 Mon Sep 17 00:00:00 2001
From: Evangelos Foutras <evangelos@foutrelis.com>
Date: Tue, 27 Sep 2022 22:20:41 +0000
Subject: [PATCH] unbundle/jsoncpp: avoid CFI faults with is_cfi=true
Ensure jsoncpp symbols have public visibility and are thus excluded from
CFI checks and whole-program optimization. This is achieved by defining
JSON_DLL_BUILD which in turn causes json/config.h to define JSON_API as
__attribute__((visibility("default"))). The latter macro is used to tag
jsoncpp classes and namespace functions throughout jsoncpp's headers.
BUG=1365218
Change-Id: I56277737b7d9ecaeb5e17c8d21a2e55f3d5d5bc9
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/3919652
Reviewed-by: Thomas Anderson <thomasanderson@chromium.org>
Commit-Queue: Thomas Anderson <thomasanderson@chromium.org>
Cr-Commit-Position: refs/heads/main@{#1052077}
---
build/linux/unbundle/jsoncpp.gn | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/build/linux/unbundle/jsoncpp.gn b/build/linux/unbundle/jsoncpp.gn
index 544f9d13c9..e84a0ef27a 100644
--- a/build/linux/unbundle/jsoncpp.gn
+++ b/build/linux/unbundle/jsoncpp.gn
@@ -3,6 +3,11 @@ import("//build/shim_headers.gni")
pkg_config("jsoncpp_config") {
packages = [ "jsoncpp" ]
+
+ # Defining JSON_DLL_BUILD applies public visibility to jsoncpp classes
+ # thus deactivating CFI checks for them. This avoids CFI violations in
+ # virtual calls to system jsoncpp library (https://crbug.com/1365218).
+ defines = [ "JSON_DLL_BUILD" ]
}
shim_headers("jsoncpp_shim") {

View file

@ -1,18 +0,0 @@
--- a/base/allocator/partition_allocator/tagging.cc
+++ b/base/allocator/partition_allocator/tagging.cc
@@ -19,15 +19,6 @@
#define PR_GET_TAGGED_ADDR_CTRL 56
#define PR_TAGGED_ADDR_ENABLE (1UL << 0)
-#if BUILDFLAG(IS_LINUX)
-#include <linux/version.h>
-
-// Linux headers already provide these since v5.10.
-#if LINUX_VERSION_CODE >= KERNEL_VERSION(5, 10, 0)
-#define HAS_PR_MTE_MACROS
-#endif
-#endif
-
#ifndef HAS_PR_MTE_MACROS
#define PR_MTE_TCF_SHIFT 1
#define PR_MTE_TCF_NONE (0UL << PR_MTE_TCF_SHIFT)

View file

@ -1,107 +0,0 @@
diff --git a/sandbox/linux/seccomp-bpf-helpers/syscall_parameters_restrictions.cc ./sandbox/linux/seccomp-bpf-helpers/syscall_parameters_restrictions.cc
index ff5a1c0..da56b9b 100644
--- a/sandbox/linux/seccomp-bpf-helpers/syscall_parameters_restrictions.cc
+++ b/sandbox/linux/seccomp-bpf-helpers/syscall_parameters_restrictions.cc
@@ -139,21 +139,11 @@ namespace sandbox {
// present (as in newer versions of posix_spawn).
ResultExpr RestrictCloneToThreadsAndEPERMFork() {
const Arg<unsigned long> flags(0);
-
- // TODO(mdempsky): Extend DSL to support (flags & ~mask1) == mask2.
- const uint64_t kAndroidCloneMask = CLONE_VM | CLONE_FS | CLONE_FILES |
- CLONE_SIGHAND | CLONE_THREAD |
- CLONE_SYSVSEM;
- const uint64_t kObsoleteAndroidCloneMask = kAndroidCloneMask | CLONE_DETACHED;
-
- const uint64_t kGlibcPthreadFlags =
- CLONE_VM | CLONE_FS | CLONE_FILES | CLONE_SIGHAND | CLONE_THREAD |
- CLONE_SYSVSEM | CLONE_SETTLS | CLONE_PARENT_SETTID | CLONE_CHILD_CLEARTID;
- const BoolExpr glibc_test = flags == kGlibcPthreadFlags;
-
- const BoolExpr android_test =
- AnyOf(flags == kAndroidCloneMask, flags == kObsoleteAndroidCloneMask,
- flags == kGlibcPthreadFlags);
+ const int required = CLONE_VM | CLONE_FS | CLONE_FILES | CLONE_SIGHAND |
+ CLONE_THREAD | CLONE_SYSVSEM;
+ const int safe = CLONE_SETTLS | CLONE_PARENT_SETTID | CLONE_CHILD_CLEARTID |
+ CLONE_DETACHED;
+ const BoolExpr thread_clone_ok = (flags&~safe)==required;
// The following two flags are the two important flags in any vfork-emulating
// clone call. EPERM any clone call that contains both of them.
@@ -163,7 +153,7 @@ ResultExpr RestrictCloneToThreadsAndEPERMFork() {
AnyOf((flags & (CLONE_VM | CLONE_THREAD)) == 0,
(flags & kImportantCloneVforkFlags) == kImportantCloneVforkFlags);
- return If(IsAndroid() ? android_test : glibc_test, Allow())
+ return If(thread_clone_ok, Allow())
.ElseIf(is_fork_or_clone_vfork, Error(EPERM))
.Else(CrashSIGSYSClone());
}
diff --git a/sandbox/linux/seccomp-bpf-helpers/syscall_sets.cc ./sandbox/linux/seccomp-bpf-helpers/syscall_sets.cc
index d9d1882..0567557 100644
--- a/sandbox/linux/seccomp-bpf-helpers/syscall_sets.cc
+++ b/sandbox/linux/seccomp-bpf-helpers/syscall_sets.cc
@@ -392,6 +392,7 @@ bool SyscallSets::IsAllowedProcessStartOrDeath(int sysno) {
#if defined(__i386__)
case __NR_waitpid:
#endif
+ case __NR_set_tid_address:
return true;
case __NR_clone: // Should be parameter-restricted.
case __NR_setns: // Privileged.
@@ -404,7 +405,6 @@ bool SyscallSets::IsAllowedProcessStartOrDeath(int sysno) {
#if defined(__i386__) || defined(__x86_64__) || defined(__mips__)
case __NR_set_thread_area:
#endif
- case __NR_set_tid_address:
case __NR_unshare:
#if !defined(__mips__) && !defined(__aarch64__)
case __NR_vfork:
@@ -514,6 +514,8 @@ bool SyscallSets::IsAllowedAddressSpaceAccess(int sysno) {
case __NR_mlock:
case __NR_munlock:
case __NR_munmap:
+ case __NR_mremap:
+ case __NR_membarrier:
return true;
case __NR_madvise:
case __NR_mincore:
@@ -531,7 +533,6 @@ bool SyscallSets::IsAllowedAddressSpaceAccess(int sysno) {
case __NR_modify_ldt:
#endif
case __NR_mprotect:
- case __NR_mremap:
case __NR_msync:
case __NR_munlockall:
case __NR_readahead:
diff --git a/sandbox/linux/system_headers/linux_syscalls.h ./sandbox/linux/system_headers/linux_syscalls.h
index 2b78a0c..b6fedb5 100644
--- a/sandbox/linux/system_headers/linux_syscalls.h
+++ b/sandbox/linux/system_headers/linux_syscalls.h
@@ -10,6 +10,7 @@
#define SANDBOX_LINUX_SYSTEM_HEADERS_LINUX_SYSCALLS_H_
#include "build/build_config.h"
+#include <sys/syscall.h>
#if defined(__x86_64__)
#include "sandbox/linux/system_headers/x86_64_linux_syscalls.h"
diff --git a/services/service_manager/sandbox/linux/bpf_renderer_policy_linux.cc ./services/service_manager/sandbox/linux/bpf_renderer_policy_linux.cc
index a85c0ea..715aa1e 100644
--- a/sandbox/policy/linux/bpf_renderer_policy_linux.cc
+++ b/sandbox/policy/linux/bpf_renderer_policy_linux.cc
@@ -102,11 +102,11 @@
#if defined(__arm__) || defined(__aarch64__)
case __NR_getcpu:
#endif
- return Allow();
- case __NR_sched_getaffinity:
case __NR_sched_getparam:
case __NR_sched_getscheduler:
case __NR_sched_setscheduler:
+ return Allow();
+ case __NR_sched_getaffinity:
return RestrictSchedTarget(GetPolicyPid(), sysno);
case __NR_prlimit64:
// See crbug.com/662450 and setrlimit comment above.

View file

@ -1,81 +0,0 @@
--- a/sandbox/linux/services/namespace_sandbox.cc
+++ b/sandbox/linux/services/namespace_sandbox.cc
@@ -209,6 +209,70 @@
return base::LaunchProcess(argv, launch_options_copy);
}
+#if defined(__aarch64__)
+#define TLS_ABOVE_TP
+#endif
+
+struct musl_pthread
+{
+ /* Part 1 -- these fields may be external or
+ * internal (accessed via asm) ABI. Do not change. */
+ struct pthread *self;
+#ifndef TLS_ABOVE_TP
+ uintptr_t *dtv;
+#endif
+ struct pthread *prev, *next; /* non-ABI */
+ uintptr_t sysinfo;
+#ifndef TLS_ABOVE_TP
+#ifdef CANARY_PAD
+ uintptr_t canary_pad;
+#endif
+ uintptr_t canary;
+#endif
+
+/* Part 2 -- implementation details, non-ABI. */
+ int tid;
+ int errno_val;
+ volatile int detach_state;
+ volatile int cancel;
+ volatile unsigned char canceldisable, cancelasync;
+ unsigned char tsd_used:1;
+ unsigned char dlerror_flag:1;
+ unsigned char *map_base;
+ size_t map_size;
+ void *stack;
+ size_t stack_size;
+ size_t guard_size;
+ void *result;
+ struct __ptcb *cancelbuf;
+ void **tsd;
+ struct {
+ volatile void *volatile head;
+ long off;
+ volatile void *volatile pending;
+ } robust_list;
+ int h_errno_val;
+ volatile int timer_id;
+ locale_t locale;
+ volatile int killlock[1];
+ char *dlerror_buf;
+ void *stdio_locks;
+
+ /* Part 3 -- the positions of these fields relative to
+ * the end of the structure is external and internal ABI. */
+#ifdef TLS_ABOVE_TP
+ uintptr_t canary;
+ uintptr_t *dtv;
+#endif
+};
+
+void MaybeUpdateMuslTidCache()
+{
+ pid_t real_tid = sys_gettid();
+ pid_t* cached_tid_location = &reinterpret_cast<struct musl_pthread*>(pthread_self())->tid;
+ *cached_tid_location = real_tid;
+}
+
// static
pid_t NamespaceSandbox::ForkInNewPidNamespace(bool drop_capabilities_in_child) {
const pid_t pid =
@@ -226,6 +290,7 @@
#if defined(LIBC_GLIBC)
MaybeUpdateGlibcTidCache();
#endif
+ MaybeUpdateMuslTidCache();
return 0;
}

View file

@ -1,22 +0,0 @@
Use monotonic clock for pthread_cond_timedwait with musl too.
--- a/v8/src/base/platform/condition-variable.cc
+++ b/v8/src/base/platform/condition-variable.cc
@@ -16,7 +16,7 @@
ConditionVariable::ConditionVariable() {
#if (V8_OS_FREEBSD || V8_OS_NETBSD || V8_OS_OPENBSD || \
- (V8_OS_LINUX && V8_LIBC_GLIBC))
+ V8_OS_LINUX)
// On Free/Net/OpenBSD and Linux with glibc we can change the time
// source for pthread_cond_timedwait() to use the monotonic clock.
pthread_condattr_t attr;
@@ -92,7 +92,7 @@
&native_handle_, &mutex->native_handle(), &ts);
#else
#if (V8_OS_FREEBSD || V8_OS_NETBSD || V8_OS_OPENBSD || \
- (V8_OS_LINUX && V8_LIBC_GLIBC))
+ V8_OS_LINUX)
// On Free/Net/OpenBSD and Linux with glibc we can change the time
// source for pthread_cond_timedwait() to use the monotonic clock.
result = clock_gettime(CLOCK_MONOTONIC, &ts);

View file

@ -1,107 +0,0 @@
--- a/base/debug/stack_trace_posix.cc
+++ b/base/debug/stack_trace_posix.cc
@@ -27,7 +27,7 @@
#if !defined(USE_SYMBOLIZE)
#include <cxxabi.h>
#endif
-#if !defined(__UCLIBC__) && !defined(_AIX)
+#if defined(__GLIBC__) && !defined(_AIX)
#include <execinfo.h>
#endif
@@ -89,7 +89,7 @@
// Note: code in this function is NOT async-signal safe (std::string uses
// malloc internally).
-#if !defined(__UCLIBC__) && !defined(_AIX)
+#if defined(__GLIBC__) && !defined(_AIX)
std::string::size_type search_from = 0;
while (search_from < text->size()) {
// Look for the start of a mangled symbol, from search_from.
@@ -136,7 +136,7 @@
virtual ~BacktraceOutputHandler() = default;
};
-#if !defined(__UCLIBC__) && !defined(_AIX)
+#if defined(__GLIBC__) && !defined(_AIX)
void OutputPointer(void* pointer, BacktraceOutputHandler* handler) {
// This should be more than enough to store a 64-bit number in hex:
// 16 hex digits + 1 for null-terminator.
@@ -839,7 +839,7 @@
// If we do not have unwind tables, then try tracing using frame pointers.
return base::debug::TraceStackFramePointers(const_cast<const void**>(trace),
count, 0);
-#elif !defined(__UCLIBC__) && !defined(_AIX)
+#elif defined(__GLIBC__) && !defined(_AIX)
// Though the backtrace API man page does not list any possible negative
// return values, we take no chance.
return base::saturated_cast<size_t>(backtrace(trace, count));
@@ -852,13 +852,13 @@
// NOTE: This code MUST be async-signal safe (it's used by in-process
// stack dumping signal handler). NO malloc or stdio is allowed here.
-#if !defined(__UCLIBC__) && !defined(_AIX)
+#if defined(__GLIBC__) && !defined(_AIX)
PrintBacktraceOutputHandler handler;
ProcessBacktrace(trace_, count_, prefix_string, &handler);
#endif
}
-#if !defined(__UCLIBC__) && !defined(_AIX)
+#if defined(__GLIBC__) && !defined(_AIX)
void StackTrace::OutputToStreamWithPrefix(std::ostream* os,
const char* prefix_string) const {
StreamBacktraceOutputHandler handler(os);
--- a/v8/src/codegen/external-reference-table.cc
+++ b/v8/src/codegen/external-reference-table.cc
@@ -11,7 +11,9 @@
#if defined(DEBUG) && defined(V8_OS_LINUX) && !defined(V8_OS_ANDROID)
#define SYMBOLIZE_FUNCTION
+#if defined(__GLIBC__)
#include <execinfo.h>
+#endif
#include <vector>
@@ -96,7 +98,7 @@
}
const char* ExternalReferenceTable::ResolveSymbol(void* address) {
-#ifdef SYMBOLIZE_FUNCTION
+#if defined(SYMBOLIZE_FUNCTION) && defined(__GLIBC__)
char** names = backtrace_symbols(&address, 1);
const char* name = names[0];
// The array of names is malloc'ed. However, each name string is static
--- a/third_party/swiftshader/third_party/llvm-subzero/build/Linux/include/llvm/Config/config.h
+++ b/third_party/swiftshader/third_party/llvm-subzero/build/Linux/include/llvm/Config/config.h
@@ -58,7 +58,7 @@
#define HAVE_ERRNO_H 1
/* Define to 1 if you have the <execinfo.h> header file. */
-#define HAVE_EXECINFO_H 1
+/* #define HAVE_EXECINFO_H 1 */
/* Define to 1 if you have the <fcntl.h> header file. */
#define HAVE_FCNTL_H 1
--- a/base/debug/stack_trace.cc
+++ b/base/debug/stack_trace.cc
@@ -251,7 +253,9 @@
}
void StackTrace::OutputToStream(std::ostream* os) const {
+#if defined(__GLIBC__) && !defined(_AIX)
OutputToStreamWithPrefix(os, nullptr);
+#endif
}
std::string StackTrace::ToString() const {
@@ -281,7 +281,7 @@
}
std::string StackTrace::ToStringWithPrefix(const char* prefix_string) const {
std::stringstream stream;
-#if !defined(__UCLIBC__) && !defined(_AIX)
+#if defined(__GLIBC__) && !defined(_AIX)
OutputToStreamWithPrefix(&stream, prefix_string);
#endif
return stream.str();

View file

@ -1,19 +0,0 @@
--- a/chrome/browser/metrics/chrome_browser_main_extra_parts_metrics.cc
+++ b/chrome/browser/metrics/chrome_browser_main_extra_parts_metrics.cc
@@ -61,7 +61,6 @@
// TODO(crbug.com/1052397): Revisit the macro expression once build flag switch
// of lacros-chrome is complete.
#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS)
-#include <gnu/libc-version.h>
#include "base/linux_util.h"
#include "base/strings/string_split.h"
@@ -324,7 +323,7 @@
void RecordLinuxGlibcVersion() {
// TODO(crbug.com/1052397): Revisit the macro expression once build flag switch
// of lacros-chrome is complete.
-#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS)
+#if defined(__GLIBC__) || BUILDFLAG(IS_CHROMEOS_LACROS)
base::Version version(gnu_get_libc_version());
UMALinuxGlibcVersion glibc_version_result = UMA_LINUX_GLIBC_NOT_PARSEABLE;

View file

@ -1,110 +0,0 @@
--- a/base/trace_event/malloc_dump_provider.cc
+++ b/base/trace_event/malloc_dump_provider.cc
@@ -185,7 +185,6 @@
#define MALLINFO2_FOUND_IN_LIBC
struct mallinfo2 info = mallinfo2();
#endif
-#endif // defined(__GLIBC__) && defined(__GLIBC_PREREQ)
#if !defined(MALLINFO2_FOUND_IN_LIBC)
struct mallinfo info = mallinfo();
#endif
@@ -205,6 +204,7 @@
sys_alloc_dump->AddScalar(MemoryAllocatorDump::kNameSize,
MemoryAllocatorDump::kUnitsBytes, info.uordblks);
}
+#endif // defined(__GLIBC__) && defined(__GLIBC_PREREQ)
}
#endif
@@ -339,7 +340,7 @@
&allocated_objects_count);
#elif BUILDFLAG(IS_FUCHSIA)
// TODO(fuchsia): Port, see https://crbug.com/706592.
-#else
+#elif defined(__GLIBC__)
ReportMallinfoStats(/*pmd=*/nullptr, &total_virtual_size, &resident_size,
&allocated_objects_size, &allocated_objects_count);
#endif
--- a/base/process/process_metrics_posix.cc
+++ b/base/process/process_metrics_posix.cc
@@ -105,7 +105,7 @@
#endif // !BUILDFLAG(IS_FUCHSIA)
-#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS) || BUILDFLAG(IS_ANDROID)
+#if (BUILDFLAG(IS_LINUX) && defined(__GLIBC__)) || BUILDFLAG(IS_CHROMEOS) || BUILDFLAG(IS_ANDROID)
namespace {
size_t GetMallocUsageMallinfo() {
@@ -123,7 +123,7 @@
}
} // namespace
-#endif // BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS) ||
+#endif // (BUILDFLAG(IS_LINUX) && defined(__GLIBC__)) || BUILDFLAG(IS_CHROMEOS) ||
// BUILDFLAG(IS_ANDROID)
size_t ProcessMetrics::GetMallocUsage() {
@@ -131,9 +131,9 @@
malloc_statistics_t stats = {0};
malloc_zone_statistics(nullptr, &stats);
return stats.size_in_use;
-#elif BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS) || BUILDFLAG(IS_ANDROID)
+#elif (BUILDFLAG(IS_LINUX) && defined(__GLIBC__)) || BUILDFLAG(IS_CHROMEOS) || BUILDFLAG(IS_ANDROID)
return GetMallocUsageMallinfo();
-#elif BUILDFLAG(IS_FUCHSIA)
+#else
// TODO(fuchsia): Not currently exposed. https://crbug.com/735087.
return 0;
#endif
--- a/third_party/tflite/src/tensorflow/lite/profiling/memory_info.cc
+++ b/third_party/tflite/src/tensorflow/lite/profiling/memory_info.cc
@@ -35,7 +35,7 @@
MemoryUsage GetMemoryUsage() {
MemoryUsage result;
-#ifdef __linux__
+#if defined(__linux__) && defined(__GLIBC__)
rusage res;
if (getrusage(RUSAGE_SELF, &res) == 0) {
result.max_rss_kb = res.ru_maxrss;
--- a/third_party/swiftshader/third_party/llvm-subzero/lib/Support/Unix/Process.inc
+++ b/third_party/swiftshader/third_party/llvm-subzero/lib/Support/Unix/Process.inc
@@ -86,11 +86,11 @@
}
size_t Process::GetMallocUsage() {
-#if defined(HAVE_MALLINFO2)
+#if defined(HAVE_MALLINFO2) && defined(__GLIBC__)
struct mallinfo2 mi;
mi = ::mallinfo2();
return mi.uordblks;
-#elif defined(HAVE_MALLINFO)
+#elif defined(HAVE_MALLINFO) && defined(__GLIBC__)
struct mallinfo mi;
mi = ::mallinfo();
return mi.uordblks;
--- a/third_party/swiftshader/third_party/llvm-10.0/configs/linux/include/llvm/Config/config.h
+++ b/third_party/swiftshader/third_party/llvm-10.0/configs/linux/include/llvm/Config/config.h
@@ -122,7 +122,9 @@
/* #undef HAVE_MALLCTL */
/* Define to 1 if you have the `mallinfo' function. */
+#if defined(__GLIBC__)
#define HAVE_MALLINFO 1
+#endif
/* Define to 1 if you have the <malloc.h> header file. */
#define HAVE_MALLOC_H 1
--- a/base/allocator/allocator_shim_default_dispatch_to_partition_alloc.cc
+++ b/base/allocator/allocator_shim_default_dispatch_to_partition_alloc.cc
@@ -717,7 +717,7 @@
#endif // !BUILDFLAG(IS_APPLE) && !BUILDFLAG(IS_ANDROID)
-#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
+#if 0
SHIM_ALWAYS_EXPORT struct mallinfo mallinfo(void) __THROW {
base::SimplePartitionStatsDumper allocator_dumper;
Allocator()->DumpStats("malloc", true, &allocator_dumper);

View file

@ -1,30 +0,0 @@
--- a/net/dns/public/scoped_res_state.cc
+++ b/net/dns/public/scoped_res_state.cc
@@ -13,7 +13,7 @@
namespace net {
ScopedResState::ScopedResState() {
-#if BUILDFLAG(IS_OPENBSD) || BUILDFLAG(IS_FUCHSIA)
+#if BUILDFLAG(IS_OPENBSD) || BUILDFLAG(IS_FUCHSIA) || defined(_GNU_SOURCE)
// Note: res_ninit in glibc always returns 0 and sets RES_INIT.
// res_init behaves the same way.
memset(&_res, 0, sizeof(_res));
@@ -25,16 +25,8 @@
}
ScopedResState::~ScopedResState() {
-#if !BUILDFLAG(IS_OPENBSD) && !BUILDFLAG(IS_FUCHSIA)
-
- // Prefer res_ndestroy where available.
-#if BUILDFLAG(IS_APPLE) || BUILDFLAG(IS_FREEBSD)
- res_ndestroy(&res_);
-#else
- res_nclose(&res_);
-#endif // BUILDFLAG(IS_APPLE) || BUILDFLAG(IS_FREEBSD)
-
-#endif // !BUILDFLAG(IS_OPENBSD) && !BUILDFLAG(IS_FUCHSIA)
+ // musl res_init() doesn't actually do anything
+ // no destruction is necessary as no memory has been allocated
}
bool ScopedResState::IsValid() const {

View file

@ -1,12 +0,0 @@
--- a/base/files/file.h
+++ b/base/files/file.h
@@ -19,7 +19,8 @@
#include "build/build_config.h"
#if BUILDFLAG(IS_BSD) || BUILDFLAG(IS_APPLE) || BUILDFLAG(IS_NACL) || \
- BUILDFLAG(IS_FUCHSIA) || (BUILDFLAG(IS_ANDROID) && __ANDROID_API__ < 21)
+ BUILDFLAG(IS_FUCHSIA) || (BUILDFLAG(IS_ANDROID) && __ANDROID_API__ < 21) || \
+ (defined(OS_LINUX) && !defined(__GLIBC__))
struct stat;
namespace base {
typedef struct stat stat_wrapper_t;

View file

@ -1,11 +0,0 @@
--- a/chrome/browser/ui/autofill/autofill_popup_controller_impl.h
+++ b/chrome/browser/ui/autofill/autofill_popup_controller_impl.h
@@ -178,7 +178,7 @@
class AutofillPopupViewPtr {
public:
AutofillPopupViewPtr() = default;
- AutofillPopupViewPtr(nullptr_t) : ptr_(nullptr) {}
+ AutofillPopupViewPtr(std::nullptr_t) : ptr_(nullptr) {}
AutofillPopupViewPtr(AutofillPopupView* ptr) : ptr_(ptr) {}
explicit operator bool() const { return ptr_; }

View file

@ -1,15 +0,0 @@
--- a/base/allocator/partition_allocator/partition_root.cc
+++ b/base/allocator/partition_allocator/partition_root.cc
@@ -248,9 +248,9 @@
// However, no perfect solution really exists to make threads + fork()
// cooperate, but deadlocks are real (and fork() is used in DEATH_TEST()s),
// and other malloc() implementations use the same techniques.
- int err =
- pthread_atfork(BeforeForkInParent, AfterForkInParent, AfterForkInChild);
- PA_CHECK(err == 0);
+ //int err =
+ // pthread_atfork(BeforeForkInParent, AfterForkInParent, AfterForkInChild);
+ //PA_CHECK(err == 0);
#endif // BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
}

View file

@ -1,11 +0,0 @@
--- a/tools/grit/grit/util.py
+++ b/tools/grit/grit/util.py
@@ -209,7 +209,7 @@
mode = 'rb'
encoding = None
else:
- mode = 'rU'
+ mode = 'r'
with io.open(filename, mode, encoding=encoding) as f:
return f.read()

View file

@ -1,22 +0,0 @@
--- a/third_party/electron_node/tools/inspector_protocol/jinja2/runtime.py
+++ b/third_party/electron_node/tools/inspector_protocol/jinja2/runtime.py
@@ -315,7 +315,7 @@ class Context(with_metaclass(ContextMeta
# register the context as mapping if possible
try:
- from collections import Mapping
+ from collections.abc import Mapping
Mapping.register(Context)
except ImportError:
pass
--- a/third_party/electron_node/tools/inspector_protocol/jinja2/sandbox.py
+++ b/third_party/electron_node/tools/inspector_protocol/jinja2/sandbox.py
@@ -14,7 +14,7 @@
"""
import types
import operator
-from collections import Mapping
+from collections.abc import Mapping
from jinja2.environment import Environment
from jinja2.exceptions import SecurityError
from jinja2._compat import string_types, PY2

View file

@ -1,11 +0,0 @@
--- a/net/third_party/quiche/src/quiche/quic/core/quic_one_block_arena.h
+++ b/net/third_party/quiche/src/quiche/quic/core/quic_one_block_arena.h
@@ -69,7 +69,7 @@
// QuicConnections currently use around 1KB of polymorphic types which would
// ordinarily be on the heap. Instead, store them inline in an arena.
-using QuicConnectionArena = QuicOneBlockArena<1280>;
+using QuicConnectionArena = QuicOneBlockArena<1504>;
} // namespace quic

View file

@ -1,287 +0,0 @@
--- a/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.cc
+++ b/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.cc
@@ -74,7 +74,7 @@
codec_context->sample_fmt = AV_SAMPLE_FMT_NONE;
}
- codec_context->ch_layout.nb_channels = config.channel_count;
+ codec_context->channels = config.channel_count;
codec_context->sample_rate = config.samples_per_second;
if (config.extra_data) {
@@ -124,8 +124,8 @@
case cdm::kAudioFormatPlanarS16:
case cdm::kAudioFormatPlanarF32: {
const int decoded_size_per_channel =
- decoded_audio_size / av_frame.ch_layout.nb_channels;
- for (int i = 0; i < av_frame.ch_layout.nb_channels; ++i) {
+ decoded_audio_size / av_frame.channels;
+ for (int i = 0; i < av_frame.channels; ++i) {
memcpy(output_buffer, av_frame.extended_data[i],
decoded_size_per_channel);
output_buffer += decoded_size_per_channel;
@@ -185,14 +185,13 @@
// Success!
decoding_loop_ = std::make_unique<FFmpegDecodingLoop>(codec_context_.get());
samples_per_second_ = config.samples_per_second;
- bytes_per_frame_ =
- codec_context_->ch_layout.nb_channels * config.bits_per_channel / 8;
+ bytes_per_frame_ = codec_context_->channels * config.bits_per_channel / 8;
output_timestamp_helper_ =
std::make_unique<AudioTimestampHelper>(config.samples_per_second);
is_initialized_ = true;
// Store initial values to guard against midstream configuration changes.
- channels_ = codec_context_->ch_layout.nb_channels;
+ channels_ = codec_context_->channels;
av_sample_format_ = codec_context_->sample_fmt;
return true;
@@ -292,19 +291,17 @@
for (auto& frame : audio_frames) {
int decoded_audio_size = 0;
if (frame->sample_rate != samples_per_second_ ||
- frame->ch_layout.nb_channels != channels_ ||
- frame->format != av_sample_format_) {
+ frame->channels != channels_ || frame->format != av_sample_format_) {
DLOG(ERROR) << "Unsupported midstream configuration change!"
<< " Sample Rate: " << frame->sample_rate << " vs "
- << samples_per_second_
- << ", Channels: " << frame->ch_layout.nb_channels << " vs "
- << channels_ << ", Sample Format: " << frame->format << " vs "
- << av_sample_format_;
+ << samples_per_second_ << ", Channels: " << frame->channels
+ << " vs " << channels_ << ", Sample Format: " << frame->format
+ << " vs " << av_sample_format_;
return cdm::kDecodeError;
}
decoded_audio_size = av_samples_get_buffer_size(
- nullptr, codec_context_->ch_layout.nb_channels, frame->nb_samples,
+ nullptr, codec_context_->channels, frame->nb_samples,
codec_context_->sample_fmt, 1);
if (!decoded_audio_size)
continue;
@@ -323,9 +320,9 @@
size_t* total_size,
std::vector<std::unique_ptr<AVFrame, ScopedPtrAVFreeFrame>>* audio_frames,
AVFrame* frame) {
- *total_size += av_samples_get_buffer_size(
- nullptr, codec_context_->ch_layout.nb_channels, frame->nb_samples,
- codec_context_->sample_fmt, 1);
+ *total_size += av_samples_get_buffer_size(nullptr, codec_context_->channels,
+ frame->nb_samples,
+ codec_context_->sample_fmt, 1);
audio_frames->emplace_back(av_frame_clone(frame));
return true;
}
--- a/media/ffmpeg/ffmpeg_common.cc
+++ b/media/ffmpeg/ffmpeg_common.cc
@@ -345,11 +345,10 @@
codec_context->sample_fmt, codec_context->codec_id);
ChannelLayout channel_layout =
- codec_context->ch_layout.nb_channels > 8
+ codec_context->channels > 8
? CHANNEL_LAYOUT_DISCRETE
- : ChannelLayoutToChromeChannelLayout(
- codec_context->ch_layout.u.mask,
- codec_context->ch_layout.nb_channels);
+ : ChannelLayoutToChromeChannelLayout(codec_context->channel_layout,
+ codec_context->channels);
int sample_rate = codec_context->sample_rate;
switch (codec) {
@@ -402,7 +401,7 @@
extra_data, encryption_scheme, seek_preroll,
codec_context->delay);
if (channel_layout == CHANNEL_LAYOUT_DISCRETE)
- config->SetChannelsForDiscrete(codec_context->ch_layout.nb_channels);
+ config->SetChannelsForDiscrete(codec_context->channels);
#if BUILDFLAG(ENABLE_PLATFORM_AC3_EAC3_AUDIO)
// These are bitstream formats unknown to ffmpeg, so they don't have
@@ -471,7 +470,7 @@
// TODO(scherkus): should we set |channel_layout|? I'm not sure if FFmpeg uses
// said information to decode.
- codec_context->ch_layout.nb_channels = config.channels();
+ codec_context->channels = config.channels();
codec_context->sample_rate = config.samples_per_second();
if (config.extra_data().empty()) {
--- a/media/filters/audio_file_reader.cc
+++ b/media/filters/audio_file_reader.cc
@@ -113,15 +113,14 @@
// Verify the channel layout is supported by Chrome. Acts as a sanity check
// against invalid files. See http://crbug.com/171962
- if (ChannelLayoutToChromeChannelLayout(
- codec_context_->ch_layout.u.mask,
- codec_context_->ch_layout.nb_channels) ==
+ if (ChannelLayoutToChromeChannelLayout(codec_context_->channel_layout,
+ codec_context_->channels) ==
CHANNEL_LAYOUT_UNSUPPORTED) {
return false;
}
// Store initial values to guard against midstream configuration changes.
- channels_ = codec_context_->ch_layout.nb_channels;
+ channels_ = codec_context_->channels;
audio_codec_ = CodecIDToAudioCodec(codec_context_->codec_id);
sample_rate_ = codec_context_->sample_rate;
av_sample_format_ = codec_context_->sample_fmt;
@@ -224,7 +223,7 @@
if (frames_read < 0)
return false;
- const int channels = frame->ch_layout.nb_channels;
+ const int channels = frame->channels;
if (frame->sample_rate != sample_rate_ || channels != channels_ ||
frame->format != av_sample_format_) {
DLOG(ERROR) << "Unsupported midstream configuration change!"
--- a/media/filters/audio_file_reader_unittest.cc
+++ b/media/filters/audio_file_reader_unittest.cc
@@ -121,11 +121,11 @@
EXPECT_FALSE(reader_->Open());
}
- void RunTestFailingDecode(const char* fn, int expect_read = 0) {
+ void RunTestFailingDecode(const char* fn) {
Initialize(fn);
EXPECT_TRUE(reader_->Open());
std::vector<std::unique_ptr<AudioBus>> decoded_audio_packets;
- EXPECT_EQ(reader_->Read(&decoded_audio_packets), expect_read);
+ EXPECT_EQ(reader_->Read(&decoded_audio_packets), 0);
}
void RunTestPartialDecode(const char* fn) {
@@ -219,7 +219,7 @@
}
TEST_F(AudioFileReaderTest, MidStreamConfigChangesFail) {
- RunTestFailingDecode("midstream_config_change.mp3", 42624);
+ RunTestFailingDecode("midstream_config_change.mp3");
}
#endif
--- a/media/filters/audio_video_metadata_extractor.cc
+++ b/media/filters/audio_video_metadata_extractor.cc
@@ -113,15 +113,6 @@
if (!stream)
continue;
- void* display_matrix =
- av_stream_get_side_data(stream, AV_PKT_DATA_DISPLAYMATRIX, nullptr);
- if (display_matrix) {
- rotation_ = VideoTransformation::FromFFmpegDisplayMatrix(
- static_cast<int32_t*>(display_matrix))
- .rotation;
- info.tags["rotate"] = base::NumberToString(rotation_);
- }
-
// Extract dictionary from streams also. Needed for containers that attach
// metadata to contained streams instead the container itself, like OGG.
ExtractDictionary(stream->metadata, &info.tags);
@@ -264,6 +255,8 @@
if (raw_tags->find(tag->key) == raw_tags->end())
(*raw_tags)[tag->key] = tag->value;
+ if (ExtractInt(tag, "rotate", &rotation_))
+ continue;
if (ExtractString(tag, "album", &album_))
continue;
if (ExtractString(tag, "artist", &artist_))
--- a/media/filters/ffmpeg_aac_bitstream_converter.cc
+++ b/media/filters/ffmpeg_aac_bitstream_converter.cc
@@ -195,15 +195,14 @@
if (!header_generated_ || codec_ != stream_codec_parameters_->codec_id ||
audio_profile_ != stream_codec_parameters_->profile ||
sample_rate_index_ != sample_rate_index ||
- channel_configuration_ !=
- stream_codec_parameters_->ch_layout.nb_channels ||
+ channel_configuration_ != stream_codec_parameters_->channels ||
frame_length_ != header_plus_packet_size) {
header_generated_ =
GenerateAdtsHeader(stream_codec_parameters_->codec_id,
0, // layer
stream_codec_parameters_->profile, sample_rate_index,
0, // private stream
- stream_codec_parameters_->ch_layout.nb_channels,
+ stream_codec_parameters_->channels,
0, // originality
0, // home
0, // copyrighted_stream
@@ -215,7 +214,7 @@
codec_ = stream_codec_parameters_->codec_id;
audio_profile_ = stream_codec_parameters_->profile;
sample_rate_index_ = sample_rate_index;
- channel_configuration_ = stream_codec_parameters_->ch_layout.nb_channels;
+ channel_configuration_ = stream_codec_parameters_->channels;
frame_length_ = header_plus_packet_size;
}
--- a/media/filters/ffmpeg_aac_bitstream_converter_unittest.cc
+++ b/media/filters/ffmpeg_aac_bitstream_converter_unittest.cc
@@ -34,7 +34,7 @@
memset(&test_parameters_, 0, sizeof(AVCodecParameters));
test_parameters_.codec_id = AV_CODEC_ID_AAC;
test_parameters_.profile = FF_PROFILE_AAC_MAIN;
- test_parameters_.ch_layout.nb_channels = 2;
+ test_parameters_.channels = 2;
test_parameters_.extradata = extradata_header_;
test_parameters_.extradata_size = sizeof(extradata_header_);
}
--- a/media/filters/ffmpeg_audio_decoder.cc
+++ b/media/filters/ffmpeg_audio_decoder.cc
@@ -28,7 +28,7 @@
// Return the number of channels from the data in |frame|.
static inline int DetermineChannels(AVFrame* frame) {
- return frame->ch_layout.nb_channels;
+ return frame->channels;
}
// Called by FFmpeg's allocation routine to allocate a buffer. Uses
@@ -231,7 +231,7 @@
// Translate unsupported into discrete layouts for discrete configurations;
// ffmpeg does not have a labeled discrete configuration internally.
ChannelLayout channel_layout = ChannelLayoutToChromeChannelLayout(
- codec_context_->ch_layout.u.mask, codec_context_->ch_layout.nb_channels);
+ codec_context_->channel_layout, codec_context_->channels);
if (channel_layout == CHANNEL_LAYOUT_UNSUPPORTED &&
config_.channel_layout() == CHANNEL_LAYOUT_DISCRETE) {
channel_layout = CHANNEL_LAYOUT_DISCRETE;
@@ -348,11 +348,11 @@
// Success!
av_sample_format_ = codec_context_->sample_fmt;
- if (codec_context_->ch_layout.nb_channels != config.channels()) {
+ if (codec_context_->channels != config.channels()) {
MEDIA_LOG(ERROR, media_log_)
<< "Audio configuration specified " << config.channels()
<< " channels, but FFmpeg thinks the file contains "
- << codec_context_->ch_layout.nb_channels << " channels";
+ << codec_context_->channels << " channels";
ReleaseFFmpegResources();
state_ = DecoderState::kUninitialized;
return false;
@@ -403,7 +403,7 @@
if (frame->nb_samples <= 0)
return AVERROR(EINVAL);
- if (s->ch_layout.nb_channels != channels) {
+ if (s->channels != channels) {
DLOG(ERROR) << "AVCodecContext and AVFrame disagree on channel count.";
return AVERROR(EINVAL);
}
@@ -436,8 +436,7 @@
ChannelLayout channel_layout =
config_.channel_layout() == CHANNEL_LAYOUT_DISCRETE
? CHANNEL_LAYOUT_DISCRETE
- : ChannelLayoutToChromeChannelLayout(s->ch_layout.u.mask,
- s->ch_layout.nb_channels);
+ : ChannelLayoutToChromeChannelLayout(s->channel_layout, s->channels);
if (channel_layout == CHANNEL_LAYOUT_UNSUPPORTED) {
DLOG(ERROR) << "Unsupported channel layout.";

View file

@ -1,15 +0,0 @@
--- a/media/filters/audio_file_reader.cc
+++ b/media/filters/audio_file_reader.cc
@@ -243,10 +243,10 @@
// silence from being output. In the case where we are also discarding some
// portion of the packet (as indicated by a negative pts), we further want to
// adjust the duration downward by however much exists before zero.
- if (audio_codec_ == AudioCodec::kAAC && frame->duration) {
+ if (audio_codec_ == AudioCodec::kAAC && frame->pkt_duration) {
const base::TimeDelta pkt_duration = ConvertFromTimeBase(
glue_->format_context()->streams[stream_index_]->time_base,
- frame->duration + std::min(static_cast<int64_t>(0), frame->pts));
+ frame->pkt_duration + std::min(static_cast<int64_t>(0), frame->pts));
const base::TimeDelta frame_duration =
base::Seconds(frames_read / static_cast<double>(sample_rate_));

View file

@ -1,22 +0,0 @@
for some reason this breaks and the fd returned after close() after a few
cycles is still in the lock array
so, just don't enforce or wrap anything.
--- a/base/files/scoped_file_linux.cc
+++ b/base/files/scoped_file_linux.cc
@@ -77,15 +77,3 @@
}
} // namespace base
-
-extern "C" {
-
-int __close(int);
-
-__attribute__((visibility("default"), noinline)) int close(int fd) {
- if (base::IsFDOwned(fd) && g_is_ownership_enforced)
- CrashOnFdOwnershipViolation();
- return __close(fd);
-}
-
-} // extern "C"

View file

@ -1,53 +0,0 @@
--- a/third_party/electron_node/BUILD.gn
+++ b/third_party/electron_node/BUILD.gn
@@ -42,6 +42,18 @@
node_module_version = ""
}
+if (is_linux) {
+ import("//build/config/linux/pkg_config.gni")
+
+ pkg_config("cares") {
+ packages = [ "libcares" ]
+ }
+
+ pkg_config("nghttp2") {
+ packages = [ "libnghttp2" ]
+ }
+}
+
assert(!node_use_dtrace, "node_use_dtrace not supported in GN")
assert(!node_use_etw, "node_use_etw not supported in GN")
@@ -182,11 +194,9 @@
component("node_lib") {
deps = [
":node_js2c",
- "deps/cares",
"deps/histogram",
"deps/googletest:gtest",
"deps/llhttp",
- "deps/nghttp2",
"deps/uvwasi",
"//third_party/zlib",
"//third_party/brotli:dec",
@@ -202,6 +212,19 @@
public_configs = [ ":node_lib_config" ]
include_dirs = [ "src" ]
libs = []
+ if (is_linux) {
+ configs += [
+ ":cares",
+ ":nghttp2",
+ ]
+ libs += [ "http_parser" ]
+ } else {
+ deps += [
+ "deps/cares",
+ "deps/http_parser",
+ "deps/nghttp2",
+ ]
+ }
frameworks = []
cflags_cc = [
"-Wno-deprecated-declarations",

View file

@ -1,18 +0,0 @@
random glibc macro
--- a/sandbox/linux/suid/process_util.h
+++ b/sandbox/linux/suid/process_util.h
@@ -11,6 +11,14 @@
#include <stdbool.h>
#include <sys/types.h>
+// Some additional functions
+# define TEMP_FAILURE_RETRY(expression) \
+ (__extension__ \
+ ({ long int __result; \
+ do __result = (long int) (expression); \
+ while (__result == -1L && errno == EINTR); \
+ __result; }))
+
// This adjusts /proc/process/oom_score_adj so the Linux OOM killer
// will prefer certain process types over others. The range for the
// adjustment is [-1000, 1000], with [0, 1000] being user accessible.

View file

@ -1,113 +0,0 @@
--- a/chrome/browser/process_singleton_posix.cc
+++ b/chrome/browser/process_singleton_posix.cc
@@ -607,7 +607,7 @@
// |reader| is for sending back ACK message.
void HandleMessage(const std::string& current_dir,
const std::vector<std::string>& argv,
- const std::vector<const uint8_t> additional_data,
+ const std::vector<uint8_t> additional_data,
SocketReader* reader);
private:
@@ -664,7 +664,7 @@
void ProcessSingleton::LinuxWatcher::HandleMessage(
const std::string& current_dir,
const std::vector<std::string>& argv,
- const std::vector<const uint8_t> additional_data,
+ const std::vector<uint8_t> additional_data,
SocketReader* reader) {
DCHECK(ui_task_runner_->BelongsToCurrentThread());
DCHECK(reader);
@@ -754,7 +754,7 @@
base::StringToSizeT(tokens[0], &num_args);
std::vector<std::string> command_line(tokens.begin() + 1, tokens.begin() + 1 + num_args);
- std::vector<const uint8_t> additional_data;
+ std::vector<uint8_t> additional_data;
if (tokens.size() >= 3 + num_args) {
size_t additional_data_size;
base::StringToSizeT(tokens[1 + num_args], &additional_data_size);
@@ -763,7 +763,7 @@
std::string(1, kTokenDelimiter));
const uint8_t* additional_data_bits =
reinterpret_cast<const uint8_t*>(remaining_args.c_str());
- additional_data = std::vector<const uint8_t>(
+ additional_data = std::vector<uint8_t>(
additional_data_bits, additional_data_bits + additional_data_size);
}
--- a/chrome/browser/process_singleton.h
+++ b/chrome/browser/process_singleton.h
@@ -102,7 +102,7 @@
using NotificationCallback =
base::RepeatingCallback<bool(const base::CommandLine& command_line,
const base::FilePath& current_directory,
- const std::vector<const uint8_t> additional_data)>;
+ const std::vector<uint8_t> additional_data)>;
#if BUILDFLAG(IS_WIN)
ProcessSingleton(const std::string& program_name,
--- a/chrome/browser/process_singleton_win.cc
+++ b/chrome/browser/process_singleton_win.cc
@@ -81,7 +81,7 @@
bool ParseCommandLine(const COPYDATASTRUCT* cds,
base::CommandLine* parsed_command_line,
base::FilePath* current_directory,
- std::vector<const uint8_t>* parsed_additional_data) {
+ std::vector<uint8_t>* parsed_additional_data) {
// We should have enough room for the shortest command (min_message_size)
// and also be a multiple of wchar_t bytes. The shortest command
// possible is L"START\0\0" (empty command line, current directory,
@@ -163,7 +163,7 @@
msg.substr(fourth_null + 1, fifth_null - fourth_null);
const uint8_t* additional_data_bytes =
reinterpret_cast<const uint8_t*>(additional_data.c_str());
- *parsed_additional_data = std::vector<const uint8_t>(additional_data_bytes,
+ *parsed_additional_data = std::vector<uint8_t>(additional_data_bytes,
additional_data_bytes + additional_data_length);
return true;
@@ -187,7 +187,7 @@
base::CommandLine parsed_command_line(base::CommandLine::NO_PROGRAM);
base::FilePath current_directory;
- std::vector<const uint8_t> additional_data;
+ std::vector<uint8_t> additional_data;
if (!ParseCommandLine(cds, &parsed_command_line, &current_directory, &additional_data)) {
*result = TRUE;
return true;
--- a/electron/shell/browser/api/electron_api_app.cc
+++ b/electron/shell/browser/api/electron_api_app.cc
@@ -519,10 +519,10 @@
const base::RepeatingCallback<
void(const base::CommandLine& command_line,
const base::FilePath& current_directory,
- const std::vector<const uint8_t> additional_data)>& callback,
+ const std::vector<uint8_t> additional_data)>& callback,
const base::CommandLine& cmd,
const base::FilePath& cwd,
- const std::vector<const uint8_t> additional_data) {
+ const std::vector<uint8_t> additional_data) {
// Make sure the callback is called after app gets ready.
if (Browser::Get()->is_ready()) {
callback.Run(cmd, cwd, std::move(additional_data));
@@ -1082,7 +1082,7 @@
void App::OnSecondInstance(const base::CommandLine& cmd,
const base::FilePath& cwd,
- const std::vector<const uint8_t> additional_data) {
+ const std::vector<uint8_t> additional_data) {
v8::Isolate* isolate = JavascriptEnvironment::GetIsolate();
v8::Locker locker(isolate);
v8::HandleScope handle_scope(isolate);
--- a/electron/shell/browser/api/electron_api_app.h
+++ b/electron/shell/browser/api/electron_api_app.h
@@ -195,7 +195,7 @@
std::string GetLocaleCountryCode();
void OnSecondInstance(const base::CommandLine& cmd,
const base::FilePath& cwd,
- const std::vector<const uint8_t> additional_data);
+ const std::vector<uint8_t> additional_data);
bool HasSingleInstanceLock() const;
bool RequestSingleInstanceLock(gin::Arguments* args);
void ReleaseSingleInstanceLock();

View file

@ -1,12 +0,0 @@
--- a/electron/build/webpack/webpack.config.base.js
+++ b/electron/build/webpack/webpack.config.base.js
@@ -117,7 +117,8 @@
entry,
target: alwaysHasNode ? 'node' : 'web',
output: {
- filename: outputFilename
+ filename: outputFilename,
+ hashFunction: 'sha256'
},
resolve: {
alias: {

View file

@ -1,20 +0,0 @@
--- a/third_party/blink/renderer/platform/wtf/stack_util.cc
+++ b/third_party/blink/renderer/platform/wtf/stack_util.cc
@@ -29,7 +29,7 @@
// FIXME: On Mac OSX and Linux, this method cannot estimate stack size
// correctly for the main thread.
-#elif defined(__GLIBC__) || BUILDFLAG(IS_ANDROID) || BUILDFLAG(IS_FREEBSD) || \
+#elif BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_ANDROID) || BUILDFLAG(IS_FREEBSD) || \
BUILDFLAG(IS_FUCHSIA)
// pthread_getattr_np() can fail if the thread is not invoked by
// pthread_create() (e.g., the main thread of blink_unittests).
@@ -97,7 +97,7 @@
}
void* GetStackStart() {
-#if defined(__GLIBC__) || BUILDFLAG(IS_ANDROID) || BUILDFLAG(IS_FREEBSD) || \
+#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_ANDROID) || BUILDFLAG(IS_FREEBSD) || \
BUILDFLAG(IS_FUCHSIA)
pthread_attr_t attr;
int error;

82
backports/pandoc/APKBUILD Normal file
View file

@ -0,0 +1,82 @@
# Contributor: Jean-Louis Fuchs <jean-louis.fuchs@adfinis-sygroup.ch>
# Maintainer: Jean-Louis Fuchs <jean-louis.fuchs@adfinis-sygroup.ch>
pkgname=pandoc
pkgver=2.19.2
pkgrel=0
pkgdesc="universal markup converter"
url="https://pandoc.org/"
# limited by ghc
arch="aarch64 x86_64"
license="GPL-2.0-or-later"
makedepends="ghc cabal zlib-dev libffi-dev"
subpackages="$pkgname-doc"
source="https://hackage.haskell.org/package/pandoc-$pkgver/pandoc-$pkgver.tar.gz
texmath-0.12.5.4.patch
cabal.config
"
options="net"
# Cabal seems to be built without sandbox, moving the cabal-dir into src
export CABAL_DIR="$srcdir/.cabal"
cabal_update() {
msg "Freezing $pkgname dependencies"
# Resolve deps and generate fresh cabal.config with version constraints.
cabal update
(
cd "$builddir"
cabal v1-freeze --shadow-installed-packages
# Add version tag at the first line.
sed -i "1i--$pkgver" "cabal.config"
mv "cabal.config" "$startdir/"
)
if ! abuild checksum; then
die "Failed to update checksum, run 'abuild checksum' manually"
fi
}
prepare() {
default_prepare
if [ "$(head -n 1 "$srcdir/cabal.config")" != "--$pkgver" ]; then
die "Requirements file is outdated, run 'abuild cabal_update'"
fi
ln -sf "$srcdir/cabal.config" "$builddir/cabal.project.freeze"
}
build() {
export PATH="$PATH:/usr/lib/llvm14/bin"
cabal update
cabal install --only-dependencies
cabal configure \
--prefix='/usr' \
--enable-tests \
--enable-split-sections \
--ghc-option="-split-sections" \
--flags="+embed_data_files -trypandoc +static"
cabal build --jobs=${JOBS:-1}
}
check() {
cabal test --jobs=${JOBS:-1}
}
package() {
_bindir="$pkgdir/usr/bin"
mkdir -p "$_bindir"
cabal install \
--installdir="$_bindir" \
--install-method=copy
install -Dm644 man/pandoc.1 "$pkgdir"/usr/share/man/man1/pandoc.1
}
sha512sums="
3628a9193d5138294bae562726bcd94567eec10fa0053d43739af04d4eba0a53bd49c2c000a5360afcac08153960a9bf2ee4be3c419cec7e5c13273e718edc80 pandoc-2.19.2.tar.gz
172f8f57c18cc08c976b3c4853be54918fab57aaead2c272685be2183de2e8db9163c26e5f4477ed5059de08b1ed100b6508b0b1ea98c0a20cb6ef9ae6eb52cd texmath-0.12.5.4.patch
02013589a1acd53ffb9ef50bf76ad31b569823f8ef382a783372d0941f462aa53830e506e5b64a1755899eb25111cc912b69628c5ef1c889aec999d4d7883b5d cabal.config
"

View file

@ -0,0 +1,228 @@
--2.19.2
constraints: Cabal ==3.4.0.0,
Glob ==0.10.2,
HUnit ==1.6.2.0,
HsYAML ==0.2.1.1,
JuicyPixels ==3.3.8,
OneTuple ==0.3.1,
QuickCheck ==2.14.2,
SHA ==1.6.4.4,
StateVar ==1.2.2,
aeson ==2.1.1.0,
aeson-pretty ==0.8.9,
ansi-terminal ==0.11.4,
ansi-wl-pprint ==0.6.9,
appar ==0.1.8,
array ==0.5.4.0,
asn1-encoding ==0.9.6,
asn1-parse ==0.9.5,
asn1-types ==0.3.4,
assoc ==1.0.2,
async ==2.2.4,
attoparsec ==0.14.4,
attoparsec-iso8601 ==1.1.0.0,
auto-update ==0.1.6,
base ==4.15.0.0,
base-compat ==0.12.2,
base-compat-batteries ==0.12.2,
base-orphans ==0.8.7,
base16-bytestring ==1.0.2.0,
base64 ==0.4.2.4,
base64-bytestring ==1.2.1.0,
basement ==0.0.15,
bifunctors ==5.5.14,
binary ==0.8.8.0,
bitvec ==1.1.3.0,
blaze-builder ==0.4.2.2,
blaze-html ==0.9.1.2,
blaze-markup ==0.8.2.8,
boring ==0.2,
bsb-http-chunked ==0.0.0.4,
byteorder ==1.0.4,
bytestring ==0.10.12.1,
cabal-doctest ==1.0.9,
call-stack ==0.4.0,
case-insensitive ==1.2.1.0,
cereal ==0.5.8.3,
citeproc ==0.8.0.2,
cmdargs ==0.10.21,
colour ==2.3.6,
commonmark ==0.2.2,
commonmark-extensions ==0.2.3.3,
commonmark-pandoc ==0.2.1.2,
comonad ==5.0.8,
conduit ==1.3.4.3,
conduit-extra ==1.3.6,
connection ==0.3.1,
constraints ==0.13.4,
containers ==0.6.4.1,
contravariant ==1.5.5,
cookie ==0.4.6,
cryptonite ==0.30,
data-array-byte ==0.1.0.1,
data-default ==0.7.1.1,
data-default-class ==0.1.2.0,
data-default-instances-containers ==0.0.1,
data-default-instances-dlist ==0.0.1,
data-default-instances-old-locale ==0.0.1,
data-fix ==0.3.2,
dec ==0.0.5,
deepseq ==1.4.5.0,
digest ==0.0.1.4,
directory ==1.3.6.1,
distributive ==0.6.2.1,
dlist ==1.0,
doclayout ==0.4,
doctemplates ==0.10.0.2,
easy-file ==0.2.2,
emojis ==0.1.2,
exceptions ==0.10.4,
fast-logger ==3.1.1,
file-embed ==0.0.15.0,
filepath ==1.4.2.1,
generically ==0.1,
ghc-bignum ==1.0,
ghc-bignum-orphans ==0.1.1,
ghc-boot-th ==9.0.1,
ghc-prim ==0.7.0,
gridtables ==0.0.3.0,
haddock-library ==1.11.0,
happy ==1.20.0,
hashable ==1.4.2.0,
haskell-lexer ==1.1.1,
hourglass ==0.2.12,
hsc2hs ==0.68.8,
hslua ==2.2.1,
hslua-aeson ==2.2.1,
hslua-classes ==2.2.0,
hslua-core ==2.2.1,
hslua-marshalling ==2.2.1,
hslua-module-doclayout ==1.0.4,
hslua-module-path ==1.0.3,
hslua-module-system ==1.0.2,
hslua-module-text ==1.0.3.1,
hslua-module-version ==1.0.3,
hslua-objectorientation ==2.2.1,
hslua-packaging ==2.2.1,
http-api-data ==0.5,
http-client ==0.7.13.1,
http-client-tls ==0.3.6.1,
http-date ==0.0.11,
http-media ==0.8.0.0,
http-types ==0.12.3,
http2 ==3.0.3,
indexed-traversable ==0.1.2,
indexed-traversable-instances ==0.1.1.1,
integer-gmp ==1.1,
integer-logarithms ==1.0.3.1,
iproute ==1.7.12,
ipynb ==0.2,
jira-wiki-markup ==1.4.0,
libyaml ==0.1.2,
lpeg ==1.0.3,
lua ==2.2.1,
memory ==0.18.0,
mime-types ==0.1.1.0,
mmorph ==1.2.0,
monad-control ==1.0.3.1,
mono-traversable ==1.0.15.3,
mtl ==2.2.2,
network ==3.1.2.7,
network-byte-order ==0.1.6,
network-uri ==2.6.4.2,
old-locale ==1.0.0.7,
old-time ==1.1.0.3,
optparse-applicative ==0.17.0.0,
pandoc-lua-marshal ==0.1.7,
pandoc-types ==1.22.2.1,
parsec ==3.1.14.0,
pem ==0.2.4,
pretty ==1.1.3.6,
pretty-show ==1.10,
primitive ==0.7.4.0,
process ==1.6.11.0,
psqueues ==0.2.7.3,
random ==1.2.1.1,
recv ==0.0.0,
resourcet ==1.2.6,
rts ==1.0,
safe ==0.3.19,
safe-exceptions ==0.1.7.3,
scientific ==0.3.7.0,
semialign ==1.2.0.1,
semigroupoids ==5.3.7,
servant ==0.19.1,
servant-server ==0.19.2,
simple-sendfile ==0.2.30,
singleton-bool ==0.1.6,
skylighting ==0.13.2,
skylighting-core ==0.13.2,
skylighting-format-ansi ==0.1,
skylighting-format-blaze-html ==0.1.1,
skylighting-format-context ==0.1.0.1,
skylighting-format-latex ==0.1,
socks ==0.6.1,
some ==1.0.4.1,
sop-core ==0.5.0.2,
split ==0.2.3.5,
splitmix ==0.1.0.4,
stm ==2.5.0.0,
streaming-commons ==0.2.2.5,
strict ==0.4.0.1,
string-conversions ==0.4.0.1,
syb ==0.7.2.2,
tagged ==0.8.6.1,
tagsoup ==0.14.8,
template-haskell ==2.17.0.0,
temporary ==1.3,
texmath ==0.12.5.4,
text ==1.2.4.1,
text-conversions ==0.3.1.1,
text-short ==0.1.5,
th-abstraction ==0.4.5.0,
th-compat ==0.1.4,
th-lift ==0.8.2,
th-lift-instances ==0.1.20,
these ==1.1.1.1,
time ==1.9.3,
time-compat ==1.9.6.1,
time-manager ==0.0.0,
tls ==1.6.0,
transformers ==0.5.6.2,
transformers-base ==0.4.6,
transformers-compat ==0.7.2,
type-equality ==1,
typed-process ==0.2.10.1,
unicode-collation ==0.1.3.3,
unicode-data ==0.4.0.1,
unicode-transforms ==0.4.0.1,
uniplate ==1.6.13,
unix ==2.7.2.2,
unix-compat ==0.6,
unix-time ==0.4.8,
unliftio ==0.2.23.0,
unliftio-core ==0.2.0.1,
unordered-containers ==0.2.19.1,
utf8-string ==1.0.2,
uuid-types ==1.0.5,
vault ==0.3.1.5,
vector ==0.13.0.0,
vector-algorithms ==0.9.0.1,
vector-stream ==0.1.0.0,
wai ==3.2.3,
wai-app-static ==3.1.7.4,
wai-extra ==3.1.13.0,
wai-logger ==2.4.0,
warp ==3.3.23,
witherable ==0.4.2,
word8 ==0.1.3,
x509 ==1.7.7,
x509-store ==1.6.9,
x509-system ==1.6.7,
x509-validation ==1.6.12,
xml ==1.3.14,
xml-conduit ==1.9.1.1,
xml-types ==0.3.8,
yaml ==0.11.8.0,
zip-archive ==0.4.2.2,
zlib ==0.6.3.0

View file

@ -0,0 +1,13 @@
diff --git a/test/writer.ms b/test/writer.ms
index 9df9083..836d7a2 100644
--- a/test/writer.ms
+++ b/test/writer.ms
@@ -700,7 +700,7 @@ LaTeX
.IP \[bu] 3
Here\[cq]s some display math:
.EQ
-d over {d x} f left ( x right ) = lim sub {h -> 0} {f left ( x + h right ) \[u2212] f left ( x right )} over h
+d over {d x} f left ( x right ) = lim sub {h -> 0} {f left ( x + h right ) - f left ( x right )} over h
.EN
.IP \[bu] 3
Here\[cq]s one that has a line break in it: @alpha + omega times x sup 2@.

View file

@ -0,0 +1,45 @@
# Contributor: Michał Polański <michal@polanski.me>
# Maintainer: Michał Polański <michal@polanski.me>
pkgname=py3-anyio
pkgver=3.6.2
pkgrel=1
pkgdesc="High level compatibility layer for multiple asynchronous event loop implementations"
url="https://github.com/agronholm/anyio"
license="MIT"
arch="noarch !armhf !ppc64le" # limited by py3-uvloop
depends="python3 py3-idna py3-sniffio"
makedepends="py3-setuptools py3-setuptools_scm"
# change this when 4.x releases and upgrade py3-trio
checkdepends="py3-pytest py3-pytest-mock py3-hypothesis py3-trustme py3-trio<0.22 py3-uvloop"
source="https://github.com/agronholm/anyio/archive/$pkgver/py3-anyio-$pkgver.tar.gz"
builddir="$srcdir/anyio-$pkgver"
case "$CARCH" in
x86*)
# weird dns resolution errors on builders
options="$options !check"
;;
esac
export SETUPTOOLS_SCM_PRETEND_VERSION=$pkgver
build() {
python3 setup.py build
}
check() {
python3 setup.py install --root="$PWD/test_install" --skip-build
# Behavior of getaddrinfo differs between event loop implementations
# on musl-based systems
PYTHONPATH="$(echo $PWD/test_install/usr/lib/python3*/site-packages)" pytest \
--deselect tests/test_sockets.py::test_getaddrinfo_ipv6addr
}
package() {
python3 setup.py install --root="$pkgdir" --skip-build
}
sha512sums="
4a0d3dd11393bd3d7a99d3365825df14d70fa14fa6ddf0e3f9eb9affcde7a9ac1f9e5ba38d6ac9b7f246ba9e7d4bea0dd9c8049f1dc8beadbe6b4b803571fc21 py3-anyio-3.6.2.tar.gz
"

View file

@ -11,6 +11,7 @@ depends="
python3 python3
" "
makedepends=" makedepends="
py3-pip
py3-setuptools py3-setuptools
py3-setuptools_scm py3-setuptools_scm
py3-wheel py3-wheel

View file

@ -0,0 +1,31 @@
# Contributor: Rasmus Thomsen <oss@cogitri.dev>
# Maintainer: Rasmus Thomsen <oss@cogitri.dev>
pkgname=py3-gnupg
pkgver=0.5.0
pkgrel=1
pkgdesc="Python3 wrapper for the Gnu Privacy Guard (GPG or GnuPG)"
url="https://gnupg.readthedocs.io/en/latest/"
arch="noarch"
license="BSD-3-Clause"
depends="python3 gnupg"
makedepends="py3-setuptools py3-wheel py3-build py3-installer"
checkdepends="py3-pytest"
source="https://pypi.io/packages/source/p/python-gnupg/python-gnupg-$pkgver.tar.gz"
builddir="$srcdir/python-gnupg-$pkgver"
build() {
python3 -m build --no-isolation --wheel
}
check() {
NO_EXTERNAL_TESTS=no pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
dist/python_gnupg-$pkgver-py2.py3-none-any.whl
}
sha512sums="
cfd302257b53fdc9318004db7323ea5bf4bddc055b65b24386a1ecb27cd476fdf1bc771adcdde70a4eef442982a0c57dc832b92274bbe5ba16cbdf3247f4e77a python-gnupg-0.5.0.tar.gz
"

View file

@ -0,0 +1,52 @@
# Contributor: Michał Polański <michal@polanski.me>
# Maintainer: Michał Polański <michal@polanski.me>
pkgname=py3-httpcore
pkgver=0.15.0
pkgrel=2
pkgdesc="Minimal HTTP client"
url="https://www.encode.io/httpcore/"
license="BSD-3-Clause"
arch="noarch !armhf !ppc64le" # limited by py3-anyio
depends="
python3
py3-anyio
py3-certifi
py3-h11
py3-sniffio
py3-trio<0.22
"
makedepends="py3-setuptools"
checkdepends="
py3-h2
py3-hpack
py3-hyperframe
py3-pytest
py3-pytest-asyncio
py3-pytest-httpbin
py3-pytest-trio
py3-socksio
"
subpackages="$pkgname-doc"
source="https://github.com/encode/httpcore/archive/$pkgver/py3-httpcore-$pkgver.tar.gz
pytest-asyncio-warning.patch
"
builddir="$srcdir/httpcore-$pkgver"
build() {
python3 setup.py build
}
check() {
PYTHONPATH="$PWD/build/lib" pytest
}
package() {
python3 setup.py install --root="$pkgdir" --skip-build
install -Dm644 LICENSE.md "$pkgdir"/usr/share/licenses/$pkgname/LICENSE.md
}
sha512sums="
3c25630d582448e3c7d46176c8862e4d92c6c4aac954bfe46b06e26297b32f996db2e002a87c7187accb5bf4ef86e82d7f9051404bee651ce5254119571d0c95 py3-httpcore-0.15.0.tar.gz
28aa7bc050d56a09df5eb19c2edc8eda389a77c73c043945dcf74f8832387849d7a08c9366c403f451c65ab888720fcc4c5d233d3fb9a6628e91800f6c82ab3f pytest-asyncio-warning.patch
"

View file

@ -0,0 +1,11 @@
looks like upstream enabled a pytest equivalent of -Werror without testing too much
--- a/setup.cfg
+++ b/setup.cfg
@@ -29,6 +29,7 @@
ignore:unclosed <(socket\.socket|ssl\.SSLSocket) .*:ResourceWarning
ignore:ssl\.wrap_socket\(\) is deprecated, use SSLContext\.wrap_socket\(\):DeprecationWarning
ignore:ssl\.PROTOCOL_TLS is deprecated:DeprecationWarning
+ ignore:The 'asyncio_mode' default value will change:DeprecationWarning
[coverage:run]
omit = venv/*, httpcore/_sync/*

View file

@ -0,0 +1,44 @@
# Contributor: Michał Polański <michal@polanski.me>
# Maintainer: Michał Polański <michal@polanski.me>
pkgname=py3-httpx
pkgver=0.23.0
pkgrel=1
pkgdesc="Next generation HTTP client for Python"
url="https://www.python-httpx.org/"
license="BSD-3-Clause"
arch="noarch !armhf !ppc64le" # limited by py3-httpcore
depends="
python3
py3-certifi
py3-httpcore
py3-idna
py3-rfc3986
py3-sniffio
"
makedepends="py3-setuptools"
checkdepends="py3-pytest py3-pytest-asyncio py3-pytest-trio py3-socksio py3-trustme uvicorn"
subpackages="$pkgname-doc"
source="https://github.com/encode/httpx/archive/$pkgver/py3-httpx-$pkgver.tar.gz
relax-dependencies.patch
"
builddir="$srcdir/httpx-$pkgver"
options="!check" # cyclic dependency on uvicorn
build() {
python3 setup.py build
}
check() {
PYTHONPATH="$PWD/build/lib" pytest
}
package() {
python3 setup.py install --root="$pkgdir" --skip-build
install -Dm644 LICENSE.md "$pkgdir"/usr/share/licenses/$pkgname/LICENSE.md
}
sha512sums="
3cfdf2b3b2f15967a1eec0be05ed947c5e18a46576b68a9cbfd5147dfd4736cb7c389f5431732b93f3a11f3ec6c6f25f7cbb3d96d845f00b58e2b8dae047c1d5 py3-httpx-0.23.0.tar.gz
dc64c27e15116fdd061972747f96caedda4e4f73ca4545e77785d8c319763d55701f059339a502edc709441e21076e689caf113484986d0cb4a09b569b41603a relax-dependencies.patch
"

View file

@ -0,0 +1,13 @@
Relax dependencies
--- a/setup.py
+++ b/setup.py
@@ -58,7 +58,7 @@ setup(
install_requires=[
"certifi",
"sniffio",
- "rfc3986[idna2008]>=1.3,<2",
+ "rfc3986[idna2008]",
"httpcore>=0.15.0,<0.16.0",
],
extras_require={

View file

@ -0,0 +1,26 @@
# Maintainer: psykose <alice@ayaya.dev>
pkgname=py3-nose2
pkgver=0.12.0
pkgrel=1
pkgdesc="Successor to nose, based on unittest"
url="https://docs.nose2.io/en/latest/"
arch="noarch"
license="LGPL-2.0-or-later"
depends="python3"
makedepends="py3-setuptools"
source="https://github.com/nose-devs/nose2/archive/refs/tags/$pkgver/nose2-$pkgver.tar.gz"
options="!check" # don't work
builddir="$srcdir/nose2-$pkgver"
build() {
rm -rf nose2/tests
python3 setup.py build
}
package() {
python3 setup.py install --root="$pkgdir" --skip-build
}
sha512sums="
7f1462613b7d38a77cd0764f721be4223b8f4ae4694c49ba416a076c321e07484969ee1a7c2d2f89484c0c3b2a96e74d6a42321981af4930ebe5f63f07df7755 nose2-0.12.0.tar.gz
"

View file

@ -0,0 +1,30 @@
# Contributor: Rejah Rehim <rejah@beaglesecurity.com>
# Maintainer: Rejah Rehim <rejah@beaglesecurity.com>
pkgname=py3-python-jwt
_pkgname=python_jwt
pkgver=4.0.0
pkgrel=0
pkgdesc="Module for generating and verifying JSON Web Tokens"
options="!check" # no test suite
url="https://github.com/davedoesdev/python-jwt"
arch="noarch"
license="MIT"
depends="python3 py3-jwcrypto"
makedepends="py3-setuptools"
subpackages="$pkgname-doc"
source="https://files.pythonhosted.org/packages/source/p/$_pkgname/$_pkgname-$pkgver.tar.gz"
builddir="$srcdir"/$_pkgname-$pkgver
build() {
python3 setup.py build
}
package() {
python3 setup.py install --skip-build --root="$pkgdir"
install -Dm644 LICENCE "$pkgdir"/usr/share/licenses/$pkgname/LICENCE
install -Dm644 README.md "$pkgdir"/usr/share/licenses/$pkgname/README.md
}
sha512sums="
94c6ebd7738da3087b7192f3eff3e6af3aa7dce3f38cd0e001261e61a5aa42e03fa08d76bb56b7d033ee64723a428bfbad480b6d46934c9b1ef446f613b1cbe0 python_jwt-4.0.0.tar.gz
"

View file

@ -0,0 +1,51 @@
# Contributor: Marian Buschsieweke <marian.buschsieweke@ovgu.de>
# Maintainer: Marian Buschsieweke <marian.buschsieweke@ovgu.de>
pkgname=py3-rapidjson
pkgver=1.9
pkgrel=1
pkgdesc="Python3 wrapper around RapidJSON"
url="https://github.com/python-rapidjson/python-rapidjson"
arch="all"
license="MIT"
depends="
python3
"
makedepends="
py3-setuptools
rapidjson-dev
python3-dev
"
checkdepends="
py3-pytest
py3-tz
"
source="$pkgname-$pkgver.tar.gz::https://github.com/python-rapidjson/python-rapidjson/archive/refs/tags/v$pkgver.tar.gz"
builddir="$srcdir/"python-rapidjson-$pkgver
build() {
python3 setup.py \
--rj-include-dir=/usr/include/rapidjson \
build
}
check() {
PYTHONPATH="$(echo "$PWD"/build/lib.linux*)" pytest \
--ignore benchmarks \
--deselect tests/test_base_types.py::test_base_values \
--deselect tests/test_unicode.py::test_unicode_decode_error \
--deselect tests/test_validator.py::test_additional_and_pattern_properties_valid \
#
}
package() {
python3 setup.py \
--rj-include-dir=/usr/include/rapidjson \
install \
--skip-build \
--prefix=/usr \
--root="$pkgdir"
}
sha512sums="
d3f4c06d021058ea42f01f676e77dfa84997a681d548582060d20713878ecf73ada257186026847de43718764078f7e3a2467a165fa6d8b15b2f3e3d4bded4e1 py3-rapidjson-1.9.tar.gz
"

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,183 @@
From 9bf9f40f41141942be166966ec434720da5b85bd Mon Sep 17 00:00:00 2001
From: Drew DeVault <sir@cmpwn.com>
Date: Wed, 29 Dec 2021 10:16:53 +0100
Subject: [PATCH 2/2] Drop tests/test_ssl.py
This test expects to be run in the upstream project's CI enviornment.
Ref https://github.com/redis/redis-py/issues/1838
---
tests/test_ssl.py | 161 ----------------------------------------------
1 file changed, 161 deletions(-)
delete mode 100644 tests/test_ssl.py
diff --git a/tests/test_ssl.py b/tests/test_ssl.py
deleted file mode 100644
index a2f66b2..0000000
--- a/tests/test_ssl.py
+++ /dev/null
@@ -1,161 +0,0 @@
-import os
-import socket
-import ssl
-from urllib.parse import urlparse
-
-import pytest
-
-import redis
-from redis.exceptions import ConnectionError, RedisError
-
-from .conftest import skip_if_cryptography, skip_if_nocryptography
-
-
-@pytest.mark.ssl
-class TestSSL:
- """Tests for SSL connections
-
- This relies on the --redis-ssl-url purely for rebuilding the client
- and connecting to the appropriate port.
- """
-
- ROOT = os.path.join(os.path.dirname(__file__), "..")
- CERT_DIR = os.path.abspath(os.path.join(ROOT, "docker", "stunnel", "keys"))
- if not os.path.isdir(CERT_DIR): # github actions package validation case
- CERT_DIR = os.path.abspath(
- os.path.join(ROOT, "..", "docker", "stunnel", "keys")
- )
- if not os.path.isdir(CERT_DIR):
- raise IOError(f"No SSL certificates found. They should be in {CERT_DIR}")
-
- def test_ssl_with_invalid_cert(self, request):
- ssl_url = request.config.option.redis_ssl_url
- sslclient = redis.from_url(ssl_url)
- with pytest.raises(ConnectionError) as e:
- sslclient.ping()
- assert "SSL: CERTIFICATE_VERIFY_FAILED" in str(e)
-
- def test_ssl_connection(self, request):
- ssl_url = request.config.option.redis_ssl_url
- p = urlparse(ssl_url)[1].split(":")
- r = redis.Redis(host=p[0], port=p[1], ssl=True, ssl_cert_reqs="none")
- assert r.ping()
-
- def test_ssl_connection_without_ssl(self, request):
- ssl_url = request.config.option.redis_ssl_url
- p = urlparse(ssl_url)[1].split(":")
- r = redis.Redis(host=p[0], port=p[1], ssl=False)
-
- with pytest.raises(ConnectionError) as e:
- r.ping()
- assert "Connection closed by server" in str(e)
-
- def test_validating_self_signed_certificate(self, request):
- ssl_url = request.config.option.redis_ssl_url
- p = urlparse(ssl_url)[1].split(":")
- r = redis.Redis(
- host=p[0],
- port=p[1],
- ssl=True,
- ssl_certfile=os.path.join(self.CERT_DIR, "server-cert.pem"),
- ssl_keyfile=os.path.join(self.CERT_DIR, "server-key.pem"),
- ssl_cert_reqs="required",
- ssl_ca_certs=os.path.join(self.CERT_DIR, "server-cert.pem"),
- )
- assert r.ping()
-
- def _create_oscp_conn(self, request):
- ssl_url = request.config.option.redis_ssl_url
- p = urlparse(ssl_url)[1].split(":")
- r = redis.Redis(
- host=p[0],
- port=p[1],
- ssl=True,
- ssl_certfile=os.path.join(self.CERT_DIR, "server-cert.pem"),
- ssl_keyfile=os.path.join(self.CERT_DIR, "server-key.pem"),
- ssl_cert_reqs="required",
- ssl_ca_certs=os.path.join(self.CERT_DIR, "server-cert.pem"),
- ssl_validate_ocsp=True,
- )
- return r
-
- @skip_if_cryptography()
- def test_ssl_ocsp_called(self, request):
- r = self._create_oscp_conn(request)
- with pytest.raises(RedisError) as e:
- assert r.ping()
- assert "cryptography not installed" in str(e)
-
- @skip_if_nocryptography()
- def test_ssl_ocsp_called_withcrypto(self, request):
- r = self._create_oscp_conn(request)
- with pytest.raises(ConnectionError) as e:
- assert r.ping()
- assert "No AIA information present in ssl certificate" in str(e)
-
- # rediss://, url based
- ssl_url = request.config.option.redis_ssl_url
- sslclient = redis.from_url(ssl_url)
- with pytest.raises(ConnectionError) as e:
- sslclient.ping()
- assert "No AIA information present in ssl certificate" in str(e)
-
- @skip_if_nocryptography()
- def test_valid_ocsp_cert_http(self):
- from redis.ocsp import OCSPVerifier
-
- hostnames = ["github.com", "aws.amazon.com", "ynet.co.il", "microsoft.com"]
- for hostname in hostnames:
- context = ssl.create_default_context()
- with socket.create_connection((hostname, 443)) as sock:
- with context.wrap_socket(sock, server_hostname=hostname) as wrapped:
- ocsp = OCSPVerifier(wrapped, hostname, 443)
- assert ocsp.is_valid()
-
- @skip_if_nocryptography()
- def test_revoked_ocsp_certificate(self):
- from redis.ocsp import OCSPVerifier
-
- context = ssl.create_default_context()
- hostname = "revoked.badssl.com"
- with socket.create_connection((hostname, 443)) as sock:
- with context.wrap_socket(sock, server_hostname=hostname) as wrapped:
- ocsp = OCSPVerifier(wrapped, hostname, 443)
- assert ocsp.is_valid() is False
-
- @skip_if_nocryptography()
- def test_unauthorized_ocsp(self):
- from redis.ocsp import OCSPVerifier
-
- context = ssl.create_default_context()
- hostname = "stackoverflow.com"
- with socket.create_connection((hostname, 443)) as sock:
- with context.wrap_socket(sock, server_hostname=hostname) as wrapped:
- ocsp = OCSPVerifier(wrapped, hostname, 443)
- with pytest.raises(ConnectionError):
- ocsp.is_valid()
-
- @skip_if_nocryptography()
- def test_ocsp_not_present_in_response(self):
- from redis.ocsp import OCSPVerifier
-
- context = ssl.create_default_context()
- hostname = "google.co.il"
- with socket.create_connection((hostname, 443)) as sock:
- with context.wrap_socket(sock, server_hostname=hostname) as wrapped:
- ocsp = OCSPVerifier(wrapped, hostname, 443)
- assert ocsp.is_valid() is False
-
- @skip_if_nocryptography()
- def test_unauthorized_then_direct(self):
- from redis.ocsp import OCSPVerifier
-
- # these certificates on the socket end return unauthorized
- # then the second call succeeds
- hostnames = ["wikipedia.org", "squarespace.com"]
- for hostname in hostnames:
- context = ssl.create_default_context()
- with socket.create_connection((hostname, 443)) as sock:
- with context.wrap_socket(sock, server_hostname=hostname) as wrapped:
- ocsp = OCSPVerifier(wrapped, hostname, 443)
- assert ocsp.is_valid()
--
2.34.1

View file

@ -0,0 +1,42 @@
# Maintainer: Eivind Uggedal <eu@eju.no>
pkgname=py3-redis
_pkgname=redis
pkgver=4.1.0
pkgrel=1
pkgdesc="Python3 client for Redis key-value store"
url="https://github.com/andymccurdy/redis-py"
arch="noarch"
license="MIT"
depends="python3 py3-deprecated py3-hiredis"
makedepends="py3-setuptools"
checkdepends="py3-pytest py3-mock redis"
source="
https://files.pythonhosted.org/packages/source/${_pkgname:0:1}/$_pkgname/$_pkgname-$pkgver.tar.gz
0001-all-remove-support-for-nonfree-Redis-modules.patch
0002-Drop-tests-test_ssl.py.patch
"
builddir="$srcdir"/$_pkgname-$pkgver
options="!check" # tests fail due to old version + disabled proprietary redis features
replaces="py-redis" # Backwards compatibility
provides="py-redis=$pkgver-r$pkgrel" # Backwards compatibility
build() {
python3 setup.py build
}
check() (
redis-server --dir "$builddir" &
trap "kill $!" EXIT
pytest
)
package() {
python3 setup.py install --skip-build --root="$pkgdir"
}
sha512sums="
85cd09570f4faf34a735befd0677aa8ca2cb0d62b0285c4c040380c2440f2774e47762ec4219381294465343353a15804b96f06b4d6eefa7159a224eb9e72001 redis-4.1.0.tar.gz
b1dd96aeb6129f121108fac3c1ad033b1b657287fb0f959bc7fcab997b26c4b91cc7c0df6f86d6d2ac283951956a4a38826647f0e744514ce5031cf3917d1746 0001-all-remove-support-for-nonfree-Redis-modules.patch
5184efc472ad16020240e57222f906656b1f6db5139d37de22b34298c7a15c9b91f5c2d976f6c8455071459d2ff273f75f6bf76f3f46990bacec6673a83a2872 0002-Drop-tests-test_ssl.py.patch
"

View file

@ -0,0 +1,83 @@
# Contributor: Andrew Manison <amanison@anselsystems.com>
# Contributor: Fabian Affolter <fabian@affolter-engineering.ch>
# Contributor: Leo <thinkabit.ukim@gmail.com>
# Maintainer: psykose <alice@ayaya.dev>
pkgname=py3-setuptools
_pkgname=${pkgname#py3-}
pkgver=65.7.0
pkgrel=0
pkgdesc="Collection of enhancements to the Python3 distutils"
options="!check" # Tests require packages out of main/
url="https://pypi.python.org/pypi/setuptools"
arch="noarch"
license="MIT"
# everything is vendored
depends="
py3-packaging
python3
"
# depends="
# py3-appdirs
# py3-more-itertools
# py3-ordered-set
# py3-packaging
# py3-parsing
# python3
# "
makedepends="py3-setuptools-stage0"
source="$_pkgname-$pkgver.tar.gz::https://github.com/pypa/$_pkgname/archive/v$pkgver.tar.gz"
builddir="$srcdir"/$_pkgname-$pkgver
provides="py-setuptools=$pkgver-r$pkgrel" # Backwards compatibility
replaces="py-setuptools" # Backwards compatiblity
# py3-setuptools needs itself to build, bootstrapped with a lower version
# in main/py3-setuptools-stage0
provides="$provides py3-setuptools-bootstrap"
provider_priority=100 # highest
export SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES=0
prepare() {
default_prepare
# Unbundle
# rm -rf pkg_resources/extern pkg_resources/_vendor \
# setuptools/extern setuptools/_vendor
# Upstream devendoring logic is badly broken, see:
# https://bugs.archlinux.org/task/58670
# https://github.com/pypa/pip/issues/5429
# https://github.com/pypa/setuptools/issues/1383
# The simplest fix is to simply rewrite import paths to use the canonical
# location in the first place
# for _module in setuptools pkg_resources '' ; do
# find . -name \*.py -exec sed -i \
# -e 's/from '$_module.extern' import/import/' \
# -e 's/from '$_module.extern'./from /' \
# -e 's/import '$_module.extern'./import /' \
# -e "s/__import__('$_module.extern./__import__('/" \
# {} +
# done
# Fix post-release tag
sed -e '/tag_build = .post/d' \
-e '/tag_date = 1/d' \
-i setup.cfg
}
build() {
python3 setup.py build
}
package() {
# Otherwise it complains that build/scripts-3.10 cannot be found
# no other changes noted
mkdir -p build/scripts-3.11
python3 setup.py install --root="$pkgdir" --skip-build
}
sha512sums="
864bdd80acc65c34c472c9401ee6eadee057871b2f10793720887b43354bce08a2201e79fcf92767ddf46912285c46df88ec7733cca25982537e0bd51529e409 setuptools-65.7.0.tar.gz
"

View file

@ -0,0 +1,33 @@
# Contributor: Michał Polański <michal@polanski.me>
# Maintainer: Michał Polański <michal@polanski.me>
pkgname=py3-socksio
pkgver=1.0.0
pkgrel=1
pkgdesc="Sans-I/O implementation of SOCKS4, SOCKS4A, and SOCKS5"
url="https://pypi.org/project/socksio/"
license="MIT"
arch="noarch"
depends="python3"
makedepends="py3-setuptools"
checkdepends="py3-pytest py3-pytest-cov"
subpackages="$pkgname-doc"
source="$pkgname-$pkgver.tar.gz::https://files.pythonhosted.org/packages/source/s/socksio/socksio-$pkgver.tar.gz"
builddir="$srcdir/socksio-$pkgver"
build() {
python3 setup.py build
}
check() {
PYTHONPATH="$PWD/build/lib" pytest
}
package() {
python3 setup.py install --root="$pkgdir" --skip-build
install -Dm644 LICENSE "$pkgdir"/usr/share/licenses/$pkgname/LICENSE
}
sha512sums="
89bce7294555e2623df68e99352c27b83af38a4fcc4e3b86c8826c9c4bf545eeaf0a6328b000cfe1d286fa442c756c4579b4887cff03bc9e559cd66414a7ac6f py3-socksio-1.0.0.tar.gz
"

View file

@ -0,0 +1,37 @@
# Contributor: Michał Polański <michal@polanski.me>
# Maintainer: Michał Polański <michal@polanski.me>
pkgname=py3-uvloop
pkgver=0.16.0
pkgrel=0
pkgdesc="Ultra fast asyncio event loop"
url="https://github.com/MagicStack/uvloop"
license="MIT OR Apache-2.0"
arch="all !armhf !ppc64le" # tests fail
depends="python3"
makedepends="py3-setuptools python3-dev cython libuv-dev py3-pip"
checkdepends="py3-pytest py3-aiohttp py3-openssl py3-psutil"
source="https://github.com/MagicStack/uvloop/archive/v$pkgver/py3-uvloop-$pkgver.tar.gz
tcp-tests.patch
dns-tests.patch
"
options="!check" # mypy validation fails with our mypy version
builddir="$srcdir/uvloop-$pkgver"
build() {
python3 setup.py build build_ext --inplace --cython-always --use-system-libuv
}
check() {
PYTHONASYNCIODEBUG=1 PYTHONPATH="$(echo "$builddir"/build/lib.linux-*)" pytest -v \
-k 'not test_remote_shutdown_receives_trailing_data'
}
package() {
python3 setup.py install --root="$pkgdir" --skip-build
}
sha512sums="
1896d9a60a9c4e4b8d146ad858e664f3e43969ad0c14026fe79c69f546e40bf1dc6a4cce2d388a7a6e0f5b8306b1eb4da3f713cce44c58ba6628b82ac6eaf271 py3-uvloop-0.16.0.tar.gz
809af42dc056b718652ff1e2f99cc493b230a9566367bccf349afc705653ffb830288b7de80bc6016071980af5d5e0e635e72d53f7774ace193ce4fb2b1a62cc tcp-tests.patch
072c955662a9922de1f08713a73f0f9a08bd76b82cabd04e15cbb8b8299d81615516d03bdff207d2f0125afe055e9573604ebc331ad85f5d69ec6bf69668e620 dns-tests.patch
"

View file

@ -0,0 +1,47 @@
Behavior of getaddrinfo provided by libuv differs from musl.
Skip affected tests.
--- a/tests/test_dns.py
+++ b/tests/test_dns.py
@@ -1,4 +1,5 @@
import asyncio
+import pytest
import socket
import unittest
@@ -99,18 +100,22 @@ class BaseTestDNS:
self._test_getaddrinfo(b'example.com', '80')
self._test_getaddrinfo(b'example.com', '80', type=socket.SOCK_STREAM)
+ @pytest.mark.skip(reason="failure")
def test_getaddrinfo_12(self):
self._test_getaddrinfo('127.0.0.1', '80')
self._test_getaddrinfo('127.0.0.1', '80', type=socket.SOCK_STREAM)
+ @pytest.mark.skip(reason="failure")
def test_getaddrinfo_13(self):
self._test_getaddrinfo(b'127.0.0.1', b'80')
self._test_getaddrinfo(b'127.0.0.1', b'80', type=socket.SOCK_STREAM)
+ @pytest.mark.skip(reason="failure")
def test_getaddrinfo_14(self):
self._test_getaddrinfo(b'127.0.0.1', b'http')
self._test_getaddrinfo(b'127.0.0.1', b'http', type=socket.SOCK_STREAM)
+ @pytest.mark.skip(reason="failure")
def test_getaddrinfo_15(self):
self._test_getaddrinfo('127.0.0.1', 'http')
self._test_getaddrinfo('127.0.0.1', 'http', type=socket.SOCK_STREAM)
@@ -127,10 +132,12 @@ class BaseTestDNS:
self._test_getaddrinfo('localhost', b'http')
self._test_getaddrinfo('localhost', b'http', type=socket.SOCK_STREAM)
+ @pytest.mark.skip(reason="failure")
def test_getaddrinfo_19(self):
self._test_getaddrinfo('::1', 80)
self._test_getaddrinfo('::1', 80, type=socket.SOCK_STREAM)
+ @pytest.mark.skip(reason="failure")
def test_getaddrinfo_20(self):
self._test_getaddrinfo('127.0.0.1', 80)
self._test_getaddrinfo('127.0.0.1', 80, type=socket.SOCK_STREAM)

View file

@ -0,0 +1,15 @@
Adjust error message for musl-based systems.
--- a/tests/test_tcp.py
+++ b/tests/test_tcp.py
@@ -221,8 +221,8 @@ class _TestTCP:
addr = sock.getsockname()
with self.assertRaisesRegex(OSError,
- r"error while attempting.*\('127.*: "
- r"address already in use"):
+ r"\[Errno 98\] error while attempting.*\('127.*: "
+ r"address in use"):
self.loop.run_until_complete(
self.loop.create_server(object, *addr))

View file

@ -0,0 +1,33 @@
# Contributor: Anjandev Momi <anjan@momi.ca>
# Maintainer: Anjandev Momi <anjan@momi.ca>
pkgname=py3-zxcvbn
pkgdesc="Python implementation of Dropbox's realistic password strength estimator"
_pkgreal=zxcvbn
pkgver=4.4.28
pkgrel=1
url="http://packages.python.org/pypi/zxcvbn"
arch="noarch"
license="MIT"
depends="python3"
checkdepends="py3-pytest"
makedepends="py3-setuptools"
_pypiprefix="${_pkgreal%"${_pkgreal#?}"}"
source="https://files.pythonhosted.org/packages/source/$_pypiprefix/$_pkgreal/$_pkgreal-$pkgver.tar.gz"
builddir="$srcdir/$_pkgreal-$pkgver"
options="!check" # no upstream checks
build() {
python3 setup.py build
}
check() {
pytest
}
package() {
python3 setup.py install --skip-build --root="$pkgdir"
}
sha512sums="
6ed12b555442b4ee30662c90f38c90988833545310acce45e68a0aa2fc6297729da500ae0f578a1a266e85c09522eb3287c38d92bcfc1017f852ee76bf92c606 zxcvbn-4.4.28.tar.gz
"

View file

@ -1,387 +0,0 @@
# Contributor: Lauren N. Liberda <lauren@selfisekai.rocks>
# Maintainer: Lauren N. Liberda <lauren@selfisekai.rocks>
pkgname=signal-desktop
pkgver=6.1.0
pkgrel=0
pkgdesc="A messaging app for simple private communication with friends"
url="https://github.com/signalapp/Signal-Desktop/"
# same as electron
# aarch64: polyval-0.5.3 crate subdep uses unstable stdsimd feature
arch="x86_64"
license="AGPL-3.0-only"
# this build system sucks massive ass and does not add needed to anything
depends="
electron
ffmpeg-libs
font-barlow
font-eb-garamond
font-inter
font-parisienne
libevent
opus
"
makedepends="
alsa-lib-dev
bsd-compat-headers
cargo
clang-dev
cmake
electron-dev
ffmpeg-dev
git-lfs
glib-dev
gn
libepoxy-dev
libevent-dev
lld
llvm-dev
mesa-dev
nodejs
npm
openssl-dev
opus-dev
pipewire-dev
protoc
pulseaudio-dev
py3-setuptools
python3
samurai
sqlcipher-dev
vips-dev
yarn
"
options="net !check"
# follow signal-desktop package.json -> @signalapp/libsignal-client
_libsignalver=0.21.1
# follow signal-desktop package.json -> ringrtc -> commit title
_ringrtcver=2.22.0
# follow ringrtc (on version above) -> config/version.sh -> WEBRTC_VERSION
# downloading tarball generated with abuild snapshot (with gclient dependencies fetched)
_webrtcver=5005b
source="
https://github.com/signalapp/Signal-Desktop/archive/refs/tags/v$pkgver/Signal-Desktop-$pkgver.tar.gz
https://github.com/signalapp/libsignal/archive/refs/tags/v$_libsignalver/libsignal-$_libsignalver.tar.gz
https://github.com/signalapp/ringrtc/archive/refs/tags/v$_ringrtcver/ringrtc-$_ringrtcver.tar.gz
https://s3.sakamoto.pl/lnl-aports-snapshots/webrtc-$_webrtcver.tar.xz
bettersqlite-use-system-sqlcipher.patch
signal-build-expire-time.patch
signal-disable-updates.patch
signal-update-links.patch
webrtc-canonicalize-file-name.patch
webrtc-use-alpine-target.patch
webrtc-no-shared-deps-in-static-lib.patch
signal-desktop
signal-desktop.desktop
"
builddir="$srcdir/Signal-Desktop-$pkgver"
export CC=clang
export CXX=clang++
# required to find the tools
export AR=llvm-ar
export NM=llvm-nm
export LD=clang++
# less log spam, reproducible, allow lto with rust
export CFLAGS="${CFLAGS/-g/} -O2 -Wno-deprecated-builtins -Wno-unknown-warning-option -Wno-builtin-macro-redefined"
export CXXFLAGS="${CXXFLAGS/-g/} -O2 -Wno-deprecated-builtins -Wno-unknown-warning-option -Wno-builtin-macro-redefined"
export CPPFLAGS="$CPPFLAGS -D__DATE__= -D__TIME__= -D__TIMESTAMP__="
export CARGO_PROFILE_RELEASE_OPT_LEVEL=2
export CARGO_PROFILE_RELEASE_STRIP="symbols"
export RUSTFLAGS="$RUSTFLAGS -C linker=clang -C link-arg=-fuse-ld=lld"
export YARN_CACHE_FOLDER="$srcdir/.yarn"
# webrtc only, the other dependencies are fine with tarballs
snapshot() {
mkdir -p "$srcdir"
cd "$srcdir"
echo "
solutions = [{
'name': 'src',
'url': 'https://github.com/signalapp/webrtc.git@$_webrtcver',
}]
target_cpu = ['x64']
target_cpu_only = True
" > .gclient
gclient sync --no-history --nohooks --tpot-cipd-ignore-platformed
# needed DEPS hooks
python3 'src/build/landmines.py' --landmine-scripts 'src/tools_webrtc/get_landmines.py' --src-dir 'src'
python3 'src/build/util/lastchange.py' -o 'src/build/util/LASTCHANGE'
for elf in $(scanelf -RA -F "%F" src); do
rm -f "$elf"
done
mv src webrtc-$_webrtcver
msg "generating tarball.."
tar -cf webrtc-$_webrtcver.tar \
--exclude="ChangeLog*" \
--exclude="testdata/" \
--exclude="test_data/" \
--exclude="android_rust_toolchain/toolchain/" \
--exclude-backups \
--exclude-caches-all \
--exclude-vcs \
webrtc-$_webrtcver
xz -T0 -e -9 -vv -k webrtc-$_webrtcver.tar
}
prepare() {
ln -s "$srcdir"/webrtc-$_webrtcver "$srcdir"/ringrtc-$_ringrtcver/src/webrtc
ln -sf "$srcdir"/ringrtc-$_ringrtcver/src "$srcdir"/webrtc-$_webrtcver/ringrtc
msg "Applying patches"
for x in $source; do
case "$x" in
signal-*.patch)
msg "$x"
patch -p1 -i "$srcdir"/$x -d "$srcdir"/Signal-Desktop-$pkgver
;;
ringrtc-*.patch)
msg "$x"
patch -p1 -i "$srcdir"/$x -d "$srcdir"/ringrtc-$_ringrtcver
;;
webrtc-*.patch)
msg "$x"
patch -p1 -i "$srcdir"/$x -d "$srcdir"/webrtc-$_webrtcver
;;
esac
done
msg "Installing signal-desktop JS dependencies"
echo 'ignore-engines true' > .yarnrc
yarn --ignore-scripts --frozen-lockfile
(
cd "$srcdir"/webrtc-$_webrtcver
local use_system="
ffmpeg
fontconfig
freetype
harfbuzz-ng
icu
libdrm
libevent
libjpeg
libpng
libwebp
libxml
libxslt
opus
re2
snappy
zlib
"
for _lib in $use_system libjpeg_turbo; do
msg "Removing buildscripts for system provided $_lib"
find . -type f -path "*third_party/$_lib/*" \
\! -path "*third_party/$_lib/chromium/*" \
\! -path "*third_party/$_lib/google/*" \
\! -path './base/third_party/icu/*' \
\! -path './third_party/libxml/*' \
\! -path './third_party/pdfium/third_party/freetype/include/pstables.h' \
\! -path './third_party/harfbuzz-ng/utils/hb_scoped.h' \
\! -path './third_party/crashpad/crashpad/third_party/zlib/zlib_crashpad.h' \
\! -regex '.*\.\(gn\|gni\|isolate\|py\)' \
-delete
done
msg "Replacing gn files"
python3 build/linux/unbundle/replace_gn_files.py --system-libraries \
$use_system
# allow system dependencies in "official builds"
sed -i 's/OFFICIAL_BUILD/GOOGLE_CHROME_BUILD/' \
tools/generate_shim_headers/generate_shim_headers.py
)
(
cd "$srcdir"/ringrtc-$_ringrtcver/src/node
msg "Installing ringrtc js dependencies"
yarn --frozen-lockfile --ignore-scripts
)
(
cd "$srcdir"/libsignal-$_libsignalver/node
# fix target
sed -i 's/unknown-linux-gnu/alpine-linux-musl/g' binding.gyp
msg "Installing libsignal js dependencies"
yarn --ignore-scripts --frozen-lockfile
)
# remove shipped fonts for system-provided (part 1)
rm -rf fonts/
}
build() {
chromium_arch="$(node -e 'console.log(process.arch)')"
# required dependency of ringrtc
(
cd "$srcdir"/webrtc-$_webrtcver
local webrtc_args="
rtc_build_examples=false
rtc_build_tools=false
rtc_enable_protobuf=false
rtc_enable_sctp=false
rtc_include_tests=false
rtc_include_ilbc=false
rtc_libvpx_build_vp9=true
rtc_use_x11=false
build_with_mozilla=false
chrome_pgo_phase=0
clang_use_chrome_plugins=false
custom_toolchain=\"//build/toolchain/linux/unbundle:default\"
is_cfi=false
is_clang=true
is_debug=false
is_official_build=true
host_toolchain=\"//build/toolchain/linux/unbundle:default\"
proprietary_codecs=true
rtc_link_pipewire=true
rtc_use_pipewire=true
symbol_level=0
use_custom_libcxx=false
use_sysroot=false
use_system_freetype=true
use_system_harfbuzz=true
use_system_libjpeg=true
"
mkdir -p "$srcdir"/ringrtc-$_ringrtcver/out/release
msg "Building signal's webrtc"
gn gen "$srcdir"/ringrtc-$_ringrtcver/out/release --args="$(echo $webrtc_args)"
ninja -C "$srcdir"/ringrtc-$_ringrtcver/out/release webrtc
)
# add lto for the remaining c steps (sqlite)
export CFLAGS="$CFLAGS -flto"
export CXXFLAGS="$CXXFLAGS -flto"
export LDFLAGS="$LDFLAGS -flto -fuse-ld=lld"
(
cd "$srcdir"/ringrtc-$_ringrtcver/src/rust
msg "Building libringrtc"
OUTPUT_DIR="$srcdir"/ringrtc-$_ringrtcver/out \
cargo build --features electron --release
mkdir -p ../node/build/linux
cp -fv target/release/libringrtc.so ../node/build/linux/libringrtc-$chromium_arch.node
)
(
cd "$srcdir"/ringrtc-$_ringrtcver/src/node
msg "Building ringrtc JS glue code"
yarn build
)
# module on npm intentionally unbuildable: https://github.com/signalapp/libsignal/issues/464#issuecomment-1160665052
(
cd "$srcdir"/libsignal-$_libsignalver/node
msg "Building libsignal"
yarn node-gyp configure --nodedir=/usr/include/electron/node_headers --build-from-source
yarn node-gyp build --nodedir=/usr/include/electron/node_headers --build-from-source
mkdir -p prebuilds/linux-$chromium_arch
mv build/Release/libsignal_client_linux_$chromium_arch.node prebuilds/linux-$chromium_arch/node.napi.node
msg "Building libsignal glue code"
yarn tsc
)
# from package.json postinstall
yarn build:acknowledgments
yarn patch-package
rm -rf node_modules/dtrace-provider
# use our libsignal
rm -rf node_modules/@signalapp/libsignal-client/
ln -s "$srcdir"/libsignal-$_libsignalver/node/ node_modules/@signalapp/libsignal-client
# use our libringrtc
rm -rf node_modules/ringrtc/
ln -s "$srcdir"/ringrtc-$_ringrtcver/src/node/ node_modules/ringrtc
# patch the sqlcipher module
for x in $source; do
case "$x" in
bettersqlite-*.patch)
msg "$x"
patch -Np1 -i "$srcdir"/$x -d "$srcdir"/Signal-Desktop-$pkgver/node_modules/better-sqlite3/
;;
esac
done
# use system-provided font
echo '' > node_modules/typeface-inter/inter.css
rm -rf 'node_modules/typeface-inter/Inter '*
npm rebuild sharp better-sqlite3 --nodedir=/usr/include/electron/node_headers --build-from-source
NODE_ENV=production \
SIGNAL_ENV=production \
NODE_OPTIONS=--openssl-legacy-provider \
yarn build:dev
NODE_ENV=production \
SIGNAL_ENV=production \
yarn build:electron \
--config.extraMetadata.environment=production \
--config.directories.output=release \
--linux=dir
}
check() {
# tests run against downloaded build of electron for glibc, probably can be patched
yarn test
}
package() {
cd "$builddir"/release/linux-unpacked
install -Dm644 resources/app.asar "$pkgdir"/usr/lib/$pkgname/app.asar
cp -r resources/app.asar.unpacked "$pkgdir"/usr/lib/$pkgname/app.asar.unpacked
install -Dm755 "$srcdir"/$pkgname "$pkgdir"/usr/bin/$pkgname
install -Dm644 "$srcdir"/$pkgname.desktop "$pkgdir"/usr/share/applications/$pkgname.desktop
for i in 16 32 48 128 250 256 1024; do
install -Dm644 "$builddir"/images/icon_$i.png "$pkgdir"/usr/share/icons/hicolor/${i}x$i/apps/$pkgname.png
done
}
sha512sums="
8f5234fa018e0590b3cad934ab46797bef16ff489a36a05364df7f43be80022143e8034665d2f24a171401bfac3315a16918d702a249c9ef8acd1de78e30f52e Signal-Desktop-6.1.0.tar.gz
d2c13bf0d96eb706afe018c0ef9b377f3f50d2a82690f6ccb7260efa7cc620fb5c52ae775d598d6ebcaa581acfaab244dfe6f7f1738755604bf860cd548a62b6 libsignal-0.21.1.tar.gz
e263390ea2ae877edb39e9973a8c8a03e2f396f255ca48487311031c69493a93da0dedbce5f10adfef4859cb8e51579bf285fbd9b94f98fabd538acaee18c413 ringrtc-2.22.0.tar.gz
748f870d35b9a9789311c3d7cee9c0bc150aed8094838406e0a1969f2f824900ffec40d0b4fe2702f2f93d4a78d7987b0f91668cff859a8a34517663b138f8b5 webrtc-5005b.tar.xz
1aaf59c2d401ae127ed73981be330cd999794a217b455cb8033652063eb7b549c7070c63f54e4e60bf4338b475e162b6eae98683c3ce058e8f1a407588b3b2e1 bettersqlite-use-system-sqlcipher.patch
3ecfbd1b0cd03d1a697f3e53c057fa8bc2118de48ff0c3f07f8bb731f128f9478862a388efb36dd4dbc1dc5ad9977165a935fe65664aea915b8b80c38e801070 signal-build-expire-time.patch
60a45285d885922f5c21f64b761a10efbee9081baf3efa4c8c13abc6a43dc4d27662ed10e239b0fa2071ab9e3a0dbbb4b11d6e3d26fe2b74a19f39e72b74a5bd signal-disable-updates.patch
c68a2a6a37c1cdea227f29c0922b9bf15259f044e9b3240b120bba14809d04d66cf0b619f52bb91abd596ad93e51e972be132b5951d0e8f6ea238fcb7bb613eb signal-update-links.patch
252b37a2ecc5e7a25385943045f426dc2e30991b28d206ceaff1be7fd8ffeeb024310a8fca6b3e69a4b1c57db535d51c570935351053525f393682d5ecd0f9a9 webrtc-canonicalize-file-name.patch
6add8b4c293f5392748a2eec9486cb4a6534e161977c6a98de71617b9abcdd1e8ad94b44014256a4b52e33eb9dd4aca380279d4161629a1bb2d7b15f8eb5b459 webrtc-use-alpine-target.patch
bab56a33265b5b094f161af1462166e371913a5269fe8e7d12e9f65ec4f5b908157406b3bcbcf73db15d03470445127d27c64fd731b6ea57c631aba3f4d302cb webrtc-no-shared-deps-in-static-lib.patch
87534e7b5ad7365509eab75629e6bd1a9ed61ee92f7e358405a0abaf0df57de14623fb3894eb082f8785422e5c087e1c50f9e2e5cafbb2529591fd7bf447f7f5 signal-desktop
87ef5f3ffcf64e3cae308aa0f6bc208fb05dd8568f6a288217cdf8498ae1523f276987a7be8d6f5208f13394bab350e08734d806a8e7c08849dd8ba0dda49d66 signal-desktop.desktop
"

View file

@ -1,45 +0,0 @@
--- a/binding.gyp
+++ b/binding.gyp
@@ -7,7 +7,16 @@
'targets': [
{
'target_name': 'better_sqlite3',
- 'dependencies': ['deps/sqlite3.gyp:sqlite3'],
+ 'include_dirs': ['/usr/include/sqlcipher'],
+ 'direct_dependent_settings': {
+ 'include_dirs': ['/usr/include/sqlcipher'],
+ },
+ 'link_settings': {
+ 'libraries': [
+ '-L/usr/lib/sqlcipher',
+ '-lsqlcipher',
+ ]
+ },
'sources': ['src/better_sqlite3.cpp'],
'cflags_cc': ['-std=c++17'],
'xcode_settings': {
@@ -17,14 +26,22 @@
['OS=="linux"', {
'ldflags': [
'-Wl,-Bsymbolic',
- '-Wl,--exclude-libs,ALL',
],
}],
],
},
{
'target_name': 'test_extension',
- 'dependencies': ['deps/sqlite3.gyp:sqlite3'],
+ 'include_dirs': ['/usr/include/sqlcipher'],
+ 'direct_dependent_settings': {
+ 'include_dirs': ['/usr/include/sqlcipher'],
+ },
+ 'link_settings': {
+ 'libraries': [
+ '-L/usr/lib/sqlcipher',
+ '-lsqlcipher',
+ ]
+ },
'conditions': [['sqlite3 == ""', { 'sources': ['deps/test_extension.c'] }]],
},
],

View file

@ -1,15 +0,0 @@
--- a/ts/scripts/get-expire-time.ts
+++ b/ts/scripts/get-expire-time.ts
@@ -7,11 +7,7 @@
import { DAY } from '../util/durations';
-const unixTimestamp = parseInt(
- execSync('git show -s --format=%ct').toString('utf8'),
- 10
-);
-const buildCreation = unixTimestamp * 1000;
+const buildCreation = new Date().getTime();
const buildExpiration = buildCreation + DAY * 90;

View file

@ -1,6 +0,0 @@
#!/bin/sh
# app chooses config (including used endpoints) based on this
export NODE_ENV=production
exec electron /usr/lib/signal-desktop/app.asar

View file

@ -1,10 +0,0 @@
[Desktop Entry]
Name=Signal
Exec=/usr/bin/signal-desktop %U
Terminal=false
Type=Application
Icon=signal-desktop
StartupWMClass=Signal
Comment=Private messaging from your desktop
MimeType=x-scheme-handler/sgnl;x-scheme-handler/signalcaptcha;
Categories=Network;InstantMessaging;Chat;

View file

@ -1,9 +0,0 @@
--- a/config/production.json
+++ b/config/production.json
@@ -11,5 +11,5 @@
},
"serverPublicParams": "AMhf5ywVwITZMsff/eCyudZx9JDmkkkbV6PInzG4p8x3VqVJSFiMvnvlEKWuRob/1eaIetR31IYeAbm0NdOuHH8Qi+Rexi1wLlpzIo1gstHWBfZzy1+qHRV5A4TqPp15YzBPm0WSggW6PbSn+F4lf57VCnHF7p8SvzAA2ZZJPYJURt8X7bbg+H3i+PEjH9DXItNEqs2sNcug37xZQDLm7X36nOoGPs54XsEGzPdEV+itQNGUFEjY6X9Uv+Acuks7NpyGvCoKxGwgKgE5XyJ+nNKlyHHOLb6N1NuHyBrZrgtY/JYJHRooo5CEqYKBqdFnmbTVGEkCvJKxLnjwKWf+fEPoWeQFj5ObDjcKMZf2Jm2Ae69x+ikU5gBXsRmoF94GXQ==",
"serverTrustRoot": "BXu6QIKVz5MA8gstzfOgRQGqyLqOwNKHL6INkv3IHWMF",
- "updatesEnabled": true
+ "updatesEnabled": false
}

View file

@ -1,25 +0,0 @@
--- a/ts/components/DialogExpiredBuild.tsx
+++ b/ts/components/DialogExpiredBuild.tsx
@@ -29,9 +29,9 @@
containerWidthBreakpoint={containerWidthBreakpoint}
type="error"
onClick={() => {
- openLinkInWebBrowser('https://signal.org/download/');
+ openLinkInWebBrowser('https://pkgs.alpinelinux.org/packages?name=signal-desktop');
}}
- clickLabel={i18n('upgrade')}
+ clickLabel={<code>apk upgrade signal-desktop</code>}
hasAction
>
{i18n('expiredWarning')}{' '}
--- a/ts/components/DialogUpdate.tsx
+++ b/ts/components/DialogUpdate.tsx
@@ -27,7 +27,7 @@
currentVersion: string;
};
-const PRODUCTION_DOWNLOAD_URL = 'https://signal.org/download/';
+const PRODUCTION_DOWNLOAD_URL = 'https://pkgs.alpinelinux.org/packages?name=signal-desktop';
const BETA_DOWNLOAD_URL = 'https://support.signal.org/beta';
export const DialogUpdate = ({

View file

@ -1,13 +0,0 @@
no canonicalize_file_name on musl. funnily, the file using this says this is
not portable, but avoids the nonportability of realpath(path, NULL);
--- a/third_party/nasm/config/config-linux.h
+++ b/third_party/nasm/config/config-linux.h
@@ -139,7 +139,7 @@
#define HAVE_ACCESS 1
/* Define to 1 if you have the `canonicalize_file_name' function. */
-#define HAVE_CANONICALIZE_FILE_NAME 1
+/* #define HAVE_CANONICALIZE_FILE_NAME 1 */
/* Define to 1 if you have the `cpu_to_le16' intrinsic function. */
/* #undef HAVE_CPU_TO_LE16 */

View file

@ -1,14 +0,0 @@
--- a/BUILD.gn
+++ b/BUILD.gn
@@ -480,11 +480,6 @@
"rtc_base",
"sdk",
"video",
-
- # Added by RingRTC
- # Necessary for use_custom_libcxx=true,
- # which is in turn necessary for deploying to Ubuntu 16.04.
- "//build/config:shared_library_deps",
]
if (rtc_include_builtin_audio_codecs) {

View file

@ -1,13 +0,0 @@
--- ./build/config/compiler/BUILD.gn
+++ ./build/config/compiler/BUILD.gn
@@ -766,8 +766,8 @@
}
} else if (current_cpu == "arm64") {
if (is_clang && !is_android && !is_nacl && !is_fuchsia) {
- cflags += [ "--target=aarch64-linux-gnu" ]
- ldflags += [ "--target=aarch64-linux-gnu" ]
+ cflags += [ "--target=aarch64-alpine-linux-musl" ]
+ ldflags += [ "--target=aarch64-alpine-linux-musl" ]
}
} else if (current_cpu == "mipsel" && !is_nacl) {
ldflags += [ "-Wl,--hash-style=sysv" ]

View file

@ -1,53 +0,0 @@
# Contributor: wener <wenermail@gmail.com>
# Maintainer: wener <wenermail@gmail.com>
pkgname=sqlcipher
pkgver=4.5.2
pkgrel=1
pkgdesc="SQLCipher is an SQLite extension that provides 256 bit AES encryption of database files."
url="https://www.zetetic.net/sqlcipher/"
arch="all"
license="BSD-3-clause"
makedepends="openssl-dev>3 tcl-dev readline-dev zlib-dev"
subpackages="$pkgname-dev $pkgname-doc $pkgname-libs"
source="$pkgname-$pkgver.tar.gz::https://github.com/sqlcipher/sqlcipher/archive/v$pkgver.tar.gz"
# block by https://github.com/sqlcipher/sqlcipher/issues/368#issuecomment-669984195
# options="!check"
build() {
export CFLAGS="$CFLAGS -DSQLITE_HAS_CODEC -DSQLCIPHER_TEST -DSQLITE_ENABLE_COLUMN_METADATA"
export LDFLAGS="$LDFLAGS -lcrypto"
./configure \
--build=$CBUILD \
--host=$CHOST \
--prefix=/usr \
--sysconfdir=/etc \
--mandir=/usr/share/man \
--localstatedir=/var \
--enable-tempstore=yes \
--enable-fts5
make
}
check() {
make testfixture
./testfixture test/sqlcipher.test
}
package() {
make DESTDIR="$pkgdir" install
install -Dm0644 sqlcipher.1 \
"$pkgdir"/usr/share/man/man1/sqlcipher.1
}
libs() {
pkgdesc="SQLCipher library"
mkdir -p "$subpkgdir"/usr
mv "$pkgdir"/usr/lib "$subpkgdir"/usr/
}
sha512sums="
1de5b219392bb976631857e32b4523258fd660fedb558d478e536b7e10c711c72c7e7c9062e45bd8a5ceaecbc1fee717935d2357f6811c3ddf76702167f4601b sqlcipher-4.5.2.tar.gz
"

View file

@ -0,0 +1,59 @@
# Contributor: Michał Polański <michal@polanski.me>
# Maintainer: Michał Polański <michal@polanski.me>
pkgname=uvicorn
pkgver=0.18.3
pkgrel=1
pkgdesc="Lightning-fast ASGI server"
url="https://www.uvicorn.org/"
license="BSD-3-Clause"
arch="noarch"
depends="
python3
py3-asgiref
py3-click
py3-h11
"
makedepends="py3-build py3-hatchling py3-installer"
checkdepends="
py3-dotenv
py3-httptools
py3-httpx
py3-pytest
py3-pytest-mock
py3-yaml
py3-trustme
py3-watchgod
py3-websockets
py3-wsproto
"
subpackages="$pkgname-doc"
source="https://github.com/encode/uvicorn/archive/$pkgver/uvicorn-$pkgver.tar.gz
skip-warns.patch
"
case "$CARCH" in
# test suite blocked by py3-httpx
armhf|ppc64le) options="!check" ;;
# test suite blocked by py3-watchgod
s390x) options="!check" ;;
esac
build() {
python3 -m build --no-isolation --wheel
}
check() {
export PYTHONWARNINGS="ignore::DeprecationWarning"
pytest
}
package() {
python3 -m installer -d "$pkgdir" dist/uvicorn-$pkgver-py3-none-any.whl
install -Dm644 LICENSE.md "$pkgdir"/usr/share/licenses/$pkgname/LICENSE
}
sha512sums="
a4983e7095f3596ac54557432a074eb8cae4c6dd579fabfe8b6c34606ba8258e6c0684c2e34e8019f2455fbfd1771f8c2c88bf8b01fc505abfae93655f61f032 uvicorn-0.18.3.tar.gz
0fe235e59a595bc36c9aeb6c8f54ecde9ee4ad30dbe8feae0477db533fd1c3a4c866719b7354e0e5e0e92085ab7e4909a0586256daaaaa76465d39f653bd720a skip-warns.patch
"

View file

@ -0,0 +1,15 @@
diff --git a/setup.cfg b/setup.cfg
index aef20c6..61d3ef4 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -69,10 +69,6 @@ addopts = -rxXs
--strict-config
--strict-markers
xfail_strict=True
-filterwarnings=
- # Turn warnings that aren't filtered into exceptions
- error
- ignore: \"watchgod\" is depreciated\, you should switch to watchfiles \(`pip install watchfiles`\)\.:DeprecationWarning
[coverage:run]
omit = venv/*

View file

@ -9,7 +9,7 @@ arch="noarch"
url="https://github.com/firefly-iii/firefly-iii" url="https://github.com/firefly-iii/firefly-iii"
license="AGPL-3.0-only" license="AGPL-3.0-only"
options="!check" # No testsuite options="!check" # No testsuite
_php=php81 _php=php8
_php_mods="-intl -curl -bcmath -zip -gd -xml -mbstring -ldap -session -fileinfo -simplexml -sodium -tokenizer -xmlwriter -dom -pdo" _php_mods="-intl -curl -bcmath -zip -gd -xml -mbstring -ldap -session -fileinfo -simplexml -sodium -tokenizer -xmlwriter -dom -pdo"
depends="$_php ${_php_mods//-/$_php-}" depends="$_php ${_php_mods//-/$_php-}"
makedepends="composer" makedepends="composer"

View file

@ -4,7 +4,7 @@
pkgname=mastodon pkgname=mastodon
pkgver=4.0.0_rc2 pkgver=4.0.0_rc2
_gittag=4.0.0rc2 _gittag=4.0.0rc2
pkgrel=0 pkgrel=1
pkgdesc="Self-hosted social media and network server based on ActivityPub and OStatus" pkgdesc="Self-hosted social media and network server based on ActivityPub and OStatus"
arch="all" arch="all"
url="https://github.com/tootsuite/mastodon" url="https://github.com/tootsuite/mastodon"
@ -102,7 +102,6 @@ build() {
bundle config set without 'development test' bundle config set without 'development test'
bundle config set no-cache 'true' bundle config set no-cache 'true'
bundle install -j"$(getconf _NPROCESSORS_ONLN)" bundle install -j"$(getconf _NPROCESSORS_ONLN)"
export NODE_OPTIONS="--openssl-legacy-provider"
yarn install --pure-lockfile --ignore-engines yarn install --pure-lockfile --ignore-engines
RAILS_ENV='production' OTP_SECRET=precompile_placeholder SECRET_KEY_BASE=precompile_placeholder bundle exec rails assets:precompile RAILS_ENV='production' OTP_SECRET=precompile_placeholder SECRET_KEY_BASE=precompile_placeholder bundle exec rails assets:precompile
} }

View file

@ -16,7 +16,7 @@ source="$pkgname-$pkgver.tar.gz::https://pypi.io/packages/source/${_pyname:0:1}/
builddir="$srcdir"/$_pyname-$pkgver builddir="$srcdir"/$_pyname-$pkgver
build() { build() {
python setup.py build python3 setup.py build
} }
check() { check() {
@ -24,7 +24,7 @@ check() {
} }
package() { package() {
python setup.py install --prefix=/usr --root="$pkgdir" --optimize=1 python3 setup.py install --prefix=/usr --root="$pkgdir" --optimize=1
install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/ install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/
} }
sha512sums=" sha512sums="

View file

@ -17,7 +17,7 @@ source="$pkgname-$pkgver.tar.gz::https://pypi.io/packages/source/${_pyname:0:1}/
builddir="$srcdir"/$_pyname-$pkgver builddir="$srcdir"/$_pyname-$pkgver
build() { build() {
python setup.py build python3 setup.py build
} }
check() { check() {
@ -26,7 +26,7 @@ check() {
} }
package() { package() {
python setup.py install --skip-build \ python3 setup.py install --skip-build \
--optimize=1 \ --optimize=1 \
--root="$pkgdir" --root="$pkgdir"
install -vDm 644 README.rst -t "${pkgdir}/usr/share/doc/${pkgname}/" install -vDm 644 README.rst -t "${pkgdir}/usr/share/doc/${pkgname}/"

View file

@ -16,7 +16,7 @@ source="$pkgname-$pkgver.tar.gz::https://pypi.io/packages/source/${_pyname:0:1}/
builddir="$srcdir"/$_pyname-$pkgver builddir="$srcdir"/$_pyname-$pkgver
build() { build() {
python setup.py build python3 setup.py build
} }
check() { check() {
@ -24,7 +24,7 @@ check() {
} }
package() { package() {
python setup.py install --root="$pkgdir" --optimize=1 python3 setup.py install --root="$pkgdir" --optimize=1
install -Dm644 "./LICENSE" "$pkgdir/usr/share/licenses/$pkgname/LICENSE" install -Dm644 "./LICENSE" "$pkgdir/usr/share/licenses/$pkgname/LICENSE"
} }
sha512sums=" sha512sums="

View file

@ -17,7 +17,7 @@ source="$pkgname-$pkgver.tar.gz::https://pypi.io/packages/source/${_pyname:0:1}/
builddir="$srcdir"/$_pyname-$pkgver builddir="$srcdir"/$_pyname-$pkgver
build() { build() {
python -m build --wheel --skip-dependency-check --no-isolation python3 -m build --wheel --skip-dependency-check --no-isolation
} }
check() { check() {
@ -25,7 +25,7 @@ check() {
} }
package() { package() {
python -m installer --destdir="$pkgdir" dist/*.whl python3 -m installer --destdir="$pkgdir" dist/*.whl
install -vDm 644 README.rst -t "$pkgdir/usr/share/doc/$pkgname" install -vDm 644 README.rst -t "$pkgdir/usr/share/doc/$pkgname"
install -vDm 644 LICENSE -t "$pkgdir/usr/share/licenses/$pkgname" install -vDm 644 LICENSE -t "$pkgdir/usr/share/licenses/$pkgname"
} }

View file

@ -16,7 +16,7 @@ source="$pkgname-$pkgver.tar.gz::https://pypi.io/packages/source/${_pyname:0:1}/
builddir="$srcdir"/$_pyname-$pkgver builddir="$srcdir"/$_pyname-$pkgver
build() { build() {
python setup.py build python3 setup.py build
} }
check() { check() {
@ -24,7 +24,7 @@ check() {
} }
package() { package() {
python setup.py install --prefix=/usr --root="$pkgdir" --optimize=1 python3 setup.py install --prefix=/usr --root="$pkgdir" --optimize=1
install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/ install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/
} }
sha512sums=" sha512sums="

View file

@ -16,7 +16,7 @@ source="$pkgname-$pkgver.tar.gz::https://pypi.io/packages/source/${_pyname:0:1}/
builddir="$srcdir"/$_pyname-$pkgver builddir="$srcdir"/$_pyname-$pkgver
build() { build() {
python -m build --wheel --no-isolation python3 -m build --wheel --no-isolation
} }
check() { check() {
@ -24,7 +24,7 @@ check() {
} }
package() { package() {
python -m installer --destdir="$pkgdir" dist/*.whl python3 -m installer --destdir="$pkgdir" dist/*.whl
install -vDm 644 AUTHORS.rst -t "$pkgdir/usr/share/doc/$pkgname/" install -vDm 644 AUTHORS.rst -t "$pkgdir/usr/share/doc/$pkgname/"
install -vDm 644 CHANGELOG.rst -t "$pkgdir/usr/share/doc/$pkgname/" install -vDm 644 CHANGELOG.rst -t "$pkgdir/usr/share/doc/$pkgname/"
install -vDm 644 README.rst -t "$pkgdir/usr/share/doc/$pkgname/" install -vDm 644 README.rst -t "$pkgdir/usr/share/doc/$pkgname/"

View file

@ -17,7 +17,7 @@ source="$pkgname-$pkgver.tar.gz::https://pypi.io/packages/source/${_pyname:0:1}/
builddir="$srcdir"/$_pyname-$pkgver builddir="$srcdir"/$_pyname-$pkgver
build() { build() {
python setup.py build python3 setup.py build
} }
check() { check() {
@ -25,7 +25,7 @@ check() {
} }
package() { package() {
python setup.py install --skip-build \ python3 setup.py install --skip-build \
--optimize=1 \ --optimize=1 \
--root="$pkgdir" --root="$pkgdir"
install -vDm 644 AUTHORS -t "$pkgdir/usr/share/doc/$pkgname" install -vDm 644 AUTHORS -t "$pkgdir/usr/share/doc/$pkgname"

View file

@ -16,7 +16,7 @@ source="$pkgname-$pkgver.tar.gz::https://pypi.io/packages/source/${_pyname:0:1}/
builddir="$srcdir"/$_pyname-$pkgver builddir="$srcdir"/$_pyname-$pkgver
build() { build() {
python setup.py build python3 setup.py build
} }
check() { check() {
@ -24,7 +24,7 @@ check() {
} }
package() { package() {
python setup.py install --prefix=/usr --root="$pkgdir" --optimize=1 python3 setup.py install --prefix=/usr --root="$pkgdir" --optimize=1
install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/ install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/
} }
sha512sums=" sha512sums="

View file

@ -16,7 +16,7 @@ source="$pkgname-$pkgver.tar.gz::https://pypi.io/packages/source/${_pyname:0:1}/
builddir="$srcdir"/$_pyname-$pkgver builddir="$srcdir"/$_pyname-$pkgver
build() { build() {
python setup.py build python3 setup.py build
} }
check() { check() {
@ -24,7 +24,7 @@ check() {
} }
package() { package() {
python setup.py install --skip-build --root="$pkgdir/" --optimize=1 python3 setup.py install --skip-build --root="$pkgdir/" --optimize=1
install -Dm644 LICENSE "$pkgdir/usr/share/licenses/$pkgname/LICENSE" install -Dm644 LICENSE "$pkgdir/usr/share/licenses/$pkgname/LICENSE"
} }
sha512sums=" sha512sums="

View file

@ -16,7 +16,7 @@ source="$pkgname-$pkgver.tar.gz::https://pypi.io/packages/source/${_pyname:0:1}/
builddir="$srcdir"/$_pyname-$pkgver builddir="$srcdir"/$_pyname-$pkgver
build() { build() {
python setup.py build python3 setup.py build
} }
check() { check() {
@ -24,7 +24,7 @@ check() {
} }
package() { package() {
python setup.py install --prefix=/usr --root="$pkgdir" --optimize=1 python3 setup.py install --prefix=/usr --root="$pkgdir" --optimize=1
install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/ install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/
} }
sha512sums=" sha512sums="

View file

@ -20,7 +20,7 @@ check() {
} }
package() { package() {
python setup.py install --root="${pkgdir}" -O1 python3 setup.py install --root="${pkgdir}" -O1
install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/ install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/
} }
sha512sums=" sha512sums="

View file

@ -16,7 +16,7 @@ source="$pkgname-$pkgver.tar.gz::https://pypi.io/packages/source/${_pyname:0:1}/
builddir="$srcdir"/$_pyname-$pkgver builddir="$srcdir"/$_pyname-$pkgver
build() { build() {
python setup.py build python3 setup.py build
} }
check() { check() {
@ -24,7 +24,7 @@ check() {
} }
package() { package() {
python setup.py install --prefix=/usr --root="$pkgdir" --optimize=1 python3 setup.py install --prefix=/usr --root="$pkgdir" --optimize=1
install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/ install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/
} }
sha512sums=" sha512sums="

View file

@ -21,7 +21,7 @@ prepare() {
build() { build() {
export SETUPTOOLS_SCM_PRETEND_VERSION=$pkgver export SETUPTOOLS_SCM_PRETEND_VERSION=$pkgver
python -m build --wheel --no-isolation python3 -m build --wheel --no-isolation
} }
check() { check() {
@ -29,7 +29,7 @@ check() {
} }
package() { package() {
python -m installer --destdir="$pkgdir" dist/*.whl python3 -m installer --destdir="$pkgdir" dist/*.whl
install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/ install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/
} }
sha512sums=" sha512sums="

View file

@ -17,7 +17,7 @@ source="$pkgname-$_pkgver.tar.gz::https://pypi.io/packages/source/${_pyname:0:1}
builddir="$srcdir"/$_pyname-$_pkgver builddir="$srcdir"/$_pyname-$_pkgver
build() { build() {
python setup.py build python3 setup.py build
} }
check() { check() {
@ -25,7 +25,7 @@ check() {
} }
package() { package() {
python setup.py install --prefix=/usr --root="$pkgdir" --optimize=1 python3 setup.py install --prefix=/usr --root="$pkgdir" --optimize=1
install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/ install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/
} }
sha512sums=" sha512sums="

View file

@ -16,7 +16,7 @@ source="$pkgname-$pkgver.tar.gz::https://pypi.io/packages/source/${_pyname:0:1}/
builddir="$srcdir"/$_pyname-$pkgver builddir="$srcdir"/$_pyname-$pkgver
build() { build() {
python setup.py build python3 setup.py build
} }
check() { check() {
@ -24,7 +24,7 @@ check() {
} }
package() { package() {
python setup.py install --prefix=/usr --root="$pkgdir" --optimize=1 python3 setup.py install --prefix=/usr --root="$pkgdir" --optimize=1
install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/ install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/
} }
sha512sums=" sha512sums="

View file

@ -16,7 +16,7 @@ source="$pkgname-$pkgver.tar.gz::https://github.com/mobolic/facebook-sdk/archive
builddir="$srcdir"/$_pyname-$pkgver builddir="$srcdir"/$_pyname-$pkgver
build() { build() {
python setup.py build python3 setup.py build
} }
check() { check() {
@ -24,7 +24,7 @@ check() {
} }
package() { package() {
python setup.py install --prefix=/usr --root="$pkgdir" --optimize=1 python3 setup.py install --prefix=/usr --root="$pkgdir" --optimize=1
install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/ install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/
} }
sha512sums=" sha512sums="

View file

@ -16,7 +16,7 @@ source="$pkgname-$pkgver.tar.gz::https://pypi.io/packages/source/${_pyname:0:1}/
builddir="$srcdir"/$_pyname-$pkgver builddir="$srcdir"/$_pyname-$pkgver
build() { build() {
python setup.py build python3 setup.py build
} }
check() { check() {
@ -24,7 +24,7 @@ check() {
} }
package() { package() {
python setup.py install --root="$pkgdir" -O1 python3 setup.py install --root="$pkgdir" -O1
install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/ install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/
} }
sha512sums=" sha512sums="

View file

@ -16,7 +16,7 @@ source="$pkgname-$pkgver.tar.gz::https://github.com/linsomniac/python-memcached/
builddir="$srcdir"/$_pyname-$pkgver builddir="$srcdir"/$_pyname-$pkgver
build() { build() {
python setup.py build python3 setup.py build
} }
check() { check() {

View file

@ -16,7 +16,7 @@ source="$pkgname-$pkgver.tar.gz::https://pypi.io/packages/source/${_pyname:0:1}/
builddir="$srcdir"/$_pyname-$pkgver builddir="$srcdir"/$_pyname-$pkgver
build() { build() {
python setup.py build python3 setup.py build
} }
check() { check() {
@ -24,7 +24,7 @@ check() {
} }
package() { package() {
python setup.py install --prefix=/usr --root="$pkgdir" --optimize=1 python3 setup.py install --prefix=/usr --root="$pkgdir" --optimize=1
install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/ install -Dm644 LICENSE -t "$pkgdir"/usr/share/licenses/$pkgname/
} }
sha512sums=" sha512sums="

Some files were not shown because too many files have changed in this diff Show more