Compare commits

..

209 commits

Author SHA1 Message Date
f98506d3bb
pmos/*: drop 2025-12-22 00:49:39 -05:00
6a81c77cf8 backports/openterface-qt: upgrade to 0.5.7 2025-12-22 05:07:47 +00:00
797de7c2a9
ci: fix pmos check 2025-12-22 00:06:56 -05:00
206336bdd1
ci: track latest stable for pmos
Some checks failed
check-pmos.yml / ci: track latest stable for pmos (push) Failing after 0s
2025-12-22 00:05:59 -05:00
dfd1702835
ci: skip dotnet10-stage0 for community 2025-12-16 21:17:59 -05:00
e14a8dc1e6
ci: check backports of v3.23 2025-12-16 16:28:23 -05:00
ayakael
88d7b4e8c7
user/filebrowser: new aport
All checks were successful
/ lint (pull_request) Successful in 29s
/ deploy-aarch64 (pull_request) Successful in 28s
/ build-aarch64 (pull_request) Successful in 1m23s
/ deploy-armv7 (pull_request) Successful in 27s
/ build-armv7 (pull_request) Successful in 1m32s
/ build-x86_64 (pull_request) Successful in 2m2s
/ deploy-x86_64 (pull_request) Successful in 35s
2025-12-16 13:34:44 -05:00
ayakael
839fa112c7
backports/yarn-berry: move to correct dir 2025-12-15 16:07:26 -05:00
ayakael
5a29004820
backports/py3-limits: backport for py3-flask-limiter 2025-12-15 16:06:10 -05:00
ayakael
7a023d0024 user/papermc: upgrade to 1.21.7 2025-12-15 20:52:02 +00:00
ayakael
094222021c
backports/yarn-berry: backport for electron 2025-12-15 15:50:00 -05:00
ayakael
946a011b61
backports/electron: upgrade to 38.2.7 2025-12-15 15:50:00 -05:00
ayakael
3d599aff17 user/scanservjs: upgrade to 3.0.4 2025-12-15 20:40:12 +00:00
ayakael
a7a531a637 user/scantopl: upgrade to 1.0.1 2025-12-15 20:40:07 +00:00
ayakael
7ef8cfa070 backports/py3-flask-limiter: backport for calibre-web 2025-12-15 20:40:04 +00:00
ayakael
2e181afd19 backports/py3-flask-httpauth: backport for calibre-web 2025-12-15 20:40:04 +00:00
ayakael
ac2f1855db user/calibre-web: upgrade to 0.6.25 2025-12-15 20:40:04 +00:00
ayakael
35e7f126ce
user/kb: upgrade to 0.1.8
All checks were successful
/ lint (pull_request) Successful in 30s
/ build-armv7 (pull_request) Successful in 58s
/ deploy-armv7 (pull_request) Successful in 29s
/ build-aarch64 (pull_request) Successful in 58s
/ deploy-aarch64 (pull_request) Successful in 28s
/ build-x86_64 (pull_request) Successful in 55s
/ deploy-x86_64 (pull_request) Successful in 40s
2025-12-15 14:41:04 -05:00
ayakael
ed788403aa
ci: run lint on x86_64
All checks were successful
/ lint (pull_request) Successful in 29s
/ deploy-armv7 (pull_request) Successful in 38s
/ build-armv7 (pull_request) Successful in 43s
/ deploy-x86_64 (pull_request) Successful in 42s
/ build-x86_64 (pull_request) Successful in 2m19s
/ build-aarch64 (pull_request) Successful in 2m11s
/ deploy-aarch64 (pull_request) Successful in 44s
2025-12-15 13:24:39 -05:00
ayakael
55135b73b1
user/rmfakecloud: upgrade to 0.0.27
Some checks failed
/ lint (pull_request) Failing after 10s
/ deploy-armv7 (pull_request) Successful in 32s
/ build-armv7 (pull_request) Successful in 39s
/ deploy-x86_64 (pull_request) Successful in 33s
/ build-x86_64 (pull_request) Successful in 2m13s
/ build-aarch64 (pull_request) Successful in 2m6s
/ deploy-aarch64 (pull_request) Successful in 37s
2025-12-15 13:10:24 -05:00
273730b1b9
ci: track dotnet8 and 9 on previous releases of Alpine to backport security fixes 2025-12-11 09:58:38 -05:00
7be7e19fc9
ci: check_ver can now exclude packages from skip_package when set as all 2025-12-11 09:57:27 -05:00
8791fcb06c
ci: track arm-trusted-firmware LTS on edge correctly 2025-12-08 11:57:42 -05:00
cacd6ba22b
ci: add special rules for tracking dotnet 2025-12-08 09:25:21 -05:00
16a4ebc1e4
ci: track v3.23, drop v3.19 2025-12-08 09:15:34 -05:00
7ce80a2dd7
ci: add armv7 builder 2025-12-01 22:18:41 -05:00
6fceb42254
user/papermc-plugin-worldedit: disable due to missing dependencies 2025-12-01 22:03:51 -05:00
8e2f15f244
user/*: disable packages broken on armv7 2025-12-01 21:58:50 -05:00
ba29f2b000
user/uvicorn: drop due to re-added to aports 2025-12-01 21:34:42 -05:00
2865b8f524
user/papermc-plugin-essentialsx: blocked by luckperms 2025-12-01 10:43:14 -05:00
8792a8c06a
user/papermc-plugin-luckperms: disable as upstream dependency not available 2025-12-01 10:43:12 -05:00
df11191c86
backports/thelounge: disable as broken against node 24 2025-12-01 10:43:10 -05:00
fd29f9c77b
backports/shntool: fix build against latest gcc 2025-12-01 10:43:08 -05:00
a1898dab2d
backports/thelounge: enable check 2025-12-01 10:43:06 -05:00
fe3b86d53a
backports/signal-desktop: upgrade to 7.76.0 2025-12-01 10:43:04 -05:00
e5734af93e
backports/py3-apsw: upgrade to 3.50.4.0 2025-12-01 10:43:02 -05:00
061cc8069c
backports/freetube: upgrade to 0.23.12 2025-12-01 10:43:00 -05:00
1300d81709
backports/element-desktop: upgrade to 1.12.3 2025-12-01 10:42:58 -05:00
a993aebd4d
backports/electron: upgrade to 39.2.3 2025-12-01 10:42:55 -05:00
2825fbb4f5
backports/caprine: bump pkgrel 2025-12-01 10:42:53 -05:00
ee0ce13773
backports/calibre: upgrade to 8.14.0 2025-12-01 10:42:51 -05:00
d6eed040f2
user/jellyfin{,-web}, skiasharp: drop due to merged on aports 2025-12-01 10:42:33 -05:00
23bcbc7513
user/jellyfin: set skiasharp version dynamically
Some checks failed
/ lint (pull_request) Failing after 6s
/ deploy-aarch64 (pull_request) Successful in 30s
/ build-aarch64 (pull_request) Successful in 3m13s
/ deploy-x86_64 (pull_request) Successful in 40s
/ build-x86_64 (pull_request) Successful in 16m34s
2025-11-29 20:36:16 -05:00
c25b332251
user/skiasharp: use preprocessed tar 2025-11-29 20:36:12 -05:00
73ebcb7fcc
user/skiasharp: use vendored libjpeg-turbo 2025-11-29 20:35:48 -05:00
f45c1e54db
user/jellyfin: upgrade to 10.11.3
Some checks failed
/ deploy-aarch64 (pull_request) Has been cancelled
/ build-aarch64 (pull_request) Has been cancelled
/ deploy-x86_64 (pull_request) Has been cancelled
/ build-x86_64 (pull_request) Has been cancelled
/ lint (pull_request) Failing after 12s
2025-11-29 15:10:47 -05:00
749f413ab8
user/skiasharp: upgrade to 3.119.1 2025-11-29 15:10:42 -05:00
6770f6babb
user/jellyfin-web: upgrade to 10.11.3 2025-11-28 17:57:01 -05:00
42d2681350 user/jellyfin-web: new aport 2025-11-24 03:35:23 +00:00
16d5ce552f user/jellyfin: new aport 2025-11-24 03:35:23 +00:00
df65efacf1 user/skiasharp: new aport 2025-11-24 03:35:23 +00:00
869d5c3845
user/koreader: new aport
Some checks failed
/ lint (pull_request) Failing after 17s
/ deploy-aarch64 (pull_request) Successful in 43s
/ build-aarch64 (pull_request) Successful in 7m46s
/ deploy-x86_64 (pull_request) Successful in 59s
/ build-x86_64 (pull_request) Successful in 32m26s
2025-11-23 21:45:43 -05:00
03b4f6425d
ci: fix lookup for u-boot-pine64-pinenote blobs 2025-11-13 12:19:14 -05:00
8658e947c7
ci: fix title updating 2025-10-14 09:22:28 -04:00
b3d03dc817
ci: follow redirects on curl requests 2025-10-14 09:03:25 -04:00
223efcc89f
ci: add capability to exclude packages from fix_only minor_only rules 2025-09-13 10:16:11 -04:00
bd1a71e8a2
ci: use aarch64 runner on check and lint workflows 2025-09-06 22:17:07 -04:00
d3043bab85
ci: fix titles for non-edge issues 2025-09-06 22:15:59 -04:00
2eeb660610
ci: use actions namespace for {upload,download}-artifact action 2025-09-06 21:59:55 -04:00
8c1da51a60
ci: u-boot-pine64-pinenote: fix commit lookup 2025-08-14 22:07:15 -04:00
4de45ea910
ci: u-boot-pine64-pinenote: fix typo 2025-08-14 21:59:55 -04:00
d0dfb021eb
ci: u-boot-pine64-pinenote: fix ddr and trust tracking 2025-08-10 20:19:18 -04:00
953627f48d
ci: u-boot-pine64-pinenote: track ddr and trust blob versions 2025-08-10 20:02:12 -04:00
8789a20256
ci: linux-pine64-pinenote: fix naming current release 2025-08-10 17:23:02 -04:00
7332be7561
ci: linux-pine64-pinenote: fix tracking latest release 2025-08-10 17:14:47 -04:00
4adb8575d1
ci: linux-pine64-pinenote: track latest and current linux release 2025-08-10 17:12:37 -04:00
4a5617805a
ci: track u-boot-pine64-pinenote against u-boot mainline version 2025-08-10 17:04:57 -04:00
9c137f0b9d
ci: fix tracking of raspberrypi-usbboot 2025-08-10 17:00:46 -04:00
b621ae6716
ci: skip dotnet6-stage0 in workflow 2025-07-10 17:46:21 -04:00
d0b5791120
ci: add workaround exception for user repo 2025-07-10 01:16:00 -04:00
c600b841b3
ci: add workaround to issues not being created for edge when already existing for stable 2025-07-10 00:56:02 -04:00
2fa99e56c7
ci: rollback linux-radxa change 2025-06-15 00:54:08 -04:00
22a33dd83d
ci: skip u-boot-radxa-cm5 2025-06-15 00:48:26 -04:00
a48dd2cc70
ci: fix linux kernel tracking 2025-06-15 00:47:10 -04:00
5ea7fbce20
ci: track linux-pine64-pinenote and linux-radxa 2025-06-15 00:29:52 -04:00
f7e711f9a1
ci: do not track linux-clockworkpi-uconsole-rpi 2025-06-14 23:25:52 -04:00
923797f1b9
ci: fix special case for linux-radxa 2025-06-09 11:35:23 -04:00
7c780b5d0f
ci: fix special case for looking-glass 2025-06-08 20:17:25 -04:00
edac436331
ci: fix check community workflow 2025-06-08 20:10:29 -04:00
eea9bc1537
ci: remove repo_name add special case handling of repo name for postmarketos 2025-06-08 15:41:29 -04:00
d746dda5cb
ci: override repo_name 2025-06-08 15:34:11 -04:00
7a2ebe9cfe
ci: skip my device packages 2025-06-08 15:15:57 -04:00
3da0a268ee
ci: add additional package feature and track linux-clockworkpi-uconsole-rpi 2025-06-08 15:12:56 -04:00
3611b942ed
ci: check_ver lint 2025-06-08 15:09:58 -04:00
f2ad704226
ci: add checks for pmos 2025-06-08 15:08:38 -04:00
7b106bae0c
backports/py3-{dateparser,flask-limiter,limits,pathvalidate}: drop to unneeded 2025-05-31 12:36:51 -04:00
3f1fd192e5
backports/{coin,libmedc,py3-pivy,soqt,freecad}: drop due to in community 2025-05-31 12:35:02 -04:00
bff91156de
ci: add looking-glass special case to check_ver 2025-05-29 11:18:33 -04:00
97a6234e8e
backports/electron: upgrade to 35.5.0
All checks were successful
/ lint (pull_request) Successful in 29s
/ build-x86_64 (pull_request) Successful in 4h59m34s
/ deploy-x86_64 (pull_request) Successful in 31s
/ deploy-aarch64 (pull_request) Successful in 1m3s
/ build-aarch64 (pull_request) Successful in 1m23s
2025-05-28 16:01:14 -04:00
614aa49d49
ci: add clear-repo workflow 2025-05-28 15:43:04 -04:00
fd63d1544f
ci: add cross-gen workflow 2025-05-28 15:12:39 -04:00
f94e3afd33
user/looking-glass: drop due to in testing 2025-05-27 17:31:53 -04:00
378739e007
user/papermc: fix jar install 2025-05-27 11:21:16 -04:00
106fe3e570
user/papermc-plugin-essentialsx: upgrade to 2.21.1 2025-05-27 11:18:17 -04:00
dfe9556b9d
unmaintained/paperless-ngx: move from user 2025-05-27 10:37:31 -04:00
5503c80497
unmaintained/calibre-web: move from user 2025-05-27 10:36:08 -04:00
352e518f8e
unmaintained/firefly-iii: move from user 2025-05-27 10:20:39 -04:00
762159a7e9
user/papermc-plugin-worledit: upgrade to 7.3.13 2025-05-27 10:14:42 -04:00
72e6999c87
user/papermc: upgrade to 1.21.4 2025-05-27 09:56:52 -04:00
ce94f901d0
unmaintained/jellysub: move from user 2025-05-27 09:42:02 -04:00
2fb7a3cd25
user/firefly-iii: disable due to FTBFS 2025-05-27 09:41:29 -04:00
5d9d856a6c
backports/thelounge: disable tests 2025-05-27 09:39:06 -04:00
56f0391fd1
user/scantopl: upgrade to 1.0.0 2025-05-27 09:19:49 -04:00
6b7279026d
user/rmfakecloud: upgrade to 0.0.24 2025-05-27 09:18:25 -04:00
370d84dc77
user/openterface-qt: upgrade to 0.3.13 2025-05-27 09:15:55 -04:00
92c41ceef9
ci: update electron title 2025-05-27 00:02:39 -04:00
23c870f2c6
ci: fix special case for electron 2025-05-26 23:43:08 -04:00
e287d26996
ci: add special case for zotero 2025-05-26 23:42:40 -04:00
234904618e
backports/libmedc: upgrade to 5.0.0 2025-05-26 23:00:12 -04:00
9a93d206fb
ci: skip dotnet6* on testing 2025-05-26 22:36:58 -04:00
d55be2f427
backports/electron: add rust 1.87 patches 2025-05-26 22:09:21 -04:00
a04e0ca17b
backports/fdm-materials: drop due to not needed 2025-05-26 20:49:05 -04:00
0df5b0abb6
backports/nibspatialindex: drop due to not needed 2025-05-26 20:48:28 -04:00
eb112910e9
backports/nlopt: drop due to not needed 2025-05-26 20:48:05 -04:00
024544f4df
backports/libnest2d: drop due to not needed 2025-05-26 20:47:41 -04:00
73b064ed37
backports/uranium: drop due to not needed 2025-05-26 20:47:05 -04:00
26821abb53
backports/py3-svgpath: drop due to not needed 2025-05-26 20:46:39 -04:00
6a037b09a6
backports/py3-rtree: drop due to not needed 2025-05-26 20:46:17 -04:00
b72d6e62e4
backports/py3-pynest2d: drop due to not needed 2025-05-26 20:45:38 -04:00
d09e18a9da
backports/py3-pyinstrument: drop due to not needed 2025-05-26 20:45:18 -04:00
e73030be6b
backports/py3-numpy-stl: drop due to not needed 2025-05-26 20:44:34 -04:00
59283db32a
backports/py3-trimesh: drop due to not needed 2025-05-26 20:44:08 -04:00
6abdbe7e50
backports/py3-mapbox-eacut: drop due to not needed 2025-05-26 20:43:44 -04:00
9e839bd17b
unmaintained/py3-html5-parser: drop for in backports 2025-05-26 20:42:58 -04:00
b4b4877fd3
backports/py3-arcus: drop due to unmaintained 2025-05-26 20:42:18 -04:00
9a735111c7
backports/cura: drop due to unmaintained 2025-05-26 20:42:05 -04:00
27e1275d13
unmaintained/mathjax2: move from user 2025-05-26 20:40:30 -04:00
166b952516
unmaintained/rstudio-desktop: move from user 2025-05-26 20:39:26 -04:00
012a294f6d
backports/perl-math-random-isaac-xs: drop for lack of need 2025-05-26 20:38:39 -04:00
20281faaf5
backports/airsonic-advanced: drop for lack of need 2025-05-26 20:37:42 -04:00
41cd4ce83b
user/xochitl-bin: drop due to pmos 2025-05-26 19:28:27 -04:00
a6c26bc587
user/tandoor-recipes: enable build 2025-05-26 19:27:43 -04:00
39b8c7c45d
user/paperless-ngx: re-enable 2025-05-26 19:27:01 -04:00
39823fcb31
user/uvicorn: new aport 2025-05-26 19:26:34 -04:00
0cfe53ed2c
user/xf86-video-fbdev-rm: drop for pmos 2025-05-26 19:24:25 -04:00
494a4e9ebd
user/linux-rm-headers: drop for pmos 2025-05-26 19:20:03 -04:00
9ab70541a0
user/linux-rm: drop for pmos 2025-05-26 19:19:51 -04:00
c4e0656192
user/rm-extractor: drop for pmos 2025-05-26 19:19:39 -04:00
de224ac5c4
user/rm-utils: drop for pmos 2025-05-26 19:19:27 -04:00
975ca3acc6
user/u-boot-rm: drop for pmos 2025-05-26 19:19:14 -04:00
ea32d2af8e
backports/thelounge: move from user, upgrade to 4.4.3 2025-05-26 19:18:39 -04:00
ab89660806
user/sane: drop due to in community 2025-05-26 19:16:38 -04:00
ff38eb3200
user/freecad: drop due to in backports 2025-05-26 19:06:56 -04:00
24d2f84bab
unmaintained/soci: move from user 2025-05-26 19:05:47 -04:00
48447cd14c
unmaintained/znapzend: move from user 2025-05-26 19:05:21 -04:00
50524f1f2a
ci: check backports of 3.22 2025-05-26 18:39:40 -04:00
260eca3424
backports/uranium: fix build on edge 2025-05-26 18:39:39 -04:00
cea504a924
backports/swig3: drop to not needed 2025-05-26 18:39:37 -04:00
4919fe261c
backports/soqt: fix build on edge 2025-05-26 18:39:35 -04:00
25ba00ae7f
backports/signal-desktop: upgrade to 7.55.0 2025-05-26 18:39:33 -04:00
83207c4dae
backports/shntool: sync pkgrel 2025-05-26 18:39:31 -04:00
ffdbdaafd9
backports/py3-rtree: upgrade to 1.4.0 2025-05-26 18:39:29 -04:00
420fc93b84
backports/py3-pynest2d: fix build on edge 2025-05-26 18:39:27 -04:00
747c2c79a6
backports/py3-pyinstrument: upgrade to 5.0.2 2025-05-26 18:39:25 -04:00
48ef10378e
backports/py3-piby: sync with aports 2025-05-26 18:39:23 -04:00
11b717b3c2
backports/py3-flask-limiter: upgrade to 3.10.1 2025-05-26 18:39:21 -04:00
0a65a63be1
backports/py3-arcus: bump pkgrel 2025-05-26 18:39:19 -04:00
637e2f14ed
backports/apsw: upgrade to 3.49.1.0 2025-05-26 18:39:17 -04:00
91d930b63d
backports/nlopt: upgrade to 2.10.0 2025-05-26 18:39:15 -04:00
c1baeb0f35
backports/nb: upgrade to 7.19.1 2025-05-26 18:39:13 -04:00
5ccaf8e4d7
backports/libspatialindex: fix build on edge 2025-05-26 18:39:11 -04:00
bcbb961623
backports/libnestd: fix build on edge 2025-05-26 18:39:09 -04:00
4677e14f49
backports/libmedc: fix source url 2025-05-26 18:39:08 -04:00
5901ea37ae
backports/freecad: upgrade to 1.0.1 2025-05-26 18:39:06 -04:00
366cbca424
backports/electron: fix build on edge 2025-05-26 18:39:04 -04:00
dc8a32f4b9
backports/calibre: upgrade to 8.4.0 2025-05-26 18:39:02 -04:00
10c0f61045
backports/rapidfuzz: drop due to in community 2025-05-26 18:39:00 -04:00
2162431bb5
backports/py3-levenshtein: drop due to in community 2025-05-26 18:38:58 -04:00
90bda986c7
backports/py3-fuzzywuzzy: drop due to in community 2025-05-26 18:38:56 -04:00
ef52fcc239
backports/py3-colored: drop due to in community 2025-05-26 18:38:53 -04:00
16e489995a
backports/pnpm: drop due to not necessary 2025-05-26 18:38:51 -04:00
c319f2b6ca
backports/openssl1.1-compat: drop due to not necessary 2025-05-26 18:38:49 -04:00
1f49286ca2
backports/gn: drop due to needed version 2025-05-26 18:38:47 -04:00
b4c27f6e3e
backports/dex: drop due to in community 2025-05-26 18:38:45 -04:00
0261319dda
backports/electron: upgrade to 35.4.0 2025-05-26 18:25:49 -04:00
c7b89349dd
backports/*: sync with v3.21 2025-05-26 17:59:21 -04:00
e861045fe3
ci: check community workflow title change 2025-05-26 17:54:28 -04:00
1351bcc0fd
ci: also check last 2 years of alpine releases on main 2025-05-26 17:53:05 -04:00
8763a65fdb
ci: check main v3.22 2025-05-26 17:50:49 -04:00
7caaeeee04
ci: skip dotnet9-stage0 and dotnet8-stage0 in v3.22 community check 2025-05-26 17:45:10 -04:00
474e2ea62d
ci: add ability to skip packages 2025-05-26 17:44:23 -04:00
2d19224dcb
ci: drop v from $release 2025-05-26 17:26:55 -04:00
63dd181c59
ci: introduce release in title when not edge 2025-05-26 17:25:13 -04:00
d7538d14bb
ci: track community v3.22 instead of v3.21 2025-05-26 17:23:15 -04:00
61153cb989
ci: attempt bracket release title 2025-05-26 17:21:27 -04:00
3c3b7fe1ae
ci: fix unterminated s in sed command 2025-05-26 17:16:01 -04:00
c7fa4babd2
ci: fix bracket chracters in query when creating issue 2025-05-26 17:13:22 -04:00
d20ee3e612
ci: fix semantic context checker 2025-05-26 17:09:29 -04:00
2c9d1bc657
ci: check v3.21 release of community 2025-05-26 17:04:25 -04:00
4e133161c7
ci: add in brackets release when not edge 2025-05-26 17:03:12 -04:00
edf630c6dc
ci: add ability to track minor and/or fix releases only 2025-05-26 17:02:39 -04:00
a7e99375b3
ci: add check for main repo 2025-05-26 12:14:35 -04:00
6306486b82
ci: add special case for arm-trusted-firmware 2025-05-26 12:14:00 -04:00
0bb36c34c9
ci: add special case for dotnet*sdk and dotnet*stage0 2025-05-26 11:50:42 -04:00
eb38d0963f
ci: set specific label for user packages 2025-05-26 10:05:57 -04:00
dd132b86fa
ci: abstract electron special code 2025-05-15 14:55:25 -04:00
ab7d3dce4a
ci: add special exception for electron 2025-05-11 17:16:58 -04:00
99107ff63a
ci: update create_issue to support forgejo 11 2025-05-11 10:59:10 -04:00
a7d9756c64
ci: fix lint 2025-05-08 21:17:14 -04:00
3e86f4927c
ci: upgrade container as first action 2025-05-08 21:14:23 -04:00
928ff35f6b
forgejo: use ISSUE_TOKEN instead of FORGEJO_TOKEN 2025-02-18 12:59:44 -05:00
6b56f7cb51
user/openterface-qt: new aport
All checks were successful
/ lint (pull_request) Successful in 31s
/ deploy-x86_64 (pull_request) Successful in 29s
/ build-x86_64 (pull_request) Successful in 1m35s
/ deploy-aarch64 (pull_request) Successful in 58s
/ build-aarch64 (pull_request) Successful in 3m53s
2025-02-14 19:45:05 -05:00
b03043f08c
pmos/device-pine64-pinenote: fix blacklist
All checks were successful
/ lint (pull_request) Successful in 26s
/ deploy-x86_64 (pull_request) Successful in 25s
/ build-x86_64 (pull_request) Successful in 36s
/ deploy-aarch64 (pull_request) Successful in 57s
/ build-aarch64 (pull_request) Successful in 2m1s
2025-01-11 12:27:45 -05:00
d08c1392bc pmos/device-pine64-pinenote: new aport 2025-01-09 14:25:11 +00:00
c6d37636b6 pmos/linux-pine64-pinenote: upgrade to 6.12.0 2025-01-09 04:28:46 +00:00
c82ac68ca4
pmos/pinenote-dbus-service: move from user
All checks were successful
/ lint (pull_request) Successful in 27s
/ deploy-x86_64 (pull_request) Successful in 25s
/ build-x86_64 (pull_request) Successful in 39s
/ build-aarch64 (pull_request) Successful in 11m52s
/ deploy-aarch64 (pull_request) Successful in 2m22s
2025-01-08 23:01:42 -05:00
5d7449a835 pmos/linux-pine64-pinenote: new aport 2025-01-09 03:40:40 +00:00
c2b6767bfa forgejo-ci: add postmarketOS repo when building aport under pmos 2025-01-09 03:40:40 +00:00
8c9065c3af
user/papermc-plugin-worldedit: fix packaging
All checks were successful
/ lint (pull_request) Successful in 31s
/ build-aarch64 (pull_request) Successful in 1m20s
/ deploy-aarch64 (pull_request) Successful in 56s
/ build-x86_64 (pull_request) Successful in 10m25s
/ deploy-x86_64 (pull_request) Successful in 34s
2025-01-07 11:01:44 -05:00
256 changed files with 11913 additions and 9660 deletions

View file

@ -7,7 +7,7 @@
set -eu -o pipefail
readonly APORTSDIR=$CI_PROJECT_DIR
readonly REPOS="backports user"
readonly REPOS="backports user pmos"
readonly ALPINE_REPOS="main community testing"
readonly ARCH=$(apk --print-arch)
# gitlab variables
@ -16,6 +16,8 @@ readonly BASEBRANCH=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME
: "${REPODEST:=$HOME/packages}"
: "${MIRROR:=https://ayakael.net/api/packages/forge/alpine}"
: "${ALPINE_MIRROR:=http://dl-cdn.alpinelinux.org/alpine}"
: "${PMOS_MIRROR:=http://mirror.postmarketos.org/postmarketos}"
: "${PMOS_KEY:=https://git.syndicate-lang.org/synit/pmbootstrap/raw/commit/8efee86388408c0d8de45c64fe383580ffd91700/pmb/data/keys/build.postmarketos.org.rsa.pub}"
: "${MAX_ARTIFACT_SIZE:=300000000}" #300M
: "${CI_DEBUG_BUILD:=}"
@ -137,6 +139,22 @@ setup_system() {
git config --global init.defaultBranch master
}
setup_pmos() {
local release
case $BASEBRANCH in
v3.21) release="v24.12";;
v3.20) release="v24.6";;
v3.19) release="v23.12";;
edge) release=master;;
*) die "Branch \"$BASEBRANCH\" not supported!"
esac
doas wget "$PMOS_KEY" -P /etc/apk/keys
doas sh -c "echo $PMOS_MIRROR/$release >> /etc/apk/repositories"
doas apk update || true
}
sysinfo() {
printf ">>> Host system information (arch: %s, release: %s) <<<\n" "$ARCH" "$(get_release)"
printf "- Number of Cores: %s\n" "$(nproc)"
@ -144,6 +162,7 @@ sysinfo() {
printf "- Free space: %s\n" "$(df -hP / | awk '/\/$/ {print $4}')"
}
copy_artifacts() {
cd "$APORTSDIR"
@ -182,7 +201,7 @@ setup_system || die "Failed to setup system"
# git no longer allows to execute in repositories owned by different users
doas chown -R buildozer: .
fetch_flags="-qn"
fetch_flags="-qnu"
debugging && fetch_flags="-v"
git fetch $fetch_flags "$CI_MERGE_REQUEST_PROJECT_URL" \
@ -204,6 +223,7 @@ build_limit=$CI_ALPINE_BUILD_LIMIT
for repo in $(changed_repos); do
set_repositories_for "$repo"
[ "$repo" == "pmos" ] && setup_pmos
built_aports=0
changed_aports_in_repo=$(changed_aports "$repo")
changed_aports_in_repo_count=$(echo "$changed_aports_in_repo" | wc -l)

View file

@ -1,11 +1,49 @@
#!/bin/bash
# expects the following env variables:
# downstream: downstream repo
# downstream: downstream repo
#
# env variables to track minor or bug-fix updates
# minor_only: array of packages that should only track minor releases (seperate by space)
# default: none
# all packages: all
# fix_only: array of packages that should only track bug fix releases (seperated by space)
# default: none
# all packages: all
#
# If either minor_only or fix_only is set, only packages with semantic versioning schemes
# will be tracked.
#
# If a package is both minor_only and fix_only, the minor releases will be tracked
#
# If a - is placed in front of package name, it'll be excluded from the update rule
#
# optional env variables
# ALL_PACKAGES: when true, ignore is package is owned by me
# skip_package: array of packages to skip, place a - in front of package name to not exclude it
# add_package: array of additional packages to check
#
repo=${downstream/*\/}
release=${downstream/\/$repo/}
release=${release/*\/}
release=${release/v}
arch=$(apk --print-arch)
# add special case for postmarketos
[ "$release" == "postmarketos" ] && { release=$repo; repo="pmos"; arch="aarch64"; }
[ "$release" == "master" ] && release=edge
curl --silent $downstream/x86_64/APKINDEX.tar.gz | tar -O -zx APKINDEX > APKINDEX
is_semantic() {
local downstream_version_dot=${1//[^.]}
if [[ ${#downstream_version_dot} -eq 2 ]]; then
return 0
fi
return 1
}
echo "Checking $downstream for out of date packages"
curl --silent $downstream/$arch/APKINDEX.tar.gz | tar -O -zx APKINDEX > APKINDEX
if [ "$ALL_PACKAGES" == "true" ]; then
owned_by_you=$(awk -F ':' '{if($1=="o"){print $2}}' APKINDEX | sort | uniq)
@ -15,27 +53,145 @@ else
echo "Found $(printf '%s\n' $owned_by_you | wc -l ) packages owned by you"
fi
# add additionnal packages
owned_by_you="$owned_by_you $add_package"
rm -f out_of_date not_in_anitya
for pkg in $owned_by_you; do
upstream_version=$(curl --fail -X GET -sS -H 'Content-Type: application/json' "https://release-monitoring.org/api/v2/packages/?name=$pkg&distribution=Alpine" | jq -r '.items.[].stable_version')
downstream_version=$(sed -n "/^P:$pkg$/,/^$/p" APKINDEX | awk -F ':' '{if($1=="V"){print $2}}' | sort -V | tail -n 1)
downstream_version=${downstream_version/-*}
# special cases
# skip package if in $skip_package array
if [[ "$skip_package" == *all* || "$skip_package" == *$pkg* ]] && [[ "$skip_package" != *-$pkg* ]]; then
echo "$pkg skipped"
continue
fi
# special cases where package is not semantic
case $pkg in
freetube) upstream_version=$(curl --fail -X GET -sS -H 'Content-Type: application/json' "https://release-monitoring.org/api/v2/packages/?name=$pkg&distribution=Alpine" | jq -r '.items.[].version' | sed "s|-beta||");;
dotnet9-sdk|dotnet9-stage0) upstream_version=${upstream_version/-*};;
# track u-boot-pine64-pinenote against mainline u-boot, and track upstream rockchip blobs
u-boot-pine64-pinenote)
upstream_version="$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/u-boot" | jq -r '.stable_versions.[]' | head -n1)"
# some reason the commit now not in APKINDEX, using master instead
#commit=$(sed -n "/^P:$pkg$/,/^$/p" APKINDEX | awk -F ':' '{if($1=="c"){print $2}}')
#commit=${commit/-dirty/}
commit=master
# fetches upstream version for blobs using ini file
upstream_trust="$(curl --fail -s 'https://raw.githubusercontent.com/rockchip-linux/rkbin/master/RKTRUST/RK3566TRUST_ULTRA.ini' | grep bl31 | awk -F '=' '{if($1"="PATH){print $2}}' | grep -o -P '(?<=_v).*(?=.elf)')"
upstream_ddr="$(curl --fail -s 'https://raw.githubusercontent.com/rockchip-linux/rkbin/master/RKBOOT/RK3566MINIALL_ULTRA.ini' | grep ddr | awk -F '=' '{if($1"="PATH){print $2}}' | head -n 1 | grep -o -P '(?<=_v).*(?=.bin)')"
# extracts downstream version via _trust_ver and _ddr_ver variable
downstream_trust=$(curl --fail -X GET -s "https://gitlab.postmarketos.org/postmarketOS/pmaports/-/raw/$commit/device/testing/u-boot-pine64-pinenote/APKBUILD" | awk -F '=' '{if($1=="_trust_ver"){print $2}}')
downstream_ddr=$(curl --fail -X GET -s "https://gitlab.postmarketos.org/postmarketOS/pmaports/-/raw/$commit/device/testing/u-boot-pine64-pinenote/APKBUILD" | awk -F '=' '{if($1=="_ddr_ver"){print $2}}')
# compares versions and creates newline in out_of_date if problematic
if [ "$upstream_trust" != "$downstream_trust" ]; then
echo "$pkg new Trust blob $upstream_trust version available"
echo "$pkg(trust) $downstream_trust $upstream_trust $repo $release" >> out_of_date
fi
if [ "$upstream_ddr" != "$downstream_ddr" ]; then
echo "$pkg new ddr blob $upstream_ddr version available"
echo "$pkg(ddr) $downstream_ddr $upstream_ddr $repo $release" >> out_of_date
fi
;;
# release-monitoring omits the extra B, while we keep it but put it after the version no.
looking-glass) upstream_version="$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/$pkg" | jq -r '.stable_versions.[]' | head -n1)b";;
# we want to track both Firefox security upgrades + Zotero upgrades
zotero)
commit=$(sed -n "/^P:$pkg$/,/^$/p" APKINDEX | awk -F ':' '{if($1=="c"){print $2}}')
downstream_fx_ver=$(curl --fail -X GET -s "https://gitlab.alpinelinux.org/alpine/aports/-/raw/$commit/community/zotero/APKBUILD" | awk -F '=' '{if($1=="_fxver"){print $2}}')
upstream_fx_ver=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/firefox-esr" | jq -r ".stable_versions.[] | match(\"${downstream_fx_ver/.*.*}.*\").string" | head -n1)
if [ "$upstream_fx_ver" != "$downstream_fx_ver" ]; then
echo "$pkg new Firefox $upstream_fx_ver version available"
echo "$pkg(fx_ver) $downstream_fx_ver $upstream_fx_ver $repo $release" >> out_of_date
fi
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/$pkg" | jq -r '.stable_versions.[]' | head -n1)
;;
# aports omits the -beta part of the version
freetube) upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/v2/packages/?name=$pkg&distribution=Alpine" | jq -r '.items.[].version' | sed "s|-beta||");;
# we only track x.x.1xx feature branches of SDK and stage0
dotnet*sdk|dotnet*stage0) upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/$pkg" | jq -r ".stable_versions.[] | match(\"${downstream_version::-2}.*\").string" | sed 's|-.*||' | head -n1);;
# we want to track both current major version and upstream latest
electron)
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/v2/projects/?name=$pkg&distribution=Alpine" | jq -r '.items.[].stable_versions' | jq -r ".[] | match(\"${downstream_version/.*}.*\").string" | head -n 1)
latest_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/v2/packages/?name=$pkg&distribution=Alpine" | jq -r '.items.[].stable_version' )
# append version number to signal that this is not latest major version
if [ "${upstream_version/.*}" != "${latest_version/.*}" ]; then
echo "$pkg(${latest_version/.*}) major version available"
echo "$pkg(${latest_version/.*}) $downstream_version $latest_version $repo $release" >> out_of_date
pkg="$pkg(${upstream_version/.*})"
fi
;;
# we want to track LTS (even releases) rather than latest
arm-trusted-firmware)
if [[ "$fix_only" == *all* || "$fix_only" == *$pkg* ]] || [[ "$minor_only" == *all* || "$minor_only" == *$pkg* ]]; then
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/$pkg" | jq -r ".stable_versions.[] | match(\"${downstream_version%.*}.*\").string" | head -n1)
else
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/$pkg" | jq -r '.stable_versions.[] | select(test("^[0-9]+\\.[0-9]+\\.[0-9]+$")) | select(split(".") | .[1] | tonumber | . % 2 == 0)' | head -n1)
fi
;;
# track linux-pine64-pinenote against latest
linux-pine64-pinenote)
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/315000" | jq -r ".stable_versions.[] | match(\"${downstream_version%.*}.*\").string" | head -n1)
latest_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/315000" | jq -r ".stable_versions.[]" | head -n1)
# append version number to signal that this is not latest major version
if [ "${upstream_version/.*.*}" != "${latest_version/.*.*}" ]; then
echo "$pkg(${latest_version/.*.*}) major version available"
echo "$pkg(${latest_version/.*.*}) $downstream_version $latest_version $repo $release" >> out_of_date
pkg="$pkg(${upstream_version%.*})"
fi
;;
# track linux-radxa against BSP kernel (usually got awful late
linux-radxa)
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/$pkg" | jq -r '.stable_versions.[]' | head -n1)
upstream_version=${upstream_version/-*}
;;
dotnet*-sdk|dotnet*-stage0)
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/141853" | jq -r ".stable_versions.[] | match(\"${downstream_version::-2}.*\").string" | head -n 1)
;;
dotnet*-runtime)
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/220385" | jq -r ".stable_versions.[] | match(\"${downstream_version%.*}.*\").string" | head -n 1)
;;
# removes last bit in github tag from usbboot release, as not needed
raspberrypi-usbboot) curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/$pkg" | jq -r '.stable_versions.[]' | head -n1 | sed 's|-.*||';;
*)
if [[ "$minor_only" == *all* || "$minor_only" == *$pkg* ]] && [[ "$minor_only" != *-$pkg* ]]; then
# continues when package version scheme is not semantic, but minor_only or fix_only is set
if ! is_semantic $downstream_version; then
echo "$pkg is not semantic, and minor_only is set"
continue
fi
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/$pkg" | jq -r ".stable_versions.[] | match(\"${downstream_version%.*.*}.*\").string" | head -n1)
elif [[ "$fix_only" == *all* || "$fix_only" == *$pkg* ]] && [[ "$fix_only" != *-$pkg* ]]; then
# continues when package version scheme is not semantic, but minor_only or fix_only is set
if ! is_semantic $downstream_version; then
echo "$pkg is not semantic, and fix_only is set"
continue
fi
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/$pkg" | jq -r ".stable_versions.[] | match(\"${downstream_version%.*}.*\").string" | head -n1)
else
upstream_version=$(curl --fail -X GET -s -H 'Content-Type: application/json' "https://release-monitoring.org/api/project/Alpine/$pkg" | jq -r '.stable_versions.[]' | head -n1)
fi
;;
esac
if [ -z "$upstream_version" ]; then
echo "$pkg not in anitya"
echo "$pkg" >> not_in_anitya
# do not track not_in_anitya if either minor_only or fix_only is set
if [ -z ${minor_only+x} ] && [ -z ${fix_only+x} ]; then
echo "$pkg" >> not_in_anitya
fi
elif [ "$downstream_version" != "$(printf '%s\n' $upstream_version $downstream_version | sort -V | head -n 1)" ]; then
echo "$pkg higher downstream"
echo "$pkg higher downstream $upstream_version"
continue
elif [ "$upstream_version" != "$downstream_version" ]; then
echo "$pkg upstream version $upstream_version does not match downstream version $downstream_version"
echo "$pkg $downstream_version $upstream_version $repo" >> out_of_date
echo "$pkg upstream version $upstream_version does not match downstream version $downstream_version in $release"
echo "$pkg $downstream_version $upstream_version $repo $release" >> out_of_date
fi
done

View file

@ -1,17 +1,22 @@
#!/bin/sh
TARGET_REPO=$1
ARCH=$2
ARCH="x86 x86_64 armhf armv7 aarch64 ppc64le s390x mips64 riscv64 loongarch64"
for arch in $ARCH; do
# check if repo exists
wget --spider $TARGET_REPO/$arch/APKINDEX.tar.gz -o /dev/null || continue
echo ">>> Clearing repo $TARGET_REPO/$arch"
curl --silent $TARGET_REPO/$ARCH/APKINDEX.tar.gz | tar -O -zx APKINDEX > APKINDEX
curl --silent $TARGET_REPO/$arch/APKINDEX.tar.gz | tar -O -zx APKINDEX > APKINDEX
pkgs=$(awk -F ':' '{if($1=="o"){print $2}}' APKINDEX | sort | uniq)
pkgs=$(awk -F ':' '{if($1=="o"){print $2}}' APKINDEX | sort | uniq)
for pkg in $pkgs; do
pkgvers=$(sed -n "/^P:$pkg$/,/^$/p" APKINDEX | awk -F ':' '{if($1=="V"){print $2}}')
for pkgver in $pkgvers; do
echo "Deleting $pkg-$pkgver of arch $ARCH from $TARGET_REPO"
curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN -X DELETE $TARGET_REPO/$ARCH/$pkg-$pkgver.apk
for pkg in $pkgs; do
pkgvers=$(sed -n "/^P:$pkg$/,/^$/p" APKINDEX | awk -F ':' '{if($1=="V"){print $2}}')
for pkgver in $pkgvers; do
echo "Deleting $pkg-$pkgver of arch $arch from $TARGET_REPO"
curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN -X DELETE $TARGET_REPO/$arch/$pkg-$pkgver.apk
done
done
done

View file

@ -1,7 +1,7 @@
#!/bin/bash
# expects:
# env variable FORGEJO_TOKEN
# env variable ISSUE_TOKEN
# file out_of_date
IFS='
@ -13,14 +13,23 @@ does_it_exist() {
downstream_version=$2
upstream_version=$3
repo=$4
release=$5
query="$repo/$name: upgrade to $upstream_version"
query="$(echo $query | sed 's| |%20|g' | sed 's|:|%3A|g' | sed 's|/|%2F|g' )"
if [ "$release" != "edge" ]; then
query="%22[$release] $query%22"
elif [ "$repo" != "pmos" ] && [ "$repo" != "user" ]; then
# workaround to this query matching both stable and edge branch
query="%22$query%22&labels=Edge"
else
query="%22$query%22"
fi
query="$(echo $query | sed 's| |%20|g' | sed 's|:|%3A|g' | sed 's|/|%2F|g' | sed 's|\[|%5B|g' | sed 's|\]|%5D|g')"
result="$(curl --silent -X 'GET' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues" \
result="$(curl -L --silent -X 'GET' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues&sort=latest" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN"
-H "Authorization: token $ISSUE_TOKEN"
)"
if [ "$result" == "[]" ]; then
@ -33,14 +42,23 @@ is_it_old() {
downstream_version=$2
upstream_version=$3
repo=$4
release=$5
query="$repo/$name: upgrade to"
query="$(echo $query | sed 's| |%20|g' | sed 's|:|%3A|g' | sed 's|/|%2F|g' )"
if [ "$release" != "edge" ]; then
query="%22[$release] $query%22"
elif [ "$repo" != "pmos" ] && [ "$repo" != "user" ]; then
# workaround to this query matching both stable and edge branch
query="%22$query%22&labels=Edge"
else
query="%22$query%22"
fi
query="$(echo $query | sed 's| |%20|g' | sed 's|:|%3A|g' | sed 's|/|%2F|g' | sed 's|\[|%5B|g' | sed 's|\]|%5D|g')"
result="$(curl --silent -X 'GET' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues" \
result="$(curl -L --silent -X 'GET' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues&sort=latest" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN"
-H "authorization: token $ISSUE_TOKEN"
)"
result_title="$(echo $result | jq -r '.[].title' )"
@ -59,16 +77,18 @@ update_title() {
downstream_version=$2
upstream_version=$3
repo=$4
id=$5
release=$5
id=$6
result=$(curl --silent -X 'PATCH' \
title="$repo/$name: upgrade to $upstream_version"
if [ "$release" != "edge" ]; then title="[$release] $title"; fi
result=$(curl -L --silent -X 'PATCH' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues/$id" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN" \
-H "authorization: token $ISSUE_TOKEN" \
-H 'Content-Type: application/json' \
-d "{
\"title\": \"$repo/$name: upgrade to $upstream_version\"
}"
-d "{\"title\": \"$title\"}"
)
return 0
@ -79,14 +99,18 @@ create_issue() {
downstream_version=$2
upstream_version=$3
repo=$4
release=$5
result=$(curl --silent -X 'POST' \
title="$repo/$name: upgrade to $upstream_version"
if [ "$release" != "edge" ]; then title="[$release] $title"; fi
result=$(curl -L --silent -X 'POST' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN" \
-H "authorization: token $ISSUE_TOKEN" \
-H 'Content-Type: application/json' \
-d "{
\"title\": \"$repo/$name: upgrade to $upstream_version\",
\"title\": \"$title\",
\"labels\": [
$LABEL_NUMBER
]
@ -105,41 +129,42 @@ if [ -f out_of_date ]; then
downstream_version="$(echo $pkg | awk '{print $2}')"
upstream_version="$(echo $pkg | awk '{print $3}')"
repo="$(echo $pkg | awk '{print $4}')"
release="$(echo $pkg | awk '{print $5}')"
if does_it_exist $name $downstream_version $upstream_version $repo; then
if does_it_exist $name $downstream_version $upstream_version $repo $release; then
echo "Issue for $repo/$name already exists"
continue
fi
id=$(is_it_old $name $downstream_version $upstream_version $repo)
id=$(is_it_old $name $downstream_version $upstream_version $repo $release)
if [ "$id" != "0" ] && [ -n "$id" ]; then
echo "Issue for $repo/$name needs updating"
update_title $name $downstream_version $upstream_version $repo $id
update_title $name $downstream_version $upstream_version $repo $release $id
continue
fi
echo "Creating issue for $repo/$name"
create_issue $name $downstream_version $upstream_version $repo
create_issue $name $downstream_version $upstream_version $repo $release
done
fi
if [ -f not_in_anitya ]; then
query="Add missing $repo packages to anitya"
query="$(echo $query | sed 's| |%20|g')"
query="%22$(echo $query | sed 's| |%20|g')%22"
result="$(curl --silent -X 'GET' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues" \
result="$(curl -L --silent -X 'GET' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues&sort=latest" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN"
-H "authorization: token $ISSUE_TOKEN"
)"
if [ "$result" == "[]" ]; then
echo "Creating anitya issue"
result=$(curl --silent -X 'POST' \
result=$(curl -L --silent -X 'POST' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN" \
-H "authorization: token $ISSUE_TOKEN" \
-H 'Content-Type: application/json' \
-d "{
\"title\": \"Add missing $repo packages to anitya\",
@ -152,10 +177,10 @@ if [ -f not_in_anitya ]; then
else
echo "Updating anitya issue"
result_id="$(echo $result | jq -r '.[].number' )"
result=$(curl --silent -X 'PATCH' \
result=$(curl -L --silent -X 'PATCH' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues/$result_id" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN" \
-H "authorization: token $ISSUE_TOKEN" \
-H 'Content-Type: application/json' \
-d "{
\"body\": \"- [ ] $(sed '{:q;N;s/\n/\\n- [ ] /g;t q}' not_in_anitya)\"

View file

@ -4,7 +4,7 @@
set -eu -o pipefail
readonly REPOS="backports user"
readonly BASEBRANCH=$GITHUB_BASE_REF
readonly BASEBRANCH=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME
readonly TARGET_REPO=$CI_ALPINE_REPO
apkgs=$(find package -type f -name "*.apk")

54
.forgejo/bin/generate-cross.sh Executable file
View file

@ -0,0 +1,54 @@
#!/bin/bash
TARGET_RELEASE=$1
shift
TARGET_ARCH=$@
CURRENT_ARCH=$(cat /etc/apk/arch)
if [ -z "$TARGET_RELEASE" ]; then
echo ">>> No target release specified, assumming edge"
TARGET_RELEASE=edge
fi
[[ "$TARGET_RELEASE" == "edge" ]] && TARGET_BRANCH=master || TARGET_BRANCH="${TARGET_RELEASE/v}-stable"
if [[ ! -d "aports-$TARGET_RELEASE" ]]; then
echo ">>> Fetching aports for $TARGET_RELEASE"
git init aports-$TARGET_RELEASE
git -C aports-$TARGET_RELEASE remote add origin https://gitlab.alpinelinux.org/alpine/aports
git -C aports-$TARGET_RELEASE fetch --depth 1 origin $TARGET_BRANCH
git -C aports-$TARGET_RELEASE checkout $TARGET_BRANCH
[[ $? -ne 0 ]] && { echo ">>> Git fetch failed, does your release exist?"; exit; } || true
fi
if [ -z "$TARGET_ARCH" ]; then
echo ">>> No arch specified, assuming target to all arches supported by upstream for release $TARGET_RELEASE"
TARGET_ARCH=$(cat aports-$TARGET_RELEASE/scripts/mkimg.minirootfs.sh | tr -d "\t" | awk -F "=" '{if($1=="arch"){print $2}}' | tr -d \" | sed "s| $CURRENT_ARCH||")
if [ -z "$TARGET_ARCH" ]; then
echo ">>> Could not compute arches that are supported, does your release exist?"
exit
fi
fi
. /usr/share/abuild/functions.sh
for arch in $TARGET_ARCH; do
if [[ "$(arch_to_hostspec $arch)" == "unknown" ]]; then
echo ">>> $arch not valid arch, please chose among the following"
sed -n '/^arch_to_hostspec/,/esac$/ {s/esac//;p;}' /usr/share/abuild/functions.sh | sed -e '/unknown/d' -e '/arch/d' -e '/case/d' -e "/$CURRENT_ARCH/d" | awk '{print $1}' | tr -d ')'
exit
fi
done
echo ">>> Targetting $TARGET_ARCH for cross generation"
(
cd aports-$TARGET_RELEASE/scripts
# this stops bootstrap from building the whole base system
sed -i 's|^msg "Cross building base system"|exit; msg "Cross building base system"|' bootstrap.sh
for arch in $TARGET_ARCH; do
echo ">>> Building cross-compilers for $arch"
./bootstrap.sh $arch
[[ $? -ne 0 ]] && { echo ">>> Cross-build failure"; exit; } || true
done
echo ">>> Building done"
)

View file

@ -15,6 +15,7 @@ jobs:
steps:
- name: Environment setup
run: |
doas apk upgrade -a
doas apk add nodejs git patch curl
cd /etc/apk/keys
doas curl -JO https://ayakael.net/api/packages/forge/alpine/key
@ -27,7 +28,7 @@ jobs:
${{ github.workspace }}/.forgejo/bin/build.sh
touch packages/dummy
- name: Package upload
uses: forgejo/upload-artifact@v3
uses: actions/upload-artifact@v3
with:
name: package
path: packages
@ -39,6 +40,7 @@ jobs:
image: alpine:latest
env:
CI_ALPINE_REPO: 'https://ayakael.net/api/packages/forge/alpine'
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
FORGE_REPO_TOKEN: ${{ secrets.FORGE_REPO_TOKEN }}
FORGE_REPO_USER: ${{ vars.FORGE_REPO_USER }}
steps:
@ -47,6 +49,6 @@ jobs:
- name: Repo pull
uses: actions/checkout@v4
- name: Package download
uses: forgejo/download-artifact@v3
uses: actions/download-artifact@v3
- name: Package deployment
run: ${{ github.workspace }}/.forgejo/bin/deploy.sh

View file

@ -0,0 +1,54 @@
on:
pull_request:
types: [ assigned, opened, synchronize, reopened ]
jobs:
build-armv7:
runs-on: armv7
container:
image: alpinelinux/alpine-gitlab-ci:latest
env:
CI_PROJECT_DIR: ${{ github.workspace }}
CI_DEBUG_BUILD: ${{ runner.debug }}
CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }}
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
steps:
- name: Environment setup
run: |
doas apk upgrade -a
doas apk add nodejs git patch curl
cd /etc/apk/keys
doas curl -JO https://ayakael.net/api/packages/forge/alpine/key
- name: Repo pull
uses: actions/checkout@v4
with:
fetch-depth: 500
- name: Package build
run: |
${{ github.workspace }}/.forgejo/bin/build.sh
touch packages/dummy
- name: Package upload
uses: actions/upload-artifact@v3
with:
name: package
path: packages
deploy-armv7:
needs: [build-armv7]
runs-on: armv7
container:
image: alpine:latest
env:
CI_ALPINE_REPO: 'https://ayakael.net/api/packages/forge/alpine'
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
FORGE_REPO_TOKEN: ${{ secrets.FORGE_REPO_TOKEN }}
FORGE_REPO_USER: ${{ vars.FORGE_REPO_USER }}
steps:
- name: Setting up environment
run: apk add nodejs curl findutils git gawk
- name: Repo pull
uses: actions/checkout@v4
- name: Package download
uses: actions/download-artifact@v3
- name: Package deployment
run: ${{ github.workspace }}/.forgejo/bin/deploy.sh

View file

@ -0,0 +1,59 @@
on:
workflow_dispatch:
inputs:
target_arch:
description: 'target arch'
required: false
type: string
jobs:
build-cross:
runs-on: x86_64
container:
image: alpinelinux/alpine-gitlab-ci:latest
env:
CI_PROJECT_DIR: ${{ github.workspace }}
CI_DEBUG_BUILD: ${{ runner.debug }}
CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }}
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.ref_name }}
steps:
- name: Environment setup
run: |
doas apk upgrade -a
doas apk add nodejs git patch curl bash
cd /etc/apk/keys
doas curl -JO https://ayakael.net/api/packages/forge/alpine/key
- name: Repo pull
uses: actions/checkout@v4
with:
fetch-depth: 500
- name: Package build
run: |
${{ github.workspace }}/.forgejo/bin/build.sh
${{ github.workspace }}/.forgejo/bin/generate-cross.sh ${{ github.ref_name }} ${{ inputs.target_arch }}
mv -v /home/buildozer/packages/main ${{ github.workspace }}/packages/cross
- name: Package upload
uses: actions/upload-artifact@v3
with:
name: package
path: packages
deploy-cross:
needs: [build-cross]
runs-on: x86_64
container:
image: alpine:latest
env:
CI_ALPINE_REPO: 'https://ayakael.net/api/packages/forge/alpine'
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.ref_name }}
FORGE_REPO_TOKEN: ${{ secrets.FORGE_REPO_TOKEN }}
FORGE_REPO_USER: ${{ vars.FORGE_REPO_USER }}
steps:
- name: Setting up environment
run: apk add nodejs curl findutils git gawk
- name: Repo pull
uses: actions/checkout@v4
- name: Package download
uses: actions/download-artifact@v3
- name: Package deployment
run: ${{ github.workspace }}/.forgejo/bin/deploy.sh

View file

@ -15,6 +15,7 @@ jobs:
steps:
- name: Environment setup
run: |
doas apk upgrade -a
doas apk add nodejs git patch curl
cd /etc/apk/keys
doas curl -JO https://ayakael.net/api/packages/forge/alpine/key
@ -27,7 +28,7 @@ jobs:
${{ github.workspace }}/.forgejo/bin/build.sh
touch packages/dummy
- name: Package upload
uses: forgejo/upload-artifact@v3
uses: actions/upload-artifact@v3
with:
name: package
path: packages
@ -39,6 +40,7 @@ jobs:
image: alpine:latest
env:
CI_ALPINE_REPO: 'https://ayakael.net/api/packages/forge/alpine'
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
FORGE_REPO_TOKEN: ${{ secrets.FORGE_REPO_TOKEN }}
FORGE_REPO_USER: ${{ vars.FORGE_REPO_USER }}
steps:
@ -47,6 +49,6 @@ jobs:
- name: Repo pull
uses: actions/checkout@v4
- name: Package download
uses: forgejo/download-artifact@v3
uses: actions/download-artifact@v3
- name: Package deployment
run: ${{ github.workspace }}/.forgejo/bin/deploy.sh

View file

@ -7,12 +7,12 @@ on:
jobs:
check-backports:
name: Check backports repo
runs-on: x86_64
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://ayakael.net/api/packages/forge/alpine/v3.21/backports
FORGEJO_TOKEN: ${{ secrets.forgejo_token }}
downstream: https://ayakael.net/api/packages/forge/alpine/v3.23/backports
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 1
ALL_PACKAGES: true
steps:

View file

@ -5,14 +5,14 @@ on:
- cron: '0 5 * * *'
jobs:
check-community:
name: Check community repo
runs-on: x86_64
check-community-edge:
name: Check community(edge) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/edge/community
FORGEJO_TOKEN: ${{ secrets.forgejo_token }}
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 4
steps:
- name: Environment setup
@ -25,3 +25,92 @@ jobs:
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh
check-community-3.23:
name: Check community(3.23) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/v3.23/community
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 13
fix_only: all -git-annex
skip_package: dotnet9-stage0 dotnet8-stage0 py3-boto3 py3-botocore dotnet10-stage0
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh
check-community-3.22:
name: Check community(3.22) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/v3.22/community
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 13
skip_package: 'all -dotnet8-runtime -dotnet9-runtime'
fix_only: all
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh
check-community-3.21:
name: Check community(3.21) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/v3.21/community
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 13
skip_package: 'all -dotnet8-runtime -dotnet9-runtime'
fix_only: all
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh
check-community-3.20:
name: Check community(3.20) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/v3.20/community
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 13
skip_package: 'all -dotnet8-runtime'
fix_only: all
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh

View file

@ -0,0 +1,112 @@
on:
workflow_dispatch:
schedule:
- cron: '0 5 * * *'
jobs:
check-main-edge:
name: Check main(edge) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/edge/main
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 4
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh
check-main-3.23:
name: Check main(3.23) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/v3.23/main
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 13
fix_only: all
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh
check-main-3.22:
name: Check main(3.22) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/v3.22/main
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 13
fix_only: all
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh
check-main-3.21:
name: Check main(3.21) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/v3.21/main
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 13
fix_only: all
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh
check-main-3.20:
name: Check main(3.20) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/v3.20/main
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 13
fix_only: all
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh

View file

@ -0,0 +1,50 @@
on:
workflow_dispatch:
schedule:
- cron: '0 5 * * *'
jobs:
check-pmos-edge:
name: Check pmos(edge) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: http://mirror.postmarketos.org/postmarketos/master
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 14
skip_package: device-clockworkpi-uconsole-radxa-cm5 device-pine64-pinenote u-boot-radxa-cm5
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh
check-pmos-latest:
name: Check pmos(v25.12) repo
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: http://mirror.postmarketos.org/postmarketos/v25.12
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 14
fix_only: all
skip_package: device-clockworkpi-uconsole-radxa-cm5 device-pine64-pinenote u-boot-radxa-cm5
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh

View file

@ -5,15 +5,16 @@ on:
- cron: '0 5 * * *'
jobs:
check-community:
check-testing:
name: Check testing repo
runs-on: x86_64
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/edge/testing
FORGEJO_TOKEN: ${{ secrets.forgejo_token }}
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 4
skip_package: dotnet6-stage0 dotnet6-build
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed

View file

@ -7,13 +7,13 @@ on:
jobs:
check-user:
name: Check user repo
runs-on: x86_64
runs-on: aarch64
container:
image: alpine:latest
env:
downstream: https://ayakael.net/api/packages/forge/alpine/edge/user
FORGEJO_TOKEN: ${{ secrets.forgejo_token }}
LABEL_NUMBER: 4
ISSUE_TOKEN: ${{ secrets.issue_token }}
LABEL_NUMBER: 12
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed

View file

@ -0,0 +1,25 @@
on:
workflow_dispatch:
inputs:
target_repo:
description: 'target repo'
default: 'edge/user'
required: true
type: string
jobs:
clear-repo:
runs-on: x86_64
container:
image: alpine:latest
env:
TARGET_REPO: 'https://ayakael.net/api/packages/forge/alpine/${{ inputs.target_repo }}'
FORGE_REPO_TOKEN: ${{ secrets.FORGE_REPO_TOKEN }}
FORGE_REPO_USER: ${{ vars.FORGE_REPO_USER }}
steps:
- name: Setting up environment
run: apk add nodejs curl findutils git gawk
- name: Repo pull
uses: actions/checkout@v4
- name: Clear repo
run: ${{ github.workspace }}/.forgejo/bin/clear-repo.sh $TARGET_REPO

View file

@ -14,7 +14,9 @@ jobs:
CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }}
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
steps:
- run: doas apk add nodejs git
- run: |
doas apk upgrade -a
doas apk add nodejs git
- uses: actions/checkout@v4
with:
fetch-depth: 500

View file

@ -1,48 +0,0 @@
# Contributor: Kay Thomas <kaythomas@pm.me>
# Maintainer: Kay Thomas <kaythomas@pm.me>
pkgname=airsonic-advanced
_sha=1397446f979b1cdea283eec89ce4f0eae7d63450
pkgver=11.0.0_git20230217
pkgrel=0
pkgdesc="Modern implementation of the Airsonic fork with several key performance and feature enhancements"
url="https://github.com/airsonic-advanced/airsonic-advanced"
# inconsistent test and build failures on other arches
arch="x86_64"
license="GPL-3.0-or-later"
depends="openjdk11"
makedepends="maven"
subpackages="$pkgname-openrc"
pkgusers="airsonic-advanced"
pkggroups="airsonic-advanced"
install="$pkgname.pre-install"
source="$pkgname-$pkgver.tar.gz::https://github.com/airsonic-advanced/airsonic-advanced/archive/$_sha.tar.gz
maven.patch
airsonic-advanced.initd
"
builddir="$srcdir/$pkgname-$_sha"
build() {
mvn clean package -DskipTests
}
check() {
mvn test
}
package() {
install -dm755 -o airsonic-advanced -g airsonic-advanced \
"$pkgdir"/var/airsonic
install -m755 -o airsonic-advanced -g airsonic-advanced \
"$builddir"/airsonic-main/target/airsonic.war \
"$pkgdir"/var/airsonic/airsonic.war
install -Dm755 "$srcdir"/$pkgname.initd \
"$pkgdir"/etc/init.d/$pkgname
}
sha512sums="
f415620bdbed9fb3874afbf30d9362e68b1e9e8e90dbbed4ca3206b643cad97ca0558e64ec5b4440382f0ec908c3325e321ea3631c38ff9a2109163c8f0cfe0b airsonic-advanced-11.0.0_git20230217.tar.gz
6cb52fee19815fcdf2596e55d97d3e750321b1df7a4fec36fc9bc2a57d4be979a3905a42d3aa9dbeb2bf0d4f56edbf344f13551219b8e4d2ca583abd4bb5c8f9 maven.patch
ca87e6a7199950e6ac52aeb076a03f831d60ee9d4ceed47366bbd78443765d205796d895ebb244051d8033e5b2e9ccd648d20434039c854b8b50e766cc5cd10d airsonic-advanced.initd
"

View file

@ -1,14 +0,0 @@
#!/sbin/openrc-run
supervisor=supervise-daemon
name="airsonic-advanced"
command="/usr/lib/jvm/java-11-openjdk/jre/bin/java"
command_args="-jar airsonic.war"
command_user="airsonic-advanced:airsonic-advanced"
directory="/var/airsonic"
pidfile="/run/airsonic-advanced.pid"
depend() {
need net localmount
after firewall
}

View file

@ -1,6 +0,0 @@
#!/bin/sh
addgroup -S airsonic-advanced 2>/dev/null
adduser -S -D -H -s /sbin/nologin -G airsonic-advanced -g airsonic-advanced airsonic-advanced 2>/dev/null
exit 0

View file

@ -1,8 +0,0 @@
fixes maven 3.9 breaking change
https://maven.apache.org/docs/3.9.0/release-notes.html#potentially-breaking-core-changes
--- airsonic-advanced-1397446f979b1cdea283eec89ce4f0eae7d63450/.mvn/maven.config
+++ airsonic-advanced-1397446f979b1cdea283eec89ce4f0eae7d63450/.mvn/maven.config
@@ -1 +1,2 @@
---settings ./.mvn/settings.xml
+--settings
+./.mvn/settings.xml

View file

@ -0,0 +1,13 @@
diff --git a/setup/build.py b/setup/build.py
index 956ad7504f..aa9d7ea028 100644
--- a/setup/build.py
+++ b/setup/build.py
@@ -662,7 +662,7 @@ def build_headless(self):
f.seek(0), f.truncate()
f.write(raw)
bdir = os.path.join(bdir, 'build')
- cmd = [CMAKE]
+ cmd = [CMAKE, '-GUnix Makefiles']
if is_macos_universal_build:
cmd += ['-DCMAKE_OSX_ARCHITECTURES=x86_64;arm64']
if sw and os.path.exists(os.path.join(sw, 'qt')):

View file

@ -0,0 +1,46 @@
piper uses a function in espeak-ng that is upstreamed but not in a
release.
diff --git a/setup/extensions.json b/setup/extensions.json
index b39ce6d..c105031 100644
--- a/setup/extensions.json
+++ b/setup/extensions.json
@@ -134,14 +134,6 @@
"error": "!podofo_error",
"needs_c++": "17"
},
- {
- "name": "piper",
- "sources": "calibre/utils/tts/piper.cpp",
- "needs_c++": "17",
- "libraries": "!piper_libs",
- "lib_dirs": "!piper_lib_dirs",
- "inc_dirs": "!piper_inc_dirs"
- },
{
"name": "html_as_json",
"sources": "calibre/srv/html_as_json.cpp",
diff --git a/src/calibre/constants.py b/src/calibre/constants.py
index fa4b211459..7b27768953 100644
--- a/src/calibre/constants.py
+++ b/src/calibre/constants.py
@@ -258,7 +258,6 @@ def __init__(self):
'rcc_backend',
'icu',
'speedup',
- 'piper',
'html_as_json',
'fast_css_transform',
'fast_html_entities',
diff --git a/src/calibre/utils/run_tests.py b/src/calibre/utils/run_tests.py
index ffd0f95c04..c80a35f83d 100644
--- a/src/calibre/utils/run_tests.py
+++ b/src/calibre/utils/run_tests.py
@@ -192,6 +192,7 @@ def test_import_of_all_python_modules(self):
}
if 'SKIP_SPEECH_TESTS' in os.environ:
exclude_packages.add('calibre.gui2.tts')
+ exclude_modules.add('calibre.utils.tts.piper')
if not isbsd:
exclude_modules.add('calibre.devices.usbms.hal')
d = os.path.dirname

View file

@ -1,6 +1,6 @@
# Maintainer: Cowington Post <cowingtonpost@gmail.com>
pkgname=calibre
pkgver=7.21.0
pkgver=8.14.0
pkgrel=0
pkgdesc="Ebook management application"
# qt6-webengine
@ -41,11 +41,13 @@ depends="
qt6-qtimageformats
qt6-qtsvg
qt6-qtwebengine
qt6-qtbase-private-dev
udisks2
"
makedepends="
cmake
curl
ffmpeg-dev
hunspell-dev
hyphen-dev
libmtp-dev
@ -54,12 +56,12 @@ makedepends="
podofo-dev
py3-pyqt-builder
py3-pyqt6-sip
py3-qt6
py3-sip
python3-dev
qt6-qtbase-dev
uchardet-dev
xdg-utils
ffmpeg-dev
"
subpackages="
$pkgname-pyc
@ -69,12 +71,15 @@ subpackages="
"
source="https://download.calibre-ebook.com/$pkgver/calibre-$pkgver.tar.xz
0001-$pkgname-no-update.patch
0002-$pkgname-use-make.patch
0003-$pkgname-disable-piper.patch
"
# net: downloads iso-codes
# !check: no tests ran
options="net !check"
export LANG="en_US.UTF-8"
export PATH="$PATH:/usr/lib/qt6/bin"
prepare() {
default_prepare
@ -111,6 +116,8 @@ package() {
}
sha512sums="
0c2ee610833df83219c0c33b09e1374a8262f1630ccd48e3c4725c92922a3ac5d102ad83fc213457fb9de3efa4f5a2c98ff6dff039828e1661085a1054d7f631 calibre-7.21.0.tar.xz
edb32e47b083e10fbf53088e485737f3b61bb642ce6c4dd444e58a6618979c3b05b77ceffc4b8cb42e35eee7dcc2b94145abc22030ffd8b5de63e45b321fbf72 calibre-8.14.0.tar.xz
eb8e7ce40ff8b8daf6e7e55a5dff8ec4dff06c45744266bb48b3194e92ab1196bc91468203e3c2ca1e5144166a7d6be90e6cf0253513e761b56a4c85be4c2c76 0001-calibre-no-update.patch
bbb7253257073ae14840b3b4697943fe129d862b49cabd9388ea24cbd0259e68a1d359870334772164897f0c781db121de55fcdf5bccc841e36c021abe56f1ec 0002-calibre-use-make.patch
0efcf35944cd0f42d6f3572839647fc5c8336562db3f71655211d3de682e155b6d6fee4d281f9576201156e0bc828b6a579a8708a27791e4e4d604d456416954 0003-calibre-disable-piper.patch
"

View file

@ -1,8 +1,8 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=caprine
pkgver=2.60.1
pkgrel=2
pkgver=2.60.3
pkgrel=6
pkgdesc="Elegant Facebook Messenger desktop app"
arch="x86_64 aarch64" # blocked by electron
url="https://github.com/sindresorhus/caprine"
@ -17,12 +17,10 @@ source="
"
build() {
npm install --ignore-scripts
npx --yes patch-package
npm ci --ignore-scripts
npx patch-package
npx tsc
rm -r node_modules
npm install --ignore-scripts --production
npx --yes patch-package
npm prune --ignore-scripts --omit=dev
}
package() {
@ -59,7 +57,7 @@ package() {
-or -name "test" -prune -exec rm -r '{}' \;
}
sha512sums="
0df7f233c91f5a044dcffde94b976c6ad71e6d355518615c48cd825a249c01d63f455de31ece69193a66ca0fd8157506f9b88088da1bd47fc75e9d3800784ed0 caprine-2.60.1.tar.gz
edf6452294b3c661befd9811c5836da33311171d587cb9a5939ac11a0c1e2a7ebbc4f2a8d81e02c1db1a2d814ac1aa7bbdadca9e21892cc8d7f7e9c23dc2e221 caprine-2.60.3.tar.gz
a469e3bea24926119e51642b777ef794c5fa65421107903f967c36d81bbb1adb3d52469ce3a3301b2c890f1aa53ab989ded22a7c6e811fb8cf0a582dbd835e19 caprine.desktop
3ad8994c1a0417e73d622587769e527b4236a32c1a89442ff76413b75b4392d667c9e2908979b453e5926e54db6d94b31625340c5a94e84e91ea77f56feae778 caprine.sh
"

View file

@ -1,36 +0,0 @@
# Contributor: Aiden Grossman <agrossman154@yahoo.com>
# Maintainer:
pkgname=coin
pkgver=4.0.0
pkgrel=7
pkgdesc="OpenGL OpenInventor compatible graphics library"
url="https://github.com/coin3d/coin"
license="BSD-3-Clause"
arch="all"
makedepends="boost-dev cmake glu-dev graphviz samurai"
subpackages="$pkgname-dev"
source="https://github.com/coin3d/coin/releases/download/Coin-$pkgver/coin-$pkgver-src.tar.gz
TestSuitePatch.patch
"
builddir="$srcdir/coin"
build() {
cmake -B build -G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCOIN_BUILD_TESTS=ON
cmake --build build
}
check() {
cmake --build build --target test
}
package() {
DESTDIR="$pkgdir" cmake --install build
}
sha512sums="
e036276a243bfe252569cee1b67d38b8633fcf35bdf4e366a92ca67e23799d54d91fe272c23b383c451d330cee284809f28f237857493948149e0da1ebd64fae coin-4.0.0-src.tar.gz
aab464244b13371badf0878e5bfbcce859a42756cf8c7657d1480318aa291d296eac2741219c346bae056f761c5f46857f8fd1ec1c4129f86bc10236d3869deb TestSuitePatch.patch
"

View file

@ -1,11 +0,0 @@
--- ./testsuite/TestSuiteUtils.cpp
+++ ./testsuite/TestSuiteUtils.cpp
@@ -39,7 +39,7 @@
#elif defined(_WIN32)
#define USE_WIN32
#else //_WIN32
-#error Unknown system
+#define USE_POSIX
#endif //POSIX
#include <Inventor/errors/SoDebugError.h>

View file

@ -1,76 +0,0 @@
# Contributor: Anjandev Momi <anjan@momi.ca>
# Maintainer: Anjandev Momi <anjan@momi.ca>
pkgname=cura
# uranium and curaengine packages must be updated in sync with this verion number
# py3-pynest2d and fdm-materials should be checked as well, but their versions are not always in sync
pkgver=5.2.2
pkgrel=1
pkgdesc="3D printer / slicing GUI built on top of the Uranium framework"
url="https://ultimaker.com/software/ultimaker-cura"
# ppc64le: no py3-keyring
# x86: no curaengine
# armhf: no uranium, qt5-qtquickcontrols, qt5-qtquickcontrols2, qt5-qtgraphicaleffects
# riscv64: no uranium
# s390x: no py3-trimesh, no py3-numpy-stl
# armv7: no py3-trimesh
arch="noarch !ppc64le !x86 !armhf !riscv64 !s390x !armv7"
license="LGPL-3.0-or-later"
# add cura-binary-data to depends when packaged
depends="
curaengine
fdm-materials
uranium
py3-arcus
py3-keyring
py3-numpy-stl
py3-pyclipper
py3-pynest2d
py3-pyserial
py3-qt6
py3-requests
py3-trimesh
py3-zeroconf
"
makedepends="samurai cmake gettext gettext-dev" # needs msginit from gettext
checkdepends="py3-pytest"
subpackages="$pkgname-lang"
source="$pkgname-$pkgver.tar.gz::https://github.com/Ultimaker/Cura/archive/refs/tags/$pkgver.tar.gz
AppDesktopData.patch
CuraVersion.patch
cmake-helpers.patch
cmake.patch"
builddir="$srcdir/Cura-$pkgver"
options="!check" # tests broken after v5.x
build() {
local pyver="$(python3 -c 'import sys; print(f"{sys.version_info.major}.{sys.version_info.minor}")')"
cmake -B build -G Ninja \
-DCURA_VERSION=$pkgver \
-DPython_VERSION=$pyver \
-DURANIUM_DIR=/usr/share/uranium \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_INSTALL_LIBDIR=lib \
-DGETTEXT_MSGINIT_EXECUTABLE=msginit \
-DCURA_BINARY_DATA_DIRECTORY=/usr/share/cura \
-DCMAKE_BUILD_TYPE=minsizerel
cmake --build build
}
package() {
DESTDIR="$pkgdir" cmake --install build
mv $pkgdir/usr/bin/cura_app.py $pkgdir/usr/bin/cura
# don't ever send any user or print info through the internet to Ultimaker
rm -rf "$pkgdir/usr/lib/cura/plugins/SliceInfoPlugin"
install -d "$pkgdir"/usr/share/locale
mv "$pkgdir"/usr/share/cura/resources/i18n/* "$pkgdir"/usr/share/locale/
}
sha512sums="
5d4e0fdc740d0c048905e2b87cc8c73eedea59b54766b74760505902007b365582d22b46b1cfdcd6914828840865c10a3beb0ef6a1f04ea181c81d44f42434bc cura-5.2.2.tar.gz
214e373f6cab7e3ccac12c96d1b5ca636d8d1e9ecdadaae84fc28fb429969c7c2d6055ce2a01b6db3ad85ab6cbc8d135cf2c26c77d7cfe13a73eb81aa5e85f11 AppDesktopData.patch
e3bb302db70ca195b2ce9831e71302c8ee2a51955fecc7264a495d7d4fc9c107cfd48811aa5865f16671e7b1ae126f95d3d7bbb6a70f367f7f91a2b32bce377b CuraVersion.patch
0db4ff97e7f82ae1a9dbc9c330d08c3e46249feeb3fb630f7c4e2de73749327337ec041680c39a07e0b5034c1b3f3656d75614ab4dc2f39861c8e27bdb2a58ef cmake-helpers.patch
05a73f892700ff6279230385b04180873a62b7413fa7f7d55ae150f1bcee57ef05eda0bd7fe444fe660ab66a044c958f42badd33b743fca81033ae8f19dd3805 cmake.patch
"

View file

@ -1,58 +0,0 @@
--- /dev/null
+++ ./com.ultimaker.cura.appdata.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Copyright 2016 Richard Hughes <richard@hughsie.com> -->
+<component type="desktop">
+ <id>com.ultimaker.cura.desktop</id>
+ <metadata_license>CC0-1.0</metadata_license>
+ <project_license>LGPL-3.0 and CC-BY-SA-4.0</project_license>
+ <name>Cura</name>
+ <summary>The world's most advanced 3d printer software</summary>
+ <description>
+ <p>
+ Cura creates a seamless integration between hardware, software and
+ materials for the best 3D printing experience around.
+ Cura supports the 3MF, OBJ and STL file formats and is available on
+ Windows, Mac and Linux.
+ </p>
+ <ul>
+ <li>Novices can start printing right away</li>
+ <li>Experts are able to customize 300 settings to achieve the best results</li>
+ <li>Optimized profiles for Ultimaker materials</li>
+ <li>Supported by a global network of Ultimaker certified service partners</li>
+ <li>Print multiple objects at once with different settings for each object</li>
+ <li>Cura supports STL, 3MF and OBJ file formats</li>
+ <li>Open source and completely free</li>
+ </ul>
+ </description>
+ <screenshots>
+ <screenshot type="default">
+ 
+ </screenshot>
+ </screenshots>
+ <url type="homepage">https://ultimaker.com/software/ultimaker-cura?utm_source=cura&amp;utm_medium=software&amp;utm_campaign=cura-update-linux</url>
+ <translation type="gettext">Cura</translation>
+</component>
--- /dev/null
+++ ./com.ultimaker.cura.desktop.in
@@ -0,0 +1,19 @@
+[Desktop Entry]
+Name=Ultimaker Cura
+Name[de]=Ultimaker Cura
+Name[nl]=Ultimaker Cura
+GenericName=3D Printing Software
+GenericName[de]=3D-Druck-Software
+GenericName[nl]=3D-printsoftware
+Comment=Cura converts 3D models into paths for a 3D printer. It prepares your print for maximum accuracy, minimum printing time and good reliability with many extra features that make your print come out great.
+Comment[de]=Cura wandelt 3D-Modelle in Pfade für einen 3D-Drucker um. Es bereitet Ihren Druck für maximale Genauigkeit, minimale Druckzeit und guter Zuverlässigkeit mit vielen zusätzlichen Funktionen vor, damit Ihr Druck großartig wird.
+Comment[nl]=Cura converteert 3D-modellen naar paden voor een 3D printer. Het bereidt je print voor om zeer precies, snel en betrouwbaar te kunnen printen, met veel extra functionaliteit om je print er goed uit te laten komen.
+Exec=@CMAKE_INSTALL_FULL_BINDIR@/cura %F
+TryExec=@CMAKE_INSTALL_FULL_BINDIR@/cura
+Icon=cura-icon
+Terminal=false
+Type=Application
+MimeType=model/stl;application/vnd.ms-3mfdocument;application/prs.wavefront-obj;image/bmp;image/gif;image/jpeg;image/png;text/x-gcode;application/x-amf;application/x-ply;application/x-ctm;model/vnd.collada+xml;model/gltf-binary;model/gltf+json;model/vnd.collada+xml+zip;
+Categories=Graphics;
+Keywords=3D;Printing;Slicer;
+StartupWMClass=cura.real

View file

@ -1,16 +0,0 @@
--- /dev/null
+++ ./cura/CuraVersion.py.in
@@ -0,0 +1,13 @@
+# Copyright (c) 2020 Ultimaker B.V.
+# Cura is released under the terms of the LGPLv3 or higher.
+
+CuraAppName = "@CURA_APP_NAME@"
+CuraAppDisplayName = "@CURA_APP_DISPLAY_NAME@"
+CuraVersion = "@CURA_VERSION@"
+CuraBuildType = "@CURA_BUILDTYPE@"
+CuraDebugMode = True if "@_cura_debugmode@" == "ON" else False
+CuraCloudAPIRoot = "@CURA_CLOUD_API_ROOT@"
+CuraCloudAPIVersion = "@CURA_CLOUD_API_VERSION@"
+CuraCloudAccountAPIRoot = "@CURA_CLOUD_ACCOUNT_API_ROOT@"
+CuraMarketplaceRoot = "@CURA_MARKETPLACE_ROOT@"
+CuraDigitalFactoryURL = "@CURA_DIGITAL_FACTORY_URL@"

View file

@ -1,95 +0,0 @@
--- /dev/null
+++ ./cmake/CuraPluginInstall.cmake
@@ -0,0 +1,92 @@
+# Copyright (c) 2022 Ultimaker B.V.
+# CuraPluginInstall.cmake is released under the terms of the LGPLv3 or higher.
+
+#
+# This module detects all plugins that need to be installed and adds them using the CMake install() command.
+# It detects all plugin folder in the path "plugins/*" where there's a "plugin.json" in it.
+#
+# Plugins can be configured to NOT BE INSTALLED via the variable "CURA_NO_INSTALL_PLUGINS" as a list of string in the
+# form of "a;b;c" or "a,b,c". By default all plugins will be installed.
+#
+
+option(PRINT_PLUGIN_LIST "Should the list of plugins that are installed be printed?" ON)
+
+# Options or configuration variables
+set(CURA_NO_INSTALL_PLUGINS "" CACHE STRING "A list of plugins that should not be installed, separated with ';' or ','.")
+
+file(GLOB_RECURSE _plugin_json_list ${CMAKE_SOURCE_DIR}/plugins/*/plugin.json)
+list(LENGTH _plugin_json_list _plugin_json_list_len)
+
+# Sort the lists alphabetically so we can handle cases like this:
+# - plugins/my_plugin/plugin.json
+# - plugins/my_plugin/my_module/plugin.json
+# In this case, only "plugins/my_plugin" should be added via install().
+set(_no_install_plugin_list ${CURA_NO_INSTALL_PLUGINS})
+# Sanitize the string so the comparison will be case-insensitive.
+string(STRIP "${_no_install_plugin_list}" _no_install_plugin_list)
+string(TOLOWER "${_no_install_plugin_list}" _no_install_plugin_list)
+
+# WORKAROUND counterpart of what's in cura-build.
+string(REPLACE "," ";" _no_install_plugin_list "${_no_install_plugin_list}")
+
+list(LENGTH _no_install_plugin_list _no_install_plugin_list_len)
+
+if(_no_install_plugin_list_len GREATER 0)
+ list(SORT _no_install_plugin_list)
+endif()
+if(_plugin_json_list_len GREATER 0)
+ list(SORT _plugin_json_list)
+endif()
+
+# Check all plugin directories and add them via install() if needed.
+set(_install_plugin_list "")
+foreach(_plugin_json_path ${_plugin_json_list})
+ get_filename_component(_plugin_dir ${_plugin_json_path} DIRECTORY)
+ file(RELATIVE_PATH _rel_plugin_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_plugin_dir})
+ get_filename_component(_plugin_dir_name ${_plugin_dir} NAME)
+
+ # Make plugin name comparison case-insensitive
+ string(TOLOWER "${_plugin_dir_name}" _plugin_dir_name_lowercase)
+
+ # Check if this plugin needs to be skipped for installation
+ set(_add_plugin ON) # Indicates if this plugin should be added to the build or not.
+ set(_is_no_install_plugin OFF) # If this plugin will not be added, this indicates if it's because the plugin is
+ # specified in the NO_INSTALL_PLUGINS list.
+ if(_no_install_plugin_list)
+ if("${_plugin_dir_name_lowercase}" IN_LIST _no_install_plugin_list)
+ set(_add_plugin OFF)
+ set(_is_no_install_plugin ON)
+ endif()
+ endif()
+
+ # Make sure this is not a subdirectory in a plugin that's already in the install list
+ if(_add_plugin)
+ foreach(_known_install_plugin_dir ${_install_plugin_list})
+ if(_plugin_dir MATCHES "${_known_install_plugin_dir}.+")
+ set(_add_plugin OFF)
+ break()
+ endif()
+ endforeach()
+ endif()
+
+ if(_add_plugin)
+ if(${PRINT_PLUGIN_LIST})
+ message(STATUS "[+] PLUGIN TO INSTALL: ${_rel_plugin_dir}")
+ endif()
+ get_filename_component(_rel_plugin_parent_dir ${_rel_plugin_dir} DIRECTORY)
+ install(DIRECTORY ${_rel_plugin_dir}
+ DESTINATION lib${LIB_SUFFIX}/cura/${_rel_plugin_parent_dir}
+ PATTERN "__pycache__" EXCLUDE
+ PATTERN "*.qmlc" EXCLUDE
+ )
+ list(APPEND _install_plugin_list ${_plugin_dir})
+ elseif(_is_no_install_plugin)
+ if(${PRINT_PLUGIN_LIST})
+ message(STATUS "[-] PLUGIN TO REMOVE : ${_rel_plugin_dir}")
+ endif()
+ execute_process(COMMAND ${Python_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/cmake/mod_bundled_packages_json.py
+ -d ${CMAKE_CURRENT_SOURCE_DIR}/resources/bundled_packages
+ ${_plugin_dir_name}
+ RESULT_VARIABLE _mod_json_result)
+ endif()
+endforeach()

View file

@ -1,85 +0,0 @@
--- ./CMakeLists.txt.orig
+++ ./CMakeLists.txt
@@ -1,10 +1,6 @@
# Copyright (c) 2022 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
-# NOTE: This is only being used for translation scripts.
-
-# For MSVC flags, will be ignored on non-Windows OS's and this project in general. Only needed for cura-build-environment.
-cmake_policy(SET CMP0091 NEW)
project(cura)
cmake_minimum_required(VERSION 3.18)
@@ -15,8 +11,44 @@
set(URANIUM_DIR "${CMAKE_SOURCE_DIR}/../Uranium" CACHE PATH "The location of the Uranium repository")
set(URANIUM_SCRIPTS_DIR "${URANIUM_DIR}/scripts" CACHE PATH "The location of the scripts directory of the Uranium repository")
+option(CURA_DEBUGMODE "Enable debug dialog and other debug features" OFF)
+if(CURA_DEBUGMODE)
+ set(_cura_debugmode "ON")
+endif()
+
option(GENERATE_TRANSLATIONS "Should the translations be generated?" ON)
+set(CURA_APP_NAME "cura" CACHE STRING "Short name of Cura, used for configuration folder")
+set(CURA_APP_DISPLAY_NAME "Ultimaker Cura" CACHE STRING "Display name of Cura")
+set(CURA_VERSION "master" CACHE STRING "Version name of Cura")
+set(CURA_BUILDTYPE "" CACHE STRING "Build type of Cura, eg. 'PPA'")
+set(CURA_CLOUD_API_ROOT "" CACHE STRING "Alternative Cura cloud API root")
+set(CURA_CLOUD_API_VERSION "" CACHE STRING "Alternative Cura cloud API version")
+set(CURA_CLOUD_ACCOUNT_API_ROOT "" CACHE STRING "Alternative Cura cloud account API version")
+set(CURA_MARKETPLACE_ROOT "" CACHE STRING "Alternative Marketplace location")
+set(CURA_DIGITAL_FACTORY_URL "" CACHE STRING "Alternative Digital Factory location")
+
+configure_file(${CMAKE_SOURCE_DIR}/com.ultimaker.cura.desktop.in ${CMAKE_BINARY_DIR}/com.ultimaker.cura.desktop @ONLY)
+
+configure_file(cura/CuraVersion.py.in CuraVersion.py @ONLY)
+
+if(NOT DEFINED Python_VERSION)
+ set(Python_VERSION
+ 3.11
+ CACHE STRING "Python Version" FORCE)
+ message(STATUS "Setting Python version to ${Python_VERSION}. Set Python_VERSION if you want to compile against an other version.")
+endif()
+if(APPLE)
+ set(Python_FIND_FRAMEWORK NEVER)
+endif()
+find_package(Python ${Python_VERSION} EXACT REQUIRED COMPONENTS Interpreter)
+message(STATUS "Linking and building ${project_name} against Python ${Python_VERSION}")
+if(NOT DEFINED Python_SITELIB_LOCAL)
+ set(Python_SITELIB_LOCAL
+ "${Python_SITELIB}"
+ CACHE PATH "Local alternative site-package location to install Cura" FORCE)
+endif()
+
if(NOT ${URANIUM_DIR} STREQUAL "")
set(CMAKE_MODULE_PATH "${CMAKE_MODULE_PATH};${URANIUM_DIR}/cmake")
endif()
@@ -29,4 +61,24 @@
if(${GENERATE_TRANSLATIONS})
CREATE_TRANSLATION_TARGETS()
endif()
-endif()
\ No newline at end of file
+endif()
+
+install(DIRECTORY resources DESTINATION ${CMAKE_INSTALL_DATADIR}/cura)
+
+include(CuraPluginInstall)
+
+install(FILES cura_app.py DESTINATION ${CMAKE_INSTALL_BINDIR}
+ PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
+install(DIRECTORY cura DESTINATION "${Python_SITELIB_LOCAL}")
+install(FILES ${CMAKE_BINARY_DIR}/CuraVersion.py DESTINATION "${Python_SITELIB_LOCAL}/cura/")
+if(NOT APPLE AND NOT WIN32)
+ install(FILES ${CMAKE_BINARY_DIR}/com.ultimaker.cura.desktop
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/applications)
+ install(FILES ${CMAKE_SOURCE_DIR}/resources/images/cura-icon.png
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/icons/hicolor/128x128/apps/)
+ install(FILES com.ultimaker.cura.appdata.xml
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/metainfo)
+ install(FILES cura.sharedmimeinfo
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/mime/packages/
+ RENAME cura.xml )
+endif()

View file

@ -1,26 +0,0 @@
# Contributor: Anjandev Momi <anjan@momi.ca>
# Maintainer: Anjandev Momi <anjan@momi.ca>
pkgname=dex
pkgver=0.9.0
pkgrel=1
pkgdesc="program to generate and execute DesktopEntry files of the Application type"
url="https://github.com/jceb/dex"
arch="all"
license="GPL-3.0-or-later"
depends="python3"
makedepends="py3-sphinx"
subpackages="$pkgname-doc"
source="$pkgname-$pkgver.tar.gz::https://github.com/jceb/dex/archive/refs/tags/v$pkgver.tar.gz"
options="!check" # no testsuite
build() {
make
}
package() {
make install PREFIX=/usr MANPREFIX=/usr/share/man DESTDIR="$pkgdir"
}
sha512sums="
d68f5482cb0948f27a724437ddfc6de9a0f502bfd0d5c60c76fb85dda3c30e4c432013e530f6a91138c9ac9ff36b3824cd5e382e9d29bb9fb2ec2b9de4133094 dex-0.9.0.tar.gz
"

View file

@ -0,0 +1,21 @@
From adbc495726382c023b755c35aea36c6e9cad1950 Mon Sep 17 00:00:00 2001
From: LN Liberda <lauren@selfisekai.rocks>
Date: Sat, 23 Aug 2025 03:11:09 +0200
Subject: [PATCH] hotfix: ignore a new warning in rust 1.89
---
third_party/rust/chromium_crates_io/vendor/qr_code-v2/src/lib.rs | 1 -
1 file changed, 1 deletion(-)
diff --git a/third_party/rust/chromium_crates_io/vendor/qr_code-v2/src/lib.rs b/third_party/rust/chromium_crates_io/vendor/qr_code-v2/src/lib.rs
index 1b729621c2f47..55d392c6da72f 100644
--- a/third_party/rust/chromium_crates_io/vendor/qr_code-v2/src/lib.rs
+++ b/third_party/rust/chromium_crates_io/vendor/qr_code-v2/src/lib.rs
@@ -5,7 +5,6 @@
//!
#![deny(missing_docs)]
-#![deny(warnings)]
#![allow(
clippy::must_use_candidate, // This is just annoying.
clippy::use_self, // Rust 1.33 doesn't support Self::EnumVariant, let's try again in 1.37.

View file

@ -1,23 +1,23 @@
# Contributor: lauren n. liberda <lauren@selfisekai.rocks>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=electron
pkgver=33.2.1
pkgver=39.2.7
_gittag=v"${pkgver/_beta/-beta.}"
pkgrel=0
_chromium=130.0.6723.127
_copium_tag=129.1
_chromium=142.0.7444.235
_copium_tag=142.0
_depot_tools=495b23b39aaba2ca3b55dd27cadc523f1cb17ee6
pkgdesc="Electron cross-platform desktop toolkit"
url="https://github.com/electron/electron"
# armv7: Segmentation fault on builder despite building in CI
arch="aarch64 x86_64" # same as chromium
license="MIT"
depends="gtk+3.0 so:libudev.so.1 xdg-utils"
_llvmver=19
_llvmver=21
makedepends="
ada-dev
alsa-lib-dev
aom-dev
base64-dev
bash
brotli-dev
bsd-compat-headers
@ -25,8 +25,8 @@ makedepends="
c-ares-dev
cairo-dev
clang$_llvmver-dev
clang$_llvmver-rtlib
clang-extra-tools
compiler-rt
crc32c-dev
cups-dev
curl-dev
@ -45,6 +45,7 @@ makedepends="
gn
gzip
harfbuzz-dev
highway-dev
hdrhistogram-c-dev
hunspell-dev
http-parser-dev
@ -59,7 +60,6 @@ makedepends="
libbsd-dev
libcap-dev
libdrm-dev
libevent-dev
libexif-dev
libgcrypt-dev
libjpeg-turbo-dev
@ -76,7 +76,8 @@ makedepends="
libxscrnsaver-dev
libxslt-dev
linux-headers
lld
lld$_llvmver
llhttp-dev
llvm$_llvmver
mesa-dev
minizip-dev
@ -96,19 +97,19 @@ makedepends="
py3-setuptools
py3-six
python3
qt5-qtbase-dev
re2-dev
rsync
rust
rust-bindgen
samurai
simdutf-dev
snappy-dev
speex-dev
spirv-tools-dev
sqlite-dev
woff2-dev
xcb-proto
yarn
yarn-berry
zlib-dev
zstd-dev
"
@ -117,19 +118,20 @@ subpackages="$pkgname-lang $pkgname-dev"
source="
https://ayakael.net/api/packages/mirrors/generic/electron/$_gittag/electron-$_gittag-$_chromium.tar.zst
copium-$_copium_tag.tar.gz::https://codeberg.org/selfisekai/copium/archive/$_copium_tag.tar.gz
chromium-revert-drop-of-system-java.patch
0001-hotfix-ignore-a-new-warning-in-rust-1.89.patch
compiler.patch
disable-dns_config_service.patch
disable-failing-tests.patch
fc-cache-version.patch
fix-opus.patch
fix-ffmpeg-codec-list.patch
fstatat-32bit.patch
gdbinit.patch
generic-sensor-include.patch
musl-auxv.patch
headless-shell-no-license.patch
musl-sandbox.patch
musl-tid-caching.patch
musl-v8-monotonic-pthread-cont_timedwait.patch
net-test-no-vpython.patch
net-test-pyws3-py3.12.patch
no-execinfo.patch
no-mallinfo.patch
no-res-ninit-nclose.patch
@ -137,41 +139,47 @@ source="
partalloc-no-tagging-arm64.patch
pvalloc.patch
temp-failure-retry.patch
yes-musl.patch
electron_icon.patch
electron_python-jinja-3.10.patch
electron_webpack-hash.patch
electron_unbundle-node.patch
electron_system-zlib-headers.patch
electron_do-not-strip-binaries.patch
electron_shell-file-dialog-drop-glibc.patch
electron_use-system-yarn.patch
default.conf
electron.desktop
electron-launcher.sh
"
_copium_patches="
cr124-iwyu-sys-select-dawn-terminal.patch
cr126-aarch64-musl-unsupported-relocation.patch
cr129-ffmpeg-no-noh264parse.patch
cr129-musl-metricts-imports.patch
cr131-v8-non4k-pages.patch
cr133-ffmpeg-no-noh264parse.patch
cr133-is-musl-libcxx.patch
cr138-node-version-check.patch
cr140-musl-prctl.patch
cr142-autofill-incomplete-formfielddata.patch
"
# Avoid conflicting providers
sonameprefix="$pkgname:"
# tests are todo for some base checks
options="!check net suid"
builddir="$srcdir/electron-$_gittag-$_chromium"
export PATH="$PATH:/usr/lib/qt5/bin"
export CC=clang-$_llvmver
export CXX=clang++-$_llvmver
export CC="/usr/lib/llvm$_llvmver/bin/clang"
export CXX="/usr/lib/llvm$_llvmver/bin/clang++"
# required to find the tools
export AR=llvm-ar
export NM=llvm-nm
export LD=clang++-$_llvmver
export NM="/usr/lib/llvm$_llvmver/bin/llvm-nm"
export AR="/usr/lib/llvm$_llvmver/bin/llvm-ar"
export LD="/usr/lib/llvm$_llvmver/bin/clang++"
# less log spam, reproducible
export CFLAGS="${CFLAGS/-g/} -O2 -Wno-builtin-macro-redefined -Wno-deprecated-declarations -Wno-shift-count-overflow -Wno-ignored-attributes"
export CXXFLAGS="${CXXFLAGS/-g/} -O2 -Wno-builtin-macro-redefined -Wno-deprecated-declarations -Wno-invalid-constexpr"
# _LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_EXTENSIVE is set by project
export CXXFLAGS="${CXXFLAGS/-D_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_FAST/}"
export CPPFLAGS="${CPPFLAGS/-g/} -D__DATE__= -D__TIME__= -D__TIMESTAMP__="
case "$CARCH" in
aarch64|arm*|riscv64)
@ -184,6 +192,10 @@ esac
# breaks chromium-based stuff
export CXXFLAGS="${CXXFLAGS/-D_GLIBCXX_ASSERTIONS=1}"
# workaround to error: undefined symbol: __rustc::__rust_dealloc
# with 000*.patch patches
export RUSTC_BOOTSTRAP=1
# creates a dist tarball that does not need to git clone everything at build time.
_distbucket="sakamoto/lnl-aports-snapshots/"
snapshot() {
@ -224,8 +236,8 @@ snapshot() {
--nohooks
python3 src/build/landmines.py
python3 src/build/util/lastchange.py -o src/build/util/LASTCHANGE \
--revision src/gpu/webgpu/DAWN_VERSION
python3 src/build/util/lastchange.py -m DAWN_COMMIT_HASH -s src/third_party/dawn \
--revision src/gpu/webgpu/DAWN_VERSION --header src/gpu/webgpu/dawn_commit_hash.h
python3 src/build/util/lastchange.py -m GPU_LISTS_VERSION \
--revision-id-only --header src/gpu/config/gpu_lists_version.h
python3 src/build/util/lastchange.py -m SKIA_COMMIT_HASH \
@ -260,6 +272,8 @@ snapshot() {
mcli cp "$SRCDEST"/$pkgname-$_gittag-$_chromium.tar.zst "$_distbucket"
}
export RUSTC_BOOTSTRAP=1
prepare() {
default_prepare
@ -297,7 +311,7 @@ prepare() {
git commit -m "init"
git tag "$_gittag"
git pack-refs
yarn install --frozen-lockfile --ignore-scripts
yarn install --immutable --mode=skip-build
)
(
@ -305,13 +319,19 @@ prepare() {
./update_npm_deps
)
# generate dawn_commit_hash
# TODO: remove on next update as it'll be generated after
# https://ayakael.net/mirrors/electron/commit/7623f4a14ab44fa4f4343e47d9d681c9b4aa984c
python3 build/util/lastchange.py -m DAWN_COMMIT_HASH -s third_party/dawn \
--revision gpu/webgpu/DAWN_VERSION --header gpu/webgpu/dawn_commit_hash.h
# reusable system library settings
# flatbuffers - tensorflow has a few static_asserts for a specific patch version
# highway - requires highway>=1.1.0 (arm failures)
# libavif - https://github.com/AOMediaCodec/libavif/commit/50a541469c98009016af8dcc9f83a1be79f3a7d9
# libaom - https://aomedia.googlesource.com/aom/+/706ee36dcc82%5E%21/
# but watch this space: https://aomedia-review.googlesource.com/c/aom/+/188606
# jsoncpp, re2, snappy, swiftshader-*, woff2 - requires use_custom_libcxx=false
# icu 76 does not build - https://bugs.gentoo.org/943216
local chromium_use_system="
brotli
crc32c
@ -322,9 +342,8 @@ prepare() {
fontconfig
freetype
harfbuzz-ng
icu
highway
libdrm
libevent
libjpeg
libsecret
libusb
@ -333,6 +352,7 @@ prepare() {
libxslt
openh264
opus
simdutf
zlib
zstd
"
@ -352,10 +372,9 @@ prepare() {
-delete
done
# llhttp - 9.x needed, 8.x in repo (2023-12-17)
# ada - needs use_custom_libcxx=false
local node_use_system="
base64
llhttp
brotli
cares
corepack
@ -412,6 +431,8 @@ prepare() {
third_party/blink/renderer/core/xml/parser/xml_document_parser.cc \
third_party/libxml/chromium/*.cc
echo "$CTARGET" >> build/rust/known-target-triples.txt
_configure
}
@ -445,10 +466,9 @@ _configure() {
custom_toolchain=\"//build/toolchain/linux/unbundle:default\"
disable_fieldtrial_testing_config=true
enable_hangout_services_extension=true
enable_nacl=false
enable_nocompile_tests=false
enable_stripping=false
enable_rust=true
enable_stripping=false
enable_vr=false
fatal_linker_warnings=false
ffmpeg_branding=\"Chrome\"
@ -458,8 +478,10 @@ _configure() {
is_clang=true
is_component_ffmpeg=true
is_debug=false
is_musl=true
is_official_build=true
link_pulseaudio=true
node_version_check=false
proprietary_codecs=true
rtc_link_pipewire=true
rtc_use_pipewire=true
@ -469,6 +491,7 @@ _configure() {
safe_browsing_use_unrar=false
symbol_level=$symbol_level
treat_warnings_as_errors=false
use_clang_modules=false
use_custom_libcxx=true
use_lld=true
use_pulseaudio=true
@ -482,12 +505,11 @@ _configure() {
skia_use_dawn=false
use_dawn=false
use_system_ada=false
use_system_base64=true
use_system_cares=true
use_system_histogram=true
use_system_lcms2=true
use_system_libffi=true
use_system_llhttp=false
use_system_llhttp=true
use_system_nghttp2=true
"
@ -497,14 +519,13 @@ _configure() {
}
build() {
export PATH="$PATH:/usr/lib/qt5/bin"
export ELECTRON_OUT_DIR="$builddir"/out/Release/
ninja -C out/Release \
copy_node_headers \
electron_dist_zip \
node_gypi_headers \
node_version_header
}
package() {
@ -544,34 +565,37 @@ lang() {
}
sha512sums="
e2df4454f4178af859c13aadee4ea04a5b6aa202972cad625e54bc68f5b8c25e098e50d428ec9c1886c37ccf49aaaedb4c5f02fc8bdd498314ba216901932185 electron-v33.2.1-130.0.6723.127.tar.zst
6138b3dbf3903c78f4ca1ed5a6c3c3c485471ded31976010484ce8893d03953df2b8f066a4fe84bbde5ae7ef9bbff664ef917e247b2e95dd471de40f2774d7d0 copium-129.1.tar.gz
29bb685e03356a77df5fd347cdf55194cc8b3265c421cc76e54d64edefc329dbcb052deb26b22e8f587ce68456876c071de1b7d258dd0fcc6ee66c875ec4a020 chromium-revert-drop-of-system-java.patch
53b7cdee8f7bfb4c9371cb385c473e34ed3d8ac7efaa43c0af061107560be30d8747b07fb0b16c01079b8c770f2c721bb5a8081313b7c126856ea4078a74da2a compiler.patch
4057cc78f10bfd64092bc35a373869abb1d68b880cdbca70422f39ffd78a929c19c7728d4d4c40709aaba25581148a93ae5343e724849fd35323062ed68753fa disable-dns_config_service.patch
2470904846e3adde2c9506f9e78220daca0932320b628dd3d427bf2b7c17a8f7880cb97e787b046c28de7aca642e1a8d30824d6049905976da77e7473baa64da disable-failing-tests.patch
5fc5c012c1db6cf1ba82f38c6f3f4f5ca3a209e47ac708a74de379b018e0649b7694877c9571ef79002dde875ffc07b458a3355425f1c01867f362c66c2bc1bf fc-cache-version.patch
b24563e9a738c00fce7ff2fbdee3d7c024d9125d7c74d9ab90af6bdb16f7ec8419f2c8aa78c0640f6d5d81c17dc2c673a194401d354f466749672729b48ed068 fix-opus.patch
6dc7161f6df396e2b7569b0a607e264b43a2d7215de65164dc2ca04c019df93ea0a67dec2490071c09c8a03f90605faaf3880f2d843f838bb5d841bba204c298 electron-v39.2.7-142.0.7444.235.tar.zst
30b298549804e7753b0b639b72417ba081e964676862b6c7d73ad73cdf806883f20e4a4b36e67a6c375eaf2dd97686cf21b90b062400d3b61fba86da4d239bfa copium-142.0.tar.gz
69b45005451ccd69c354b4c2910e92371cb801665f5e300dbecd36f8bc4ce68e77a431b5dac07c0937787debb4e93b7aadefa0a1e76c4ae334d2547ca3ca14ff 0001-hotfix-ignore-a-new-warning-in-rust-1.89.patch
dc254dd79e135aeac3e9c03eb055e3bc17980fc213f8c4d8d7921a575be7f9c26b91f110a6dcb01c0a824a7d9375c09f8a61c8858c20c11d79c03f873e2cb3f9 compiler.patch
1bee1448e409fedff635388ee6f1efa6d23c29ae3e6b6fd31452c56974adb40fcd0088c82d1e643d549154663e402942cbab9807dff5aff2d8997a09de6f5655 disable-dns_config_service.patch
0ef9168b8b1a4779bc4c8df718735e06d29e459dcfd00f8cbf9a4edaf9fade8089225219e46dead7de81de716bddc8d745dc2069db0ee7f7e5d2f64c5236e2ab disable-failing-tests.patch
0050857a9a9553c10fd502fe70606bce48269c9b48fa82ce9e111575637a0c03578e923c82fc639fcb574fc3337aeef50d8a0aea5e512ae4eab83b8c3d732cf6 fc-cache-version.patch
87f63d83139562e058f3f649eb1f62bf100dd92c2bb6ee393fdce0c8f7d7c188a7062394647aafe4e82c0a8fbbffeb613edc5c8dd9415dd9dda777827ea371c5 fix-ffmpeg-codec-list.patch
c63dee5044353eb306a39ca1526158c0f003ab310ecb03d1c368dc2a979454590c84b8d3c15484517d5e66bb8add9b231da9abbadf2e50850abd72ac1345c4ab fstatat-32bit.patch
33ee60863cc438ef57ffef92ba4cf67a856a5ffc16138bce241bcf87e47b15154aa86918e793c26f7ec4dc62a445257ad5673ed7001daf22c4043cf6cc57da7f gdbinit.patch
36a764fa73443b47d38050b52dbe6ad2fa8d67201ff4ccdbad13b52308ef165ca046aac6f9609fe35890a6485f0f3e672e78cc41e3e44f3cdc7f145e540524e8 generic-sensor-include.patch
99bcc7dd485b404a90c606a96addab1d900852128d44fb8cea8acc7303189ef87c89a7b0e749fd0e10c5ef5f6bf1fadeb5c16a34503cab6a59938ce2653d887e musl-auxv.patch
a94cf7a0670abf5178abba33c619cc6d41d73f2e16c7a1fd5b152152f5077df103e049d166e3b8627797c38113821d2f2e6b64cd48d132c1e90ad32d63a349f5 headless-shell-no-license.patch
51f1959bd622af26a1c3a1f4b0ad9a5bfa461057aa4cf9960c568dddf8ac47d55989c277f5d5ab5db040a04c54925a531af7a1cc767559218b408eaa6bdd7577 musl-sandbox.patch
e7163ac5810ac85366cef2447412287c856e3d67c6b77f219a6e5a418b1965b98e449c409424ad0704a5bded9355dd0aec3dc4585918ce5a2ab36c079707afe2 musl-tid-caching.patch
92eb002718026611f5542362ad69b67f0a398ff71b3fca5c05d55cb5c6f9f29334e5e127bb4860cfaa3fba0f0d4c901e2b98808217e7dc02e254a64a5c9521aa musl-v8-monotonic-pthread-cont_timedwait.patch
3b7420d58d13dfc4baab5065e3017f666f51fed6de087af42a660a839d7b4444b50d1a93204322d213df36c6722eaf6b08d46d50dc374198a342da2675fafff5 net-test-no-vpython.patch
e487662b6606ea526ddd716c31e6b9ad3d61f1bee5356cd94b78a903efb3928338cbb48e3d5840b34c3b70a71e8361a228430bd50e707ad301228a7049d59e37 net-test-pyws3-py3.12.patch
a250cff50d282b02ce0f28880d0a2b4fb8e7df51bc072bfeeddc561c29a7c76453dbcbc7b17b82966a7b30a31409d2555720d1dcf963e1b3fb8a2a06a6abcf46 no-execinfo.patch
0b41aeb6b212f9c3f61aa0a8d3085c9e865a2e68f3270ceec2376aab67f337ac46eaea7da36d3fd7219e2a1cb731b7aa2d3fb619a374d2b7653976b9f4f384bb no-mallinfo.patch
e4c4e5bc6f828f9c883dd418c0ba01887949c29c311f76206a1ec29f620b0c0ba0452949dc2778a9c46ea066405857536964a36436a68eecf7da7952736333cf no-res-ninit-nclose.patch
6dc4d8dc92e685dace62265a1ddb3aebc558aed54d20ff6d36b030be0c48d7e84662326c31363612492574d9a03c62653cdc21a60995b97dee1d75cae86a9f9b no-sandbox-settls.patch
f2b08538ff57c50b3772a07ca91845f9d45f4a5112f608b6192d4fb5d7be48f478c0c36194d95ab7bbf933e0278e5c6d578619d8643895cdc40386eebc5b975f partalloc-no-tagging-arm64.patch
b75908a45ee2f4f806eec8d86fca2f51fda3531b88de48ef4539c364a40d7e2897cdaf38b715682d712648e3f43aac983055e688385f85fa7b7204ffb6d617e1 partalloc-no-tagging-arm64.patch
03f829a2da633533ef3fd0f287f5ec602d936a97a98b53cd2415553c2537ae9d571f35397ca7c9fb3f4b0806c300e3b189569f8d979ca132e1a2a4dae7206396 pvalloc.patch
e48693e6b7aeebf69a5acbf80d9a35defe4c23835121dfeb58b051ac7c527e758a41004f4d193274fe1b01c0bfb1dbc77b09cb6a404a3fdee507a2918afb0edb temp-failure-retry.patch
914ccf649d7771f19f209ab97f99c481aebc6f66174d68e8b539f6ad4a70bc8cb0fae2df6dadbf0415958ffb3574c420fe029079dcce45f5e5add4db2e903566 yes-musl.patch
465107da7818b237e3c144a318ab80c3c9343b51ed38b8971ef204692d13346929becbe94cefad4c153788d3a200642143584d5ca070f6304e768ba2139c19ec electron_icon.patch
e05180199ee1d559e4e577cedd3e589844ecf40d98a86321bf1bea5607b02eeb5feb486deddae40e1005b644550331f6b8500177aa7e79bcb3750d3c1ceb76c3 electron_python-jinja-3.10.patch
2aa340854316f1284217c0ca17cbf44953684ad6c7da90815117df30928612eb9fb9ffb734b948dfc309cd25d1a67cd57f77aac2d052a3dd9aca07a3a58cbb30 electron_webpack-hash.patch
57aa81d46b9cc931092d9d9b3cb4a9859f86c183a236bc5cca6abbaeca86b82bf1b537dd9cb3412114fa4e86087c0022ee3f7e88de974d29b309e9d1714df7a5 electron_unbundle-node.patch
1b35edcf0b41e39e20c4d64dbb978bcaab8036f2fe839930709b269c50cb1321458a15b4d0013246f9e03f58f250a1e3a57ea910db1aa0adbd602a6a11ad33b9 electron_system-zlib-headers.patch
c7f57929943a86f9e5f333da9d5691da88038770eeb46dd0a0719962c934deb2879f0e7a1ed714e9383e38ee4d68eb754501f362c4d7cdee76cfc2e980b21272 electron_unbundle-node.patch
4d9287d4cdfe27fbfb7be3d4b26c0c40edbd6a0c3ff926d60f2093ca09c15bcb58e20c2ccc8c0606aafd66c6d25a54225bc329cb056d8c5b297db4c6d0e768e6 electron_system-zlib-headers.patch
7031ddb61a858e95d83366185a53b5a2e4be9abe0aa4957543e0621cad57175ffef31bd87b8be25255184bb4cb30ec4fbced055407c6c8c7940c9e240b25d498 electron_do-not-strip-binaries.patch
0f8f36c21cc50c80e378691265845ff10fa53953d6cd5352fe71efcba489f956e50d374d8f634dadc3569c4901a81a1f308a3e69140c0f9136e0777022b9520f electron_shell-file-dialog-drop-glibc.patch
3fd20144ed171cf9706899a1481141c7fa3e98b17d600cdc5a3a68ba39059cebd9e5ccb5534af3e262f689df381bc3cb630ac24e46dd6f6c72eac4f4b6b14b35 electron_use-system-yarn.patch
e8ea87c547546011c4c8fc2de30e4f443b85cd4cfcff92808e2521d2f9ada03feefb8e1b0cf0f6b460919c146e56ef8d5ad4bb5e2461cc5247c30d92eb4d068e default.conf
191559fc7aa1ea0353c6fb0cc321ee1d5803a0e44848c8be941cfab96277b0de6a59962d373e2a2a1686c8f9be2bcf2d2f33706759a339a959e297d3f7fda463 electron.desktop
5f7ba5ad005f196facec1c0f26108356b64cafb1e5cfa462ff714a33b8a4c757ac00bfcb080da09eb5b65032f8eb245d9676a61ec554515d125ed63912708648 electron-launcher.sh

View file

@ -0,0 +1,59 @@
# electron
This is the `electron` package for Alpine Linux.
Please report any issues [using Gitlab](https://gitlab.alpinelinux.org/alpine/aports/-/issues/new) and tag @ayakael
## Building electron
Electron is an application framework based on `chromium`. Just like `chromium`,
and any Google application, the build process is a form of [hostile
architecture] (https://en.wikipedia.org/wiki/Hostile_architecture) It's quite
literally chromium with patches applied on top for the most part. The build
process applies a series of git patches against `chromium` from directories
with a script.
Its source code isn't available as a downloadable tarball. It is only fetchable
using Google's `gclient` available in `depot_tools` with a reimplemented
version in the `teapot` package. By executing, `abuild snapshot`, the tarball
can be fetched and packaged, as long as `gclient` is in your path. For ease of
maintenance, a workflow on [Ayakael's Forge](https://ayakael.net/mirrors/electron)
automatically fetches and packages the source code on new releases and makes it
available in a [generic Forgejo repository](https://ayakael.net/mirrors/-/packages/generic/electron).
## Electron maintenance cycle
Security / bug fixes land from upstream land randomly, but chromium security fixes land
basically weekly around Tuesday in `America/Los_Angeles`. Minor relases only require
an upgrade to the `electron` packages. It is advisable to follow chromium weekly
security fixes, although following `electron` minor releases is fine.
Major version upgrades require a more thorough approach. For one, most changes
can be backported from `chromium` APKBUILD by diffing the previous version
packaged with `electron` with the current (set with `_chromium` var). You also
need to rebuild all `electron` apps, with patches sometimes necessary when
upstream bumps to a new `nodejs` major verion. Major electron releases are
every two `chromium` major releases, with [dates known well ahead]
(https://chromiumdash.appspot.com/schedule) with a few major releases of
`electron` [officially supported at a time](https://www.electronjs.org/docs/latest/tutorial/electron-timelines).
Steps, in a nutshell:
1. Set `pkgver` to up-to-date version
2. Optional: fetch source-code using `abuild snapshot`, making sure `gclient`
is in your path
3. Update source checksum using `abuild checksum`
4. If major update, backport changes from `chromium` aport and bump `pkgrel`
for all electron-based applications.
## Why is this package still in testing
[Work is under way](https://gitlab.alpinelinux.org/alpine/aports/-/issues/15760)
to make this aport ready for `community`
Until that happens, this package is also kept-to-date against the latest
release of Alpine Linux in [Ayakael's Forge](https://ayakael.net/forge/-/packages/alpine/signal-desktop)
This is true of all Ayakael's packages still in `testing`.

View file

@ -1,17 +0,0 @@
This was dropped for some reason in 6951c37cecd05979b232a39e5c10e6346a0f74ef
allows using /usr/bin/java instead of a downloaded one (that doesn't work on musl)
--
--- a/third_party/closure_compiler/compiler.py 2021-05-20 04:17:53.000000000 +0200
+++ b/third_party/closure_compiler/compiler.py 2021-05-20 04:17:53.000000000 +0200
@@ -13,8 +13,9 @@
_CURRENT_DIR = os.path.join(os.path.dirname(__file__))
-_JAVA_PATH = os.path.join(_CURRENT_DIR, "..", "jdk", "current", "bin", "java")
-assert os.path.isfile(_JAVA_PATH), "java only allowed in android builds"
+_JAVA_BIN = "java"
+_JDK_PATH = os.path.join(_CURRENT_DIR, "..", "jdk", "current", "bin", "java")
+_JAVA_PATH = _JDK_PATH if os.path.isfile(_JDK_PATH) else _JAVA_BIN
class Compiler(object):
"""Runs the Closure compiler on given source files to typecheck them

View file

@ -1,14 +1,12 @@
--- ./build/config/compiler/BUILD.gn.orig
+++ ./build/config/compiler/BUILD.gn
@@ -568,24 +568,6 @@
@@ -658,22 +658,6 @@
}
}
- # TODO(crbug.com/40283598): This causes binary size growth and potentially
- # other problems.
- # TODO(crbug.com/40284925): This isn't supported by Cronet's mainline llvm version.
- if (default_toolchain != "//build/toolchain/cros:target" &&
- !llvm_android_mainline) {
- if (default_toolchain != "//build/toolchain/cros:target") {
- cflags += [
- "-mllvm",
- "-split-threshold-for-reg-with-hint=0",
@ -25,47 +23,91 @@
# TODO(crbug.com/40192287): Investigate why/if this should be needed.
if (is_win) {
cflags += [ "/clang:-ffp-contract=off" ]
@@ -998,17 +980,6 @@
# `-nodefaultlibs` from the linker invocation from Rust, which would be used
# to compile dylibs on Android, such as for constructing unit test APKs.
"-Cdefault-linker-libraries",
-
- # To make Rust .d files compatible with ninja
- "-Zdep-info-omit-d-target",
-
- # If a macro panics during compilation, show which macro and where it is
- # defined.
- "-Zmacro-backtrace",
-
- # For deterministic builds, keep the local machine's current working
- # directory from appearing in build outputs.
- "-Zremap-cwd-prefix=.",
]
if (!is_win || force_rustc_color_output) {
@@ -1175,8 +1146,8 @@
@@ -1273,8 +1257,8 @@
# simplicity we always explicitly set the architecture.
if (current_cpu == "x64") {
if (is_clang && !is_android && !is_fuchsia && !is_chromeos_device) {
- cflags += [ "--target=x86_64-unknown-linux-gnu" ]
- ldflags += [ "--target=x86_64-unknown-linux-gnu" ]
+ cflags += [ "--target=x86_64-alpine-linux-musl" ]
+ ldflags += [ "--target=x86_64-alpine-linux-musl" ]
} else {
cflags += [ "-m64" ]
ldflags += [ "-m64" ]
@@ -1282,8 +1266,8 @@
cflags += [ "-msse3" ]
} else if (current_cpu == "x86") {
if (is_clang && !is_android && !is_chromeos_device) {
- cflags += [ "--target=i386-unknown-linux-gnu" ]
- ldflags += [ "--target=i386-unknown-linux-gnu" ]
+ cflags += [ "--target=i586-alpine-linux-musl" ]
+ ldflags += [ "--target=i586-alpine-linux-musl" ]
} else {
cflags += [ "-m32" ]
ldflags += [ "-m32" ]
@@ -1294,8 +1278,8 @@
]
} else if (current_cpu == "arm") {
if (is_clang && !is_android && !is_nacl &&
!(is_chromeos_lacros && is_chromeos_device)) {
if (is_clang && !is_android && !is_chromeos_device) {
- cflags += [ "--target=arm-linux-gnueabihf" ]
- ldflags += [ "--target=arm-linux-gnueabihf" ]
+ cflags += [ "--target=armv7-alpine-linux-musleabihf" ]
+ ldflags += [ "--target=armv7-alpine-linux-musleabihf" ]
}
if (!is_nacl) {
cflags += [
@@ -1190,8 +1161,8 @@
cflags += [
"-march=$arm_arch",
@@ -1306,8 +1290,8 @@
}
} else if (current_cpu == "arm64") {
if (is_clang && !is_android && !is_nacl && !is_fuchsia &&
!(is_chromeos_lacros && is_chromeos_device)) {
if (is_clang && !is_android && !is_fuchsia && !is_chromeos_device) {
- cflags += [ "--target=aarch64-linux-gnu" ]
- ldflags += [ "--target=aarch64-linux-gnu" ]
+ cflags += [ "--target=aarch64-alpine-linux-musl" ]
+ ldflags += [ "--target=aarch64-alpine-linux-musl" ]
}
} else if (current_cpu == "mipsel" && !is_nacl) {
} else if (current_cpu == "mipsel") {
ldflags += [ "-Wl,--hash-style=sysv" ]
@@ -1982,7 +1953,7 @@
@@ -1551,22 +1535,22 @@
ldflags += [ "-maix64" ]
}
} else if (is_clang) {
- cflags += [ "--target=powerpc64le-unknown-linux-gnu" ]
- ldflags += [ "--target=powerpc64le-unknown-linux-gnu" ]
+ cflags += [ "--target=powerpc64le-alpine-linux-musl" ]
+ ldflags += [ "--target=powerpc64le-alpine-linux-musl" ]
} else {
cflags += [ "-m64" ]
ldflags += [ "-m64" ]
}
} else if (current_cpu == "riscv64") {
if (is_clang && !is_android) {
- cflags += [ "--target=riscv64-linux-gnu" ]
- ldflags += [ "--target=riscv64-linux-gnu" ]
+ cflags += [ "--target=riscv64-alpine-linux-musl" ]
+ ldflags += [ "--target=riscv64-alpine-linux-musl" ]
}
cflags += [ "-mabi=lp64d" ]
} else if (current_cpu == "loong64") {
if (is_clang) {
- cflags += [ "--target=loongarch64-linux-gnu" ]
- ldflags += [ "--target=loongarch64-linux-gnu" ]
+ cflags += [ "--target=loongarch64-alpine-linux-musl" ]
+ ldflags += [ "--target=loongarch64-alpine-linux-musl" ]
}
cflags += [
"-mabi=lp64d",
@@ -1574,8 +1558,8 @@
]
} else if (current_cpu == "s390x") {
if (is_clang) {
- cflags += [ "--target=s390x-unknown-linux-gnu" ]
- ldflags += [ "--target=s390x-unknown-linux-gnu" ]
+ cflags += [ "--target=s390x-alpine-linux-musl" ]
+ ldflags += [ "--target=s390x-alpine-linux-musl" ]
}
cflags += [ "-m64" ]
ldflags += [ "-m64" ]
@@ -2274,7 +2258,7 @@
defines = [ "_HAS_NODISCARD" ]
}
} else {
@ -76,7 +118,7 @@
cflags += [ "-Wextra" ]
--- ./build/config/rust.gni.orig
+++ ./build/config/rust.gni
@@ -185,11 +185,11 @@
@@ -178,11 +178,11 @@
rust_abi_target = ""
if (is_linux || is_chromeos) {
if (current_cpu == "arm64") {
@ -91,11 +133,15 @@
} else if (current_cpu == "arm") {
if (arm_float_abi == "hard") {
float_suffix = "hf"
@@ -198,15 +198,15 @@
}
if (arm_arch == "armv7-a" || arm_arch == "armv7") {
# No way to inform Rust about the -a suffix.
- rust_abi_target = "armv7-unknown-linux-gnueabi" + float_suffix
@@ -200,25 +200,21 @@
# The thumbv7 vs. armv7 distinction is for legacy reasons and both
# targets in fact target Thumb, see:
# https://github.com/rust-lang/rust/issues/44722
- if (arm_use_neon) {
- rust_abi_target = "thumbv7neon-unknown-linux-gnueabi" + float_suffix
- } else {
- rust_abi_target = "armv7-unknown-linux-gnueabi" + float_suffix
- }
+ rust_abi_target = "armv7-alpine-linux-musleabi" + float_suffix
} else {
- rust_abi_target = "arm-unknown-linux-gnueabi" + float_suffix
@ -104,6 +150,15 @@
} else if (current_cpu == "riscv64") {
- rust_abi_target = "riscv64gc-unknown-linux-gnu"
+ rust_abi_target = "riscv64-alpine-linux-musl"
} else if (current_cpu == "ppc64") {
- rust_abi_target = "powerpc64le-unknown-linux-gnu"
+ rust_abi_target = "powerpc64le-alpine-linux-musl"
} else if (current_cpu == "s390x") {
- rust_abi_target = "s390x-unknown-linux-gnu"
+ rust_abi_target = "s390x-alpine-linux-musl"
} else if (current_cpu == "loong64") {
- rust_abi_target = "loongarch64-unknown-linux-gnu"
+ rust_abi_target = "loongarch64-alpine-linux-musl"
} else {
# Best guess for other future platforms.
- rust_abi_target = current_cpu + "-unknown-linux-gnu"
@ -113,9 +168,9 @@
import("//build/config/android/abi.gni")
--- ./build/config/clang/BUILD.gn.orig
+++ ./build/config/clang/BUILD.gn
@@ -128,14 +128,15 @@
} else if (is_apple) {
_dir = "darwin"
@@ -207,22 +207,23 @@
assert(false) # Unhandled cpu type
}
} else if (is_linux || is_chromeos) {
+ _dir = "linux"
if (current_cpu == "x64") {
@ -130,6 +185,18 @@
} else if (current_cpu == "arm64") {
- _dir = "aarch64-unknown-linux-gnu"
+ _suffix = "-aarch64"
} else if (current_cpu == "loong64") {
- _dir = "loongarch64-unknown-linux-gnu"
+ _suffix = "-loongarch64"
} else if (current_cpu == "riscv64") {
- _dir = "riscv64-unknown-linux-gnu"
+ _suffix = "-riscv64"
} else if (current_cpu == "ppc64") {
- _dir = "ppc64le-unknown-linux-gnu"
+ _suffix = "-powerpc64le"
} else if (current_cpu == "s390x") {
- _dir = "s390x-unknown-linux-gnu"
+ _suffix = "-s390x"
} else {
assert(false) # Unhandled cpu type
}

View file

@ -2,7 +2,7 @@ diff --git a/net/dns/BUILD.gn b/net/dns/BUILD.gn
index f36bf68..805d9a6 100644
--- a/net/dns/BUILD.gn
+++ b/net/dns/BUILD.gn
@@ -130,8 +130,8 @@ source_set("dns") {
@@ -142,8 +142,8 @@
]
} else if (is_linux) {
sources += [
@ -13,3 +13,11 @@ index f36bf68..805d9a6 100644
]
} else if (is_posix) {
sources += [
@@ -455,7 +455,6 @@
if (is_android) {
sources += [ "dns_config_service_android_unittest.cc" ]
} else if (is_linux) {
- sources += [ "dns_config_service_linux_unittest.cc" ]
} else if (is_posix) {
sources += [ "dns_config_service_posix_unittest.cc" ]
}

View file

@ -2,86 +2,6 @@ safesprintf emitnull:
error: conversion from 'std::nullptr_t' to 'const internal::Arg' is ambiguous
const internal::Arg arg_array[] = { args... };
flatmap incompletetype:
error: static assertion failed due to requirement 'std::__is_complete_or_unbounded(std::__type_identity<std::pair<A, A>>{})': template argument must be a complete class or an unbounded array
static_assert(std::__is_complete_or_unbounded(__type_identity<_Tp>{}),
i18n, time:
various icu failures (new icu time formatting? internal api difference?)
a ton of these fail:
Expected equality of these values:
u"Monday 16 May Saturday 28 May"
Which is: u"Monday 16 May \x2013 Saturday 28 May"
DateIntervalFormat(begin_time, end_time, DATE_FORMAT_MONTH_WEEKDAY_DAY)
Which is: u"Monday 16\x2009\x2013\x2009Saturday 28 May"
../../base/i18n/time_formatting_unittest.cc:84: Failure
Expected equality of these values:
clock12h_pm
Which is: u"3:42 PM"
TimeFormatTimeOfDay(time)
Which is: u"3:42\x202FPM"
.. and so on
fileutiltest filetofile:
../../base/files/file_util_unittest.cc:2692: Failure
Value of: stream
Actual: true
Expected: false
stacktracetest: crashes (this doesn't seem to use execinfo so probably relies on glibc internal layout for tracing here)
platformthreadtest canchangethreadtype:
../../base/threading/platform_thread_unittest.cc:445: Failure
Expected equality of these values:
PlatformThread::CanChangeThreadType(ThreadType::kBackground, ThreadType::kResourceEfficient)
Which is: true
kCanIncreasePriority
Which is: false
scopedfdownershiptrackingtest crashonunownedclose: fails due to scoped-file-no-close.patch
stackcontainer customallocator:
../../base/containers/stack_container_unittest.cc:211: Failure
Expected equality of these values:
1
Allocator::deallocated
Which is: 0
nativelibrarytest loadlibrarypreferownsymbols: crashes (probably musl dlopen does not play nice here)
spantest empty: crashes (this looks fishy)
readelfbuildid: crashes (this looks like glibc dynamic linker semantics)
nss db unittest: various nss failures: e.g.:
../../net/cert/nss_cert_database_unittest.cc:209: Failure
Expected equality of these values:
OK
Which is: 0
cert_db_->ImportFromPKCS12(GetPublicSlot(), pkcs12_data, u"12345", true, nullptr)
Which is: -702
processutiltest cloneflags: fails in CI (ulimit? too many threads?)
../../base/process/process_util_unittest.cc:1434: Failure
Value of: process.IsValid()
Actual: false
Expected: true
addresstrackerlinuxnetlinktest:
../../net/base/address_tracker_linux_unittest.cc:886: Failure
Value of: child.process.IsValid()
Actual: false
Expected: true
ToAddressDoesNotDereference: ; Expected `get_for_extraction_cnt` to be 1 but got 0;
DataCapturedManyThreads: flaky
ProcessAlternativeServicesTest.Process*: crashed ?
--- a/base/strings/safe_sprintf_unittest.cc
+++ b/base/strings/safe_sprintf_unittest.cc
@@ -740,6 +740,7 @@
@ -100,244 +20,3 @@ ProcessAlternativeServicesTest.Process*: crashed ?
TEST(SafeSPrintfTest, PointerSize) {
// The internal data representation is a 64bit value, independent of the
--- a/base/containers/flat_map_unittest.cc
+++ b/base/containers/flat_map_unittest.cc
@@ -52,6 +52,7 @@
} // namespace
+#if 0
TEST(FlatMap, IncompleteType) {
struct A {
using Map = flat_map<A, A>;
@@ -65,6 +66,7 @@
A a;
}
+#endif
TEST(FlatMap, RangeConstructor) {
flat_map<int, int>::value_type input_vals[] = {
--- a/base/BUILD.gn
+++ b/base/BUILD.gn
@@ -3194,21 +3194,6 @@
"hash/md5_constexpr_unittest.cc",
"hash/md5_unittest.cc",
"hash/sha1_unittest.cc",
- "i18n/break_iterator_unittest.cc",
- "i18n/case_conversion_unittest.cc",
- "i18n/char_iterator_unittest.cc",
- "i18n/character_encoding_unittest.cc",
- "i18n/file_util_icu_unittest.cc",
- "i18n/icu_string_conversions_unittest.cc",
- "i18n/icu_util_unittest.cc",
- "i18n/message_formatter_unittest.cc",
- "i18n/number_formatting_unittest.cc",
- "i18n/rtl_unittest.cc",
- "i18n/streaming_utf8_validator_unittest.cc",
- "i18n/string_search_unittest.cc",
- "i18n/time_formatting_unittest.cc",
- "i18n/timezone_unittest.cc",
- "i18n/transliterator_unittest.cc",
"immediate_crash_unittest.cc",
"json/json_parser_unittest.cc",
"json/json_reader_unittest.cc",
--- a/base/files/file_util_unittest.cc
+++ b/base/files/file_util_unittest.cc
@@ -2686,6 +2686,7 @@
}
}
+#if 0
TEST_F(FileUtilTest, FileToFILE) {
File file;
FILE* stream = FileToFILE(std::move(file), "w");
@@ -2700,6 +2701,7 @@
EXPECT_FALSE(file.IsValid());
EXPECT_TRUE(CloseFile(stream));
}
+#endif
TEST_F(FileUtilTest, FILEToFile) {
ScopedFILE stream;
--- a/base/threading/platform_thread_unittest.cc
+++ b/base/threading/platform_thread_unittest.cc
@@ -416,6 +416,7 @@
// platforms for all priorities. This not being the case. This test documents
// and hardcodes what we know. Please inform scheduler-dev@chromium.org if this
// proprerty changes for a given platform.
+#if 0
TEST(PlatformThreadTest, CanChangeThreadType) {
#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
// On Ubuntu, RLIMIT_NICE and RLIMIT_RTPRIO are 0 by default, so we won't be
@@ -472,6 +473,7 @@
ThreadType::kBackground));
#endif
}
+#endif
TEST(PlatformThreadTest, SetCurrentThreadTypeTest) {
TestPriorityResultingFromThreadType(ThreadType::kBackground,
--- a/base/files/scoped_file_linux_unittest.cc
+++ b/base/files/scoped_file_linux_unittest.cc
@@ -42,11 +42,13 @@
EXPECT_DEATH(ScopedFD(fd.get()), "");
}
+#if 0
TEST_F(ScopedFDOwnershipTrackingTest, CrashOnUnownedClose) {
ScopedFD fd = OpenFD();
subtle::EnableFDOwnershipEnforcement(true);
EXPECT_DEATH(close(fd.get()), "");
}
+#endif
#endif // defined(GTEST_HAS_DEATH_TEST)
--- a/base/native_library_unittest.cc
+++ b/base/native_library_unittest.cc
@@ -139,6 +139,7 @@
// Verifies that the |prefer_own_symbols| option satisfies its guarantee that
// a loaded library will always prefer local symbol resolution before
// considering global symbols.
+#if 0
TEST(NativeLibraryTest, LoadLibraryPreferOwnSymbols) {
NativeLibraryOptions options;
options.prefer_own_symbols = true;
@@ -171,6 +172,7 @@
EXPECT_EQ(2, NativeLibraryTestIncrement());
EXPECT_EQ(3, NativeLibraryTestIncrement());
}
+#endif
#endif // !BUILDFLAG(IS_ANDROID) && !defined(THREAD_SANITIZER) && \
// !defined(MEMORY_SANITIZER)
--- a/base/containers/span_unittest.cc
+++ b/base/containers/span_unittest.cc
@@ -995,6 +995,7 @@
}
}
+#if 0
TEST(SpanTest, Empty) {
{
span<int> span;
@@ -1014,6 +1015,7 @@
EXPECT_TRUE(span_of_checked_iterators.empty());
}
}
+#endif
TEST(SpanTest, OperatorAt) {
static constexpr int kArray[] = {1, 6, 1, 8, 0};
--- a/base/debug/elf_reader_unittest.cc
+++ b/base/debug/elf_reader_unittest.cc
@@ -194,6 +194,7 @@
}
}
+#if 0
TEST(ElfReaderTestWithCurrentImage, ReadElfBuildId) {
#if BUILDFLAG(IS_ANDROID)
// On Android the library loader memory maps the full so file.
@@ -229,6 +230,7 @@
UnloadNativeLibrary(library);
#endif
}
+#endif
} // namespace debug
} // namespace base
--- a/net/BUILD.gn
+++ b/net/BUILD.gn
@@ -4826,7 +4826,6 @@
sources += [
"cert/internal/system_trust_store_nss_unittest.cc",
"cert/internal/trust_store_nss_unittest.cc",
- "cert/nss_cert_database_unittest.cc",
"cert/x509_util_nss_unittest.cc",
]
if (!is_castos) {
--- a/base/process/process_util_unittest.cc
+++ b/base/process/process_util_unittest.cc
@@ -1419,7 +1419,7 @@
return kSuccess;
}
-#if defined(CLONE_NEWUSER) && defined(CLONE_NEWPID)
+#if 0 && defined(CLONE_NEWUSER) && defined(CLONE_NEWPID)
TEST_F(ProcessUtilTest, CloneFlags) {
if (!PathExists(FilePath("/proc/self/ns/user")) ||
!PathExists(FilePath("/proc/self/ns/pid"))) {
--- a/net/base/address_tracker_linux_unittest.cc
+++ b/net/base/address_tracker_linux_unittest.cc
@@ -831,6 +831,7 @@
//
// This test creates multiple concurrent `AddressTrackerLinux` instances in
// separate processes, each in their own PID namespaces.
+#if 0
TEST(AddressTrackerLinuxNetlinkTest, TestInitializeTwoTrackersInPidNamespaces) {
// This test initializes `kNumChildren` instances of `AddressTrackerLinux` in
// tracking mode, each in their own child process running in a PID namespace.
@@ -901,6 +902,7 @@
ASSERT_EQ(exit_code, 0);
}
}
+#endif
MULTIPROCESS_TEST_MAIN(ChildProcessInitializeTrackerForTesting) {
base::test::TaskEnvironment task_env(
--- a/base/trace_event/trace_event_unittest.cc
+++ b/base/trace_event/trace_event_unittest.cc
@@ -1368,6 +1368,7 @@
}
// Test that data sent from multiple threads is gathered
+#if 0
TEST_F(TraceEventTestFixture, DataCapturedManyThreads) {
BeginTrace();
@@ -1408,6 +1409,7 @@
delete task_complete_events[i];
}
}
+#endif
// Test that thread and process names show up in the trace.
// In SDK build, thread names are not tracked inside //base. Instead, there's
--- a/base/allocator/partition_allocator/src/partition_alloc/pointers/raw_ptr_unittest.cc
+++ b/base/allocator/partition_allocator/src/partition_alloc/pointers/raw_ptr_unittest.cc
@@ -1481,6 +1481,7 @@
// `base::to_address()` will use the dereference operator. This is not
// what we want; this test enforces extraction semantics for
// `to_address()`.
+#if 0
TEST_F(RawPtrTest, ToAddressDoesNotDereference) {
CountingRawPtr<int> ptr = nullptr;
int* raw = base::to_address(ptr);
@@ -1492,6 +1493,7 @@
.get_for_duplication_cnt = 0}),
CountersMatch());
}
+#endif
TEST_F(RawPtrTest, ToAddressGivesBackRawAddress) {
int* raw = nullptr;
--- a/net/http/http_stream_factory_unittest.cc
+++ b/net/http/http_stream_factory_unittest.cc
@@ -3477,6 +3477,7 @@
DefaultCTPolicyEnforcer ct_policy_enforcer_;
};
+#if 0
TEST_F(ProcessAlternativeServicesTest, ProcessEmptyAltSvc) {
session_ =
std::make_unique<HttpNetworkSession>(session_params_, session_context_);
@@ -3585,6 +3586,7 @@
alternatives[0].host_port_pair());
EXPECT_EQ(0u, alternatives[0].advertised_versions().size());
}
+#endif
} // namespace

View file

@ -0,0 +1,127 @@
diff --git a/electron/BUILD.gn.orig b/electron/BUILD.gn
index b08f434..4062428 100644
--- a/electron/BUILD.gn.orig
+++ b/electron/BUILD.gn
@@ -44,7 +44,6 @@ if (is_mac) {
if (is_linux) {
import("//build/config/linux/pkg_config.gni")
- import("//electron/build/linux/strip_binary.gni")
import("//tools/generate_stubs/rules.gni")
pkg_config("gio_unix") {
@@ -1424,18 +1423,6 @@ dist_zip("electron_dist_zip") {
":licenses",
]
if (is_linux) {
- if (is_official_build) {
- data_deps += [
- ":strip_chrome_crashpad_handler",
- ":strip_chrome_sandbox",
- ":strip_electron_binary",
- ":strip_libEGL_shlib",
- ":strip_libGLESv2_shlib",
- ":strip_libffmpeg_shlib",
- ":strip_libvk_swiftshader_shlib",
- ]
- }
-
data_deps += [ "//sandbox/linux:chrome_sandbox" ]
}
deps = data_deps
@@ -1481,16 +1468,6 @@ group("electron_mksnapshot") {
dist_zip("electron_mksnapshot_zip") {
data_deps = mksnapshot_deps
- if (is_linux && is_official_build) {
- data_deps += [
- ":strip_libEGL_shlib",
- ":strip_libGLESv2_shlib",
- ":strip_libffmpeg_shlib",
- ":strip_libvk_swiftshader_shlib",
- ":strip_mksnapshot_binary",
- ":strip_v8_context_snapshot_generator_binary",
- ]
- }
deps = data_deps
outputs = [ "$root_build_dir/mksnapshot.zip" ]
}
@@ -1637,78 +1614,3 @@ group("release_build") {
]
}
}
-
-if (is_linux && is_official_build) {
- strip_binary("strip_electron_binary") {
- binary_input = "$root_out_dir/$electron_project_name"
- symbol_output = "$root_out_dir/debug/$electron_project_name.debug"
- compress_debug_sections = true
- deps = [ ":electron_app" ]
- }
-
- strip_binary("strip_chrome_crashpad_handler") {
- binary_input = "$root_out_dir/chrome_crashpad_handler"
- symbol_output = "$root_out_dir/debug/chrome_crashpad_handler.debug"
- compress_debug_sections = true
- deps = [ "//components/crash/core/app:chrome_crashpad_handler" ]
- }
-
- strip_binary("strip_chrome_sandbox") {
- binary_input = "$root_out_dir/chrome_sandbox"
- symbol_output = "$root_out_dir/debug/chrome-sandbox.debug"
- compress_debug_sections = true
- deps = [ "//sandbox/linux:chrome_sandbox" ]
- }
-
- strip_binary("strip_libEGL_shlib") {
- binary_input = "$root_out_dir/libEGL.so"
- symbol_output = "$root_out_dir/debug/libEGL.so.debug"
- compress_debug_sections = true
- deps = [ "//third_party/angle:libEGL" ]
- }
-
- strip_binary("strip_libGLESv2_shlib") {
- binary_input = "$root_out_dir/libGLESv2.so"
- symbol_output = "$root_out_dir/debug/libGLESv2.so.debug"
- compress_debug_sections = true
- deps = [ "//third_party/angle:libGLESv2" ]
- }
-
- strip_binary("strip_libffmpeg_shlib") {
- binary_input = "$root_out_dir/libffmpeg.so"
- symbol_output = "$root_out_dir/debug/libffmpeg.so.debug"
- compress_debug_sections = true
- deps = [ "//third_party/ffmpeg" ]
- }
-
- strip_binary("strip_libvk_swiftshader_shlib") {
- binary_input = "$root_out_dir/libvk_swiftshader.so"
- symbol_output = "$root_out_dir/debug/libvk_swiftshader.so.debug"
- compress_debug_sections = true
- deps = [ "//third_party/swiftshader/src/Vulkan:swiftshader_libvulkan" ]
- }
-
- strip_binary("strip_mksnapshot_binary") {
- _binary_path = rebase_path(
- get_label_info(
- ":v8_context_snapshot_generator($v8_snapshot_toolchain)",
- "root_out_dir") + "/mksnapshot",
- root_build_dir)
- binary_input = "$root_out_dir/$_binary_path"
- symbol_output = "$root_out_dir/debug/${_binary_path}.debug"
- compress_debug_sections = true
- deps = mksnapshot_deps
- }
-
- strip_binary("strip_v8_context_snapshot_generator_binary") {
- _binary_path = rebase_path(
- get_label_info(
- ":v8_context_snapshot_generator($v8_snapshot_toolchain)",
- "root_out_dir") + "/v8_context_snapshot_generator",
- root_build_dir)
- binary_input = "$root_out_dir/$_binary_path"
- symbol_output = "$root_out_dir/debug/${_binary_path}.debug"
- compress_debug_sections = true
- deps = mksnapshot_deps
- }
-}

View file

@ -0,0 +1,16 @@
diff --git a/./electron/shell/browser/ui/file_dialog.h.orig b/./electron/shell/browser/ui/file_dialog.h
index 6cdfc7b..f7757da 100644
--- a/./electron/shell/browser/ui/file_dialog.h.orig
+++ b/./electron/shell/browser/ui/file_dialog.h
@@ -13,10 +13,6 @@
#include "base/files/file_path.h"
#include "base/memory/raw_ptr_exclusion.h"
-#if BUILDFLAG(IS_LINUX)
-#include <bits/stdint-uintn.h>
-#endif
-
namespace electron {
class NativeWindow;
}

View file

@ -1,10 +1,12 @@
--- ./electron/BUILD.gn.orig
+++ ./electron/BUILD.gn
@@ -1565,7 +1565,6 @@
public_deps = header_groups + [
":node_gypi_headers",
":node_version_header",
- ":zlib_headers",
]
diff --git a/electron/BUILD.gn.orig b/electron/BUILD.gn
index 235c7abd3e8..088c24ac45e 100644
--- a/electron/BUILD.gn.orig
+++ b/electron/BUILD.gn
@@ -1569,7 +1569,6 @@ group("copy_node_headers") {
":generate_node_headers",
":node_gypi_headers",
":node_version_header",
- ":zlib_headers",
]
}

View file

@ -1,69 +1,143 @@
--- ./third_party/electron_node/BUILD.gn.orig
+++ ./third_party/electron_node/BUILD.gn
@@ -40,6 +40,8 @@
node_release_urlbase = ""
# Allows downstream packagers (eg. Linux distributions) to build Electron against system shared libraries.
+ use_system_ada = false
+ use_system_base64 = false
use_system_cares = false
use_system_nghttp2 = false
use_system_llhttp = false
@@ -48,6 +50,16 @@
if (is_linux) {
import("//build/config/linux/pkg_config.gni")
+ if (use_system_ada) {
+ config("ada") {
+ libs = [ "ada" ]
+ }
+ }
+ if (use_system_base64) {
+ pkg_config("base64") {
+ packages = [ "base64" ]
+ }
+ }
if (use_system_cares) {
pkg_config("cares") {
packages = [ "libcares" ]
@@ -258,8 +270,6 @@
deps = [
":node_js2c_exec",
"deps/googletest:gtest",
- "deps/ada",
- "deps/base64",
"deps/simdutf",
"deps/uvwasi",
"//third_party/zlib",
@@ -267,6 +277,16 @@
"//third_party/brotli:enc",
"//v8:v8_libplatform",
]
+ if (use_system_ada) {
+ configs += [ ":ada" ]
+ } else {
+ deps += [ "deps/ada" ]
+ }
+ if (use_system_base64) {
+ configs += [ ":base64" ]
+ } else {
+ deps += [ "deps/base64" ]
+ }
if (use_system_cares) {
configs += [ ":cares" ]
} else {
diff --git a/./electron/script/generate-config-gypi.py.orig b/./electron/script/generate-config-gypi.py
index b41cd7eb450..bc4098debb5 100755
--- a/./electron/script/generate-config-gypi.py.orig
+++ b/./electron/script/generate-config-gypi.py
@@ -62,6 +62,11 @@ def main(target_file, target_cpu):
# Used by certain versions of node-gyp.
v['build_v8_with_gn'] = 'false'
diff --git a/electron/script/generate-config-gypi.py.orig b/electron/script/generate-config-gypi.py
index 58c973b..c215d90 100755
--- a/electron/script/generate-config-gypi.py.orig
+++ b/electron/script/generate-config-gypi.py
@@ -64,6 +64,11 @@ def main(target_file, target_cpu):
# in common.gypi
if 'clang' in v:
del v['clang']
+
+ with open(os.path.join(NODE_DIR, 'use_system.txt')) as f:
+ for dep in f.read().strip().split(' '):
+ if v.get(f'node_shared_{dep}') is not None:
+ v[f'node_shared_{dep}'] = 'true'
+
with open(target_file, 'w+', encoding='utf-8') as file_out:
file_out.write(pprint.pformat(config, indent=2))
diff --git a/third_party/electron_node/node.gni.orig b/third_party/electron_node/node.gni
index 73bf383..1c80d5a 100644
--- a/third_party/electron_node/node.gni.orig
+++ b/third_party/electron_node/node.gni
@@ -73,6 +73,7 @@ declare_args() {
node_use_amaro = true
# Allows downstream packagers (eg. Linux distributions) to build against system shared libraries.
+ use_system_ada = false
use_system_cares = false
use_system_nghttp2 = false
use_system_llhttp = false
diff --git a/third_party/electron_node/unofficial.gni.orig b/third_party/electron_node/unofficial.gni
index d61a9bd..8bf990e 100644
--- a/third_party/electron_node/unofficial.gni.orig
+++ b/third_party/electron_node/unofficial.gni
@@ -143,7 +143,6 @@ template("node_gn_build") {
"deps/googletest:googletest_config",
]
public_deps = [
- "deps/ada",
"deps/uv",
"//electron:electron_js2c",
"deps/simdjson",
@@ -151,10 +150,7 @@ template("node_gn_build") {
]
deps = [
":run_node_js2c",
- "deps/cares",
- "deps/histogram",
"deps/nbytes",
- "deps/nghttp2",
"deps/postject",
"deps/sqlite",
"deps/uvwasi",
@@ -182,12 +178,30 @@ template("node_gn_build") {
if (is_posix) {
configs -= [ "//build/config/gcc:symbol_visibility_hidden" ]
configs += [ "//build/config/gcc:symbol_visibility_default" ]
+ libs = []
+ include_dirs = []
}
if (use_system_llhttp) {
libs += [ "llhttp" ]
} else {
deps += [ "deps/llhttp" ]
}
+ if (use_system_cares) {
+ libs += [ "cares" ]
+ } else {
+ deps += [ "deps/cares" ]
+ }
+ if (use_system_nghttp2) {
+ libs += [ "nghttp2" ]
+ } else {
+ deps += [ "deps/nghttp2" ]
+ }
+ if (use_system_ada) {
+ libs += [ "ada" ]
+ include_dirs += [ "/usr/include/ada" ]
+ } else {
+ public_deps += [ "deps/ada" ]
+ }
if (use_system_histogram) {
libs += [ "hdr_histogram" ]
include_dirs += [ "/usr/include/hdr" ]
@@ -208,7 +222,7 @@ template("node_gn_build") {
"src/inspector:node_protocol_generated_sources",
"src/inspector:v8_inspector_compress_protocol_json",
]
- include_dirs = [
+ include_dirs += [
"$target_gen_dir/src",
"$target_gen_dir/src/inspector",
"$node_inspector_protocol_path",
@@ -222,17 +236,18 @@ template("node_gn_build") {
sources += node_inspector.node_inspector_sources +
node_inspector.node_inspector_generated_sources
}
- if (is_linux) {
- import("//build/config/linux/pkg_config.gni")
- if (use_system_cares) {
- pkg_config("cares") {
- packages = [ "libcares" ]
- }
- }
- if (use_system_nghttp2) {
- pkg_config("nghttp2") {
- packages = [ "libnghttp2" ]
- }
+ }
+
+ if (is_linux) {
+ import("//build/config/linux/pkg_config.gni")
+ if (use_system_cares) {
+ pkg_config("cares") {
+ packages = [ "libcares" ]
+ }
+ }
+ if (use_system_nghttp2) {
+ pkg_config("nghttp2") {
+ packages = [ "libnghttp2" ]
}
}
}
diff --git a/third_party/electron_node/unofficial.gni.orig b/third_party/electron_node/unofficial.gni
index 6bcc40b..7e383b2 100644
--- a/third_party/electron_node/unofficial.gni.orig
+++ b/third_party/electron_node/unofficial.gni
@@ -142,7 +142,6 @@ template("node_gn_build") {
public_configs = [
":node_external_config",
"deps/googletest:googletest_config",
- ":zstd_include_config"
]
public_deps = [
"deps/ada",
@@ -163,8 +162,6 @@ template("node_gn_build") {
"//third_party/zlib",
"//third_party/brotli:dec",
"//third_party/brotli:enc",
- "//third_party/zstd:decompress",
- "//third_party/zstd:headers",
"$node_simdutf_path",
"$node_v8_path:v8_libplatform",
]

View file

@ -0,0 +1,17 @@
diff --git a/electron/.yarnrc.yml.orig b/electron/.yarnrc.yml
index ca0a580..a388ff9 100644
--- a/electron/.yarnrc.yml.orig
+++ b/electron/.yarnrc.yml
@@ -1,12 +1,3 @@
enableScripts: false
-nmHoistingLimits: workspaces
-
nodeLinker: node-modules
-
-npmMinimalAgeGate: 10080
-
-npmPreapprovedPackages:
- - "@electron/*"
-
-yarnPath: .yarn/releases/yarn-4.12.0.cjs

View file

@ -1,12 +1,10 @@
instead of hardcoding the version, use the defined macro.
--
--- a/third_party/test_fonts/fontconfig/generate_fontconfig_caches.cc
+++ b/third_party/test_fonts/fontconfig/generate_fontconfig_caches.cc
--- ./third_party/test_fonts/fontconfig/generate_fontconfig_caches.cc.orig
+++ ./third_party/test_fonts/fontconfig/generate_fontconfig_caches.cc
@@ -56,7 +56,7 @@
FcFini();
// Check existence of intended fontconfig cache file.
- auto cache = fontconfig_caches + "/" + kCacheKey + "-le64.cache-9";
- auto cache = fontconfig_caches + "/" + kCacheKey + "-le64.cache-11";
+ auto cache = fontconfig_caches + "/" + kCacheKey + "-le64.cache-" + FC_CACHE_VERSION;
bool cache_exists = access(cache.c_str(), F_OK) == 0;
return !cache_exists;

View file

@ -0,0 +1,13 @@
--- a/media/ffmpeg/ffmpeg_common.cc
+++ b/media/ffmpeg/ffmpeg_common.cc
@@ -1046,8 +1046,8 @@
// This should match the configured lists in //third_party/ffmpeg.
static constexpr std::string_view kAllowedAudioCodecs =
- "vorbis,libopus,flac,pcm_u8,pcm_s16le,pcm_s24le,pcm_s32le,pcm_f32le,"
- "mp3,pcm_s16be,pcm_s24be,pcm_mulaw,pcm_alaw" EXTRA_CODECS;
+ "vorbis,opus,libopus,flac,pcm_u8,pcm_s16le,pcm_s24le,pcm_s32le,pcm_f32le,"
+ "mp3float,mp3,pcm_s16be,pcm_s24be,pcm_mulaw,pcm_alaw" EXTRA_CODECS;
#undef EXTRA_CODECS
return kAllowedAudioCodecs.data();

View file

@ -1,12 +0,0 @@
--- a/media/filters/ffmpeg_glue.cc
+++ b/media/filters/ffmpeg_glue.cc
@@ -142,7 +142,7 @@ const char* FFmpegGlue::GetAllowedAudioDecoders() {
static const base::NoDestructor<std::string> kAllowedAudioCodecs([]() {
// This should match the configured lists in //third_party/ffmpeg.
std::string allowed_decoders(
- "vorbis,libopus,flac,pcm_u8,pcm_s16le,pcm_s24le,pcm_s32le,pcm_f32le,"
+ "vorbis,opus,libopus,flac,pcm_u8,pcm_s16le,pcm_s24le,pcm_s32le,pcm_f32le,"
"mp3,pcm_s16be,pcm_s24be,pcm_mulaw,pcm_alaw");
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
allowed_decoders += ",aac";

View file

@ -0,0 +1,27 @@
From 6f4685cff1ab8c68de98c0731bb8afaf8a05a723 Mon Sep 17 00:00:00 2001
From: knuxify <knuxify@gmail.com>
Date: Sat, 5 Apr 2025 14:10:37 +0200
Subject: [PATCH] Do not generate license file for headless-shell build
---
headless/BUILD.gn | 4 ----
1 file changed, 4 deletions(-)
diff --git a/headless/BUILD.gn b/headless/BUILD.gn
index 798bb22..9d83f49 100644
--- a/headless/BUILD.gn
+++ b/headless/BUILD.gn
@@ -934,10 +934,6 @@ executable("headless_shell") {
deps = [ ":headless_shell_lib" ]
- if (proprietary_codecs) {
- deps += [ ":generate_headless_shell_license_file" ]
- }
-
if (!headless_use_embedded_resources) {
data = [
"$root_out_dir/headless_lib_data.pak",
--
2.49.0

View file

@ -1,11 +0,0 @@
--- ./v8/src/base/cpu.cc.orig
+++ ./v8/src/base/cpu.cc
@@ -14,7 +14,7 @@
#if V8_OS_LINUX
#include <linux/auxvec.h> // AT_HWCAP
#endif
-#if V8_GLIBC_PREREQ(2, 16) || V8_OS_ANDROID
+#if 1
#include <sys/auxv.h> // getauxval()
#endif
#if V8_OS_QNX

View file

@ -1,23 +0,0 @@
use monotonic clock for pthread_cond_timedwait with musl too, since it supports
it
--
--- a/v8/src/base/platform/condition-variable.cc
+++ b/v8/src/base/platform/condition-variable.cc
@@ -16,7 +16,7 @@
ConditionVariable::ConditionVariable() {
#if (V8_OS_FREEBSD || V8_OS_NETBSD || V8_OS_OPENBSD || \
- (V8_OS_LINUX && V8_LIBC_GLIBC))
+ V8_OS_LINUX)
// On Free/Net/OpenBSD and Linux with glibc we can change the time
// source for pthread_cond_timedwait() to use the monotonic clock.
pthread_condattr_t attr;
@@ -92,7 +92,7 @@
&native_handle_, &mutex->native_handle(), &ts);
#else
#if (V8_OS_FREEBSD || V8_OS_NETBSD || V8_OS_OPENBSD || \
- (V8_OS_LINUX && V8_LIBC_GLIBC))
+ V8_OS_LINUX)
// On Free/Net/OpenBSD and Linux with glibc we can change the time
// source for pthread_cond_timedwait() to use the monotonic clock.
result = clock_gettime(CLOCK_MONOTONIC, &ts);

View file

@ -0,0 +1,22 @@
From 4b41417068045f11db9e7edead1447e93adb9073 Mon Sep 17 00:00:00 2001
From: LN Liberda <lauren@selfisekai.rocks>
Date: Sat, 28 Jun 2025 18:13:59 +0200
Subject: [PATCH] Test net without vendored python
---
net/test/python_utils.cc | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/net/test/python_utils.cc b/net/test/python_utils.cc
index 2cdc07dad9948..0b2d42a5bf126 100644
--- a/net/test/python_utils.cc
+++ b/net/test/python_utils.cc
@@ -47,7 +47,7 @@ bool GetPython3Command(base::CommandLine* python_cmd) {
#if BUILDFLAG(IS_WIN)
python_cmd->SetProgram(base::FilePath(FILE_PATH_LITERAL("vpython3.bat")));
#else
- python_cmd->SetProgram(base::FilePath(FILE_PATH_LITERAL("vpython3")));
+ python_cmd->SetProgram(base::FilePath(FILE_PATH_LITERAL("python3")));
#endif
#if BUILDFLAG(IS_MAC)

View file

@ -0,0 +1,39 @@
ssl.wrap_socket() was removed in Python 3.12, needed for net_unittests.
Patch-Source: https://github.com/GoogleChromeLabs/pywebsocket3/pull/39
Modified (changed path) -lnl
From bc50ae9d451ca705edd6101d987b839e1a09d45e Mon Sep 17 00:00:00 2001
From: Sven Diederichs <22592421+zaurask@users.noreply.github.com>
Date: Thu, 28 Mar 2024 18:55:28 +0100
Subject: [PATCH] use ssl.SSLContext.wrap_socket rather than the deprecated
ssl.wrap_socket
---
pywebsocket3/websocket_server.py | 14 ++++++++------
1 file changed, 8 insertions(+), 6 deletions(-)
diff --git a/third_party/pywebsocket3/src/mod_pywebsocket/websocket_server.py b/third_party/pywebsocket3/src/mod_pywebsocket/websocket_server.py
index e7485ec..93ad6f1 100644
--- a/third_party/pywebsocket3/src/mod_pywebsocket/websocket_server.py
+++ b/third_party/pywebsocket3/src/mod_pywebsocket/websocket_server.py
@@ -157,12 +157,14 @@ class WebSocketServer(socketserver.ThreadingMixIn, BaseHTTPServer.HTTPServer):
client_cert_ = ssl.CERT_REQUIRED
else:
client_cert_ = ssl.CERT_NONE
- socket_ = ssl.wrap_socket(
- socket_,
- keyfile=server_options.private_key,
- certfile=server_options.certificate,
- ca_certs=server_options.tls_client_ca,
- cert_reqs=client_cert_)
+
+ ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS)
+ if server_options.certificate:
+ ssl_context.load_cert_chain(certfile=server_options.certificate, keyfile=server_options.private_key)
+ if server_options.tls_client_ca:
+ ssl_context.load_verify_locations(server_options.tls_client_ca)
+ ssl_context.verify_mode =client_cert_
+ socket_ = ssl_context.wrap_socket(socket_)
self._sockets.append((socket_, addrinfo))
def server_bind(self):

View file

@ -3,15 +3,16 @@ missing some required interface headers for it, and it's not clear how
to make the partalloc support code for it work.
--- ./base/allocator/partition_allocator/partition_alloc.gni.orig
+++ ./base/allocator/partition_allocator/partition_alloc.gni
@@ -30,7 +30,7 @@
}
@@ -89,8 +89,7 @@
# TODO(crbug.com/329199197): Clean this up when experiments are complete.
use_large_empty_slot_span_ring = true
has_memory_tagging =
- current_cpu == "arm64" && is_clang && !is_asan && (is_linux || is_android)
+ false
-has_memory_tagging = current_cpu == "arm64" && is_clang && !is_asan &&
- !is_hwasan && (is_linux || is_android)
+has_memory_tagging = false
declare_args() {
# Causes all the allocations to be routed via allocator_shim.cc. Usually,
# Debug configuration.
--- ./base/allocator/partition_allocator/src/partition_alloc/aarch64_support.h.orig
+++ ./base/allocator/partition_allocator/src/partition_alloc/aarch64_support.h
@@ -10,7 +10,7 @@

View file

@ -1,11 +0,0 @@
--- ./buildtools/third_party/libc++/__config_site.orig
+++ ./buildtools/third_party/libc++/__config_site
@@ -18,7 +18,7 @@
/* #undef _LIBCPP_ABI_FORCE_MICROSOFT */
/* #undef _LIBCPP_HAS_NO_THREADS */
/* #undef _LIBCPP_HAS_NO_MONOTONIC_CLOCK */
-/* #undef _LIBCPP_HAS_MUSL_LIBC */
+#define _LIBCPP_HAS_MUSL_LIBC 1
/* #undef _LIBCPP_HAS_THREAD_API_PTHREAD */
/* #undef _LIBCPP_HAS_THREAD_API_EXTERNAL */
/* #undef _LIBCPP_HAS_THREAD_API_WIN32 */

View file

@ -0,0 +1,172 @@
# Contributor: lauren n. liberda <lauren@selfisekai.rocks>
maintainer="lauren n. liberda <lauren@selfisekai.rocks>"
pkgname=element-desktop
pkgver=1.12.3
pkgrel=1
pkgdesc="Secure and independent communication, connected via Matrix"
url="https://element.io/"
arch="aarch64 x86_64" # same as electron
license="GPL-3.0-only"
_electronver=39
depends="
electron~$_electronver
font-inconsolata
font-inter
font-nunito
font-opensans
font-twemoji
"
makedepends="
cargo
electron-dev~$_electronver
electron-tasje
jq
libsecret-dev
nodejs
npm
python3
py3-setuptools
sqlcipher-dev
swc
yarn
"
source="
https://github.com/vector-im/element-desktop/archive/refs/tags/v$pkgver/element-desktop-$pkgver.tar.gz
https://github.com/vector-im/element-web/archive/refs/tags/v$pkgver/element-web-$pkgver.tar.gz
add-alpine-targets.patch
use-system-headers.patch
tasje-fixes.patch
no-source-maps.patch.web
use-system-fonts.patch.web
element-desktop
"
options="net !check" # broken
# Avoid conflicting providers
sonameprefix="$pkgname:"
# secfixes:
# 1.11.30-r0:
# - CVE-2023-30609
# 1.11.26-r0:
# - CVE-2023-28103
# - CVE-2023-28427
# 1.11.7-r0:
# - CVE-2022-39249
# - CVE-2022-39250
# - CVE-2022-39251
# - CVE-2022-39236
# 1.11.4-r0:
# - CVE-2022-36059
# - CVE-2022-36060
# used by buildscripts (at least web's webpack)
export VERSION=$pkgver
export CARGO_PROFILE_RELEASE_OPT_LEVEL=2
export CARGO_PROFILE_RELEASE_STRIP="symbols"
export NODE_OPTIONS="--openssl-legacy-provider"
prepare() {
default_prepare
msg "Applying more patches"
for x in $source; do
case "$x" in
*.patch.web)
msg "$x"
patch -p1 -i "$srcdir"/$x -d "$srcdir"/element-web-$pkgver
;;
esac
done
rm -rf res/fonts
(
cd "$srcdir"/element-web-$pkgver
msg "Fetch element-web dependencies"
yarn install --frozen-lockfile --ignore-scripts --ignore-engines
jq '.show_labs_settings = true' < config.sample.json > config.json
)
ln -s "$srcdir"/element-web-$pkgver/webapp webapp
msg "Fetch element-desktop dependencies"
yarn install --frozen-lockfile --ignore-scripts
patch -p1 -i patches/@types+auto-launch+5.0.5.patch
}
build() {
(
cd "$srcdir"/element-web-$pkgver
msg "Build element-web"
NODE_ENV=production yarn build
)
msg "Build element-desktop"
yarn asar-webapp
# add "optional" native dependencies
# hak stands for hack
yarn run hak --target "$(uname -m)-alpine-linux-musl"
yarn build:ts
yarn build:res
# we need it as js to be of any use for tasje.
# fails with `yarn tsc`. https://github.com/electron-userland/electron-builder/issues/7961
swc compile electron-builder.ts --out-file electron-builder.mjs
yarn install --frozen-lockfile --ignore-scripts --production
npm rebuild keytar-forked --nodedir=/usr/include/electron/node_headers --build-from-source
find node_modules/keytar-forked/build/ -type f \
\! -path node_modules/keytar-forked/build/Release/keytar.node \
-delete
# stripping in build because it gets into asar
strip node_modules/keytar-forked/build/Release/keytar.node
tasje -c electron-builder.mjs pack
}
check() {
(
cd "$srcdir"/element-web-$pkgver
yarn test
)
}
package() {
local resources="dist/resources"
install -Dm644 $resources/app.asar "$pkgdir"/usr/lib/element-desktop/app.asar
install -Dm644 webapp.asar "$pkgdir"/usr/lib/element-desktop/webapp.asar
cp -r $resources/app.asar.unpacked "$pkgdir"/usr/lib/element-desktop/app.asar.unpacked
install -Dm644 $resources/build/icon.png "$pkgdir"/usr/lib/element-desktop/build/icon.png
install -Dm755 "$srcdir"/$pkgname "$pkgdir"/usr/bin/$pkgname
install -Dm644 dist/$pkgname.desktop "$pkgdir"/usr/share/applications/$pkgname.desktop
while read -r size; do
install -Dm644 dist/icons/$size.png "$pkgdir"/usr/share/icons/hicolor/$size/apps/$pkgname.png
done < dist/icons/size-list
}
sha512sums="
f302907165a35f4a4f069f5aec6bc28edeba3d09c75f483c818e3930ceb4e838e5bb91ad9d42019a11a661d6e656da3c1ff25507cbb281c69183aac7d499e882 element-desktop-1.12.3.tar.gz
b845ff71ca39d7ae4dca9bb55e821bfdf911b12de5d012ba55d598f3287046fb2b525bce608925a9fa8fa7d39a4ceed9b4213d5d1c1d0c9e6b9b72154c9a35a5 element-web-1.12.3.tar.gz
4747893ed3e43d3074e9afe1cdd668a6be0de073d439205fe8c38c5e0f4091cc76e3cd15d98818bea5139add29501d8d07e83c58e9da230a4ce5bb538d388f80 add-alpine-targets.patch
755b17f7b828eb6920c06a6950ad4e14c32c99d22e9c05fcef7a081b5d2034adb03db3958aa5209c99fb7201f4d888c2383fc9864c5e743dd33f8b5c4925acd7 use-system-headers.patch
a5d90dd1ec7aec0dc18b73eb3a6fd51ac1223e381c492d24e7dc0fd2ade955ac727cebbaff6ffa27c7e18d9acf712c709de3f886ee2ddf87ab3b028d3eb461c6 tasje-fixes.patch
ec635fde026f7fce8e8cc57960b5b9dcec4418416d4867ed47711422d48f068bb58a3c9ceb7715efc9c177beca3788da6b0babc9b689ea8c0724a0395f2b85f8 no-source-maps.patch.web
aaf46476bac403aa5204aa265fcf0654fad4c149fd74d0ec4273c051a5549943384cae3cdd62c5b78fdedfed55c11ecceb898b886e44165cbe7e30953a095cf9 use-system-fonts.patch.web
afc588311dc3b566a754e3e7fe6b37b99a06d47b8bbce0ed9acca8ef308fdab0bd1d41b406199e5cbdd86bdce695ff847cd8668857a235cbdc292ad8b899c063 element-desktop
"

View file

@ -0,0 +1,52 @@
--- a/scripts/hak/target.ts
+++ b/scripts/hak/target.ts
@@ -29,8 +29,10 @@
| "i686-unknown-linux-gnu"
| "x86_64-unknown-linux-musl"
| "x86_64-unknown-linux-gnu"
+ | "x86_64-alpine-linux-musl"
| "aarch64-unknown-linux-musl"
| "aarch64-unknown-linux-gnu"
+ | "aarch64-alpine-linux-musl"
| "powerpc64le-unknown-linux-musl"
| "powerpc64le-unknown-linux-gnu";
@@ -112,6 +114,13 @@
libC: MUSL,
};
+const x8664AlpineLinuxMusl: LinuxTarget = {
+ id: "x86_64-alpine-linux-musl",
+ platform: "linux",
+ arch: "x64",
+ libC: MUSL,
+};
+
const i686UnknownLinuxGnu: LinuxTarget = {
id: "i686-unknown-linux-gnu",
platform: "linux",
@@ -140,6 +149,13 @@
libC: MUSL,
};
+const aarch64AlpineLinuxMusl: LinuxTarget = {
+ id: "aarch64-alpine-linux-musl",
+ platform: "linux",
+ arch: "arm64",
+ libC: MUSL,
+};
+
const powerpc64leUnknownLinuxGnu: LinuxTarget = {
id: "powerpc64le-unknown-linux-gnu",
platform: "linux",
@@ -167,8 +183,10 @@
"i686-unknown-linux-gnu": i686UnknownLinuxGnu,
"x86_64-unknown-linux-musl": x8664UnknownLinuxMusl,
"x86_64-unknown-linux-gnu": x8664UnknownLinuxGnu,
+ "x86_64-alpine-linux-musl": x8664AlpineLinuxMusl,
"aarch64-unknown-linux-musl": aarch64UnknownLinuxMusl,
"aarch64-unknown-linux-gnu": aarch64UnknownLinuxGnu,
+ "aarch64-alpine-linux-musl": aarch64AlpineLinuxMusl,
"powerpc64le-unknown-linux-musl": powerpc64leUnknownLinuxMusl,
"powerpc64le-unknown-linux-gnu": powerpc64leUnknownLinuxGnu,
};

View file

@ -0,0 +1,3 @@
#!/bin/sh
exec electron /usr/lib/element-desktop/app.asar "$@"

View file

@ -0,0 +1,18 @@
--- ./webpack.config.js.orig
+++ ./webpack.config.js
@@ -102,15 +102,6 @@
}
const development = {};
- if (devMode) {
- // Embedded source maps for dev builds, can't use eval-source-map due to CSP
- development["devtool"] = "inline-source-map";
- } else {
- // High quality source maps in separate .map files which include the source. This doesn't bulk up the .js
- // payload file size, which is nice for performance but also necessary to get the bundle to a small enough
- // size that sentry will accept the upload.
- development["devtool"] = "source-map";
- }
// Resolve the directories for the js-sdk for later use. We resolve these early, so we
// don't have to call them over and over. We also resolve to the package.json instead of the src

View file

@ -0,0 +1,33 @@
directories in .hak/hakModules are already symlinked inside node_modules,
and as such are already being copied by default. this makes tasje fail with:
```
thread 'main' panicked at 'called `Result::unwrap()` on an `Err` value:
FileAlreadyWritten("/node_modules/keytar/package.json")', src/main.rs:200:18
```
console.log interferes with tasje, which reads config from node stdout
--- ./electron-builder.ts.orig
+++ ./electron-builder.ts
@@ -72,10 +72,6 @@
console.warn(`No VARIANT_PATH specified, using default variant configuration '${DEFAULT_VARIANT}':`);
}
-for (const key in variant) {
- console.log(`${key}: ${variant[key]}`);
-}
-
interface Configuration extends BaseConfiguration {
extraMetadata: Partial<Pick<Pkg, "version">> & ExtraMetadata;
linux: BaseConfiguration["linux"];
@@ -112,10 +108,6 @@
},
files: [
"package.json",
- {
- from: ".hak/hakModules",
- to: "node_modules",
- },
"lib/**",
],
extraResources: ["build/icon.*", "webapp.asar"],

View file

@ -0,0 +1,79 @@
--- a/src/vector/jitsi/index.pcss
+++ b/src/vector/jitsi/index.pcss
@@ -14,7 +14,7 @@
font-family: "Nunito";
font-style: normal;
font-weight: 400;
- src: url("$(res)/fonts/Nunito/Nunito-Regular.ttf") format("truetype");
+ src: local("Nunito Regular");
}
$dark-fg: #edf3ff;
--- a/res/themes/light/css/_fonts.pcss
+++ b/res/themes/light/css/_fonts.pcss
@@ -5,16 +5,16 @@
@font-face {
font-family: "Twemoji";
font-weight: 400;
- src: url("$(res)/fonts/Twemoji_Mozilla/TwemojiMozilla-colr.woff2") format("woff2");
+ src: local("Twemoji");
}
/* For at least Chrome on Windows 10, we have to explictly add extra weights for the emoji to appear in bold messages, etc. */
@font-face {
font-family: "Twemoji";
font-weight: 600;
- src: url("$(res)/fonts/Twemoji_Mozilla/TwemojiMozilla-colr.woff2") format("woff2");
+ src: local("Twemoji");
}
@font-face {
font-family: "Twemoji";
font-weight: 700;
- src: url("$(res)/fonts/Twemoji_Mozilla/TwemojiMozilla-colr.woff2") format("woff2");
+ src: local("Twemoji");
}
--- a/res/themes/legacy-light/css/_fonts.pcss
+++ b/res/themes/legacy-light/css/_fonts.pcss
@@ -23,17 +23,17 @@
font-family: "Nunito";
font-style: normal;
font-weight: 400;
- src: url("$(res)/fonts/Nunito/Nunito-Regular.ttf") format("truetype");
+ src: local("Nunito Regular");
}
@font-face {
font-family: "Nunito";
font-style: normal;
font-weight: 600;
- src: url("$(res)/fonts/Nunito/Nunito-SemiBold.ttf") format("truetype");
+ src: local("Nunito SemiBold");
}
@font-face {
font-family: "Nunito";
font-style: normal;
font-weight: 700;
- src: url("$(res)/fonts/Nunito/Nunito-Bold.ttf") format("truetype");
+ src: local("Nunito Bold");
}
--- ./src/theme.ts.orig
+++ ./src/theme.ts
@@ -7,20 +7,6 @@
Please see LICENSE files in the repository root for full details.
*/
-import "@fontsource/inter/400.css";
-import "@fontsource/inter/400-italic.css";
-import "@fontsource/inter/500.css";
-import "@fontsource/inter/500-italic.css";
-import "@fontsource/inter/600.css";
-import "@fontsource/inter/600-italic.css";
-import "@fontsource/inter/700.css";
-import "@fontsource/inter/700-italic.css";
-
-import "@fontsource/inconsolata/latin-ext-400.css";
-import "@fontsource/inconsolata/latin-400.css";
-import "@fontsource/inconsolata/latin-ext-700.css";
-import "@fontsource/inconsolata/latin-700.css";
-
import { logger } from "matrix-js-sdk/src/logger";
import { _t } from "./languageHandler";

View file

@ -0,0 +1,15 @@
--- a/scripts/hak/hakEnv.ts
+++ b/scripts/hak/hakEnv.ts
@@ -101,11 +101,10 @@
...process.env,
npm_config_arch: this.target.arch,
npm_config_target_arch: this.target.arch,
- npm_config_disturl: "https://electronjs.org/headers",
+ npm_config_nodedir: "/usr/include/electron/node_headers",
npm_config_runtime: this.runtime,
npm_config_target: this.runtimeVersion,
npm_config_build_from_source: "true",
- npm_config_devdir: path.join(os.homedir(), ".electron-gyp"),
};
}

View file

@ -1,27 +0,0 @@
# Contributor: Aiden Grossman <agrossman154@yahoo.com>
# Maintainer:
pkgname=fdm-materials
pkgver=5.2.2
pkgrel=1
pkgdesc="FDM Material Database"
url="https://github.com/Ultimaker/fdm_materials"
arch="noarch"
license="CC0-1.0"
makedepends="cmake samurai"
options="!check" # no checks provided
source="$pkgname-$pkgver.tar.gz::https://github.com/Ultimaker/fdm_materials/archive/refs/tags/$pkgver.tar.gz"
builddir="$srcdir/fdm_materials-$pkgver"
build() {
cmake -B build -G Ninja \
-DCMAKE_INSTALL_PREFIX=/usr
cmake --build build
}
package() {
DESTDIR="$pkgdir" cmake --install build
}
sha512sums="
73eefec8b7b88af73afc578ffba583480bda30309945b1720d7a1a075bd7ab3279599d53fe83f4c96695f294a5a3e11297abc334ca6cc9db163d4eb0fbdaf0f9 fdm-materials-5.2.2.tar.gz
"

View file

@ -1,8 +1,8 @@
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=freetube
pkgver=0.21.3
pkgrel=2
pkgver=0.23.12
pkgrel=1
pkgdesc="An open source desktop YouTube player built with privacy in mind."
arch="x86_64 aarch64" # blocked by electron
license="AGPL-3.0-only"
@ -50,7 +50,7 @@ package() {
}
sha512sums="
22e5ab677cd442d50237b2d62534698d8ad73a37e1731003dc23c4ea3da992b3cae936f0bb3a0a86cd4b7fba731c9fa53276cb0a6cd5bab213ff2a6c9006cb05 freetube-0.21.3.tar.gz
e19c7e8de0c6c5bbddcd3da73cd1907cae7157e8f44f550c4a34965b3b4f3c1a180c111a8c497d74a556d6d8e74e9fdd1ed6e064d4fc899f80712a1f187395ae freetube-0.23.12.tar.gz
2ce2effc794bb663789cefe968b5899122127983dbfa1b240aa33a2be383720b18204e6d01b4a550df72956f02b6636b79c93a58f470a970b09b770f5b8f2fc4 freetube.sh
d27cb896b65a7e8d52ffe86e5f74eed72b6cf976b28e1a13012d34c7eceba5ff6f20298017738dfa93c0336ffa52b8ee4da7e06b02747062898db7e678819526 tasje-dotdash.patch
"

View file

@ -1,40 +0,0 @@
# Contributor: Anjandev Momi <anjan@momi.ca>
# Maintainer: Anjandev Momi <anjan@momi.ca>
pkgname=libnest2d
pkgver=0.4
pkgrel=6
pkgdesc="2D irregular bin packaging and nesting library written in modern C++"
url="https://github.com/tamasmeszaros/libnest2d"
arch="noarch"
license="LGPL-3.0-only"
makedepends="samurai cmake clipper-dev boost-dev nlopt-dev"
subpackages="$pkgname-dev"
source="$pkgname-$pkgver.tar.gz::https://github.com/tamasmeszaros/libnest2d/archive/refs/tags/$pkgver.tar.gz
allow-disallowed-area.patch"
build() {
if [ "$CBUILD" != "$CHOST" ]; then
CMAKE_CROSSOPTS="-DCMAKE_SYSTEM_NAME=Linux -DCMAKE_HOST_SYSTEM_NAME=Linux"
fi
cmake -B build -G Ninja \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_INSTALL_LIBDIR=lib \
-DBUILD_SHARED_LIBS=True \
-DCMAKE_BUILD_TYPE=minsizerel \
$CMAKE_CROSSOPTS .
cmake --build build
}
check() {
cd build
CTEST_OUTPUT_ON_FAILURE=TRUE ctest
}
package() {
DESTDIR="$pkgdir" cmake --install build
}
sha512sums="
fadce18986b844eed13a581f84055df909a17407a0980deb6c7c24248a969a537a8840650bcfc673e61973810ce9a008acb599e3b8e00c9bff6b566ca41cd62c libnest2d-0.4.tar.gz
2e8cd3343c72c576ecb54960d7ad9f4f2322f822b19ac41850b3b28da95e97c2cefe7c67de6c97627df08cd5cdc1660ce4dfa95fe51f88e0ff5c066c8d785458 allow-disallowed-area.patch
"

View file

@ -1,124 +0,0 @@
From 2e91be2679b5efa0773292d9d0a2ae72255bb271 Mon Sep 17 00:00:00 2001
From: Ghostkeeper <rubend@tutanota.com>
Date: Tue, 6 Oct 2020 16:13:15 +0200
Subject: [PATCH 1/3] Allow for an item to be a disallowed area
url: https://github.com/tamasmeszaros/libnest2d/pull/18
Disallowed areas have slightly different behaviour from fixed items: Other items won't get packed closely around them. Implementation of that pending.
Contributes to issue CURA-7754.
---
include/libnest2d/nester.hpp | 16 ++++++++++++++++
1 file changed, 16 insertions(+)
diff --git a/include/libnest2d/nester.hpp b/include/libnest2d/nester.hpp
index 2f207d5..932a060 100644
--- a/include/libnest2d/nester.hpp
+++ b/include/libnest2d/nester.hpp
@@ -71,6 +71,15 @@ class _Item {
int binid_{BIN_ID_UNSET}, priority_{0};
bool fixed_{false};
+ /**
+ * \brief If this is a fixed area, indicates whether it is a disallowed area
+ * or a previously placed item.
+ *
+ * If this is a disallowed area, other objects will not get packed close
+ * together with this item. It only blocks other items in its area.
+ */
+ bool disallowed_{false};
+
public:
/// The type of the shape which was handed over as the template argument.
@@ -129,11 +138,18 @@ class _Item {
sh_(sl::create<RawShape>(std::move(contour), std::move(holes))) {}
inline bool isFixed() const noexcept { return fixed_; }
+ inline bool isDisallowedArea() const noexcept { return disallowed_; }
inline void markAsFixedInBin(int binid)
{
fixed_ = binid >= 0;
binid_ = binid;
}
+ inline void markAsDisallowedAreaInBin(int binid)
+ {
+ fixed_ = binid >= 0;
+ binid_ = binid;
+ disallowed_ = true;
+ }
inline void binId(int idx) { binid_ = idx; }
inline int binId() const noexcept { return binid_; }
From ff61049e59d3151462bca7ff2e2268c2b32731e7 Mon Sep 17 00:00:00 2001
From: Ghostkeeper <rubend@tutanota.com>
Date: Tue, 6 Oct 2020 16:14:36 +0200
Subject: [PATCH 2/3] Allow unsetting of being a disallowed area
If you set the bin to -1 or set the item to be a simple fixed item afterwards, it'll no longer be a disallowed area.
Contributes to issue CURA-7754.
---
include/libnest2d/nester.hpp | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/include/libnest2d/nester.hpp b/include/libnest2d/nester.hpp
index 932a060..54761a6 100644
--- a/include/libnest2d/nester.hpp
+++ b/include/libnest2d/nester.hpp
@@ -143,12 +143,13 @@ class _Item {
{
fixed_ = binid >= 0;
binid_ = binid;
+ disallowed_ = false;
}
inline void markAsDisallowedAreaInBin(int binid)
{
fixed_ = binid >= 0;
binid_ = binid;
- disallowed_ = true;
+ disallowed_ = fixed_;
}
inline void binId(int idx) { binid_ = idx; }
From 31391fd173249ad9b906390058e13b09238fadc8 Mon Sep 17 00:00:00 2001
From: Ghostkeeper <rubend@tutanota.com>
Date: Thu, 8 Oct 2020 11:06:58 +0200
Subject: [PATCH 3/3] Align items to their starting position if all placed
items are disallowed
We shouldn't align items to disallowed areas. So place them in the starting position according to the alignment property.
Lot of work to investigate. But very little code changes!
Contributes to issue CURA-7754.
---
include/libnest2d/placers/nfpplacer.hpp | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/include/libnest2d/placers/nfpplacer.hpp b/include/libnest2d/placers/nfpplacer.hpp
index 96a8cff..b0ebb15 100644
--- a/include/libnest2d/placers/nfpplacer.hpp
+++ b/include/libnest2d/placers/nfpplacer.hpp
@@ -101,7 +101,7 @@ struct NfpPConfig {
* alignment with the candidate item or do anything else.
*
* \param remaining A container with the remaining items waiting to be
- * placed. You can use some features about the remaining items to alter to
+ * placed. You can use some features about the remaining items to alter the
* score of the current placement. If you know that you have to leave place
* for other items as well, that might influence your decision about where
* the current candidate should be placed. E.g. imagine three big circles
@@ -735,7 +735,8 @@ class _NofitPolyPlacer: public PlacerBoilerplate<_NofitPolyPlacer<RawShape, TBin
remlist.insert(remlist.end(), remaining.from, remaining.to);
}
- if(items_.empty()) {
+ if(std::all_of(items_.begin(), items_.end(),
+ [](const Item& item) { return item.isDisallowedArea(); })) {
setInitialPosition(item);
best_overfit = overfit(item.transformedShape(), bin_);
can_pack = best_overfit <= 0;

View file

@ -1,33 +0,0 @@
# Contributor: Alex Yam <alex@alexyam.com>
# Maintainer: Alex Yam <alex@alexyam.com>
pkgname=libspatialindex
pkgver=0_git20210205
_commit=8ee223632f95c81f49f5eb2d547ad973475c4601
pkgrel=1
pkgdesc="extensible framework for robust spatial indexing methods"
url="https://libspatialindex.org/"
arch="all"
license="MIT"
makedepends="cmake"
subpackages="$pkgname-dev"
source="$pkgname-$_commit.tar.gz::https://github.com/libspatialindex/libspatialindex/archive/$_commit.tar.gz"
builddir="$srcdir/$pkgname-$_commit"
build() {
cmake -B build \
-DCMAKE_BUILD_TYPE=MinSizeRel \
-DCMAKE_PREFIX_PATH=/usr \
-DCMAKE_INSTALL_PREFIX=/usr \
-DBUILD_TESTING=ON
cmake --build build
}
check() {
cd build && ctest
}
package() {
DESTDIR="$pkgdir" cmake --build build --target install
}
sha512sums="caf91aac77b75445e4fc4d0baedcd10c619b2097dfd841b00339d9ddd4b73db05b99de1d84be88f1083f4713a936cf110d5851523491f5a74c6f96e1d5795dbb libspatialindex-8ee223632f95c81f49f5eb2d547ad973475c4601.tar.gz"

View file

@ -1,7 +1,7 @@
# Contributor: Quillith <tag.quill@protonmail.com>
# Maintainer: Quillith <tag.quill@protonmail.com>
pkgname=nb
pkgver=7.12.1
pkgver=7.19.1
pkgrel=0
pkgdesc="Command line note-taking, bookmarking, archiving, and knowledge base application"
url="https://github.com/xwmx/nb"
@ -41,5 +41,5 @@ full() {
}
sha512sums="
ed3d41a809e39a19711c6c97c38216f17f144b8b474eb94aec4134f9756da03440073f3f6557acf8f7959d3d9fba6392d1d5f59e8b94d5269b7336b11353457e nb-7.12.1.tar.gz
fdfcedc5a32c1a5fe62b00141e25193bc33eee9249fef559938f2b4baf0bff5eb7cc792db3c664c68afb2ba2db84303432790ae5254a9cdd319ce4d9a9face9f nb-7.19.1.tar.gz
"

View file

@ -1,71 +0,0 @@
# Contributor: Anjandev Momi <anjan@momi.ca>
# Maintainer: Celeste <cielesti@protonmail.com>
maintainer="Celeste <cielesti@protonmail.com>"
pkgname=nlopt
pkgver=2.8.0
pkgrel=0
pkgdesc="Library for nonlinear optimization"
url="https://github.com/stevengj/nlopt"
arch="all"
license="LGPL-2.1-or-later"
makedepends="
cmake
guile-dev
python3-dev
samurai
swig
"
subpackages="
$pkgname-dev
$pkgname-doc
$pkgname-guile
"
source="$pkgname-$pkgver.tar.gz::https://github.com/stevengj/nlopt/archive/refs/tags/v$pkgver.tar.gz"
case "$CARCH" in
# octave unavailable on these 3 archs
s390x|riscv64|ppc64le) ;;
*)
makedepends="$makedepends octave-dev"
subpackages="$subpackages $pkgname-octave"
;;
esac
build() {
if [ "$CBUILD" != "$CHOST" ]; then
local crossopts="-DCMAKE_SYSTEM_NAME=Linux -DCMAKE_HOST_SYSTEM_NAME=Linux"
fi
cmake -B build -G Ninja \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_INSTALL_LIBDIR=lib \
-DBUILD_SHARED_LIBS=ON \
-DCMAKE_BUILD_TYPE=MinSizeRel \
$crossopts
cmake --build build
}
check() {
ctest --test-dir build --output-on-failure
}
package() {
DESTDIR="$pkgdir" cmake --install build
}
guile() {
pkgdesc="$pkgdesc (Guile bindings)"
depends="$pkgname=$pkgver-r$pkgrel guile"
amove usr/lib/guile usr/share/guile
}
octave() {
pkgdesc="$pkgdesc (Octave bindings)"
depends="$pkgname=$pkgver-r$pkgrel octave"
amove usr/lib/octave usr/share/octave
}
sha512sums="
cb294caa5532e11ae0d22ed849705920bbae79f712144c840a5ca865ef8e6a15c6c9540c81ced0c3c05b9f44c360d50f74e235e69d893be34b7e1c5599f07c71 nlopt-2.8.0.tar.gz
"

View file

@ -1,168 +0,0 @@
# Contributor: Ariadne Conill <ariadne@dereferenced.org>
# Maintainer: Timo Teras <timo.teras@iki.fi>
pkgname=openssl1.1-compat
pkgver=1.1.1w
_abiver=${pkgver%.*}
pkgrel=1
pkgdesc="toolkit for transport layer security (TLS) - version 1.1"
url="https://www.openssl.org/"
arch="all"
license="OpenSSL"
replaces="libressl"
depends_dev="!openssl-dev"
makedepends_build="perl"
makedepends_host="linux-headers"
makedepends="$makedepends_host $makedepends_build"
subpackages="$pkgname-dbg $pkgname-libs-static:_static $pkgname-dev
libcrypto$_abiver:_libcrypto libssl$_abiver:_libssl"
source="https://www.openssl.org/source/openssl-$pkgver.tar.gz
man-section.patch
ppc64.patch
"
builddir="$srcdir/openssl-$pkgver"
pcprefix="openssl$_abiver:pc:"
# secfixes:
# 1.1.1u-r1:
# - CVE-2023-3446
# 1.1.1t-r2:
# - CVE-2023-0465
# 1.1.1t-r1:
# - CVE-2023-0464
# 1.1.1t-r0:
# - CVE-2022-4304
# - CVE-2022-4450
# - CVE-2023-0215
# - CVE-2023-0286
# 1.1.1q-r0:
# - CVE-2022-2097
# 1.1.1n-r0:
# - CVE-2022-0778
# 1.1.1l-r0:
# - CVE-2021-3711
# - CVE-2021-3712
# 1.1.1k-r0:
# - CVE-2021-3449
# - CVE-2021-3450
# 1.1.1j-r0:
# - CVE-2021-23841
# - CVE-2021-23840
# - CVE-2021-23839
# 1.1.1i-r0:
# - CVE-2020-1971
# 1.1.1g-r0:
# - CVE-2020-1967
# 1.1.1d-r3:
# - CVE-2019-1551
# 1.1.1d-r1:
# - CVE-2019-1547
# - CVE-2019-1549
# - CVE-2019-1563
# 1.1.1b-r1:
# - CVE-2019-1543
# 1.1.1a-r0:
# - CVE-2018-0734
# - CVE-2018-0735
# 0:
# - CVE-2022-1292
# - CVE-2022-2068
build() {
local _target _optflags
# openssl will prepend crosscompile always core CC et al
CC=${CC#${CROSS_COMPILE}}
CXX=${CXX#${CROSS_COMPILE}}
CPP=${CPP#${CROSS_COMPILE}}
# determine target OS for openssl
case "$CARCH" in
aarch64*) _target="linux-aarch64" ;;
arm*) _target="linux-armv4" ;;
ppc) _target="linux-ppc" ;;
ppc64) _target="linux-ppc64" ;;
ppc64le) _target="linux-ppc64le" ;;
x86) _target="linux-elf" ;;
x86_64) _target="linux-x86_64"; _optflags="enable-ec_nistp_64_gcc_128" ;;
s390x) _target="linux64-s390x";;
riscv64) _target="linux-generic64";;
loongarch64) _target="linux-generic64";;
*) msg "Unable to determine architecture from (CARCH=$CARCH)" ; return 1 ;;
esac
# Configure assumes --options are for it, so can't use
# gcc's --sysroot fake this by overriding CC
[ -n "$CBUILDROOT" ] && CC="$CC --sysroot=$CBUILDROOT"
# when cross building do not enable threads as libatomic is not avaiable
if [ "$CBUILD" != "$CHOST" ]; then
case $CARCH in
riscv64) _optflags="$_optflags no-threads";;
esac
fi
perl ./Configure \
$_target \
--prefix=/usr \
--libdir=/usr/lib \
--openssldir=/etc/ssl1.1 \
shared \
no-zlib \
no-async \
no-comp \
no-idea \
no-mdc2 \
no-rc5 \
no-ec2m \
no-sm2 \
no-sm4 \
no-ssl2 \
no-ssl3 \
no-seed \
no-weak-ssl-ciphers \
$_optflags \
$CPPFLAGS \
$CFLAGS \
$LDFLAGS -Wa,--noexecstack
make
}
check() {
# AFALG tests have a sporadic test failure, just delete the broken
# test for now.
rm -f test/recipes/30-test_afalg.t
make test
}
package() {
make DESTDIR="$pkgdir" install_sw install_ssldirs
# remove the script c_rehash
rm "$pkgdir"/usr/bin/c_rehash
mv -f "$pkgdir"/usr/bin/openssl "$pkgdir"/usr/bin/openssl$_abiver
}
_libcrypto() {
pkgdesc="Crypto library from openssl"
replaces="libressl2.7-libcrypto"
amove etc
amove usr/lib/libcrypto*
amove usr/lib/engines-$_abiver
}
_libssl() {
pkgdesc="SSL shared libraries"
amove usr/lib/libssl*
}
_static() {
default_static
}
sha512sums="
b4c625fe56a4e690b57b6a011a225ad0cb3af54bd8fb67af77b5eceac55cc7191291d96a660c5b568a08a2fbf62b4612818e7cca1bb95b2b6b4fc649b0552b6d openssl-1.1.1w.tar.gz
43c3255118db6f5f340dc865c0f25ccbcafe5bf7507585244ca59b4d27daf533d6c3171aa32a8685cbb6200104bec535894b633de13feaadff87ab86739a445a man-section.patch
e040f23770d52b988578f7ff84d77563340f37c026db7643db8e4ef18e795e27d10cb42cb8656da4d9c57a28283a2828729d70f940edc950c3422a54fea55509 ppc64.patch
"

View file

@ -1,54 +0,0 @@
From: Debian OpenSSL Team <pkg-openssl-devel@lists.alioth.debian.org>
Date: Sun, 5 Nov 2017 15:09:09 +0100
Subject: man-section
---
Configurations/unix-Makefile.tmpl | 6 ++++--
util/process_docs.pl | 3 ++-
2 files changed, 6 insertions(+), 3 deletions(-)
diff --git a/Configurations/unix-Makefile.tmpl b/Configurations/unix-Makefile.tmpl
index 1292053546f5..c034d21884d8 100644
--- a/Configurations/unix-Makefile.tmpl
+++ b/Configurations/unix-Makefile.tmpl
@@ -183,7 +183,8 @@ HTMLDIR=$(DOCDIR)/html
# MANSUFFIX is for the benefit of anyone who may want to have a suffix
# appended after the manpage file section number. "ssl" is popular,
# resulting in files such as config.5ssl rather than config.5.
-MANSUFFIX=
+MANSUFFIX=ssl
+MANSECTION=SSL
HTMLSUFFIX=html
# For "optional" echo messages, to get "real" silence
@@ -726,7 +727,8 @@ uninstall_runtime: uninstall_programs uninstall_runtime_libs
@[ -n "$(INSTALLTOP)" ] || (echo INSTALLTOP should not be empty; exit 1)
@$(ECHO) "*** Installing manpages"
$(PERL) $(SRCDIR)/util/process_docs.pl \
- "--destdir=$(DESTDIR)$(MANDIR)" --type=man --suffix=$(MANSUFFIX)
+ "--destdir=$(DESTDIR)$(MANDIR)" --type=man --suffix=$(MANSUFFIX) \
+ --mansection=$(MANSECTION)
uninstall_man_docs:
@$(ECHO) "*** Uninstalling manpages"
diff --git a/util/process_docs.pl b/util/process_docs.pl
index 30b149eb8fcc..424155ea808e 100755
--- a/util/process_docs.pl
+++ b/util/process_docs.pl
@@ -37,6 +37,7 @@ GetOptions(\%options,
'type=s', # The result type, 'man' or 'html'
'suffix:s', # Suffix to add to the extension.
# Only used with type=man
+ 'mansection:s', # Section to put to manpage in
'remove', # To remove files rather than writing them
'dry-run|n', # Only output file names on STDOUT
'debug|D+',
@@ -97,7 +98,7 @@ foreach my $section (sort @{$options{section}}) {
my $name = uc $podname;
my $suffix = { man => ".$podinfo{section}".($options{suffix} // ""),
html => ".html" } -> {$options{type}};
- my $generate = { man => "pod2man --name=$name --section=$podinfo{section} --center=OpenSSL --release=$config{version} \"$podpath\"",
+ my $generate = { man => "pod2man --name=$name --section=$podinfo{section}$options{mansection} --center=OpenSSL --release=$config{version} \"$podpath\"",
html => "pod2html \"--podroot=$options{sourcedir}\" --htmldir=$updir --podpath=man1:man3:man5:man7 \"--infile=$podpath\" \"--title=$podname\" --quiet"
} -> {$options{type}};
my $output_dir = catdir($options{destdir}, "man$podinfo{section}");

View file

@ -1,96 +0,0 @@
From 34ab13b7d8e3e723adb60be8142e38b7c9cd382a Mon Sep 17 00:00:00 2001
From: Andy Polyakov <appro@openssl.org>
Date: Sun, 5 May 2019 18:25:50 +0200
Subject: [PATCH] crypto/perlasm/ppc-xlate.pl: add linux64v2 flavour
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
This is a big endian ELFv2 configuration. ELFv2 was already being
used for little endian, and big endian was traditionally ELFv1
but there are practical configurations that use ELFv2 with big
endian nowadays (Adélie Linux, Void Linux, possibly Gentoo, etc.)
Reviewed-by: Paul Dale <paul.dale@oracle.com>
Reviewed-by: Richard Levitte <levitte@openssl.org>
(Merged from https://github.com/openssl/openssl/pull/8883)
---
crypto/perlasm/ppc-xlate.pl | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/crypto/perlasm/ppc-xlate.pl b/crypto/perlasm/ppc-xlate.pl
index e52f2f6ea62..5fcd0526dff 100755
--- a/crypto/perlasm/ppc-xlate.pl
+++ b/crypto/perlasm/ppc-xlate.pl
@@ -49,7 +49,7 @@
/osx/ && do { $name = "_$name";
last;
};
- /linux.*(32|64le)/
+ /linux.*(32|64(le|v2))/
&& do { $ret .= ".globl $name";
if (!$$type) {
$ret .= "\n.type $name,\@function";
@@ -80,7 +80,7 @@
};
my $text = sub {
my $ret = ($flavour =~ /aix/) ? ".csect\t.text[PR],7" : ".text";
- $ret = ".abiversion 2\n".$ret if ($flavour =~ /linux.*64le/);
+ $ret = ".abiversion 2\n".$ret if ($flavour =~ /linux.*64(le|v2)/);
$ret;
};
my $machine = sub {
@@ -186,7 +186,7 @@
# Some ABIs specify vrsave, special-purpose register #256, as reserved
# for system use.
-my $no_vrsave = ($flavour =~ /aix|linux64le/);
+my $no_vrsave = ($flavour =~ /aix|linux64(le|v2)/);
my $mtspr = sub {
my ($f,$idx,$ra) = @_;
if ($idx == 256 && $no_vrsave) {
@@ -318,7 +318,7 @@ sub vfour {
if ($label) {
my $xlated = ($GLOBALS{$label} or $label);
print "$xlated:";
- if ($flavour =~ /linux.*64le/) {
+ if ($flavour =~ /linux.*64(le|v2)/) {
if ($TYPES{$label} =~ /function/) {
printf "\n.localentry %s,0\n",$xlated;
}
From 098404128383ded87ba390dd74ecd9e2ffa6f530 Mon Sep 17 00:00:00 2001
From: Andy Polyakov <appro@openssl.org>
Date: Sun, 5 May 2019 18:30:55 +0200
Subject: [PATCH] Configure: use ELFv2 ABI on some ppc64 big endian systems
If _CALL_ELF is defined to be 2, it's an ELFv2 system.
Conditionally switch to the v2 perlasm scheme.
Reviewed-by: Paul Dale <paul.dale@oracle.com>
Reviewed-by: Richard Levitte <levitte@openssl.org>
(Merged from https://github.com/openssl/openssl/pull/8883)
---
Configure | 10 +++++++---
1 file changed, 7 insertions(+), 3 deletions(-)
diff --git a/Configure b/Configure
index 22082deb4c7..e303d98deb3 100755
--- a/Configure
+++ b/Configure
@@ -1402,8 +1402,15 @@
my %predefined_C = compiler_predefined($config{CROSS_COMPILE}.$config{CC});
my %predefined_CXX = $config{CXX}
? compiler_predefined($config{CROSS_COMPILE}.$config{CXX})
: ();
+unless ($disabled{asm}) {
+ # big endian systems can use ELFv2 ABI
+ if ($target eq "linux-ppc64") {
+ $target{perlasm_scheme} = "linux64v2" if ($predefined_C{_CALL_ELF} == 2);
+ }
+}
+
# Check for makedepend capabilities.
if (!$disabled{makedepend}) {
if ($config{target} =~ /^(VC|vms)-/) {

View file

@ -0,0 +1,23 @@
diff --git a/serial/SerialPortManager.cpp.orig b/serial/SerialPortManager.cpp
index 22565b5..d444eee 100644
--- a/serial/SerialPortManager.cpp.orig
+++ b/serial/SerialPortManager.cpp
@@ -1945,8 +1945,9 @@ void SerialPortManager::attemptRecovery()
}
} else {
qCWarning(log_core_serial) << "Serial port recovery attempt failed";
- if (eventCallback) {
- eventCallback->onStatusUpdate(QString("Recovery attempt %1 failed").arg(m_connectionRetryCount));
+ if (eventCallback) {
+ int crc = m_connectionRetryCount;
+ eventCallback->onStatusUpdate(QString("Recovery attempt %1 failed").arg(crc));
}
// Try again if we haven't exceeded max attempts
@@ -2034,4 +2035,4 @@ void SerialPortManager::applyCommandBasedBaudrateChange(int baudRate, const QStr
} else {
qCWarning(log_core_serial) << logPrefix << "Failed to apply user selected baudrate:" << baudRate;
}
-}
\ No newline at end of file
+}

View file

@ -0,0 +1,586 @@
From 4478cfb004d3db7797e99fd3bdb23bf880a9c85b Mon Sep 17 00:00:00 2001
From: John Lane <1786613+johnlane@users.noreply.github.com>
Date: Wed, 3 Dec 2025 21:19:33 +0000
Subject: [PATCH 1/2] Remove additional backslashes and add missing cpp and h
files
---
openterfaceQT.pro | 28 +++++++++++++++++++---------
1 file changed, 19 insertions(+), 9 deletions(-)
diff --git a/openterfaceQT.pro b/openterfaceQT.pro
index 3460dbac..f0d878e8 100644
--- a/openterfaceQT.pro
+++ b/openterfaceQT.pro
@@ -84,16 +84,26 @@ SOURCES += main.cpp \
!win32 {
SOURCES += host/backend/ffmpegbackendhandler.cpp \
host/backend/gstreamerbackendhandler.cpp \
- host/backend/gstreamer/sinkselector.cpp \\
- host/backend/gstreamer/queueconfigurator.cpp \\
- host/backend/gstreamer/videooverlaymanager.cpp \\
- host/backend/gstreamer/pipelinebuilder.cpp
+ host/backend/gstreamer/pipelinefactory.cpp \
+ host/backend/gstreamer/externalgstrunner.cpp \
+ host/backend/gstreamer/inprocessgstrunner.cpp \
+ host/backend/gstreamer/sinkselector.cpp \
+ host/backend/gstreamer/queueconfigurator.cpp \
+ host/backend/gstreamer/videooverlaymanager.cpp \
+ host/backend/gstreamer/pipelinebuilder.cpp \
+ host/backend/gstreamer/recordingmanager.cpp \
+ host/backend/gstreamer/gstreamerhelpers.cpp
HEADERS += host/backend/ffmpegbackendhandler.h \
host/backend/gstreamerbackendhandler.h \
- host/backend/gstreamer/sinkselector.h \\
- host/backend/gstreamer/queueconfigurator.h \\
- host/backend/gstreamer/videooverlaymanager.h \\
- host/backend/gstreamer/pipelinebuilder.h
+ host/backend/gstreamer/pipelinefactory.h \
+ host/backend/gstreamer/externalgstrunner.h \
+ host/backend/gstreamer/inprocessgstrunner.h \
+ host/backend/gstreamer/sinkselector.h \
+ host/backend/gstreamer/queueconfigurator.h \
+ host/backend/gstreamer/videooverlaymanager.h \
+ host/backend/gstreamer/pipelinebuilder.h \
+ host/backend/gstreamer/recordingmanager.h \
+ host/backend/gstreamer/gstreamerhelpers.h
}
@@ -263,4 +273,4 @@ TRANSLATIONS += config/languages/openterface_en.ts \
config/languages/openterface_se.ts \
config/languages/openterface_de.ts \
config/languages/openterface_zh.ts
- # Add more languages here
\ No newline at end of file
+ # Add more languages here
From ccd8f51fe510684439edf0d5f8083e4dd1423836 Mon Sep 17 00:00:00 2001
From: John Lane <1786613+johnlane@users.noreply.github.com>
Date: Wed, 3 Dec 2025 21:26:20 +0000
Subject: [PATCH 2/2] De-duplicate logging categories
---
host/backend/gstreamer/gstreamerhelpers.cpp | 22 ++--
host/backend/gstreamer/queueconfigurator.cpp | 8 +-
.../backend/gstreamer/videooverlaymanager.cpp | 118 +++++++++---------
3 files changed, 74 insertions(+), 74 deletions(-)
diff --git a/host/backend/gstreamer/gstreamerhelpers.cpp b/host/backend/gstreamer/gstreamerhelpers.cpp
index 6fc1dd6e..4739da17 100644
--- a/host/backend/gstreamer/gstreamerhelpers.cpp
+++ b/host/backend/gstreamer/gstreamerhelpers.cpp
@@ -4,7 +4,7 @@
#include <QDebug>
#include <QLoggingCategory>
-Q_LOGGING_CATEGORY(log_gstreamer_backend, "opf.backend.gstreamer")
+Q_LOGGING_CATEGORY(log_gstreamer_gstreamerhelpers, "opf.backend.gstreamerhelpers")
using namespace Openterface::GStreamer::GstHelpers;
@@ -16,7 +16,7 @@ bool Openterface::GStreamer::GstHelpers::setPipelineStateWithTimeout(void* eleme
{
if (!elementPtr) {
if (outError) *outError = QStringLiteral("Element pointer is null");
- qCWarning(log_gstreamer_backend) << "setPipelineStateWithTimeout: element pointer is null";
+ qCWarning(log_gstreamer_gstreamerhelpers) << "setPipelineStateWithTimeout: element pointer is null";
return false;
}
@@ -24,7 +24,7 @@ bool Openterface::GStreamer::GstHelpers::setPipelineStateWithTimeout(void* eleme
GstStateChangeReturn ret = gst_element_set_state(element, static_cast<GstState>(targetState));
if (ret == GST_STATE_CHANGE_FAILURE) {
- qCCritical(log_gstreamer_backend) << "Failed to set element state to" << targetState;
+ qCCritical(log_gstreamer_gstreamerhelpers) << "Failed to set element state to" << targetState;
// Try to pull any error from the bus for diagnostics
// Caller may pass a bus to parseAndLogGstErrorMessage, but we don't have it here.
if (outError) *outError = QStringLiteral("Failed to set state (GST_STATE_CHANGE_FAILURE)");
@@ -35,13 +35,13 @@ bool Openterface::GStreamer::GstHelpers::setPipelineStateWithTimeout(void* eleme
ret = gst_element_get_state(element, &state, &pending, static_cast<GstClockTime>(timeoutMs) * GST_MSECOND);
if (ret == GST_STATE_CHANGE_FAILURE) {
if (outError) *outError = QStringLiteral("State change failure");
- qCCritical(log_gstreamer_backend) << "State change failure waiting for target state";
+ qCCritical(log_gstreamer_gstreamerhelpers) << "State change failure waiting for target state";
return false;
}
if (state != static_cast<GstState>(targetState)) {
if (outError) *outError = QStringLiteral("Element did not reach target state in timeout");
- qCCritical(log_gstreamer_backend) << "Element failed to reach state" << targetState << "(current:" << state << ", pending:" << pending << ")";
+ qCCritical(log_gstreamer_gstreamerhelpers) << "Element failed to reach state" << targetState << "(current:" << state << ", pending:" << pending << ")";
return false;
}
@@ -51,14 +51,14 @@ bool Openterface::GStreamer::GstHelpers::setPipelineStateWithTimeout(void* eleme
void Openterface::GStreamer::GstHelpers::parseAndLogGstErrorMessage(void* busPtr, const char* context)
{
if (!busPtr) {
- qCWarning(log_gstreamer_backend) << "Bus not available for error details" << (context ? context : "");
+ qCWarning(log_gstreamer_gstreamerhelpers) << "Bus not available for error details" << (context ? context : "");
return;
}
GstBus* bus = static_cast<GstBus*>(busPtr);
GstMessage* msg = gst_bus_pop_filtered(bus, GST_MESSAGE_ERROR);
if (!msg) {
- qCDebug(log_gstreamer_backend) << "No error message available on bus" << (context ? context : "");
+ qCDebug(log_gstreamer_gstreamerhelpers) << "No error message available on bus" << (context ? context : "");
return;
}
@@ -66,8 +66,8 @@ void Openterface::GStreamer::GstHelpers::parseAndLogGstErrorMessage(void* busPtr
gchar* debug_info = nullptr;
gst_message_parse_error(msg, &error, &debug_info);
- qCCritical(log_gstreamer_backend) << "GStreamer Error:" << (error ? error->message : "Unknown") << (context ? context : "");
- qCCritical(log_gstreamer_backend) << "Debug info:" << (debug_info ? debug_info : "None");
+ qCCritical(log_gstreamer_gstreamerhelpers) << "GStreamer Error:" << (error ? error->message : "Unknown") << (context ? context : "");
+ qCCritical(log_gstreamer_gstreamerhelpers) << "Debug info:" << (debug_info ? debug_info : "None");
if (error) g_error_free(error);
if (debug_info) g_free(debug_info);
@@ -79,13 +79,13 @@ void Openterface::GStreamer::GstHelpers::parseAndLogGstErrorMessage(void* busPtr
bool Openterface::GStreamer::GstHelpers::setPipelineStateWithTimeout(void* /*elementPtr*/, int /*targetState*/, int /*timeoutMs*/, QString* outError)
{
if (outError) *outError = QStringLiteral("GStreamer not available in this build");
- qCWarning(log_gstreamer_backend) << "setPipelineStateWithTimeout called but GStreamer is not compiled in";
+ qCWarning(log_gstreamer_gstreamerhelpers) << "setPipelineStateWithTimeout called but GStreamer is not compiled in";
return false;
}
void Openterface::GStreamer::GstHelpers::parseAndLogGstErrorMessage(void* /*busPtr*/, const char* context)
{
- qCDebug(log_gstreamer_backend) << "GStreamer not compiled in - no bus to parse" << (context ? context : "");
+ qCDebug(log_gstreamer_gstreamerhelpers) << "GStreamer not compiled in - no bus to parse" << (context ? context : "");
}
#endif // HAVE_GSTREAMER
diff --git a/host/backend/gstreamer/queueconfigurator.cpp b/host/backend/gstreamer/queueconfigurator.cpp
index b7bea42e..12290193 100644
--- a/host/backend/gstreamer/queueconfigurator.cpp
+++ b/host/backend/gstreamer/queueconfigurator.cpp
@@ -8,7 +8,7 @@
#include <gst/gst.h>
#endif
-Q_LOGGING_CATEGORY(log_gstreamer_backend, "opf.backend.gstreamer")
+Q_LOGGING_CATEGORY(log_gstreamer_queueconfigurator, "opf.backend.queueconfigurator")
using namespace Openterface::GStreamer;
@@ -26,10 +26,10 @@ void QueueConfigurator::configureDisplayQueue(void* pipeline)
"max-size-time", G_GUINT64_CONSTANT(100000000), // 100ms
"leaky", 2, // GST_QUEUE_LEAK_DOWNSTREAM
NULL);
- qCDebug(log_gstreamer_backend) << "✓ Configured display queue with higher priority for qtsink";
+ qCDebug(log_gstreamer_queueconfigurator) << "✓ Configured display queue with higher priority for qtsink";
gst_object_unref(displayQueue);
} else {
- qCDebug(log_gstreamer_backend) << "Display queue element not found (no named display-queue)";
+ qCDebug(log_gstreamer_queueconfigurator) << "Display queue element not found (no named display-queue)";
}
#else
Q_UNUSED(pipeline);
@@ -50,7 +50,7 @@ void QueueConfigurator::configureRecordingQueue(void* pipeline)
"max-size-time", G_GUINT64_CONSTANT(500000000), // 500ms
"leaky", 1, // GST_QUEUE_LEAK_UPSTREAM
NULL);
- qCDebug(log_gstreamer_backend) << "✓ Configured recording queue with lower priority relative to display";
+ qCDebug(log_gstreamer_queueconfigurator) << "✓ Configured recording queue with lower priority relative to display";
gst_object_unref(recordingQueue);
}
#else
diff --git a/host/backend/gstreamer/videooverlaymanager.cpp b/host/backend/gstreamer/videooverlaymanager.cpp
index d8bf42f3..2dbe3bbf 100644
--- a/host/backend/gstreamer/videooverlaymanager.cpp
+++ b/host/backend/gstreamer/videooverlaymanager.cpp
@@ -22,7 +22,7 @@ static int x11_overlay_error_handler_local(Display* display, XErrorEvent* error)
}
#endif
-Q_LOGGING_CATEGORY(log_gstreamer_backend, "opf.backend.gstreamer")
+Q_LOGGING_CATEGORY(log_gstreamer_videooverlaymanager, "opf.backend.videooverlaymanager")
using namespace Openterface::GStreamer;
@@ -30,36 +30,36 @@ bool VideoOverlayManager::embedVideoInWidget(void* pipeline, QWidget* widget)
{
#ifdef HAVE_GSTREAMER
if (!widget || !pipeline) {
- qCWarning(log_gstreamer_backend) << "Cannot embed video: widget or pipeline is null";
+ qCWarning(log_gstreamer_videooverlaymanager) << "Cannot embed video: widget or pipeline is null";
return false;
}
GstElement* videoSink = gst_bin_get_by_name(GST_BIN(pipeline), "videosink");
if (!videoSink) {
- qCWarning(log_gstreamer_backend) << "No video sink element named 'videosink' found in pipeline";
+ qCWarning(log_gstreamer_videooverlaymanager) << "No video sink element named 'videosink' found in pipeline";
videoSink = gst_bin_get_by_interface(GST_BIN(pipeline), GST_TYPE_VIDEO_OVERLAY);
if (!videoSink) {
- qCWarning(log_gstreamer_backend) << "No video overlay interface found in pipeline either";
+ qCWarning(log_gstreamer_videooverlaymanager) << "No video overlay interface found in pipeline either";
return false;
}
}
WId winId = widget->winId();
if (winId) {
- qCDebug(log_gstreamer_backend) << "Embedding video in widget with window ID:" << winId;
+ qCDebug(log_gstreamer_videooverlaymanager) << "Embedding video in widget with window ID:" << winId;
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(videoSink), winId);
gst_object_unref(videoSink);
- qCDebug(log_gstreamer_backend) << "Video embedded in widget successfully";
+ qCDebug(log_gstreamer_videooverlaymanager) << "Video embedded in widget successfully";
return true;
} else {
- qCWarning(log_gstreamer_backend) << "Widget window ID is null, cannot embed video";
+ qCWarning(log_gstreamer_videooverlaymanager) << "Widget window ID is null, cannot embed video";
gst_object_unref(videoSink);
return false;
}
#else
Q_UNUSED(pipeline)
Q_UNUSED(widget)
- qCDebug(log_gstreamer_backend) << "Using autovideosink for video output (no in-process GStreamer)";
+ qCDebug(log_gstreamer_videooverlaymanager) << "Using autovideosink for video output (no in-process GStreamer)";
return true;
#endif
}
@@ -68,36 +68,36 @@ bool VideoOverlayManager::embedVideoInGraphicsView(void* pipeline, QGraphicsView
{
#ifdef HAVE_GSTREAMER
if (!view || !pipeline) {
- qCWarning(log_gstreamer_backend) << "Cannot embed video: graphics view or pipeline is null";
+ qCWarning(log_gstreamer_videooverlaymanager) << "Cannot embed video: graphics view or pipeline is null";
return false;
}
GstElement* videoSink = gst_bin_get_by_name(GST_BIN(pipeline), "videosink");
if (!videoSink) {
- qCWarning(log_gstreamer_backend) << "No video sink element named 'videosink' found in pipeline";
+ qCWarning(log_gstreamer_videooverlaymanager) << "No video sink element named 'videosink' found in pipeline";
videoSink = gst_bin_get_by_interface(GST_BIN(pipeline), GST_TYPE_VIDEO_OVERLAY);
if (!videoSink) {
- qCWarning(log_gstreamer_backend) << "No video overlay interface found in pipeline either";
+ qCWarning(log_gstreamer_videooverlaymanager) << "No video overlay interface found in pipeline either";
return false;
}
}
WId winId = view->winId();
if (winId) {
- qCDebug(log_gstreamer_backend) << "Embedding video in graphics view with window ID:" << winId;
+ qCDebug(log_gstreamer_videooverlaymanager) << "Embedding video in graphics view with window ID:" << winId;
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(videoSink), winId);
gst_object_unref(videoSink);
- qCDebug(log_gstreamer_backend) << "Video embedded in graphics view successfully";
+ qCDebug(log_gstreamer_videooverlaymanager) << "Video embedded in graphics view successfully";
return true;
} else {
- qCWarning(log_gstreamer_backend) << "Graphics view window ID is null, cannot embed video";
+ qCWarning(log_gstreamer_videooverlaymanager) << "Graphics view window ID is null, cannot embed video";
gst_object_unref(videoSink);
return false;
}
#else
Q_UNUSED(pipeline)
Q_UNUSED(view)
- qCDebug(log_gstreamer_backend) << "Using autovideosink for video output (no in-process GStreamer)";
+ qCDebug(log_gstreamer_videooverlaymanager) << "Using autovideosink for video output (no in-process GStreamer)";
return true;
#endif
}
@@ -106,36 +106,36 @@ bool VideoOverlayManager::embedVideoInVideoPane(void* pipeline, ::VideoPane* vid
{
#ifdef HAVE_GSTREAMER
if (!videoPane || !pipeline) {
- qCWarning(log_gstreamer_backend) << "Cannot embed video: VideoPane or pipeline is null";
+ qCWarning(log_gstreamer_videooverlaymanager) << "Cannot embed video: VideoPane or pipeline is null";
return false;
}
GstElement* videoSink = gst_bin_get_by_name(GST_BIN(pipeline), "videosink");
if (!videoSink) {
- qCWarning(log_gstreamer_backend) << "No video sink element named 'videosink' found in pipeline";
+ qCWarning(log_gstreamer_videooverlaymanager) << "No video sink element named 'videosink' found in pipeline";
videoSink = gst_bin_get_by_interface(GST_BIN(pipeline), GST_TYPE_VIDEO_OVERLAY);
if (!videoSink) {
- qCWarning(log_gstreamer_backend) << "No video overlay interface found in pipeline either";
+ qCWarning(log_gstreamer_videooverlaymanager) << "No video overlay interface found in pipeline either";
return false;
}
}
WId winId = videoPane->getVideoOverlayWindowId();
if (winId) {
- qCDebug(log_gstreamer_backend) << "Embedding video in VideoPane overlay with window ID:" << winId;
+ qCDebug(log_gstreamer_videooverlaymanager) << "Embedding video in VideoPane overlay with window ID:" << winId;
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(videoSink), winId);
gst_object_unref(videoSink);
- qCDebug(log_gstreamer_backend) << "Video embedded in VideoPane overlay successfully";
+ qCDebug(log_gstreamer_videooverlaymanager) << "Video embedded in VideoPane overlay successfully";
return true;
} else {
- qCWarning(log_gstreamer_backend) << "VideoPane overlay window ID is null, cannot embed video";
+ qCWarning(log_gstreamer_videooverlaymanager) << "VideoPane overlay window ID is null, cannot embed video";
gst_object_unref(videoSink);
return false;
}
#else
Q_UNUSED(pipeline)
Q_UNUSED(videoPane)
- qCDebug(log_gstreamer_backend) << "Using autovideosink for video output (no in-process GStreamer)";
+ qCDebug(log_gstreamer_videooverlaymanager) << "Using autovideosink for video output (no in-process GStreamer)";
return true;
#endif
}
@@ -144,7 +144,7 @@ bool VideoOverlayManager::setupVideoOverlay(void* videoSinkPtr, WId windowId, QW
{
#ifdef HAVE_GSTREAMER
if (!videoSinkPtr || windowId == 0) {
- qCWarning(log_gstreamer_backend) << "Invalid parameters for overlay setup: sink=" << videoSinkPtr << "windowId=" << windowId;
+ qCWarning(log_gstreamer_videooverlaymanager) << "Invalid parameters for overlay setup: sink=" << videoSinkPtr << "windowId=" << windowId;
return false;
}
@@ -152,7 +152,7 @@ bool VideoOverlayManager::setupVideoOverlay(void* videoSinkPtr, WId windowId, QW
// Check if the sink supports video overlay interface
if (GST_IS_VIDEO_OVERLAY(videoSink)) {
- qCDebug(log_gstreamer_backend) << "Sink supports video overlay - setting up overlay with window ID:" << windowId;
+ qCDebug(log_gstreamer_videooverlaymanager) << "Sink supports video overlay - setting up overlay with window ID:" << windowId;
#ifdef Q_OS_LINUX
// Add X11 error handling to prevent segmentation fault
@@ -175,12 +175,12 @@ bool VideoOverlayManager::setupVideoOverlay(void* videoSinkPtr, WId windowId, QW
if (g_object_class_find_property(G_OBJECT_GET_CLASS(videoSink), "force-aspect-ratio")) {
// Allow the sink to stretch to the configured render rectangle so overlay scales to widget size
g_object_set(videoSink, "force-aspect-ratio", FALSE, NULL);
- qCDebug(log_gstreamer_backend) << "Disabled force-aspect-ratio on video sink to allow fill scaling";
+ qCDebug(log_gstreamer_videooverlaymanager) << "Disabled force-aspect-ratio on video sink to allow fill scaling";
}
if (g_object_class_find_property(G_OBJECT_GET_CLASS(videoSink), "pixel-aspect-ratio")) {
g_object_set(videoSink, "pixel-aspect-ratio", "1/1", NULL);
- qCDebug(log_gstreamer_backend) << "Set pixel-aspect-ratio to 1:1 on video sink";
+ qCDebug(log_gstreamer_videooverlaymanager) << "Set pixel-aspect-ratio to 1:1 on video sink";
}
// Configure render rectangle based on provided targets
@@ -188,13 +188,13 @@ bool VideoOverlayManager::setupVideoOverlay(void* videoSinkPtr, WId windowId, QW
QSize widgetSize = videoWidget->size();
if (widgetSize.width() > 0 && widgetSize.height() > 0) {
gst_video_overlay_set_render_rectangle(GST_VIDEO_OVERLAY(videoSink), 0, 0, widgetSize.width(), widgetSize.height());
- qCDebug(log_gstreamer_backend) << "Set render rectangle to widget size:" << widgetSize;
+ qCDebug(log_gstreamer_videooverlaymanager) << "Set render rectangle to widget size:" << widgetSize;
}
} else if (graphicsVideoItem) {
QRectF itemRect = graphicsVideoItem->boundingRect();
if (itemRect.width() > 0 && itemRect.height() > 0) {
gst_video_overlay_set_render_rectangle(GST_VIDEO_OVERLAY(videoSink), 0, 0, (gint)itemRect.width(), (gint)itemRect.height());
- qCDebug(log_gstreamer_backend) << "Set render rectangle to video item size:" << itemRect.size();
+ qCDebug(log_gstreamer_videooverlaymanager) << "Set render rectangle to video item size:" << itemRect.size();
}
}
@@ -205,18 +205,18 @@ bool VideoOverlayManager::setupVideoOverlay(void* videoSinkPtr, WId windowId, QW
XCloseDisplay(display);
if (x11_overlay_error_occurred_local) {
- qCWarning(log_gstreamer_backend) << "X11 error occurred during overlay setup - continuing without embedding";
+ qCWarning(log_gstreamer_videooverlaymanager) << "X11 error occurred during overlay setup - continuing without embedding";
} else {
- qCDebug(log_gstreamer_backend) << "Video overlay setup completed successfully";
+ qCDebug(log_gstreamer_videooverlaymanager) << "Video overlay setup completed successfully";
}
} else if (!old_handler) {
- qCDebug(log_gstreamer_backend) << "Video overlay setup completed (no X11 error handling)";
+ qCDebug(log_gstreamer_videooverlaymanager) << "Video overlay setup completed (no X11 error handling)";
}
#endif
- qCDebug(log_gstreamer_backend) << "Overlay setup completed";
+ qCDebug(log_gstreamer_videooverlaymanager) << "Overlay setup completed";
} catch (...) {
- qCCritical(log_gstreamer_backend) << "Exception during video overlay setup - continuing without embedding";
+ qCCritical(log_gstreamer_videooverlaymanager) << "Exception during video overlay setup - continuing without embedding";
#ifdef Q_OS_LINUX
if (display && old_handler) {
XSetErrorHandler(old_handler);
@@ -242,17 +242,17 @@ bool VideoOverlayManager::setupVideoOverlay(void* videoSinkPtr, WId windowId, QW
if (gst_iterator_next(iter, &item) == GST_ITERATOR_OK) {
actualSink = GST_ELEMENT(g_value_get_object(&item));
if (actualSink && GST_IS_VIDEO_OVERLAY(actualSink)) {
- qCDebug(log_gstreamer_backend) << "Found overlay-capable sink inside autovideosink";
+ qCDebug(log_gstreamer_videooverlaymanager) << "Found overlay-capable sink inside autovideosink";
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(actualSink), windowId);
// Use target widget size if available to set explicit render rectangle so scaling works
if (videoWidget) {
QSize widgetSize = videoWidget->size();
if (widgetSize.width() > 0 && widgetSize.height() > 0) {
gst_video_overlay_set_render_rectangle(GST_VIDEO_OVERLAY(actualSink), 0, 0, widgetSize.width(), widgetSize.height());
- qCDebug(log_gstreamer_backend) << "Set render rectangle to widget size for autovideosink child sink:" << widgetSize;
+ qCDebug(log_gstreamer_videooverlaymanager) << "Set render rectangle to widget size for autovideosink child sink:" << widgetSize;
if (g_object_class_find_property(G_OBJECT_GET_CLASS(actualSink), "force-aspect-ratio")) {
g_object_set(actualSink, "force-aspect-ratio", FALSE, NULL);
- qCDebug(log_gstreamer_backend) << "Disabled force-aspect-ratio on autovideosink child sink";
+ qCDebug(log_gstreamer_videooverlaymanager) << "Disabled force-aspect-ratio on autovideosink child sink";
}
} else {
gst_video_overlay_set_render_rectangle(GST_VIDEO_OVERLAY(actualSink), 0, 0, -1, -1);
@@ -269,18 +269,18 @@ bool VideoOverlayManager::setupVideoOverlay(void* videoSinkPtr, WId windowId, QW
}
gst_iterator_free(iter);
}
- qCDebug(log_gstreamer_backend) << "autovideosink selected sink doesn't support overlay - video will display in separate window";
+ qCDebug(log_gstreamer_videooverlaymanager) << "autovideosink selected sink doesn't support overlay - video will display in separate window";
return false;
}
- qCWarning(log_gstreamer_backend) << "Sink does not support video overlay:" << sinkName;
+ qCWarning(log_gstreamer_videooverlaymanager) << "Sink does not support video overlay:" << sinkName;
return false;
#else
Q_UNUSED(videoSinkPtr)
Q_UNUSED(windowId)
Q_UNUSED(videoWidget)
Q_UNUSED(graphicsVideoItem)
- qCDebug(log_gstreamer_backend) << "No in-process GStreamer - overlay unavailable";
+ qCDebug(log_gstreamer_videooverlaymanager) << "No in-process GStreamer - overlay unavailable";
return false;
#endif
}
@@ -300,7 +300,7 @@ bool VideoOverlayManager::setupVideoOverlayForPipeline(void* pipeline, WId windo
gst_object_unref(videoSink);
return ok;
}
- qCWarning(log_gstreamer_backend) << "No video sink found in pipeline";
+ qCWarning(log_gstreamer_videooverlaymanager) << "No video sink found in pipeline";
return false;
#else
Q_UNUSED(pipeline)
@@ -315,10 +315,10 @@ bool VideoOverlayManager::completePendingOverlaySetup(void* pipeline,
::VideoPane* videoPane,
bool &pendingFlag)
{
- qCDebug(log_gstreamer_backend) << "VideoOverlayManager: Completing pending overlay setup...";
+ qCDebug(log_gstreamer_videooverlaymanager) << "VideoOverlayManager: Completing pending overlay setup...";
if (!pendingFlag || !pipeline) {
- qCDebug(log_gstreamer_backend) << "No pending setup or no pipeline";
+ qCDebug(log_gstreamer_videooverlaymanager) << "No pending setup or no pipeline";
return false;
}
@@ -326,7 +326,7 @@ bool VideoOverlayManager::completePendingOverlaySetup(void* pipeline,
const bool isXcb = platform.contains("xcb", Qt::CaseInsensitive);
const bool hasXDisplay = !qgetenv("DISPLAY").isEmpty();
if (!isXcb || !hasXDisplay) {
- qCWarning(log_gstreamer_backend) << "Skipping deferred overlay setup: platform is" << platform << "(DISPLAY set:" << hasXDisplay << ")";
+ qCWarning(log_gstreamer_videooverlaymanager) << "Skipping deferred overlay setup: platform is" << platform << "(DISPLAY set:" << hasXDisplay << ")";
pendingFlag = false;
return false;
}
@@ -335,7 +335,7 @@ bool VideoOverlayManager::completePendingOverlaySetup(void* pipeline,
if (videoPane) {
windowId = videoPane->getVideoOverlayWindowId();
- qCDebug(log_gstreamer_backend) << "Completing overlay setup with VideoPane window ID:" << windowId;
+ qCDebug(log_gstreamer_videooverlaymanager) << "Completing overlay setup with VideoPane window ID:" << windowId;
} else if (graphicsVideoItem) {
if (graphicsVideoItem->scene()) {
QList<QGraphicsView*> views = graphicsVideoItem->scene()->views();
@@ -344,21 +344,21 @@ bool VideoOverlayManager::completePendingOverlaySetup(void* pipeline,
if (auto pane = qobject_cast<VideoPane*>(view)) {
if (pane->isDirectGStreamerModeEnabled() && pane->getOverlayWidget()) {
windowId = pane->getVideoOverlayWindowId();
- qCDebug(log_gstreamer_backend) << "Completing overlay setup with VideoPane overlay widget window ID:" << windowId;
+ qCDebug(log_gstreamer_videooverlaymanager) << "Completing overlay setup with VideoPane overlay widget window ID:" << windowId;
} else {
- qCDebug(log_gstreamer_backend) << "VideoPane overlay widget still not ready";
+ qCDebug(log_gstreamer_videooverlaymanager) << "VideoPane overlay widget still not ready";
return false;
}
} else {
windowId = view->winId();
- qCDebug(log_gstreamer_backend) << "Completing overlay setup with graphics view window ID:" << windowId;
+ qCDebug(log_gstreamer_videooverlaymanager) << "Completing overlay setup with graphics view window ID:" << windowId;
}
} else {
- qCWarning(log_gstreamer_backend) << "Graphics video item has no associated view";
+ qCWarning(log_gstreamer_videooverlaymanager) << "Graphics video item has no associated view";
return false;
}
} else {
- qCWarning(log_gstreamer_backend) << "Graphics video item has no scene";
+ qCWarning(log_gstreamer_videooverlaymanager) << "Graphics video item has no scene";
return false;
}
}
@@ -367,7 +367,7 @@ bool VideoOverlayManager::completePendingOverlaySetup(void* pipeline,
GstElement* videoSink = gst_bin_get_by_name(GST_BIN(pipeline), "videosink");
if (!videoSink) {
videoSink = gst_bin_get_by_interface(GST_BIN(pipeline), GST_TYPE_VIDEO_OVERLAY);
- if (videoSink) qCDebug(log_gstreamer_backend) << "Deferred path: found sink by overlay interface";
+ if (videoSink) qCDebug(log_gstreamer_videooverlaymanager) << "Deferred path: found sink by overlay interface";
}
if (videoSink) {
@@ -384,14 +384,14 @@ bool VideoOverlayManager::completePendingOverlaySetup(void* pipeline,
targetWidget = videoWidget;
}
if (targetWidget) {
- qCDebug(log_gstreamer_backend) << "Deferred: binding qt6videosink to QWidget" << targetWidget;
+ qCDebug(log_gstreamer_videooverlaymanager) << "Deferred: binding qt6videosink to QWidget" << targetWidget;
g_object_set(G_OBJECT(videoSink), "widget", (gpointer)targetWidget, nullptr);
gst_object_unref(videoSink);
pendingFlag = false;
- qCDebug(log_gstreamer_backend) << "Deferred qt6videosink binding completed";
+ qCDebug(log_gstreamer_videooverlaymanager) << "Deferred qt6videosink binding completed";
return true;
} else {
- qCWarning(log_gstreamer_backend) << "Deferred: no target QWidget available to bind qt6videosink";
+ qCWarning(log_gstreamer_videooverlaymanager) << "Deferred: no target QWidget available to bind qt6videosink";
}
}
@@ -399,30 +399,30 @@ bool VideoOverlayManager::completePendingOverlaySetup(void* pipeline,
const bool looksLikeXSink = sinkNameBA.contains("xvimage") || sinkNameBA.contains("ximage");
if (!supportsOverlay) {
- qCWarning(log_gstreamer_backend) << "Deferred overlay skipped: sink does not support overlay interface (" << sinkName << ")";
+ qCWarning(log_gstreamer_videooverlaymanager) << "Deferred overlay skipped: sink does not support overlay interface (" << sinkName << ")";
gst_object_unref(videoSink);
pendingFlag = false;
return false;
}
if (!looksLikeXSink) {
- qCWarning(log_gstreamer_backend) << "Deferred overlay skipped: sink is not an X sink (" << sinkName << ") on platform" << QGuiApplication::platformName();
+ qCWarning(log_gstreamer_videooverlaymanager) << "Deferred overlay skipped: sink is not an X sink (" << sinkName << ") on platform" << QGuiApplication::platformName();
gst_object_unref(videoSink);
pendingFlag = false;
return false;
}
- qCDebug(log_gstreamer_backend) << "Setting up deferred video overlay with window ID:" << windowId << "using sink" << sinkName;
+ qCDebug(log_gstreamer_videooverlaymanager) << "Setting up deferred video overlay with window ID:" << windowId << "using sink" << sinkName;
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(videoSink), windowId);
gst_object_unref(videoSink);
pendingFlag = false;
- qCDebug(log_gstreamer_backend) << "Deferred overlay setup completed successfully";
+ qCDebug(log_gstreamer_videooverlaymanager) << "Deferred overlay setup completed successfully";
return true;
} else {
- qCWarning(log_gstreamer_backend) << "Could not find video sink for deferred overlay setup";
+ qCWarning(log_gstreamer_videooverlaymanager) << "Could not find video sink for deferred overlay setup";
}
} else {
- qCWarning(log_gstreamer_backend) << "Still no valid window ID available for deferred overlay setup";
+ qCWarning(log_gstreamer_videooverlaymanager) << "Still no valid window ID available for deferred overlay setup";
}
return false;

View file

@ -0,0 +1,2 @@
SUBSYSTEM=="usb", ATTRS{idVendor}=="534d", ATTRS{idProduct}=="2109", MODE="0660", GROUP="plugdev"
SUBSYSTEM=="usb", ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="7523", MODE="0660", GROUP="plugdev"

View file

@ -0,0 +1,73 @@
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=openterface-qt
pkgver=0.5.7
pkgrel=0
pkgdesc="Openterface Mini-KVM Host Application"
# armhf: missing qt6-qtmultimedia
# riscv64: missing libgtk-3
arch="all !armhf !riscv64"
url="https://openterface.com/"
license="AGPL-3.0-only"
depends="
gst-plugins-good-qt
hicolor-icon-theme
qt6-qtmultimedia-ffmpeg
"
makedepends="
cmake
ffmpeg-dev
libgudev-dev
libjpeg-turbo-dev
libusb-dev
libx11-dev
libxv-dev
patchelf
qt6-qtbase-dev
qt6-qtmultimedia-dev
qt6-qtserialport-dev
samurai
v4l-utils-dev
"
install="$pkgname.post-install"
builddir="$srcdir"/Openterface_QT-$pkgver
options="!check" # No testsuite
source="
$pkgname-$pkgver.tar.gz::https://github.com/TechxArtisanStudio/Openterface_QT/archive/$pkgver.tar.gz
openterfaceQT.desktop
51-openterface-permissions.rules
348_address-use-of-deleted-function.patch
423_deduplicate-logging-categories-and-remove-backslashes.patch
use-system-libs.patch
"
prepare() {
default_prepare
mkdir build && cd build
# OPENTERFACE_BUILD_STATIC: do not build vendored dependencies
cmake -DOPENTERFACE_BUILD_STATIC=OFF ..
}
build() {
ninja -C build
}
package() {
install -Dm755 "$builddir"/build/openterfaceQT "$pkgdir"/usr/bin/openterfaceQT
install -Dm644 "$srcdir"/51-openterface-permissions.rules "$pkgdir"/etc/udev/rules.d/51-openterface-permissions.rules
install -Dm644 "$srcdir"/openterfaceQT.desktop "$pkgdir"/usr/share/applications/openterfaceQT.desktop
install -Dm644 "$builddir"/images/icon_32.png "$pkgdir"/usr/share/icons/hicolor/32x32/apps/openterfaceQT.png
install -Dm644 "$builddir"/images/icon_64.png "$pkgdir"/usr/share/icons/hicolor/64x64/apps/openterfaceQT.png
install -Dm644 "$builddir"/images/icon_128.png "$pkgdir"/usr/share/icons/hicolor/128x128/apps/openterfaceQT.png
# vanilla build does not set rpath, since it usually wants to use vendored libs
patchelf --set-rpath '/usr/lib' "$pkgdir"/usr/bin/openterfaceQT
}
sha512sums="
996415d6f7d3ed950901c380a0520ddab8c31e8d3c2e2bb3a5f631a5600cace6bcf6bf89871e4e4ef818009eeb08c448fd793e1e4758ecccf1e1a21ff04fd560 openterface-qt-0.5.7.tar.gz
e39cfa04cbcb59e8ba54110a28eff41854f73fa7c4baeeed5433907c79781946f12bd3a731763caa1d591e664eab0650bdbd2a844954baa12bb96a76a17c6e4f openterfaceQT.desktop
f50d721a6a2d1e0183c81e99230e91e127ee6c6f3243af1cff3e3cb78e2913ebab3346ec8b461a4710220d1ce2e12a7cc960ded6e0dc2def539375c6e737b647 51-openterface-permissions.rules
69b5556ec9e56792e848ea1ff9374e12e6901da821ecd9d6f2f521ea30f48e564c2cd0631fc1360acd6c8c6249cfa718d5baf7ed6929e1e92f63eeaea578bcb3 348_address-use-of-deleted-function.patch
47580d07a2d971ad2010e78373d1abbcbc05b3fbd3a7e466faed50dc9a0d632db30c0a7622e7324aeb0eb38d49e3241cb6cebc835f7adeed977b1dd7b48ea5f6 423_deduplicate-logging-categories-and-remove-backslashes.patch
22ecac74fe0923f39f538a5d587f8c100d9709631a1584bd20646e09dcf777cd3042670d08195626220f0494e5efa9549a299c5e1fd8c42f991ec5746b42cc86 use-system-libs.patch
"

View file

@ -0,0 +1,3 @@
#!/bin/sh
echo "Setup the dialout permission for Serial port."
echo "Run: sudo usermod -a -G video,plugdev \$USER"

View file

@ -0,0 +1,8 @@
[Desktop Entry]
Version=1.0
Type=Application
Name=OpenterfaceQT
Exec=/usr/bin/openterfaceQT
Icon=openterfaceQT
Comment=OpenterfaceQT Application
Categories=Utility;

View file

@ -0,0 +1,72 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index e0e8ea1..c861725 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -244,7 +244,7 @@ endif()
# Add XCB cursor library for static linking (Linux only) - Legacy support
if(UNIX AND NOT APPLE AND XCB_CURSOR_FOUND)
# This is redundant now but keeping for compatibility
- message(STATUS "XCB cursor already linked via static approach above")
+ target_link_libraries(openterfaceQT PRIVATE X11)
endif()
# Add TurboJPEG if available
@@ -307,4 +307,4 @@ if(CMAKE_BUILD_TYPE STREQUAL "Release")
endif()
endif()
-# Qt components already found above
\ No newline at end of file
+# Qt components already found above
diff --git a/cmake/FFmpeg.cmake b/cmake/FFmpeg.cmake
index ef0699e..660e765 100644
--- a/cmake/FFmpeg.cmake
+++ b/cmake/FFmpeg.cmake
@@ -19,7 +19,7 @@ if(NOT DEFINED FFMPEG_PREFIX)
if(WIN32)
set(FFMPEG_PREFIX "C:/ffmpeg-static" CACHE PATH "FFmpeg installation directory")
else()
- set(FFMPEG_PREFIX "/opt/ffmpeg" CACHE PATH "FFmpeg installation directory")
+ set(FFMPEG_PREFIX "/usr" CACHE PATH "FFmpeg installation directory")
endif()
message(STATUS "Using default FFMPEG_PREFIX: ${FFMPEG_PREFIX}")
endif()
@@ -116,7 +116,7 @@ if(NOT FFMPEG_FOUND)
message(STATUS "FFmpeg search paths: ${FFMPEG_SEARCH_PATHS}")
foreach(SEARCH_PATH ${FFMPEG_SEARCH_PATHS})
# For static builds, prefer .a files; check common lib directories
- set(LIB_EXTENSIONS ".a")
+ set(LIB_EXTENSIONS ".so")
# Platform-specific library paths
if(WIN32)
diff --git a/cmake/GStreamer.cmake b/cmake/GStreamer.cmake
index 220e9f5..576535f 100644
--- a/cmake/GStreamer.cmake
+++ b/cmake/GStreamer.cmake
@@ -316,10 +316,11 @@ else()
# Check for Qt6 plugin availability in system
message(STATUS "Checking for Qt6 GStreamer plugin in system...")
find_file(GSTREAMER_QT6_PLUGIN
- NAMES libgstqt6.so
+ NAMES libgstqml6.so
PATHS
/usr/lib/x86_64-linux-gnu/gstreamer-1.0
/usr/lib/aarch64-linux-gnu/gstreamer-1.0
+ /usr/lib/gstreamer-1.0
/usr/local/lib/gstreamer-1.0
NO_DEFAULT_PATH
)
diff --git a/cmake/Resources.cmake b/cmake/Resources.cmake
index 2d28b89..e2009e3 100644
--- a/cmake/Resources.cmake
+++ b/cmake/Resources.cmake
@@ -336,7 +336,7 @@ install(FILES ${CMAKE_SOURCE_DIR}/com.openterface.openterfaceQT.metainfo.xml
if(COMMAND qt_generate_deploy_app_script)
qt_generate_deploy_app_script(
TARGET openterfaceQT
- FILENAME_VARIABLE deploy_script
+ OUTPUT_SCRIPT deploy_script
NO_UNSUPPORTED_PLATFORM_ERROR
)
install(SCRIPT ${deploy_script})

View file

@ -1,39 +0,0 @@
# Automatically generated by apkbuild-cpan, template 4
# Contributor: Timo Teräs <timo.teras@iki.fi>
# Maintainer: Celeste <cielesti@protonmail.com>
maintainer="Celeste <cielesti@protonmail.com>"
pkgname=perl-math-random-isaac-xs
pkgver=1.004
pkgrel=8
#_pkgreal is used by apkbuild-cpan to find modules at MetaCpan
_pkgreal=Math-Random-ISAAC-XS
pkgdesc="C implementation of the ISAAC PRNG algorithm"
url="https://metacpan.org/release/Math-Random-ISAAC-XS/"
arch="all"
license="Public-Domain"
depends="perl"
makedepends="perl-dev perl-module-build"
checkdepends="perl-test-nowarnings"
subpackages="$pkgname-doc"
source="https://cpan.metacpan.org/authors/id/J/JA/JAWNSY/Math-Random-ISAAC-XS-$pkgver.tar.gz"
builddir="$srcdir/$_pkgreal-$pkgver"
build() {
export CFLAGS=$(perl -MConfig -E 'say $Config{ccflags}')
perl Build.PL \
--installdirs=vendor \
--create_packlist=0
./Build
}
check() {
./Build test
}
package() {
./Build install --destdir="$pkgdir"
}
sha512sums="
40c46b5f247f585a407ef9f36b5874d9cf03ec05963a9d92d988ebd63daf1e37b1b51308845d4596f47b5ad7203953bcb7fbb421c905b526dbe99b246ccb4d87 Math-Random-ISAAC-XS-1.004.tar.gz
"

View file

@ -2,7 +2,7 @@
# Maintainer: Francesco Colista <fcolista@alpinelinux.org>
pkgname=py3-apsw
_pkgname=apsw
pkgver=3.46.1.0
pkgver=3.50.4.0
pkgrel=0
pkgdesc="Another Python SQLite Wrapper"
url="https://github.com/rogerbinns/apsw"
@ -41,6 +41,6 @@ package() {
}
sha512sums="
8d24825c8346b05a99b8959ce1fd45ae5162c95b020ecc63bd3491bfd1579370a0e6b1a962f7f64a7e7e415846007e64d90b28e2065ae047e228d60b12b9cb02 py3-apsw-3.46.1.0.zip
71db63b0a7f550c9a5d3f112d47c24953472cc6555f0b57198428997d5cf5acf73629f2da8d5d53a2473067ba19d4b655cce467a5e2267e5bd6e8cf0d9883579 py3-apsw-3.50.4.0.zip
8f3957bd6fecb5660a7cab367043e4ccdacd87d8963bbe41cc3d525265de28f08aa207099658d785be29c5c90b818c1418f766995cd780d02b8e36252a389758 detect-sqlite-config.patch
"

View file

@ -1,42 +0,0 @@
# Contributor: Aiden Grossman <agrossman154@yahoo.com>
# Maintainer:
pkgname=py3-arcus
# Needs to be upgraded in sync with libarcus
pkgver=5.3.0
pkgrel=1
pkgdesc="Python bindings for libarcus"
url="https://github.com/Ultimaker/pyArcus"
arch="all"
license="LGPL-3.0-only"
makedepends="
cmake
libarcus-dev
protobuf-dev
py3-sip
python3-dev
samurai
"
options="!check" # package doesn't provide any tests
source="$pkgname-$pkgver.tar.gz::https://github.com/Ultimaker/pyArcus/archive/refs/tags/$pkgver.tar.gz
cmake.patch
cmake-helpers.patch
pyproject.patch"
builddir="$srcdir/pyArcus-$pkgver"
build() {
cmake -G Ninja -B build \
-DBUILD_SHARED_LIBS=ON \
-DCMAKE_BUILD_TYPE=Release
cmake --build build
}
package() {
DESTDIR="$pkgdir" cmake --install build
}
sha512sums="
d4a114994fa3e3156eae95dde58df13237b8bb0571a1219d6dee6b6338fd65f911f27887d6ab32b7a3cb32bc45ca6c25147e7c2d246cb0707326b88246abfbcd py3-arcus-5.3.0.tar.gz
f14e55cd31c13051981f26364e34da8c94e8eb5227b1cfd6fe44b9f97b5a4dcf6142a1751fa62eb0514a47583e6ec2d51dc253f23cf72c3fe6a1cb5dca136f21 cmake.patch
de75b985607feae0a9c511742915814e9c3d4bc467183f010ccc334ce4d0d952b6ff86020360b78558c4738cc03cf62c386b44ed76bcec12075c4a93dd03eeb7 cmake-helpers.patch
ef593230d5c78da8ba0fc6ea83225c4543857de1837d3151c45e59ffd7c98063b8f97f25d01c15b6a8f90c26c919206f9f7fa26c9650117f4ce7be49ebca876f pyproject.patch
"

View file

@ -1,254 +0,0 @@
--- /dev/null
+++ ./cmake/CMakeBuilder.py
@@ -0,0 +1,13 @@
+from sipbuild import SetuptoolsBuilder
+
+
+class CMakeBuilder(SetuptoolsBuilder):
+ def __init__(self, project, **kwargs):
+ print("Using the CMake builder")
+ super().__init__(project, **kwargs)
+
+ def build(self):
+ """ Only Generate the source files """
+ print("Generating the source files")
+ self._generate_bindings()
+ self._generate_scripts()
--- /dev/null
+++ ./cmake/FindSIP.cmake
@@ -0,0 +1,65 @@
+# Find SIP
+# ~~~~~~~~
+#
+# SIP website: http://www.riverbankcomputing.co.uk/sip/index.php
+#
+# Find the installed version of SIP. FindSIP should be called after Python
+# has been found.
+#
+# This file defines the following variables:
+#
+# SIP_VERSION - The version of SIP found expressed as a 6 digit hex number
+# suitable for comparison as a string.
+#
+# SIP_VERSION_STR - The version of SIP found as a human readable string.
+#
+# SIP_BINARY_PATH - Path and filename of the SIP command line executable.
+#
+# SIP_INCLUDE_DIR - Directory holding the SIP C++ header file.
+#
+# SIP_DEFAULT_SIP_DIR - Default directory where .sip files should be installed
+# into.
+
+# Copyright (c) 2007, Simon Edwards <simon@simonzone.com>
+# Redistribution and use is allowed according to the terms of the BSD license.
+# For details see the accompanying COPYING-CMAKE-SCRIPTS file.
+
+
+
+IF(SIP_VERSION OR SIP_BUILD_EXECUTABLE)
+ # Already in cache, be silent
+ SET(SIP_FOUND TRUE)
+ELSE()
+
+ FIND_FILE(_find_sip_py FindSIP.py PATHS ${CMAKE_MODULE_PATH} NO_CMAKE_FIND_ROOT_PATH)
+
+ EXECUTE_PROCESS(COMMAND ${Python_EXECUTABLE} ${_find_sip_py} OUTPUT_VARIABLE sip_config)
+ IF(sip_config)
+ STRING(REGEX REPLACE "^sip_version:([^\n]+).*$" "\\1" SIP_VERSION ${sip_config})
+ STRING(REGEX REPLACE ".*\nsip_version_num:([^\n]+).*$" "\\1" SIP_VERSION_NUM ${sip_config})
+ STRING(REGEX REPLACE ".*\nsip_version_str:([^\n]+).*$" "\\1" SIP_VERSION_STR ${sip_config})
+ STRING(REGEX REPLACE ".*\ndefault_sip_dir:([^\n]+).*$" "\\1" SIP_DEFAULT_SIP_DIR ${sip_config})
+ IF(${SIP_VERSION_STR} VERSION_LESS 5)
+ STRING(REGEX REPLACE ".*\nsip_bin:([^\n]+).*$" "\\1" SIP_BINARY_PATH ${sip_config})
+ STRING(REGEX REPLACE ".*\nsip_inc_dir:([^\n]+).*$" "\\1" SIP_INCLUDE_DIR ${sip_config})
+ STRING(REGEX REPLACE ".*\nsip_module_dir:([^\n]+).*$" "\\1" SIP_MODULE_DIR ${sip_config})
+ ELSE(${SIP_VERSION_STR} VERSION_LESS 5)
+ FIND_PROGRAM(SIP_BUILD_EXECUTABLE sip-build)
+ ENDIF(${SIP_VERSION_STR} VERSION_LESS 5)
+ SET(SIP_FOUND TRUE)
+ ENDIF(sip_config)
+
+ IF(SIP_FOUND)
+ IF(NOT SIP_FIND_QUIETLY)
+ MESSAGE(STATUS "Found SIP version: ${SIP_VERSION_STR}")
+ ENDIF(NOT SIP_FIND_QUIETLY)
+ ELSE(SIP_FOUND)
+ IF(SIP_FIND_REQUIRED)
+ MESSAGE(FATAL_ERROR "Could not find SIP")
+ ENDIF(SIP_FIND_REQUIRED)
+ ENDIF(SIP_FOUND)
+
+ENDIF()
+
+include(${CMAKE_SOURCE_DIR}/cmake/SIPMacros.cmake)
+ADD_DEFINITIONS(-DSIP_VERSION=0x${SIP_VERSION})
--- /dev/null
+++ ./cmake/FindSIP.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (c) 2007, Simon Edwards <simon@simonzone.com>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+# * Neither the name of the Simon Edwards <simon@simonzone.com> nor the
+# names of its contributors may be used to endorse or promote products
+# derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY Simon Edwards <simon@simonzone.com> ''AS IS'' AND ANY
+# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL Simon Edwards <simon@simonzone.com> BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# FindSIP.py
+# Copyright (c) 2007, Simon Edwards <simon@simonzone.com>
+# Redistribution and use is allowed according to the terms of the BSD license.
+# For details see the accompanying COPYING-CMAKE-SCRIPTS file.
+
+try:
+ import sipbuild
+
+ print("sip_version:%06.0x" % sipbuild.version.SIP_VERSION)
+ print("sip_version_num:%d" % sipbuild.version.SIP_VERSION)
+ print("sip_version_str:%s" % sipbuild.version.SIP_VERSION_STR)
+
+ from distutils.sysconfig import get_python_lib
+ python_modules_dir = get_python_lib(plat_specific=1)
+ print("default_sip_dir:%s" % python_modules_dir)
+except ImportError: # Code for SIP v4
+ import sipconfig
+
+ sipcfg = sipconfig.Configuration()
+ print("sip_version:%06.0x" % sipcfg.sip_version)
+ print("sip_version_num:%d" % sipcfg.sip_version)
+ print("sip_version_str:%s" % sipcfg.sip_version_str)
+ print("sip_bin:%s" % sipcfg.sip_bin)
+ print("default_sip_dir:%s" % sipcfg.default_sip_dir)
+ print("sip_inc_dir:%s" % sipcfg.sip_inc_dir)
+ # SIP 4.19.10+ has new sipcfg.sip_module_dir
+ if hasattr(sipcfg, "sip_module_dir"):
+ print("sip_module_dir:%s" % sipcfg.sip_module_dir)
+ else:
+ print("sip_module_dir:%s" % sipcfg.sip_mod_dir)
--- /dev/null
+++ ./cmake/SIPMacros.cmake
@@ -0,0 +1,107 @@
+
+
+# Macros for SIP
+# ~~~~~~~~~~~~~~
+
+set(SIP_ARGS --pep484-pyi --no-protected-is-public)
+
+function(add_sip_module MODULE_TARGET)
+ if(NOT SIP_BUILD_EXECUTABLE)
+ set(SIP_BUILD_EXECUTABLE ${CMAKE_PREFIX_PATH}/Scripts/sip-build)
+ endif()
+
+ message(STATUS "SIP: Generating pyproject.toml")
+ configure_file(${CMAKE_SOURCE_DIR}/pyproject.toml.in ${CMAKE_CURRENT_BINARY_DIR}/pyproject.toml)
+ configure_file(${CMAKE_SOURCE_DIR}/cmake/CMakeBuilder.py ${CMAKE_CURRENT_BINARY_DIR}/CMakeBuilder.py)
+ if(WIN32)
+ set(ext .pyd)
+ set(env_path_sep ";")
+ else()
+ set(ext .so)
+ set(env_path_sep ":")
+ endif()
+
+ message(STATUS "SIP: Generating source files")
+ execute_process(
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${PYTHONPATH}${env_path_sep}$ENV{PYTHONPATH}${env_path_sep}${CMAKE_CURRENT_BINARY_DIR}" ${SIP_BUILD_EXECUTABLE} ${SIP_ARGS}
+ COMMAND_ECHO STDOUT
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/
+ )
+ # This will generate the source-files during the configuration step in CMake. Needed to obtain the sources
+
+ # Touch the generated files (8 in total) to make them dirty and force them to rebuild
+ message(STATUS "SIP: Touching the source files")
+ set(_sip_output_files)
+ list(LENGTH SIP_FILES _no_outputfiles)
+ foreach(_concat_file_nr RANGE 0 ${_no_outputfiles})
+ if(${_concat_file_nr} LESS 8)
+ list(APPEND _sip_output_files "${CMAKE_CURRENT_BINARY_DIR}/${MODULE_TARGET}/${MODULE_TARGET}/sip${MODULE_TARGET}part${_concat_file_nr}.cpp")
+ endif()
+ endforeach()
+
+ # Find the generated source files
+ message(STATUS "SIP: Collecting the generated source files")
+ file(GLOB sip_c "${CMAKE_CURRENT_BINARY_DIR}/${MODULE_TARGET}/${MODULE_TARGET}/*.c")
+ file(GLOB sip_cpp "${CMAKE_CURRENT_BINARY_DIR}/${MODULE_TARGET}/${MODULE_TARGET}/*.cpp")
+ file(GLOB sip_hdr "${CMAKE_CURRENT_BINARY_DIR}/${MODULE_TARGET}/${MODULE_TARGET}/*.h")
+
+ # Add the user specified source files
+ message(STATUS "SIP: Collecting the user specified source files")
+ get_target_property(usr_src ${MODULE_TARGET} SOURCES)
+
+ # create the target library and link all the files (generated and user specified
+ message(STATUS "SIP: Linking the interface target against the shared library")
+ set(sip_sources "${sip_c}" "${sip_cpp}" "${usr_src}")
+
+ if (BUILD_SHARED_LIBS)
+ add_library("sip_${MODULE_TARGET}" SHARED ${sip_sources})
+ else()
+ add_library("sip_${MODULE_TARGET}" STATIC ${sip_sources})
+ endif()
+
+ # Make sure that the library name of the target is the same as the MODULE_TARGET with the appropriate extension
+ target_link_libraries("sip_${MODULE_TARGET}" PRIVATE "${MODULE_TARGET}")
+ set_target_properties("sip_${MODULE_TARGET}" PROPERTIES PREFIX "")
+ set_target_properties("sip_${MODULE_TARGET}" PROPERTIES SUFFIX ${ext})
+ set_target_properties("sip_${MODULE_TARGET}" PROPERTIES OUTPUT_NAME "${MODULE_TARGET}")
+
+ # Add the custom command to (re-)generate the files and mark them as dirty. This allows the user to actually work
+ # on the sip definition files without having to reconfigure the complete project.
+ if (NOT DEFINED PYTHONPATH)
+ set(PYTHONPATH "")
+ endif ()
+ add_custom_command(
+ TARGET "sip_${MODULE_TARGET}"
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${PYTHONPATH}${env_path_sep}$ENV{PYTHONPATH}${env_path_sep}${CMAKE_CURRENT_BINARY_DIR}" ${SIP_BUILD_EXECUTABLE} ${SIP_ARGS}
+ COMMAND ${CMAKE_COMMAND} -E touch ${_sip_output_files}
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/
+ MAIN_DEPENDENCY ${MODULE_SIP}
+ DEPENDS ${sip_sources}
+ VERBATIM
+ )
+
+ set_target_properties("sip_${MODULE_TARGET}"
+ PROPERTIES
+ RESOURCE "${CMAKE_CURRENT_BINARY_DIR}/${MODULE_TARGET}/${MODULE_TARGET}/${MODULE_TARGET}.pyi")
+endfunction()
+
+function(install_sip_module MODULE_TARGET)
+ if(DEFINED ARGV1)
+ set(_install_path ${ARGV1})
+ else()
+ if(DEFINED Python_SITEARCH)
+ set(_install_path ${Python_SITEARCH})
+ elseif(DEFINED Python_SITELIB)
+ set(_install_path ${Python_SITELIB})
+ else()
+ message(FATAL_ERROR "SIP: Specify the site-packages location")
+ endif()
+ endif()
+ message(STATUS "SIP: Installing Python module and PEP 484 file in ${_install_path}")
+ install(TARGETS "sip_${MODULE_TARGET}"
+ ARCHIVE DESTINATION ${_install_path}
+ LIBRARY DESTINATION ${_install_path}
+ RUNTIME DESTINATION ${_install_path}
+ RESOURCE DESTINATION ${_install_path}
+ )
+endfunction()

View file

@ -1,32 +0,0 @@
--- ./CMakeLists.txt.orig
+++ ./CMakeLists.txt
@@ -2,22 +2,22 @@
project(pyarcus)
cmake_minimum_required(VERSION 3.20)
-find_package(protobuf REQUIRED)
-find_package(cpython REQUIRED)
-find_package(arcus REQUIRED)
+list(APPEND CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake)
-find_package(standardprojectsettings REQUIRED)
-find_package(sipbuildtool REQUIRED)
+find_package(Protobuf REQUIRED)
+find_package(Python REQUIRED COMPONENTS Interpreter Development)
+find_package(Arcus REQUIRED)
+find_package(SIP 6.5.0 REQUIRED)
+
add_library(pyArcus INTERFACE src/PythonMessage.cpp)
-use_threads(pyArcus)
target_include_directories(pyArcus
INTERFACE
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/include/>
)
-target_link_libraries(pyArcus INTERFACE arcus::arcus protobuf::libprotobuf cpython::cpython)
+target_link_libraries(pyArcus INTERFACE Arcus protobuf::libprotobuf Python::Python)
add_sip_module(pyArcus)
install_sip_module(pyArcus)

View file

@ -1,20 +0,0 @@
--- /dev/null
+++ ./pyproject.toml.in
@@ -0,0 +1,17 @@
+[build-system]
+requires = ["sip >=6, <7"]
+build-backend = "sipbuild.api"
+
+[tool.sip.metadata]
+name = "pyArcus"
+
+[tool.sip.project]
+builder-factory = "CMakeBuilder"
+sip-files-dir = "${CMAKE_CURRENT_SOURCE_DIR}/python/"
+sip-include-dirs = ["CMAKE_CURRENT_SOURCE_DIR/python/"]
+build-dir = "${CMAKE_CURRENT_BINARY_DIR}/pyArcus/"
+
+[tool.sip.bindings.pyArcus]
+exceptions = true
+release-gil = true
+concatenate = 8

View file

@ -1,31 +0,0 @@
# Maintainer: Hoang Nguyen <folliekazetani@protonmail.com>
pkgname=py3-colored
_pyname=${pkgname/py3-/}
pkgver=1.4.4
pkgrel=3
pkgdesc="Simple Python library for color and formatting in terminal"
url="https://gitlab.com/dslackw/colored"
arch="noarch"
license="MIT"
depends="python3"
makedepends="py3-setuptools py3-gpep517"
checkdepends="py3-pytest"
subpackages="$pkgname-pyc"
source="https://gitlab.com/dslackw/colored/-/archive/$pkgver/colored-$pkgver.tar.gz"
builddir="$srcdir/$_pyname-$pkgver"
options="!check" # No testsuite
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
package() {
gpep517 install-wheel --destdir "$pkgdir" \
.dist/*.whl
}
sha512sums="
d49075f97bcc220802a8a64780b4c3910acd420e7e0e82ee71659132e7a294a638b098e4e46ae54f531739f8a43cd35979e521c02bb359205a13d96e37cfe8ed colored-1.4.4.tar.gz
"

View file

@ -1,65 +0,0 @@
maintainer="Hoang Nguyen <folliekazetani@protonmail.com>"
pkgname=py3-dateparser
_pyname=${pkgname#py3-}
pkgver=1.2.0
pkgrel=0
pkgdesc="Python parser for human readable dates"
url="https://github.com/scrapinghub/dateparser"
arch="noarch"
license="BSD-3-Clause"
depends="
python3
py3-dateutil
py3-regex
py3-tz
py3-tzlocal
"
makedepends="
py3-gpep517
py3-setuptools
py3-wheel
"
checkdepends="
py3-fasttext
py3-gitpython
py3-langdetect
py3-parameterized
py3-parsel
py3-pytest
py3-requests
py3-ruamel.yaml
"
subpackages="$pkgname-pyc"
source="$pkgname-$pkgver.tar.gz::https://github.com/scrapinghub/dateparser/archive/refs/tags/v$pkgver.tar.gz"
builddir="$srcdir/$_pyname-$pkgver"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
# test_relative_base_setting_2_en fails due to tzinfo mismatch
# test_custom_language_detect_fast_text fails due to wrong file format
_test_filter="not test_parsing_date_should_fail_using_datetime_strptime_if_locale_is_non_english \
and not test_relative_base_setting_2_en and not test_custom_language_detect_fast_text"
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
# test_hijri.py: needs hijri_converter, test_jalali.py: convertdate
.testenv/bin/python3 -m pytest tests \
-k "$_test_filter" \
--ignore tests/test_hijri.py \
--ignore tests/test_jalali.py
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
2d37115f25c2076c4521b77b89ef1cff3cd0a5233c45beb00d78a5c9b1a384dcd993ff7cdd1f77db95a53ce566cf7d709d46ffa2e63eb468ac954fda178a5b6e py3-dateparser-1.2.0.tar.gz
"

View file

@ -0,0 +1,39 @@
# Contributor: Fabian Affolter <fabian@affolter-engineering.ch>
# Maintainer: Fabian Affolter <fabian@affolter-engineering.ch>
pkgname=py3-flask-httpauth
pkgver=4.8.0
pkgrel=3
pkgdesc="Basic and Digest HTTP authentication for Flask routes"
url="https://pypi.org/project/Flask-HTTPAuth"
arch="noarch"
license="MIT"
depends="py3-flask python3"
makedepends="py3-gpep517 py3-setuptools py3-wheel"
checkdepends="py3-pytest py3-pytest-asyncio py3-asgiref"
subpackages="$pkgname-pyc"
source="https://files.pythonhosted.org/packages/source/F/Flask-HTTPAuth/Flask-HTTPAuth-$pkgver.tar.gz"
builddir="$srcdir"/Flask-HTTPAuth-$pkgver
replaces="py-flask-httpauth" # Backwards compatibility
provides="py-flask-httpauth=$pkgver-r$pkgrel" # Backwards compatibility
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 -m pytest
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/flask_httpauth*.whl
}
sha512sums="
15878f45faf6bdde43d7b588539b044d621ba1ba590880e3d0a3bccf4e9bd04b898b3372775e99577b7e7955c4b6d2d7cc80df19ba30415c6b7c1d3183b7e5f4 Flask-HTTPAuth-4.8.0.tar.gz
"

View file

@ -1,7 +1,7 @@
# Contributor: lauren n. liberda <lauren@selfisekai.rocks>
maintainer="lauren n. liberda <lauren@selfisekai.rocks>"
pkgname=py3-flask-limiter
pkgver=3.9.2
pkgver=3.10.1
pkgrel=0
pkgdesc="Rate Limiting extension for Flask"
url="https://github.com/alisaifee/flask-limiter"
@ -50,6 +50,6 @@ package() {
}
sha512sums="
69e488a641ab39c088185fabcde19ebb4cbe1683e9143efdf146163bb0254e0c4f8b4b72df407542d540394e7e3b2d7498b9c93c25ae8a8128e05e319f342318 flask-limiter-3.9.2.tar.gz
a0d3af6f93d4283309d6df46ddb7fed4c358bbc712c2bd9e6897362c6d086c395cb9587c3d9da283ad757b574fd8c09d909f3c4b76d02ae8aade3e61dbea6aa0 flask-limiter-3.10.1.tar.gz
1b90e9134076cda249695d5ea741db9d205a2ae452c7d6edfe01eb37a221ce6f64b0e8ddcdbbee9b0e0fb16a28e5eabf14f1c1e41e965c7e3b93ea4f42caf553 our-std-is-good-enough.patch
"

View file

@ -1,37 +0,0 @@
# Contributor: Galen Abell <galen@galenabell.com>
# Maintainer: Galen Abell <galen@galenabell.com>
pkgname=py3-fuzzywuzzy
_pyname=fuzzywuzzy
pkgver=0.18.0
pkgrel=7
pkgdesc="Fuzzy string matching in python"
url="https://github.com/seatgeek/fuzzywuzzy"
arch="noarch"
license="GPL-2.0-only"
depends="python3 py3-levenshtein"
makedepends="py3-setuptools py3-gpep517"
checkdepends="py3-pytest py3-pycodestyle py3-hypothesis"
subpackages="$pkgname-pyc"
source="https://files.pythonhosted.org/packages/source/${_pyname%${_pyname#?}}/$_pyname/$_pyname-$pkgver.tar.gz"
builddir="$srcdir/$_pyname-$pkgver"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
gpep517 install-wheel --destdir .testenv --prefix '' .dist/*.whl
.testenv/bin/python3 -m pytest
}
package() {
gpep517 install-wheel --destdir "$pkgdir" \
.dist/*.whl
}
sha512sums="
4a21ea67278fa525842d48fed8db666d00eae6d13254e8844d11f63b47c3a305b3cac760f28c24c6347aebcf73e96180e0a7cfba29c75f01ece2f7751e0398c5 fuzzywuzzy-0.18.0.tar.gz
"

View file

@ -1,45 +0,0 @@
# Contributor: Galen Abell <galen@galenabell.com>
# Maintainer: Galen Abell <galen@galenabell.com>
pkgname=py3-levenshtein
pkgver=0.25.1
pkgrel=2
pkgdesc="Python extension for computing string edit distances and similarities"
url="https://github.com/maxbachmann/Levenshtein"
arch="all"
license="GPL-2.0-only"
depends="py3-rapidfuzz"
makedepends="
cmake
cython
py3-gpep517
py3-scikit-build
python3-dev
rapidfuzz
samurai
"
checkdepends="py3-pytest"
subpackages="$pkgname-pyc"
source="$pkgname-$pkgver.tar.gz::https://github.com/maxbachmann/Levenshtein/archive/refs/tags/v$pkgver.tar.gz"
builddir="$srcdir/Levenshtein-$pkgver"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer \
.dist/Levenshtein*.whl
.testenv/bin/python3 -m pytest
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/Levenshtein*.whl
}
sha512sums="
936dab36b15df6f2ee5425efb1fdb1490fb8f618ba453f464a6dd615bcc427e55ceee7474f06b34392871d9f38470b853602a11d8f9776eee66ec34156511ca4 py3-levenshtein-0.25.1.tar.gz
"

View file

@ -1,8 +1,8 @@
# Contributor: lauren n. liberda <lauren@selfisekai.rocks>
# Maintainer: lauren n. liberda <lauren@selfisekai.rocks>
pkgname=py3-limits
pkgver=3.13.0
pkgrel=1
pkgver=3.14.1
pkgrel=0
pkgdesc="Rate limiting using various strategies and storage backends such as redis & memcached"
url="https://github.com/alisaifee/limits"
arch="noarch"
@ -19,14 +19,16 @@ checkdepends="
py3-pytest-asyncio
py3-pytest-benchmark
py3-pytest-cov
py3-pytest-lazy-fixtures
py3-pymemcache
py3-redis
"
subpackages="$pkgname-pyc"
options="!check" # most tests are integration with db connections, assume all connectors installed
source="
https://github.com/alisaifee/limits/archive/refs/tags/$pkgver/limits-$pkgver.tar.gz
our-std-is-good-enough.patch
tests-drop-etcd3-and-k-argument.patch
"
builddir="$srcdir/limits-$pkgver"
@ -39,7 +41,7 @@ build() {
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
gpep517 install-wheel --destdir .testenv --prefix '' .dist/*.whl
.testenv/bin/python3 -m pytest \
.testenv/bin/python3 -m pytest -W ignore::DeprecationWarning \
-m 'not benchmark and not etcd and not integration and not memcached' \
-k 'not aio and not Storage and not strategy' -v
}
@ -50,6 +52,7 @@ package() {
}
sha512sums="
0a13d08001c2f95e559ac1be35fa8cc178ad2d41bd5bf7b7e85781a428f550c350c21b92942b5b7e45f0f4c0604e96e579c8a26b5e9ca1196e6605608721030a limits-3.13.0.tar.gz
0364d51f9f879b95c6a4a3c9e9fd3d7d1e15ea214c50ae98cd36826b8c0b2d903cf1128741ac83738e305a207dae8955a0b2c8679484d2d6643e334595bdb1d7 our-std-is-good-enough.patch
f30c7ec19c2d1edad9ed77dc590ae35717efa3956a4d97e465793e1923a4af08dc9921d90ee95d3c54ce3364b867ca67a9de62c61d627e07a3f50da20bdabd0f limits-3.14.1.tar.gz
271e3b0501f9f144eda8d2e96c93b285714e339b9217385e38cdbce1f4dec88f9c949e9419f8be94885092e7977f7dca29b86b5499e9fead678b42a686c337db our-std-is-good-enough.patch
e84f4db49349a6feba0f701b9d4357c5f66d64c4a23f8ce512528b0f44b5bbef55041c02d92aae3a4cc8d5340846f9e909217beb869a5aeb49df166dd29ae9e3 tests-drop-etcd3-and-k-argument.patch
"

View file

@ -1,14 +1,3 @@
--- ./limits/util.py.orig
+++ ./limits/util.py
@@ -8,7 +8,7 @@
from types import ModuleType
from typing import TYPE_CHECKING, cast
-import importlib_resources
+from importlib import resources as importlib_resources
from packaging.version import Version
from limits.typing import Dict, List, NamedTuple, Optional, Tuple, Type, Union
--- ./limits/typing.py.orig
+++ ./limits/typing.py
@@ -13,7 +13,7 @@
@ -19,4 +8,4 @@
+from typing import ClassVar, Counter, ParamSpec, Protocol, TypeAlias
Serializable = Union[int, str, float]

View file

@ -0,0 +1,24 @@
diff --git a/tests/conftest.py.orig b/tests/conftest.py
index 2aeb758dda6..a9b2b8b2bd1 100644
--- a/tests/conftest.py.orig
+++ b/tests/conftest.py
@@ -3,7 +3,6 @@ import platform
import socket
import time
-import etcd3
import pymemcache
import pymemcache.client
import pymongo
diff --git a/pytest.ini.orig b/pytest.ini
index 38c40a713d0..8c6659e21c2 100644
--- a/pytest.ini.orig
+++ b/pytest.ini
@@ -17,7 +17,6 @@ addopts =
-rfEsxX
--cov=limits
-m "not benchmark"
- -K
filterwarnings =
error
module::ResourceWarning

View file

@ -1,35 +0,0 @@
# Contributor: Aiden Grossman <agrossman154@yahoo.com>
# Maintainer:
pkgname=py3-mapbox-earcut
pkgver=1.0.1
pkgrel=2
pkgdesc="Python bindings for the mapbox earcut c++ library"
url="https://github.com/skogler/mapbox_earcut_python"
arch="all"
license="ISC"
depends="py3-numpy"
makedepends="py3-setuptools py3-pybind11-dev python3-dev py3-gpep517"
checkdepends="py3-pytest"
source="$pkgname-$pkgver.tar.gz::https://github.com/skogler/mapbox_earcut_python/archive/refs/tags/v$pkgver.tar.gz"
builddir="$srcdir/mapbox_earcut_python-$pkgver"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
gpep517 install-wheel --destdir .testenv --prefix '' .dist/*.whl
.testenv/bin/python3 -m pytest
}
package() {
gpep517 install-wheel --destdir "$pkgdir" \
.dist/*.whl
}
sha512sums="
cdb32585cbaf74c15e59af0ae70d983dd2f9bc9cfe1b59b3eadc4d442f7d962241854b589a035deae67cacd9334833b911d0981f0d417fe587348fc7d24f0c0a py3-mapbox-earcut-1.0.1.tar.gz
"

View file

@ -1,39 +0,0 @@
# Contributor: Aiden Grossman <agrossman154@yahoo.com>
# Maintainer:
pkgname=py3-numpy-stl
pkgver=3.2.0
pkgrel=0
pkgdesc="Library for working with STLs"
url="https://github.com/WoLpH/numpy-stl"
# s390x: no py3-utils
arch="noarch !s390x"
license="BSD-3-Clause"
depends="python3 py3-utils py3-numpy"
makedepends="py3-setuptools py3-gpep517"
checkdepends="py3-pytest py3-pytest-cov py3-pygments"
subpackages="$pkgname-pyc"
source="$pkgname-$pkgver.tar.gz::https://github.com/wolph/numpy-stl/archive/refs/tags/v$pkgver.tar.gz"
builddir="$srcdir/numpy-stl-$pkgver"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
gpep517 install-wheel --destdir .testenv --prefix '' .dist/*.whl
# deselected test needs xvfb-run and fails
.testenv/bin/python3 -m pytest \
--deselect tests/test_ascii.py::test_use_with_qt_with_custom_locale_decimal_delimeter
}
package() {
gpep517 install-wheel --destdir "$pkgdir" \
.dist/*.whl
}
sha512sums="
a08053ed264dbfd629229af3db9c38deed2932b28feced56e2d4c20476f1ba85ddc80881fb82330ea3f4fff9a3f91da20db7447050da5c75f1c04455a67538dc py3-numpy-stl-3.2.0.tar.gz
"

View file

@ -1,37 +0,0 @@
# Contributor: lauren n. liberda <lauren@selfisekai.rocks>
# Maintainer: lauren n. liberda <lauren@selfisekai.rocks>
pkgname=py3-pathvalidate
pkgver=3.2.1
pkgrel=0
pkgdesc="Python library to sanitize/validate a string such as filenames/file-paths/etc"
url="https://github.com/thombashi/pathvalidate"
arch="noarch"
license="MIT"
depends="python3"
makedepends="
py3-gpep517
py3-setuptools
py3-setuptools_scm
py3-wheel
"
checkdepends="py3-pytest py3-click py3-faker"
options="!check" # tests require unpackaged unmaintained dependencies
subpackages="$pkgname-pyc"
source="https://github.com/thombashi/pathvalidate/archive/refs/tags/v$pkgver/pathvalidate-$pkgver.tar.gz"
builddir="$srcdir/pathvalidate-$pkgver"
build() {
export SETUPTOOLS_SCM_PRETEND_VERSION="$pkgver"
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
094bb442258ba58fff11691f5b60976513924443247e808effbc26b9dd6c336f5f84d8e4563643b7def19d9f82170eb9ec6cd89491f9115df8d1634d2aa12206 pathvalidate-3.2.1.tar.gz
"

View file

@ -1,36 +0,0 @@
# Contributor: Aiden Grossman <agrossman154@yahoo.com>
# Maintainer:
pkgname=py3-pivy
pkgver=0.6.9
pkgrel=2
pkgdesc="Python3 bindings for coin"
url="https://github.com/coin3d/pivy"
# riscv64: blocked by py3-pyside6
arch="all !riscv64"
license="ISC"
depends="py3-pyside6"
makedepends="swig soqt-dev qt6-qtbase-dev python3-dev glu-dev cmake samurai"
checkdepends="py3-pytest"
options="!check" # test suite is interactive and requires full installation
source="$pkgname-$pkgver.tar.gz::https://github.com/coin3d/pivy/archive/refs/tags/${pkgver//_alpha/.a}.tar.gz"
builddir="$srcdir/pivy-${pkgver//_alpha/.a}"
build() {
if [ "$CBUILD" != "$CHOST" ]; then
CMAKE_CROSSOPTS="-DCMAKE_SYSTEM_NAME=Linux -DCMAKE_HOST_SYSTEM_NAME=Linux"
fi
cmake -B build -G Ninja \
-DCMAKE_INSTALL_PREFIX=/usr \
-DBUILD_SHARED_LIBS=True \
-DCMAKE_BUILD_TYPE=None \
$CMAKE_CROSSOPTS
cmake --build build
}
package() {
DESTDIR="$pkgdir" cmake --install build
}
sha512sums="
fd9587c69ad7468b771fbae59e68620f67a3c20850edadd65bf7994c1789d3444feb419e65dce34c6ee897c98eaca9f2f29f0bbfb4d1f0bbde26e4db56f74f78 py3-pivy-0.6.9.tar.gz
"

View file

@ -1,50 +0,0 @@
# Contributor: Aiden Grossman <agrossman154@yahoo.com>
# Maintainer: Celeste <cielesti@protonmail.com>
maintainer="Celeste <cielesti@protonmail.com>"
pkgname=py3-pyinstrument
pkgver=4.7.3
pkgrel=0
pkgdesc="Call stack profiler for Python"
url="https://github.com/joerick/pyinstrument"
arch="all"
license="BSD-3-Clause"
makedepends="
py3-gpep517
py3-setuptools
py3-wheel
python3-dev
"
checkdepends="
py3-flaky
py3-greenlet
py3-pytest
py3-pytest-asyncio
py3-trio
"
subpackages="$pkgname-pyc"
source="$pkgname-$pkgver.tar.gz::https://github.com/joerick/pyinstrument/archive/refs/tags/v$pkgver.tar.gz"
builddir="$srcdir/pyinstrument-$pkgver"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
[ -d "pyinstrument" ] && mv -v pyinstrument pyinstrument.src
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
# test_cmdline.py tries to run "pyinstrument" executable
PATH="$builddir/.testenv/bin:$PATH" .testenv/bin/python3 -m pytest
}
package() {
python3 -m installer -d "$pkgdir" .dist/*.whl
}
sha512sums="
24feac08a9726379b749f391bdb6ddbca6d3631cf3515d3ead85ace7a96f213bf60e2cd4d4f3c7cade68b5e481b4bfd562482817befe6322579101a8d91add66 py3-pyinstrument-4.7.3.tar.gz
"

Some files were not shown because too many files have changed in this diff Show more