Compare commits

...

649 commits
v3.18 ... edge

Author SHA1 Message Date
396c98208d
backports/caprine: new aport 2024-10-29 09:01:48 -04:00
b345573aa1
forgejo: chose highest version when dealing with multiple downstream_versions 2024-10-29 08:56:16 -04:00
257e019992
forgejo: fix typo in check-community 2024-10-29 07:07:21 -04:00
97ed4992d7
forgejo: Fix is_it_old logics 2024-10-29 07:06:50 -04:00
7814f05e1c
Check every day at 5 am instead of hourly 2024-10-28 08:21:30 -04:00
9bf9771b8c
forgejo: update is_it_old to use new title format 2024-10-27 17:01:04 -04:00
6e698a0974
forgejo: add update check workflows 2024-10-27 15:02:24 -04:00
367a606da2
user/signal-desktop: upgrade to 7.29.0
Some checks failed
/ build-x86_64 (pull_request) Has been cancelled
/ deploy-x86_64 (pull_request) Has been cancelled
/ deploy-aarch64 (pull_request) Has been cancelled
/ build-aarch64 (pull_request) Has been cancelled
/ lint (pull_request) Successful in 31s
2024-10-16 20:38:26 -04:00
26657f4d57
backports/py3-sip: drop due to in community
All checks were successful
/ lint (pull_request) Successful in 28s
/ deploy-x86_64 (pull_request) Successful in 26s
/ build-x86_64 (pull_request) Successful in 42s
/ build-aarch64 (pull_request) Successful in 1m11s
/ deploy-aarch64 (pull_request) Successful in 54s
2024-10-16 20:28:51 -04:00
9e2e00cd44
backports/py3-qt6: drop due to in community 2024-10-16 20:28:26 -04:00
5771d09151
backports/py3-pyqt6-sip: drop due to in community 2024-10-16 20:28:02 -04:00
0414f86242
backports/py3-django-debug-toolbar: drop due to in community 2024-10-16 20:26:36 -04:00
fb7a3fe81b
user/caprine: drop due to move to aports 2024-10-16 20:07:46 -04:00
3ffe64d0d4
user/forgejo-aneksajo: drop due to move to iports 2024-10-16 20:07:07 -04:00
15d01121ba
user/zotero: drop due to in aports
All checks were successful
/ deploy-aarch64 (pull_request) Successful in 55s
/ build-aarch64 (pull_request) Successful in 1m14s
/ lint (pull_request) Successful in 28s
/ deploy-x86_64 (pull_request) Successful in 29s
/ build-x86_64 (pull_request) Successful in 39s
2024-10-16 20:04:33 -04:00
d24323205e backports/signal-desktop: upgrade to 7.28.0 2024-10-16 17:48:59 +00:00
eef1e89d88
backports/signal-desktop: upgrade to 7.28.0
All checks were successful
/ lint (pull_request) Successful in 28s
/ deploy-aarch64 (pull_request) Successful in 1m1s
/ build-aarch64 (pull_request) Successful in 1h4m59s
/ build-x86_64 (pull_request) Successful in 26m33s
/ deploy-x86_64 (pull_request) Successful in 40s
2024-10-09 18:23:19 -04:00
623d98575e user/zotero: upgrade to 7.0.7 2024-10-07 16:24:49 +00:00
b306518289
testing/signal-desktop: upgrade to 7.27.0
All checks were successful
/ lint (pull_request) Successful in 28s
/ deploy-x86_64 (pull_request) Successful in 38s
/ build-x86_64 (pull_request) Successful in 35m39s
/ build-aarch64 (pull_request) Successful in 1h19m18s
/ deploy-aarch64 (pull_request) Successful in 1m0s
2024-10-07 09:37:14 -04:00
63f6a6099f
backports/signal-desktop: import upstream changes 2024-09-27 17:32:59 -04:00
7e21600868
backports/signal-desktop: upgrade to 7.26.0
All checks were successful
/ lint (pull_request) Successful in 27s
/ deploy-x86_64 (pull_request) Successful in 38s
/ build-x86_64 (pull_request) Successful in 32m29s
/ build-aarch64 (pull_request) Successful in 1h8m29s
/ deploy-aarch64 (pull_request) Successful in 59s
2024-09-27 10:13:09 -04:00
714437157c
user/zotero: upgrade to 7.0.6
All checks were successful
/ lint (pull_request) Successful in 37s
/ deploy-x86_64 (pull_request) Successful in 1m2s
/ build-x86_64 (pull_request) Successful in 1h34m59s
/ deploy-aarch64 (pull_request) Successful in 1m7s
/ build-aarch64 (pull_request) Successful in 2h14m29s
2024-09-26 09:16:53 -04:00
f82ac83d0b
backports/signal-desktop: upgrade to 7.25.0
All checks were successful
/ lint (pull_request) Successful in 41s
/ build-x86_64 (pull_request) Successful in 24m19s
/ deploy-x86_64 (pull_request) Successful in 26s
/ deploy-aarch64 (pull_request) Successful in 57s
/ build-aarch64 (pull_request) Successful in 59m21s
2024-09-18 22:59:46 -04:00
b9b609bedf
backports/signal-desktop: upgrade to 7.24.1
All checks were successful
/ lint (pull_request) Successful in 26s
/ deploy-x86_64 (pull_request) Successful in 25s
/ build-x86_64 (pull_request) Successful in 22m30s
/ deploy-aarch64 (pull_request) Successful in 57s
/ build-aarch64 (pull_request) Successful in 57m41s
2024-09-13 11:20:57 -04:00
8ffac41cb8
backports/signal-desktop: upgrade to 7.24.0
All checks were successful
/ lint (pull_request) Successful in 27s
/ deploy-x86_64 (pull_request) Successful in 24s
/ build-x86_64 (pull_request) Successful in 23m6s
/ build-aarch64 (pull_request) Successful in 58m43s
/ deploy-aarch64 (pull_request) Successful in 56s
2024-09-12 13:42:16 -04:00
7ffb4b3105
backports/signal-desktop: upgrade to 7.23.0
All checks were successful
/ lint (pull_request) Successful in 26s
/ deploy-x86_64 (pull_request) Successful in 27s
/ build-x86_64 (pull_request) Successful in 15m33s
/ build-aarch64 (pull_request) Successful in 1h1m41s
/ deploy-aarch64 (pull_request) Successful in 57s
2024-09-09 13:46:08 -04:00
743ceb8dbe
backports/signal-desktop: upgrade to 7.22.2
All checks were successful
/ lint (pull_request) Successful in 38s
/ build-x86_64 (pull_request) Successful in 21m27s
/ deploy-x86_64 (pull_request) Successful in 44s
/ deploy-aarch64 (pull_request) Successful in 1m3s
/ build-aarch64 (pull_request) Successful in 1h3m14s
2024-09-05 16:05:34 -04:00
021b81131e
user/mathjax2: bump pkgrel
All checks were successful
/ lint (pull_request) Successful in 27s
/ deploy-aarch64 (pull_request) Successful in 53s
/ build-aarch64 (pull_request) Successful in 2m8s
/ deploy-x86_64 (pull_request) Successful in 26s
/ build-x86_64 (pull_request) Successful in 19m19s
2024-09-03 12:22:25 -04:00
d00a14e695
forgejo: always create artifacts for build stage
Some checks failed
/ lint (pull_request) Successful in 26s
/ deploy-aarch64 (pull_request) Successful in 51s
/ build-aarch64 (pull_request) Successful in 1m10s
/ deploy-x86_64 (pull_request) Has been skipped
/ build-x86_64 (pull_request) Failing after 2m15s
2024-09-03 12:17:26 -04:00
a6e60edfd9
user/rstudio-desktop: enable build 2024-09-03 12:15:02 -04:00
68130cdf8a user/zotero: upgrade to 7.0.3 2024-08-28 13:42:02 +00:00
888654be5c
user/rmfakecloud: upgrade to 0.0.19
All checks were successful
/ lint (pull_request) Successful in 32s
/ build-x86_64 (pull_request) Successful in 2m31s
/ deploy-x86_64 (pull_request) Successful in 31s
/ deploy-aarch64 (pull_request) Successful in 54s
/ build-aarch64 (pull_request) Successful in 9m20s
2024-08-26 11:04:13 -04:00
d6e00b6395
forgejo-ci: build.sh is now local rather than patched 2024-08-26 11:02:37 -04:00
77dc41c8aa
forgejo-ci: fix double v in repo 2024-08-22 21:42:50 -04:00
1478a9f5c7
forgejo-ci: use new forge repo
Some checks failed
/ deploy-aarch64 (pull_request) Has been cancelled
/ build-aarch64 (pull_request) Has been cancelled
/ build-x86_64 (pull_request) Has been cancelled
/ deploy-x86_64 (pull_request) Has been cancelled
/ lint (pull_request) Successful in 26s
2024-08-21 10:54:38 -04:00
fc3cfbc01c
user/forgejo-aneksajo: upgrade to 8.0.1 2024-08-21 10:19:57 -04:00
659bd20ba1
README: update name 2024-08-12 12:56:21 -04:00
970fd7297f
README: update upstream 2024-08-12 12:55:21 -04:00
135bcd5a89
README: update to use forge repo 2024-08-12 12:53:29 -04:00
e7bef354af
forgejo: initial implementation
Some checks failed
/ lint (pull_request) Successful in 29s
/ deploy-aarch64 (pull_request) Failing after 3m48s
/ build-aarch64 (pull_request) Successful in 58s
/ deploy-x86_64 (pull_request) Failing after 1m40s
/ build-x86_64 (pull_request) Successful in 28s
2024-08-12 12:39:43 -04:00
260b8c3da6
gitlab-ci: drop in favor of forgejo actions 2024-08-12 12:39:38 -04:00
9a81361936
README.md: new repo location 2024-08-10 16:34:15 -04:00
f124e1fd95 gitab-ci: use git-annex instead of git-lfs 2024-08-10 15:33:40 +00:00
3a9141372b user/py3-validators: bump 2024-08-10 15:33:40 +00:00
e0a5952518
Update README 2024-08-10 11:31:00 -04:00
14348459f1 README: update for codeberg migration 2024-08-10 02:44:17 +00:00
92b265a1d0 user/py3-django-rest-framework: drop due to migration to ilot iports 2024-08-10 02:44:17 +00:00
07ce4b2776 user/ruby3.2-take: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
9b5788e012 user/gitaly: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
580e136768 user/gitlab-foss: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
32a461c894 user/gitlab-pages: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
9320defbce user/gitlab-shell: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
9c03466cc0 user/mastodon: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
177efa00c2 user/ruby3.2: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
438d6d6e3e user/ruby3.2-bundler: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
638732a089 user/ruby3.2-minitest: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
d09e518d1f user/ruby3.2-power_assert: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
ece30e0fbb user/ruby3.2-test-unit: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
8a2a28342a user/ruby3.2-webrick: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
9d6bf2f5a2 user/authentik: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
10821c427a user/freescout: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
bc63f1ddb8 user/listmonk: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
187eb88770 user/loomio: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
60b6bb1f9a user/peertube: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
5edd40d7f0 user/php82-pecl-inotify: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
7870ee72dc user/php83-pecl-inotify: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
2f4998dfb6 user/py3-django-tenants: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
783a964410 user/py3-scim2-filter-parser: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
ff94611df0 user/py3-tenant-schemas-celery: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
bdc0c313c6 user/uptime-kuma: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
21082688af user/wikijs: drop due migration to ilot iports 2024-08-10 02:44:17 +00:00
a0993c9e31
user/zotero: upgrade to 7.0.0 2024-08-09 10:41:28 -04:00
686e6a6504 backports/py3-html5-parser: new aport 2024-08-08 16:14:05 +00:00
497771fe9e backports/py3-apsw: new aport 2024-08-08 16:14:05 +00:00
0807d658c5 backports/calibre: new aport 2024-08-08 16:14:05 +00:00
7320512e66 backports/freetube: upgrade to 0.21.3 2024-08-07 21:32:22 +00:00
475c43723c user/forgejo-aneksajo: new aport 2024-08-07 19:46:56 +00:00
ad7d9444cc
backports/looking-glass: upgrade to 7b_git20240607 2024-08-07 15:34:26 -04:00
87e02ab716 user/zotero: upgrade to 7.0.0_beta109 2024-08-01 21:25:12 +00:00
7671a9d567
backports/looking-glass: new aport 2024-07-30 23:39:19 -04:00
65f2a53a44 user/gitlab-foss: upgrade to 17.0.4, fix initd 2024-07-27 05:02:39 +00:00
4d473acf9e user/gitlab-pages: upgrade to 17.0.4 2024-07-27 05:02:39 +00:00
59705e3486 user/gitaly: upgrade to 17.0.4 2024-07-27 05:02:39 +00:00
72ad06acd7
backports/py3-pyqt6-sip: upgrade to 13.8.0 2024-07-27 00:51:52 -04:00
e5f83095e3 backports/py3-{pyqt6-sip, qt6, sip}: new aport 2024-07-27 04:49:23 +00:00
eb0374cfba
user/mastodon: fix initd scripts 2024-07-11 07:08:45 -04:00
07ea5d4b03
user/mastodon: upgrade to 4.2.10 2024-07-04 22:53:50 -04:00
091908e87e user/gitlab-shell: upgrade to 14.36.0 2024-07-05 02:17:29 +00:00
0b3331610d user/gitaly: upgrade to 17.0.3 2024-07-05 02:17:29 +00:00
948e244824 user/gitlab-pages: upgrade to 17.0.3 2024-07-05 02:17:29 +00:00
3124031a00 user/gitlab-foss: upgrade to 17.0.3 2024-07-05 02:17:29 +00:00
64d88aa2e1 user/authentik: add custom css to config dir 2024-07-04 03:15:58 +00:00
a5cd9ca969 user/authentik: upgrade to 2024.4.3 2024-07-04 03:15:58 +00:00
ddb15faac6
user/i18nspector: new aport 2024-07-03 16:59:16 -04:00
bdb37e35ee user/wikijs: new aport 2024-06-17 01:20:53 +00:00
1c7eeb3dce
user/ruby3.2-rugged: drop due to not needed 2024-06-14 08:32:52 -04:00
08ee79d032
user/grpc: drop due to not needed 2024-06-14 08:32:51 -04:00
81ac0611da
user/gitlab-pages: upgrade to 17.0.2 2024-06-14 08:32:49 -04:00
f57dc997f7
user/gitaly: upgrade to 17.0.2 2024-06-14 08:32:48 -04:00
7440e781b8
user/gitlab-foss: upgrade to 17.0.2 2024-06-14 08:32:44 -04:00
65a5f41649 user/gitlab-foss: upgrade to 16.11.4 2024-06-13 22:07:41 +00:00
507663db3b user/gitlab-pages: upgrade to 16.11.4 2024-06-13 22:07:41 +00:00
f5bbad0712 user/gitaly: upgrade to 16.11.4 2024-06-13 22:07:41 +00:00
ecc61a2182 user/php82-pecl-inotify: new aport 2024-06-13 20:33:20 +00:00
1f7767fc5b
user/php83-pecl-inotify: new aport 2024-06-13 13:48:22 -04:00
e5237392b1
user/authentik: add missing depends 2024-06-12 23:53:12 -04:00
43905a4a72
user/py3-django-rest-framework: fix to version 3.14.0 2024-06-12 23:53:03 -04:00
2a72f32a3a
user/py3-scim2-filter-parser: new aport 2024-06-12 23:52:52 -04:00
b29ff4dcae
user/py3-tenant-schemas-celery: new aports 2024-06-12 23:52:46 -04:00
d87a333de8
user/py3-django-tenants: new aports 2024-06-12 23:52:40 -04:00
849cc1d7b3 user/mastodon: upgrade to 4.2.9 2024-06-13 00:33:01 +00:00
ae8c40104c
user/gitlab-pages: upgrade to 16.9.8 2024-06-10 09:43:06 -04:00
29d690fe52
user/gitaly: upgrade to 16.9.8 2024-06-10 09:42:59 -04:00
ab3231afda
user/gitlab-shell: upgrade to 14.35.0 2024-06-10 09:42:53 -04:00
8ac74918a3
user/gitlab-foss: upgrade to 16.9.8 2024-06-10 09:42:46 -04:00
d3888ec0a3
user/zotero: add missing git info 2024-06-04 13:31:31 -04:00
c9d0bd0b1e
backports/signal-desktop: upgrade to 7.11.0 2024-06-04 12:15:55 -04:00
e9d494a147 user/peertube: enable build 2024-06-04 13:32:30 +00:00
fddd91b20c unmaintained/firefly-iii-plaid-connector: move from user 2024-06-04 13:22:14 +00:00
104708e9a6 user/firefly-iii: use php83 2024-06-04 13:22:14 +00:00
34e0757763 user/mastodon: enable build 2024-06-04 02:48:20 +00:00
068b275bf0 user/gitlab-foss: enable build 2024-06-04 02:48:11 +00:00
0c00fe0b12 user/zotero: upgrade to 7.0.0_beta83 2024-06-04 02:47:03 +00:00
4c1e2a5ad1 user/zotero: enable build 2024-06-04 02:47:03 +00:00
aa38757ca4
user/wallabag: use php83 2024-06-03 13:44:17 -04:00
01bb50572f user/zotero: disable due to build failure 2024-06-03 14:00:51 +00:00
3ab0e08102 user/rstudio-desktop: disable due to build failure 2024-06-03 14:00:51 +00:00
cead3482d0 user/peertube: disable due to build failure 2024-06-03 14:00:51 +00:00
0fda66c95d user/mastodon: disable due to build failure 2024-06-03 14:00:51 +00:00
536488e38a user/gitlab-foss: disable due to build failure 2024-06-03 14:00:51 +00:00
99aad33f76 user/firefly-iii-plaid-connector: disable due to build failure 2024-06-03 14:00:51 +00:00
1687c6e040 user/firefly-iii: disable due to build failure 2024-06-03 14:00:51 +00:00
72ee0d8d1a user/py3-validators: backports old version for tandoor 2024-06-03 13:36:31 +00:00
c1565cc31e backports/signal-desktop: disable due to build failure 2024-06-03 13:33:43 +00:00
d070b3f938
backports/electron: upgrade to 30.0.9 2024-06-03 09:30:32 -04:00
9115f3f438
backports/coin: bump pkgrel 2024-06-02 19:37:44 -04:00
70a9a1d2ef backports/signal-desktop: upgrade to 7.9.0 2024-06-02 23:36:36 +00:00
7f99c70ce5 backports/py3-limits: upgrade to 3.12.0 2024-06-02 23:36:13 +00:00
c2f2218c0a backports/py3-flask-limiter: upgrade to 3.7.0 2024-06-02 23:35:56 +00:00
2932096ea6 community/electron: upgrade to 30.0.5 2024-06-02 23:35:30 +00:00
34a6944283 backports/py3-pivy: upgrade to 0.6.9_alpha0 2024-06-02 23:33:12 +00:00
78d9f34889 backports/py3-pynest2d: bump pkgrel 2024-06-02 23:24:05 +00:00
d9314aa56b backports/py3-arcus: upgrade to 5.3.0 2024-06-02 23:24:05 +00:00
d8dcb89d2d backports/cura: fix build 2024-06-02 23:24:05 +00:00
2e84eef9d9 user/thelounge: move from backports 2024-06-02 23:22:30 +00:00
a4df28e246 backports/uranium: bump pkgrel 2024-06-02 23:22:08 +00:00
53a721ead8 backports/py3-trimesh: bump pkgrel 2024-06-02 23:22:08 +00:00
b84114435d backports/py3-svgpath: bump pkgrel 2024-06-02 23:22:08 +00:00
ec037fc6fd backports/py3-rapidjson: bump pkgrel 2024-06-02 23:22:08 +00:00
e1da3f4eb1 backports/py3-pyinstrument: bump pkgrel 2024-06-02 23:22:08 +00:00
8f83341ebe backports/py3-microdata: bump pkgrel 2024-06-02 23:22:08 +00:00
7380fdd1ac backports/py3-django-mapbox-earcut: bump pkgrel 2024-06-02 23:22:08 +00:00
8ea01a36bf backports/py3-django-debug-toolbar: bump pkgrel 2024-06-02 23:22:08 +00:00
e7e9e8df9c backports/py3-dateparser: bump pkgrel 2024-06-02 23:13:39 +00:00
78a43bb4de backports/py3-daemon: remove docutils depend 2024-06-02 23:13:26 +00:00
de61d62ece backports/openssl1.1-compat: fix build 2024-06-02 23:11:41 +00:00
58efa9d1c5 backports/libnest2d: bump pkgrel 2024-06-02 23:09:36 +00:00
e1c4603357 backports/libmedc: bump pkgrel 2024-06-02 23:09:21 +00:00
0b85dd2b35 backports/freetube: bump pkgrel 2024-06-02 23:07:34 +00:00
22a9e782c2
backports/freecad: disable due to missing py3-pyside2 depend 2024-06-02 19:05:52 -04:00
8447cddacb backports/php81-pecl-xmlrpc: dropped from aports 2024-06-02 22:57:33 +00:00
04adb3958c user/php81-pecl-inotify: drop 2024-06-02 22:57:33 +00:00
ad6af41168 unmaintained/php82-pecl-inotify: move from user 2024-06-02 22:57:33 +00:00
6c21bf267a user/perl-net-domain-tld: drop due to in community 2024-06-02 22:57:33 +00:00
106ef2e7c0 user/perl-math-random-secure: drop due to in community 2024-06-02 22:57:33 +00:00
c2788b3527 user/perl-math-random-isaac: drop due to in community 2024-06-02 22:57:33 +00:00
7fe91bbe55 user/perl-crypt-random-source: drop due to in community 2024-06-02 22:57:33 +00:00
123f23ae47 unmaintained/airsonic: move from user 2024-06-02 22:57:33 +00:00
bace283c40 user/pdf4qt: drop due to in community 2024-06-02 22:57:33 +00:00
027acc5f24
backports/py3-microdata: move from user 2024-06-02 18:47:35 -04:00
d180f3c84e
backports/php82-pecl-xmlrpc: dropped from aports 2024-06-02 18:43:56 -04:00
5fb9235a6f
backports/py3-webdavclient3: drop due to in community 2024-06-02 18:42:39 -04:00
293424b1fc
backports/py3-validators: drop due to in community 2024-06-02 18:42:38 -04:00
dd14ec4214
backports/py3-sphinxcontrib-autoprogram: drop due to in community 2024-06-02 18:42:36 -04:00
f8f3c3f623
backports/shiboken2: dropped from aports 2024-06-02 18:42:31 -04:00
b82a63b4f3
backports/py3-pyside2: dropped from aports 2024-06-02 18:41:33 -04:00
ccf45ced96
backports/py3-gitlab: drop due to in community 2024-06-02 18:41:23 -04:00
fd52fe031b
backports/py3-flask-principal: drop due to in community 2024-06-02 18:41:14 -04:00
2054dc7f6c
backports/py3-django-auth-ldap: drop due to in community 2024-06-02 18:40:54 -04:00
2c693a0735
backports/py3-codespell: drop due to in community 2024-06-02 18:40:39 -04:00
aa5271111c
backports/py3-cbor2: drop due to in community 2024-06-02 18:40:25 -04:00
ecb91c7a17 user/authentik: upgrade to 2024.4.2 2024-05-14 20:48:43 +00:00
caf4fa9ccd user/freescout: upgrade to 1.8.139 2024-05-14 19:11:18 +00:00
da47967494 user/uptime-kuma: upgrade to 1.23.13 2024-05-01 15:03:56 +00:00
0c3e1a5643
user/authentik: upgrade to 2024.4.1 2024-04-30 21:59:01 -04:00
dd4806d73f user/zotero: upgrade to 7.0.0_beta76, add check 2024-04-28 01:59:25 +00:00
7a6e398a09
user/authentik: upgrade to 2024.4.0 2024-04-24 14:21:01 -04:00
b2496b6e09 user/py3-xmlsec: drop for in aports community 2024-04-22 17:05:05 +00:00
97356cba4f user/py3-python3-saml: drop for in aports community 2024-04-22 17:05:05 +00:00
5b0ae29dca user/uptime-kuma: upgrade to 1.23.12 2024-04-19 19:00:48 +00:00
708afd7f0e
user/zotero: rebuild with firefox 115.10.0 2024-04-18 17:03:46 -04:00
050ac7ca90 unmaintained/roxy-wi: move from user 2024-04-16 17:21:05 +00:00
b4c8cce0cf user/tandoor-recipes: rebuild for python3.12 2024-04-16 17:21:05 +00:00
a10a4d40b7 user/roxy-wi: rebuild for python3.12 2024-04-16 17:21:05 +00:00
4bb69fce05 user/rm-extractor: rebuild for python3.12 2024-04-16 17:21:05 +00:00
736ce39150 user/py3-xmlsec: rebuild for python3.12 2024-04-16 17:21:05 +00:00
3d459468bd user/py3-python3-saml: rebuild for python3.12 2024-04-16 17:21:05 +00:00
ddebcf35a4 user/py3-microdata: rebuild for python3.12 2024-04-16 17:21:05 +00:00
02aec4ffd3 user/paperless-ngx: rebuild for python3.12 2024-04-16 17:21:05 +00:00
ea5b5e2f60 user/mastodon: rebuild for python3.12 2024-04-16 17:21:05 +00:00
b7deeb738f user/kb: rebuild for python3.12 2024-04-16 17:21:05 +00:00
12511e2a64 user/jellysub: rebuild for python3.12 2024-04-16 17:21:05 +00:00
8cddad5d72 user/grpc: rebuild for python3.12 2024-04-16 17:21:05 +00:00
36ed3813a0 user/calibre-web: rebuild for python3.12 2024-04-16 17:21:05 +00:00
d0300ac8b3 user/authentik: rebuild for python3.12 2024-04-16 17:21:05 +00:00
ca8eea2b54
backports/py3-webdavclient3: bump 2024-04-16 12:23:01 -04:00
2a65046a45 backports/py3-webdavclient3: add aport 2024-04-16 15:29:27 +00:00
da35bc1720 backports/py3-django-auth-ldap: add aport 2024-04-16 15:29:27 +00:00
f3779d8d89 backports/py3-pytube: new aport 2024-04-16 14:37:31 +00:00
5e65c0abb9 backports/py3-validators: rebuild against python 3.12 2024-04-16 13:01:02 +00:00
11ac1c742c Revert "backports/py3-validators: drop for in community"
This reverts commit cd8c8dc55d.
2024-04-16 13:01:02 +00:00
8e13b2c442 backports/py3-flask-limiter: rebuild against python 3.12 2024-04-16 11:58:29 +00:00
5dddb230f9 backports/py3-pivy: disable to due broken py3-pyside2 2024-04-16 04:31:44 +00:00
b22efac80a backports/py3-pyside2: disabled due to segfault 2024-04-16 04:31:44 +00:00
9a92d96a50 backports/shiboken2: new aport 2024-04-16 04:31:44 +00:00
edf1e0a0c5 backports/py3-pyside2: new aport 2024-04-16 04:31:44 +00:00
b652dddfcf backports/py3-*: rebuild remain against python 3.12 2024-04-16 04:31:44 +00:00
93aaeb31b9 backports/freetube: upgrade to 0.20.0 2024-04-16 03:49:02 +00:00
4a0505901e backports/py3-sphinzcontrib-autoprogram: rebuild against python 3.12 2024-04-16 03:37:04 +00:00
c1991a2c08 backports/py3-gitlab: upgrade to 4.4.0 2024-04-16 03:37:04 +00:00
d6fee0eb4d backports/py3-rtree: rebuild against python 3.12 2024-04-16 03:34:40 +00:00
2510dce542 backports/py3-mapbox-earcut: rebuild against python 3.12 2024-04-16 03:34:40 +00:00
390a01796e backports/py3-svgpath: rebuild against python 3.12 2024-04-16 03:34:40 +00:00
5e58780bc1 backports/py3-trimesh: rebuild 2024-04-16 03:34:40 +00:00
512c400b00 backports/libmedc: rebuild 2024-04-16 03:15:59 +00:00
f742579610 backports/py3-django-debug-toolbar: upgrade to 4.3 2024-04-16 03:14:56 +00:00
07a38065e6 backports/py3-dateparser: upgrade to 1.2.0 2024-04-15 20:55:12 +00:00
c47a118804 backports/py3-flask-principal: rebuild 2024-04-15 20:54:48 +00:00
a30c134fd3 backports/py3-levenshtein: upgrade to 0.25.1 2024-04-15 20:54:42 +00:00
c2257cc7ce backports/py3-limits: upgrade to 3.10.1 2024-04-15 20:53:52 +00:00
7b533ea001 backports/py3-cbor2: upgrade to 5.6.1 2024-04-15 20:53:36 +00:00
ab18390138 backports/uranium: rebuild 2024-04-15 20:52:56 +00:00
fb93a24f33 user/zotero: upgrade to 7.0.0_beta74 2024-04-15 20:52:41 +00:00
69bc8b2ff1 backports/electron-tasje: upgrade to 0.7.1 2024-04-14 22:52:33 +00:00
21d4c1908e backports/openscad: drop for in community 2024-04-14 22:50:48 +00:00
715c1f545a backports/dex: rebuild for python 3.12 2024-04-14 22:48:18 +00:00
785e28ab75 backports/nb: upgrade to 7.12.1 2024-04-14 22:46:32 +00:00
08f8bbbd72 backports/libspnav: drop for in community 2024-04-14 22:44:58 +00:00
e587a87e3c backports/py3-pyinstrument: upgrade to 4.6.2 2024-04-14 22:44:26 +00:00
db8069a8d0 backports/thelounge: rebuild 2024-04-14 22:44:11 +00:00
1658218568 backports/perl-math-random-isaac-xs: rebuild 2024-04-14 22:41:32 +00:00
80b60ca012 backports/rapidfuzz: upgrade to 3.0.0 2024-04-14 22:37:36 +00:00
ecb929e891 backports/signal-desktop: upgrade to 7.4.0 2024-04-14 21:38:56 +00:00
92287e391b backports/perl-module-find: drop for in community 2024-04-14 21:32:10 +00:00
a514444306 backports/uvicorn: drop for in community 2024-04-14 21:32:10 +00:00
6681d816a8 backports/py3-w3lib: drop for in community 2024-04-14 21:32:10 +00:00
cd8c8dc55d backports/py3-validators: drop for in community 2024-04-14 21:32:10 +00:00
c280d170ee backports/py3-httptools: drop for in community 2024-04-14 21:32:10 +00:00
33f1b837de backports/py3-a2wsgi: drop for in community 2024-04-14 21:32:10 +00:00
a9b84f6a7d user/py3-django-autocomplete-light: drop for in aports 2024-04-14 21:32:10 +00:00
1d098bcd92 user/py3-pyrdfa3: drop for in aports 2024-04-14 21:32:10 +00:00
faccdf4ebf user/xmlsec: drop for in aports 2024-04-14 21:32:10 +00:00
9ed9fda2ac user/py3-webdavclient3: drop for in aports 2024-04-14 21:32:10 +00:00
9969287e44 user/py3-recipe-scrapers: drop for in aports 2024-04-14 21:32:10 +00:00
853e422ff0 user/py3-pytube: drop for in aports 2024-04-14 21:32:10 +00:00
5a992237d8 user/py3-pytest-factoryboy: drop for in aports 2024-04-14 21:32:10 +00:00
fbde65756d user/py3-pyppeteer: drop for in aports 2024-04-14 21:32:10 +00:00
4219b51190 user/py3-onelogin: drop for in aports 2024-04-14 21:32:10 +00:00
caa4af5318 user/py3-jstyleson: drop for in aports 2024-04-14 21:32:10 +00:00
feaf0d342b user/py3-html-text: drop for in aports 2024-04-14 21:32:10 +00:00
a2bd40cb20 user/py3-extruct: drop for in aports 2024-04-14 21:32:10 +00:00
09dcf012bf user/py3-drf-writable-nested: drop for in aports 2024-04-14 21:32:10 +00:00
39a4d6ed54 user/py3-django-webpack-loader: drop for in aports 2024-04-14 21:32:10 +00:00
d5f8abeccd user/py3-django-scopes: drop for in aports 2024-04-14 21:32:10 +00:00
b15e3982d8 user/py3-django-js-reverse: drop for in aports 2024-04-14 21:32:10 +00:00
ff9981e6ee user/py3-django-hcaptcha: drop for in aports 2024-04-14 21:32:10 +00:00
c2992a5cc9 user/py3-django-cleanup: drop for in aports 2024-04-14 21:32:10 +00:00
1a7f5ae1c7 user/py3-django-auth-ldap: drop for in aports 2024-04-14 21:32:10 +00:00
c65f433040 user/py3-django-annoying: drop for in aports 2024-04-14 21:32:10 +00:00
a8fdc84a19 user/py3-crispy-bootstrap4: drop for in aports 2024-04-14 21:32:10 +00:00
4f3a3fca13
user/freescout: upgrade to 1.8.135 2024-04-13 10:37:06 -04:00
d80cb99ecc
user/freescout: update post-install update message 2024-04-12 22:56:54 -04:00
a6f42e0955 user/freescout: upgrade to 1.8.134 2024-04-13 02:41:48 +00:00
75582f0fd6
user/zotero: upgrade to 7.0.0_beta68 2024-04-02 14:31:36 -04:00
e7db0340bf
user/gitaly: upgrade to 16.9.3 2024-04-01 15:53:06 -04:00
7b3186e647
user/gitlab-pages: upgrade to 16.9.3 2024-04-01 15:52:38 -04:00
fdb5ab17a1
user/gitlab-foss: upgrade to 16.9.3 2024-04-01 15:52:21 -04:00
2a4c907f83 user/gitlab-shell: upgrade to 14.34.0 2024-03-10 14:36:22 +00:00
2a825ce109 user/gitaly: upgrade to 16.9.2 2024-03-10 14:36:22 +00:00
e8400cd165 user/gitlab-pages: upgrade to 16.9.2 2024-03-10 14:36:22 +00:00
6c24e38caf user/gitlab-foss: upgrade to 16.9.2 2024-03-10 14:36:22 +00:00
e0b2920fa4 user/freescout: upgrade to 1.8.126 2024-03-09 05:24:22 +00:00
5e1cfaa832
user/zotero: upgrade to 7.0.0_beta65 2024-03-06 19:24:18 -05:00
d5d85a538c user/zotero: enable on aarch64 2024-03-06 21:15:53 +00:00
bc0fb8396d user/authentik: upgrade to 2024.2.2 2024-03-06 03:52:37 +00:00
cb1f8cf70c user/uptime-kuma: new aport 2024-03-04 00:04:00 +00:00
a17fecd2f2 backports/thelounge: allow self https for connect-src 2024-02-27 23:07:53 +00:00
d871b6ca9d user/gitaly: upgrade to 16.9.0 2024-02-24 17:56:43 +00:00
07f02d7d4f user/gitlab-pages: upgrade to 16.9.0 2024-02-24 17:56:43 +00:00
b02725241b user/gitlab-foss: upgrade to 16.9.0 2024-02-24 17:56:43 +00:00
adfd7d5461
user/freescout: upgrade to 1.8.122 2024-02-23 14:47:08 -05:00
417cf900e1 user/mastodon: upgrade to 4.2.8 2024-02-23 19:21:25 +00:00
28d52cbcc7
user/listmonk: new aport 2024-02-23 01:20:52 -05:00
1279f4009c user/zotero: upgrade to 7.0.0_beta60 2024-02-23 02:59:14 +00:00
b9529a8d2c
user/freescout: fix post-upgrade 2024-02-22 20:32:58 -05:00
f5236f6a9a
user/freescout: refer to client as member 2024-02-22 20:20:25 -05:00
de465d44b6
user/freescout: add missing upgrade step 2024-02-22 15:34:03 -05:00
3e4b39966f user/freescout: upgrade to 1.8.120 2024-02-22 20:20:38 +00:00
718b7a441f user/mastodon: upgrade to 4.2.7 2024-02-22 18:45:52 +00:00
f3c2249274 user/authentik: upgrade to 2024.2.1 2024-02-22 18:45:36 +00:00
c9b849a66b user/gitlab-pages: upgrade to 16.8.3 2024-02-22 18:45:01 +00:00
bcfab7eccb user/gitlab-foss: upgrade to 16.8.3 2024-02-22 18:45:01 +00:00
a5feaf6806 user/gitaly: upgrade to 16.8.3 2024-02-22 18:45:01 +00:00
fa933b283d
user/scanservjs: upgrade to 3.0.3 2024-02-06 18:43:28 -05:00
150a546396 backports/signal-desktop: upgrade to 6.46.0 2024-02-06 15:21:08 +00:00
6bfd1707ab user/pdf4qt: new aport 2024-02-04 15:46:34 +00:00
e1c9e01b27
user/zotero: patch cleanup 2024-02-03 16:03:16 -05:00
800e0ff1b4
user/zotero: upgrade to 7.0.0_beta57
l
2024-02-03 15:02:01 -05:00
b9e95a4e58 user/zotero: merge firefox-esr and fix build 2024-02-03 19:48:08 +00:00
ca2f264713 user/zotero: upgrade to 7.0.0_git20240131 2024-02-03 19:48:08 +00:00
72e1de3224
backports/thelounge: new aport 2024-02-03 12:48:55 -05:00
7b760382bb
user/tandoor-recipes: upgrade to 1.5.12 2024-02-03 01:25:02 -05:00
03ecd212a1
user/mastodon: upgrade to 4.2.5 2024-02-01 11:21:11 -05:00
df1ff99b07
user/icedtea-web: fix desktop application 2024-01-31 10:49:24 -05:00
be4b23bd94 user/authentik: upgrade to 2023.10.7 2024-01-31 06:11:59 +00:00
21d5b39207 user/icedtea-web: new aport 2024-01-31 05:42:38 +00:00
9e00cce2ea
user/tandoor-recipes: upgrade to 1.5.11 2024-01-28 00:24:08 -05:00
cc1af00d34
user/dotnet8-sdk: now on aports 2024-01-28 00:20:29 -05:00
bdccdfdba2
user/dotnet8-runtime: now on aports 2024-01-28 00:20:16 -05:00
201ef47734
user/dotnet8-stage0: now on aports 2024-01-28 00:19:58 -05:00
5066be5df6 user/gitaly: upgrade to 16.8.1 2024-01-26 05:46:54 +00:00
a883140e49 user/gitlab-pages: upgrade to 16.8.1 2024-01-26 05:46:54 +00:00
6b5390fad8 user/gitlab-foss: upgrade to 16.8.1 2024-01-26 05:46:54 +00:00
2671b47b45
user/freescout: upgrade 1.8.119 2024-01-25 10:20:26 -05:00
c8c12df6b1 user/mastodon: upgrade to 4.2.4 2024-01-24 14:59:32 +00:00
2fa6e20f99 user/mastodon: add py3-elasticsearch depend 2024-01-24 14:59:32 +00:00
cab3eb0cae user/mastodon: use ruby 3.2 2024-01-24 14:59:32 +00:00
35a22c241e user/freescout: upgrade to 1.8.118 2024-01-22 04:33:29 +00:00
705157b616
user/gitlab-foss: fix glfm_markdown libary location 2024-01-21 19:58:16 -05:00
ffa0b84df5 user/gitaly: upgrade to 16.8.0 2024-01-21 23:48:26 +00:00
1363ab7df2 user/gitlab-shell: upgrade to 14.33.0 2024-01-21 23:48:26 +00:00
2fb5da77f9 user/gitlab-pages: upgrade to 16.8.0 2024-01-21 23:48:26 +00:00
331a8e92b3 user/gitlab-foss: upgrade to 16.8.0 2024-01-21 23:48:26 +00:00
b107c065f3 user/ruby3.2-test-unit: new aport 2024-01-21 23:48:26 +00:00
325c159bf0 user/ruby3.2-webrick: new aport 2024-01-21 23:48:26 +00:00
304a57c1c9 user/ruby3.2-rugged: new aport 2024-01-21 23:48:26 +00:00
36da7aa759 user/ruby3.2-power_assert: new aport 2024-01-21 23:48:26 +00:00
89806ba46c user/ruby3.0-webrick: remove for ruby3.2 2024-01-21 23:48:26 +00:00
3441d234c0 user/ruby3.0-test-unit: remove for ruby3.2 2024-01-21 23:48:26 +00:00
313ac34de8 user/ruby3.0-rugged: remove for ruby3.2 2024-01-21 23:48:26 +00:00
0cc62b6d4f user/ruby3.0-power_assert: remove for ruby3.2 2024-01-21 23:48:26 +00:00
36b9607f89 user/ruby3.0-rake: remove for ruby3.2 2024-01-21 23:48:26 +00:00
017303dac4 user/ruby3.0-bundler: remove for ruby3.2 2024-01-21 23:48:26 +00:00
87c6a5eca6 user/ruby3.0-minitest: remove for ruby3.2 2024-01-21 23:48:26 +00:00
e0b8a94f39 user/ruby3.0: remove for ruby3.2 2024-01-21 23:48:26 +00:00
a2b8b68641
backports/py3-twilio: remove due to community 2024-01-21 14:31:53 -05:00
95c8508a8e
backports/py3-python-jwt: remove due to community 2024-01-21 14:31:38 -05:00
018226775d user/py3-xmlsec: add xmlsec 1.3.3 support 2024-01-20 04:40:07 +00:00
c271508b7f
gitlab-ci: do not use testing repos 2024-01-19 09:50:52 -05:00
3f67a56a91
backports/py3-utils: new aport 2024-01-19 09:50:51 -05:00
ed759d3239
backports/py3-trimesh: new aport 2024-01-19 09:50:46 -05:00
993c865a5c
backports/py3-svgpath: new aport 2024-01-19 00:00:30 -05:00
9067438ed8
backports/py3-rtree: new aport 2024-01-18 23:48:48 -05:00
c00ab9d900
backports/py3-pyinstrument: new aport 2024-01-18 23:48:37 -05:00
c091c20672
backports/py3-numpy-stl: new aport 2024-01-18 23:48:28 -05:00
de4d9f1d38
backports/py3-mapbox-earcut: new aport 2024-01-18 23:48:15 -05:00
22368d6492
backports/nlopt: new aport 2024-01-18 23:48:03 -05:00
5d1ddda555
backports/libspatialindex: new aport 2024-01-18 23:47:54 -05:00
a409dd8147
backports/libnest2d: new aport 2024-01-18 23:47:43 -05:00
950993581c backports/py3-pynest2d: new aport 2024-01-19 03:11:34 +00:00
5b4e321820 backports/py3-arcus: new aport 2024-01-19 03:11:34 +00:00
fc88ae4f0c backports/fdm-materials: new aport 2024-01-19 03:11:34 +00:00
5a8ea97ecd backports/uranium: new aport 2024-01-19 03:11:34 +00:00
1a792f23c9 backports/cura: new aports 2024-01-19 03:11:34 +00:00
e811398af9 user/rmfakecloud: upgrade to 0.0.17 2024-01-15 22:57:50 +00:00
a23b312f46 backports/nb: new aport 2024-01-13 21:30:29 +00:00
5b65ef5b0d
user/kb: fix depends and colored patch 2024-01-12 17:00:40 -05:00
645eeea482
backports/py3-colored: new aport 2024-01-12 16:53:26 -05:00
7916377e65 [86;1R[86;1R[86;1R[86;1R[86;1gommits.
user/kb: new aport
2024-01-12 21:51:36 +00:00
9a7604eaad user/gitlab-foss: upgrade to 16.6.4 2024-01-12 15:32:06 +00:00
21018c4710 user/gitlab-pages: upgrade to 16.6.4 2024-01-12 15:32:06 +00:00
ed333c4461 user/gitaly: upgrade to 16.6.4 2024-01-12 15:32:06 +00:00
ebcbe1fec9 user/grpc: use ruby 3.2 2024-01-12 05:00:53 +00:00
49e4024cc6 user/grpc: use ruby 3.2 2024-01-12 05:00:53 +00:00
c91a725f96 user/protobuf: upgrade to ruby3.2 2024-01-12 05:00:53 +00:00
8b76c98089 user/loomio: new aport 2024-01-12 05:00:53 +00:00
02af6d8194 user/ruby3.2-rake: new aport 2024-01-12 05:00:53 +00:00
365e4301ae user/ruby3.2-bundler: new aport 2024-01-12 05:00:53 +00:00
055ea7572d user/ruby3.2-minitest: new aport 2024-01-12 05:00:53 +00:00
f94cd98eff user/ruby3.2: new aport 2024-01-12 05:00:53 +00:00
27fd9ab80c user/dotnet8-sdk: upgrade to 8.0.101 2024-01-12 02:17:05 +00:00
2ef38b56b4 user/dotnet8-runtime: upgrade to 8.0.1 2024-01-12 02:17:05 +00:00
2531809043 user/xochitl-bin: rebuild 2024-01-11 21:26:51 +00:00
4dc26a2d21 user/xf86-video-fbdev-rm: rebuild 2024-01-11 21:26:51 +00:00
8fc6bcc2e0 user/u-boot-rm: rebuild 2024-01-11 21:26:51 +00:00
2c2d9f59da user/rm-utils: rebuild 2024-01-11 21:26:51 +00:00
236ea2406f user/rm-extractor: rebuild 2024-01-11 21:26:51 +00:00
7e4c556b55 user/linux-rm-headers: upgrade to v1.3.4 of upstream 2024-01-11 21:26:51 +00:00
323137ca9e user/linux-rm: upgrade to v1.3.4 of upstream and fix build 2024-01-11 21:26:51 +00:00
3ef36a5960
user/authentik: upgrade to 2023.10.6 2024-01-09 13:20:51 -05:00
b463bbf277 user/peertube: new aport 2024-01-06 06:02:27 +00:00
f9f62cc125
user/freescout: upgrade to 1.8.116 2024-01-06 00:28:00 -05:00
c62e9cc1ce user/freescout: moved writable directories to /var/lib 2024-01-06 05:27:17 +00:00
7ed6778289 user/py3-python3-saml: new aport 2024-01-05 23:59:26 +00:00
8b019782cd user/py3-xmlsec: use latest xmlsec 2024-01-05 23:59:26 +00:00
e13e8b8638
user/py3-onelogin: new aport 2024-01-05 18:41:29 -05:00
37cc16bc08 user/freescout: new aport 2024-01-05 06:31:50 +00:00
bd961262a5 backports/py3-validators: pin to 0.20.0 for tandoor 2024-01-05 03:33:59 +00:00
ae18f93da1 user/tandoor-recipes: new aport 2024-01-05 03:33:59 +00:00
261bedbaac user/py3-webdavclient3: new aport 2024-01-05 03:33:59 +00:00
0ca7adc3a8 user/py3-recipe-scrapers: new aport 2024-01-05 03:33:59 +00:00
7083e622eb user/py3-pytest-factoryboy: new aport 2024-01-05 03:33:59 +00:00
7223c02683 user/py3-pytube: new aport 2024-01-05 03:33:59 +00:00
61b2e414b4 user/py3-pyrdfa3: new aport 2024-01-05 03:33:59 +00:00
8f3a9b0d01 user/py3-pyppeteer: new aport 2024-01-05 03:33:59 +00:00
f8391b17c1 user/py3-microdata: new aport 2024-01-05 03:33:59 +00:00
31626d57dd user/py3-jstyleson: new aport 2024-01-05 03:33:59 +00:00
3b9d7bd5ba user/py3-html-text: new aport 2024-01-05 03:33:59 +00:00
1a271f6c8e user/py3-extruct: new aport 2024-01-05 03:33:59 +00:00
2ea2deb1ca user/py3-drf-writable-nested: new aport 2024-01-05 03:33:59 +00:00
b11a2325ae user/py3-django-webpack-loader: new aport 2024-01-05 03:33:59 +00:00
5bc511682a user/py3-django-scopes: new aport 2024-01-05 03:33:59 +00:00
c4098d2bdb user/py3-django-js-reverse: new aport 2024-01-05 03:33:59 +00:00
32b826a565 user/py3-django-hcaptcha: new aport 2024-01-05 03:33:59 +00:00
0c66a3365e user/py3-django-cleanup: new aport 2024-01-05 03:33:59 +00:00
b7776f33fa user/py3-django-autocomplete-light: new aport 2024-01-05 03:33:59 +00:00
2d5403cea1 user/py3-django-auth-ldap: new aport 2024-01-05 03:33:59 +00:00
74c7c8d878 user/py3-django-annoying: new aport 2024-01-05 03:33:59 +00:00
8247c09ac0 user/py3-crispy-bootstrap4: new aport 2024-01-05 03:33:59 +00:00
b460c9feaa backports/py3-w3lib: new aport 2024-01-05 03:33:59 +00:00
655a6a0d07 backports/py3-django-debug-toolbar: new aport 2024-01-05 03:33:59 +00:00
3e15f69ad2 gitlab-ci: always pickup artifacts 2024-01-05 03:33:59 +00:00
3b9c28c813 user/authentik: upgrade to 2023.10.5 2023-12-29 17:41:09 +00:00
70e5057247 Update README.md 2023-12-08 03:40:47 +00:00
c5c0c156a0 backports/*: clean 2023-12-08 02:44:38 +00:00
10aaf62e26 user/mastodon: upgrade to 4.2.3 2023-12-06 00:15:33 +00:00
02081f71b7 user/gitlab-foss: upgrade to 16.6.1 2023-12-05 23:36:11 +00:00
f2770aa9ac
user/perl-text-markdown: in community 2023-12-05 14:25:22 -05:00
b97502d516
user/perl-rpc-xml: in community 2023-12-05 14:25:04 -05:00
d79cbda483
user/perl-locale-gettext: in community 2023-12-05 14:24:31 -05:00
eb541348de
user/perl-email-valid: in community 2023-12-05 14:24:10 -05:00
daf247f0aa
user/perl-cgi-formbuilder: in community 2023-12-05 14:23:55 -05:00
0f2b0e8155
user/py3-pytest-django: in community 2023-12-05 14:19:48 -05:00
67cebc80bd
*/*: clean-up community and testing 2023-12-05 14:18:53 -05:00
0a669827a1 backports/signal-desktop: upgrade to 6.40.0 2023-12-01 19:51:16 +00:00
515ce5c57b backports/electron: upgrade to 27.1.2 2023-12-01 17:35:38 +00:00
fd0f429eee
community/git-annex: upgrade to 10.20231129 2023-11-29 15:12:37 -05:00
46d0bffbd9
user/gitlab-foss: use sys-filesystem version 1.3.2 2023-11-29 13:22:37 -05:00
9e6beac6eb user/mastodon: upgrade to 4.2.1 2023-11-29 02:07:57 +00:00
67f4498faa user/gitlab-pages: upgrade to 16.6.0 2023-11-29 01:54:19 +00:00
3f1ec8a6cc user/gitlab-foss: upgrade to 16.6.0 2023-11-29 01:54:19 +00:00
6817cec4b8 user/gitlab-shell: upgrade to 14.30.1 2023-11-29 01:54:19 +00:00
f4bf9f53a1 user/gitaly: upgrade to 16.6.0 2023-11-29 01:54:19 +00:00
92ad6ea5a6 user/grpc: upgrade to 1.58.0 2023-11-29 01:54:19 +00:00
3befac4b9a user/protobuf: new aport 2023-11-29 01:54:19 +00:00
a04c921a0b testing/ikiwki: move from user 2023-11-29 01:23:54 +00:00
70fa91ab14 user/rmfakecloud: upgrade to 0.0.16 2023-11-29 01:22:34 +00:00
ae23e477e8 user/rstudio: enable 2023-11-29 01:19:53 +00:00
c5069e73b7
unmaintained/gconf: move from user 2023-11-28 20:06:49 -05:00
2ab914d26f
unmaintained/craftbukkit-plugin*: move from user 2023-11-28 20:05:05 -05:00
a008c21e12
unmaintained/grumble: move from user 2023-11-28 20:04:44 -05:00
9d4a64117f
unmaintained/mumble-web: move from user 2023-11-28 20:04:31 -05:00
9a0cf88fba
unmaintained/papermc: move from user 2023-11-28 20:04:17 -05:00
209fdf2282
unmaintained/freshrss*: move from user 2023-11-28 20:02:32 -05:00
737709d1f5 user/dotnet8-sdk: upgrade to 8.0.100 2023-11-28 00:53:11 +00:00
5ab3dafdd1 user/dotnet8-runtime: upgrade to 8.0.0 2023-11-28 00:53:11 +00:00
3fb3ec0d41 user/dotnet8-stage0: upgrade to 8.0.100 2023-11-28 00:53:11 +00:00
776b567f1f user/paperless-ngx: upgrade to 1.17.4 2023-11-27 19:48:18 +00:00
c5eb15765f user/calibre-web: upgrade to 0.6.21 2023-11-27 19:37:25 +00:00
d5dee60311 user/mastodon: enable 2023-11-27 19:32:23 +00:00
cc682c3917 backports/openssl1.1-compat: new aport 2023-11-27 19:03:21 +00:00
82ce5cc1a0
user/ruby3.0*: re-enable 2023-11-27 13:27:46 -05:00
2b18c58e1f user/authentik: upgrade to 2023.10.4 2023-11-27 18:16:40 +00:00
dbad9842c4
user/caprine: enable build 2023-11-27 11:43:12 -05:00
697e4764ef
user/caprine: upgrade to 2.59.1 2023-11-27 11:29:31 -05:00
fab01a1aa4
backports/py3-limits: new aport 2023-11-27 09:22:10 -05:00
d8fa3f9703
user/mastodon: disable due to ruby3.0 2023-11-27 09:21:42 -05:00
111e6a4a18 testing/py3-onelogin: new aport 2023-11-26 20:00:16 +00:00
1036e597d1 testing/py3-aenum: new aport 2023-11-26 20:00:16 +00:00
2e0750f430 testing/py3-webauthn: upgrade to 1.11.1 2023-11-26 20:00:16 +00:00
b38306c3db testing/py3-wand: upgrade to 0.6.13 2023-11-26 20:00:16 +00:00
cb29f2e9ff testing/py3-twilio: upgrade to 8.10.2 2023-11-26 20:00:16 +00:00
c739d0f2bb testing/py3-slack-sdk: upgrade to 3.26.0 2023-11-26 20:00:16 +00:00
5d69177b4c testing/py3-ruamel-yaml: upgrade to 0.18.5 2023-11-26 20:00:16 +00:00
5aabe9c6ea testing/py3-rpy2: upgrade to 3.5.14 2023-11-26 20:00:16 +00:00
755145c015 testing/py3-pytelegrambotapi: upgrade to 4.14.0 2023-11-26 20:00:16 +00:00
59e005cc44 testing/py3-pydantic-scim: upgrade to 0.0.8 2023-11-26 20:00:16 +00:00
7a3a22c557 testing/py3-pyaml: upgrade to 23.9.7 2023-11-26 20:00:16 +00:00
97efd636f2 testing/py3-portalocker: upgrade to 2.8.2 2023-11-26 20:00:16 +00:00
3256839967 testing/py3-ipware: upgrade to 2.0.0 2023-11-26 20:00:16 +00:00
67f22c4c88 testing/py3-imap-tools: upgrade to 1.5.0 2023-11-26 20:00:16 +00:00
9784e24a25 testing/py3-duo-client: upgrade to 5.2.0 2023-11-26 20:00:16 +00:00
c173c056e3 testing/py3-django-redis: upgrade to 5.4.0 2023-11-26 20:00:16 +00:00
ce1c4c7cc7 testing/py3-django-q: upgrade to 1.6.1 2023-11-26 20:00:16 +00:00
45c596483c testing/py3-django-otp: upgrade to 1.3.0 2023-11-26 20:00:16 +00:00
8885763783 testing/py3-django-environ: upgrade to 0.11.2 2023-11-26 20:00:16 +00:00
4a4b538172 testing/py3-django-drf-spectacular: upgrade to 0.26.5 2023-11-26 20:00:16 +00:00
f51291f016 testing/py3-django-allauth: upgrade to 0.58.2 2023-11-26 20:00:16 +00:00
c4b6764d44
testing/freetube: upgrade to 0.19.1 2023-11-26 11:27:17 -05:00
035d223cad backports/electron-tasje: upgrade to 0.7.0 2023-11-26 15:02:46 +00:00
fa03078459 backports/electron: upgrade to 27.1.0 2023-11-26 15:02:46 +00:00
efade45a0e community/dotnet6-runtime: sync with aports 2023-11-26 15:01:01 +00:00
6756e08c16 community/dotnet6-build: sync with aports 2023-11-26 15:01:01 +00:00
46e7fabb59 community/dotnet6-stage0: sync with aports 2023-11-26 15:01:01 +00:00
ef7698e757 community/dotnet7-runtime: sync with aports 2023-11-26 14:57:50 +00:00
37645465bc community/dotnet7-build: sync with aports 2023-11-26 14:57:50 +00:00
8b189a521f community/dotnet7-stage0: sync with aports 2023-11-26 14:57:50 +00:00
30f6e014bd community/yadm: sync with aports 2023-11-26 14:57:00 +00:00
c8e0701739 community/git-annex: sync with aports 2023-11-26 14:57:00 +00:00
4f32bea3af testing/py3-twilio: sync with aports 2023-11-26 14:57:00 +00:00
c31c062006 testing/py3-swagger-spec-validator: sync with aports 2023-11-26 14:57:00 +00:00
7a42546ba4 testing/py3-ruamel-yaml-clib: sync with aports 2023-11-26 14:57:00 +00:00
6ffb1f3d85 testing/py3-ruamel-raml: sync with aports 2023-11-26 14:57:00 +00:00
9a4240789e testing/py3-rpy2: sync with aports 2023-11-26 14:57:00 +00:00
66318e421b testing/py3-goodreads: sync with aports 2023-11-26 14:57:00 +00:00
2090b34f43 testing/py3-flower: sync with aports 2023-11-26 14:57:00 +00:00
0e800ce8c2 testing/py3-django-q: sync with aports 2023-11-26 14:57:00 +00:00
038744fa84 testing/py3-django-extensions: sync with testing 2023-11-26 14:57:00 +00:00
cabfc92ad5 testing/py3-click-didyoumean: sync with testing 2023-11-26 14:57:00 +00:00
32c81d3bc4 testing/pearl-file-share: sync with testing 2023-11-26 14:57:00 +00:00
a8220abdff testing/freetube: sync with testing 2023-11-26 14:57:00 +00:00
86bdea4400 backports/py3-structlog: move from testing 2023-11-26 14:57:00 +00:00
264c04c14d
community/powershell: sync with aports 2023-11-25 11:03:12 -05:00
662e718ad6
gitlab-ci: enable aarch64 builders 2023-11-24 16:46:24 -05:00
c1c98ed24c
backports/sqlcipher: now in community 2023-11-24 16:46:23 -05:00
b4784d9a53
unmaintained/slade: move from backports 2023-11-24 16:46:22 -05:00
f46361d11d
backports/signal-desktop: upgrade to 6.39.1 2023-11-24 16:46:20 -05:00
48301f5d48
backports/rapidfuzz: upgrade to 2.2.3 2023-11-24 16:46:19 -05:00
2d03b12c23
backports/py3-wxpython: now in community 2023-11-24 16:46:18 -05:00
2d96035a08
backports/py3-whitenoise: now in community 2023-11-24 16:46:16 -05:00
2603fcf7d3
backports/py3-vine: update build 2023-11-24 16:46:15 -05:00
d1cd058cf3
backports/py3-sphinxcontrib-autoprogram: update build 2023-11-24 16:46:13 -05:00
c558ea4340
backports/py3-rapidjson: upgrade to 1.12 2023-11-24 16:46:12 -05:00
3a5b050493
backports/py3-psycopg: now in community 2023-11-24 16:46:11 -05:00
de64255e4b
backports/py3-pathvalidate: upgrade to 3.2.0 2023-11-24 16:46:09 -05:00
76d0574413
backports/py3-fuzzywuzzy: enable 2023-11-24 16:46:08 -05:00
11dc3d9344
backports/py3-levenshteine: upgrade to 0.23.0 2023-11-24 16:46:06 -05:00
9ebe8e3290
backports/jwcrypto: upgrade to 1.5.0 2023-11-24 16:46:05 -05:00
795de4bf97
backports/py3-httptools: upgrade to 0.6.1 2023-11-24 16:46:03 -05:00
c360ae7a79
backports/py3-hatch-fancy-pypi-readme: now in community 2023-11-24 16:46:02 -05:00
6a43138f45
backports/py3-gitlab: upgrade to 4.1.1 2023-11-24 16:46:01 -05:00
1e28879f05
backports/py3-flask-limiter: upgrade to 3.5.0 2023-11-24 16:45:59 -05:00
0bd7faca61
backports/py3-docker-py: now in community 2023-11-24 16:45:58 -05:00
92f4a996e3
backports/py3-deepmerge: now in community 2023-11-24 16:45:57 -05:00
b084150b9f
backports/py3-codespell: upgrade to 2.2.6 2023-11-24 16:45:55 -05:00
18e4c9bf17
testing/py3-click-repl: move from backports 2023-11-24 16:45:54 -05:00
c30a94f509
testing/py3-click-didyoumean: move from backports 2023-11-24 16:45:52 -05:00
6b0e1e15af
backports/py3-click-repl: upgrade to 0.3.0 2023-11-24 16:45:51 -05:00
8027ff421a
backports/py3-cbor2: upgrade to 5.5.0 2023-11-24 16:45:50 -05:00
be83876650
backports/py3-a2wsgi: upgrade to 1.8.0 2023-11-24 16:45:48 -05:00
fb21e2a4b9
php-pecl-mcrypt: now in community 2023-11-24 16:45:47 -05:00
3f7ff5fc31
php-pecl-inotify: move to user 2023-11-24 16:45:46 -05:00
8e83978816
backports/perl-file-homedir: now in community 2023-11-24 16:45:45 -05:00
3e5f897722
backports/opencsg: now in community 2023-11-24 16:45:43 -05:00
df571ac166
backports/opencascade: now in community 2023-11-24 16:45:42 -05:00
dd9534a9e6
backports/ngspice: now in community 2023-11-24 16:45:41 -05:00
457b2a729c
backports/lib3mf: now in community 2023-11-24 16:45:40 -05:00
f4fbdd254a
backports/kicad: now in community 2023-11-24 16:45:38 -05:00
49fa1d1e24
backports/front-parisienne: now in community 2023-11-24 16:45:37 -05:00
68320b90ed
backports/git: version now in main 2023-11-24 16:45:35 -05:00
ba0caca883
backports/crc32c: now in community 2023-11-24 12:54:35 -05:00
9bb5da1453
backports/catch2-3: now in community 2023-11-24 12:48:41 -05:00
36cbf88905
backports/cargo-auditable: now in main 2023-11-24 12:48:10 -05:00
5e719a2e2c
*/*: aarch64 initial port 2023-11-24 12:43:25 -05:00
d135da4c84
user/mastodon: upgrade to 4.1.8 2023-09-19 11:59:19 -04:00
968c3d2ef8
user/dotnet8-sdk: swap sdk and runtime + portable build 2023-09-08 15:12:20 -04:00
32a55f61be
user/dotnet8-runtime: swap sdk and runtime + portable build 2023-09-08 15:12:06 -04:00
aa71d24702
user/dotnet8-stage0: support sdk as stage1 2023-09-08 15:10:56 -04:00
0db9ff5c2b backports/electron: upgrade to 26.1.0 2023-09-01 19:22:34 +00:00
199a0231e9 user/authentik: add missing depend 2023-09-01 18:02:37 +00:00
69f88e8918 backports/py3-docker-py: new aport 2023-09-01 18:02:37 +00:00
4b242674dd backports/py3-docker: drop 2023-09-01 18:02:37 +00:00
124d774704 user/authentik: use py3-docker-py instead of py3-docker 2023-09-01 18:02:37 +00:00
c32e9a73c5 backports/kicad: upgrade to 7.0.7 2023-09-01 17:31:32 +00:00
a27ba4ad42 backports/py3-docker: new aport 2023-09-01 17:24:50 +00:00
8d6e473d97
user/authentik: upgrade to 2023.8.2 2023-09-01 12:39:29 -04:00
972381ae44
backports/freecad: upgrade to 0.20.1 2023-08-31 21:19:51 -04:00
21ce420fb8 backports/freecad: enable build 2023-09-01 00:32:12 +00:00
bdea75ee96 backports/uvicorn: enable 2023-08-31 23:30:37 +00:00
8aff9751a5
backports/rapidfuzz: upgrade to 2.0.0 2023-08-31 13:38:59 -04:00
73216a21f1
unmaintained/slade: move from user 2023-08-31 13:03:36 -04:00
19ac1ce96a
unmaintained/phppgadmin: move from user 2023-08-31 13:03:17 -04:00
e2fb96e136
unmaintained/mattermost: move from user 2023-08-31 13:02:51 -04:00
966941476f
user/zotero: disable 2023-08-31 13:02:04 -04:00
d67dcd1811
user/ruby3.0-rugged: upgrade to 1.6.3 2023-08-31 13:01:46 -04:00
abc3b1a07a
user/rstudio-desktop: disable 2023-08-31 13:01:10 -04:00
1874c9762c
user/grpc: disable 2023-08-31 13:00:44 -04:00
dba85ceaef
user/gitlab-foss: disable 2023-08-31 13:00:36 -04:00
d50921e431
user/firefox-esr: disable 2023-08-31 13:00:26 -04:00
788f9b0aa4
testing/hsxkpasswd: sync with aports 2023-08-31 13:00:10 -04:00
d6337b4ceb
community/git-annex: upgrade for edge 2023-08-31 12:59:51 -04:00
dbca3a622d
backports/uvicorn: disable 2023-08-31 12:59:14 -04:00
770ed36f2f
backports/rapidfuzz: disable 2023-08-31 12:58:54 -04:00
f5384a4c3f
backports/py3-levenshtein: disable 2023-08-31 12:58:15 -04:00
ebe177e487
backports/freecad: disable 2023-08-31 12:57:59 -04:00
a310a1e6fd backports/py3-flask-limiter: checksum 2023-08-31 01:10:55 +00:00
47dee49bc3 testing/py3-portalocker: new aport 2023-08-31 01:10:55 +00:00
02e2509b39 testing/py3-docker: drop duplicate 2023-08-31 01:10:55 +00:00
6d7ac191e4 testing/py3-whoosh: move from user 2023-08-31 01:10:55 +00:00
2b5ea79474 testing/py3-webauthn: move from user 2023-08-31 01:10:55 +00:00
657f3d8521 testing/py3-wand: move from user 2023-08-31 01:10:55 +00:00
1499281eb9 testing/py3-urllib3-secure-extra: move from user 2023-08-31 01:10:55 +00:00
034a8b80aa testing/py3-ua-parser: move from user 2023-08-31 01:10:55 +00:00
ca7e37aef2 testing/py3-twilio: move from user 2023-08-31 01:10:55 +00:00
283f2505cf testing/py3-tika: move from user 2023-08-31 01:10:55 +00:00
0e8074ffce testing/py3-swagger-spec-validator: move from user 2023-08-31 01:10:55 +00:00
2602322a8c testing/py3-slack-sdk: move from user 2023-08-31 01:10:55 +00:00
9097c1d4da testing/py3-ruamel-yaml: move from user 2023-08-31 01:10:55 +00:00
b90a9f716c testing/py3-ruamel-yaml-clib: move from user 2023-08-31 01:10:55 +00:00
d98f9dae6a testing/py3-rpy2: move from user 2023-08-31 01:10:55 +00:00
26f670ddd6 testing/py3-rauth: move from user 2023-08-31 01:10:55 +00:00
b05c84fef1 testing/py3-pytz-deprecation-shim: move from user 2023-08-31 01:10:55 +00:00
5cad1665a6 testing/py3-pytelegrambotapi: move from user 2023-08-31 01:10:55 +00:00
90cb0ee11d testing/py3-pydantic-scim: move from user 2023-08-31 01:10:55 +00:00
60bc034f61 testing/py3-pyaml: move from user 2023-08-31 01:10:55 +00:00
da52559518 testing/py3-openid: move from user 2023-08-31 01:10:55 +00:00
d083b85a09 testing/py3-opencontainers: move from user 2023-08-31 01:10:55 +00:00
447ae3222a testing/py3-memcached: move from user 2023-08-31 01:10:55 +00:00
b4ad7dc77c testing/py3-langdetect: move from user 2023-08-31 01:10:55 +00:00
b8979a4159 testing/py3-iso639: move from user 2023-08-31 01:10:55 +00:00
03aec9e958 testing/py3-ipware: move from user 2023-08-31 01:10:55 +00:00
f38b6ca78c testing/py3-inotifyrecursive: move from user 2023-08-31 01:10:55 +00:00
c13dff94c5 testing/py3-inotify-simple: move from user 2023-08-31 01:10:55 +00:00
6947968de3 testing/py3-imap-tools: move from user 2023-08-31 01:10:55 +00:00
1749b1deed testing/py3-goodreads: move from user 2023-08-31 01:10:55 +00:00
17321a7110 testing/py3-flower: move from user 2023-08-31 01:10:55 +00:00
c8374a9d05 testing/py3-facebook-sdk: move from user 2023-08-31 01:10:55 +00:00
ab424f26ff testing/py3-duo-client: move from user 2023-08-31 01:10:55 +00:00
8c91a05e4a testing/py3-dumb-init: move from user 2023-08-31 01:10:55 +00:00
47b55c3d96 testing/py3-docker: move from user 2023-08-31 01:10:55 +00:00
36950d2048 testing/py3-django-rest-framework-guardian: move from user 2023-08-31 01:10:55 +00:00
c33d300107 testing/py3-django-redis: move from user 2023-08-31 01:10:55 +00:00
7615e090b7 testing/py3-django-q: move from user 2023-08-31 01:10:55 +00:00
c4259dee25 testing/py3-django-prometheus: move from user 2023-08-31 01:10:55 +00:00
44e94a2199 testing/py3-django-picklefield: move from user 2023-08-31 01:10:55 +00:00
2ee9c85c31 testing/py3-django-otp: move from user 2023-08-31 01:10:55 +00:00
56d3b5f467 testing/py3-django-guardian: move from user 2023-08-31 01:10:55 +00:00
d31e1cec52 testing/py3-django-extensions: move from user 2023-08-31 01:10:55 +00:00
f913376230 testing/py3-django-environ: move from user 2023-08-31 01:10:55 +00:00
04b4ed9090 testing/py3-django-drf-spectacular: move from user 2023-08-31 01:10:55 +00:00
e45bc7c6c7 testing/py3-django-compression-middleware: move from user 2023-08-31 01:10:55 +00:00
c6d52607b8 testing/py3-django-channels: move from user 2023-08-31 01:10:55 +00:00
ecdd3778a7 testing/py3-django-channels-redis: move from user 2023-08-31 01:10:55 +00:00
542a6fad45 testing/py3-django-celery-results: move from user 2023-08-31 01:10:55 +00:00
149cd61df5 testing/py3-django-allauth: move from user 2023-08-31 01:10:55 +00:00
77a00e2bf3 testing/py3-dacite: move from user 2023-08-31 01:10:55 +00:00
306237b553 testing/py3-concurrent-log-handler: move from user 2023-08-31 01:10:55 +00:00
e6ef8a392b testing/py3-backports-abc: move from user 2023-08-31 01:10:55 +00:00
abc5acc54c backports/py3-whitenoise: move from user 2023-08-31 01:10:55 +00:00
463176a8b4 testing/py3-structlog: move from user 2023-08-31 01:10:55 +00:00
41c2bc27e3 user/py3-pathvalidate: move from user 2023-08-31 01:10:55 +00:00
5d046fe878 backports/py3-hatch-fancy-pypi: move from user 2023-08-31 01:10:55 +00:00
a42582583b backports/py3-flask-limiter: move from user 2023-08-31 01:10:55 +00:00
09363bd7d7 backports/py3-django-oauth-toolkit: move from user 2023-08-31 01:10:55 +00:00
93314a5349 backports/py3-deepmerge: move from user 2023-08-31 01:10:55 +00:00
eab6a228f8 backports/py3-dateparser: move from user 2023-08-31 01:10:55 +00:00
0b3ee80df8
user/authentik: drop py3-redis-nonfree 2023-08-30 14:41:00 -04:00
4a72f830ac
user/py3-redis-nonfree: remove aport 2023-08-30 14:40:32 -04:00
21d7ef4c18
user/authentik: fix patch 2023-08-30 14:34:09 -04:00
592 changed files with 8295 additions and 29128 deletions

View file

@ -1,27 +1,26 @@
#!/bin/sh #!/bin/sh
# shellcheck disable=SC3043 # shellcheck disable=SC3043
. $CI_PROJECT_DIR/.gitlab/bin/functions.sh . /usr/local/lib/functions.sh
# shellcheck disable=SC3040 # shellcheck disable=SC3040
set -eu -o pipefail set -eu -o pipefail
readonly APORTSDIR=$CI_PROJECT_DIR readonly APORTSDIR=$CI_PROJECT_DIR
readonly REPOS="cross backports user testing community" readonly REPOS="backports user"
readonly ALPINE_REPOS="main community testing" readonly ALPINE_REPOS="main community testing"
readonly ARCH=$(apk --print-arch) readonly ARCH=$(apk --print-arch)
# gitlab variables # gitlab variables
readonly BASEBRANCH=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME readonly BASEBRANCH=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME
: "${REPODEST:=$HOME/packages}" : "${REPODEST:=$HOME/packages}"
: "${MIRROR:=https://lab.ilot.io/ayakael/repo-apk/-/raw}" : "${MIRROR:=https://ayakael.net/api/packages/forge/alpine}"
: "${ALPINE_MIRROR:=http://dl-cdn.alpinelinux.org/alpine}" : "${ALPINE_MIRROR:=http://dl-cdn.alpinelinux.org/alpine}"
: "${MAX_ARTIFACT_SIZE:=300000000}" #300M : "${MAX_ARTIFACT_SIZE:=300000000}" #300M
: "${CI_DEBUG_BUILD:=}" : "${CI_DEBUG_BUILD:=}"
: "${CI_ALPINE_BUILD_OFFSET:=0}" : "${CI_ALPINE_BUILD_OFFSET:=0}"
: "${CI_ALPINE_BUILD_LIMIT:=9999}" : "${CI_ALPINE_BUILD_LIMIT:=9999}"
: "${CI_ALPINE_TARGET_ARCH:=$(uname -m)}"
msg() { msg() {
local color=${2:-green} local color=${2:-green}
@ -71,7 +70,7 @@ report() {
get_release() { get_release() {
case $BASEBRANCH in case $BASEBRANCH in
v*) echo "${BASEBRANCH%-*}";; v*) echo "$BASEBRANCH";;
edge) echo edge;; edge) echo edge;;
*) die "Branch \"$BASEBRANCH\" not supported!" *) die "Branch \"$BASEBRANCH\" not supported!"
esac esac
@ -80,9 +79,8 @@ get_release() {
build_aport() { build_aport() {
local repo="$1" aport="$2" local repo="$1" aport="$2"
cd "$APORTSDIR/$repo/$aport" cd "$APORTSDIR/$repo/$aport"
export CHOST=$CI_ALPINE_TARGET_ARCH
if abuild -r 2>&1 | report "build-$aport"; then if abuild -r 2>&1 | report "build-$aport"; then
checkapk | report "checkapk-$aport" || true checkapk 2>&1 | report "checkapk-$aport" || true
aport_ok="$aport_ok $repo/$aport" aport_ok="$aport_ok $repo/$aport"
else else
aport_ng="$aport_ng $repo/$aport" aport_ng="$aport_ng $repo/$aport"
@ -92,12 +90,6 @@ build_aport() {
check_aport() { check_aport() {
local repo="$1" aport="$2" local repo="$1" aport="$2"
cd "$APORTSDIR/$repo/$aport" cd "$APORTSDIR/$repo/$aport"
export CHOST=$CI_ALPINE_TARGET_ARCH
# TODO: this enables crossbuild only on user, this should be cleaner
if [ "$repo" != "user" ] && [ "$repo" != "backports" ] && [ "$CI_ALPINE_TARGET_ARCH" != "$ARCH" ]; then
aport_na="$aport_na $repo/$aport"
return 1
fi
if ! abuild check_arch 2>/dev/null; then if ! abuild check_arch 2>/dev/null; then
aport_na="$aport_na $repo/$aport" aport_na="$aport_na $repo/$aport"
return 1 return 1
@ -110,16 +102,13 @@ set_repositories_for() {
release=$(get_release) release=$(get_release)
for repo in $REPOS; do for repo in $REPOS; do
[ "$repo" = "non-free" ] && continue
[ "$release" == "edge" ] && [ "$repo" == "backports" ] && continue
repos="$repos $MIRROR/$release/$repo $REPODEST/$repo" repos="$repos $MIRROR/$release/$repo $REPODEST/$repo"
[ "$repo" = "$target_repo" ] && break [ "$repo" = "$target_repo" ] && break
done done
sudo sh -c "printf '%s\n' $repos >> /etc/apk/repositories" doas sh -c "printf '%s\n' $repos >> /etc/apk/repositories"
sudo apk update || true doas apk update
if [ "$CI_ALPINE_TARGET_ARCH" != "$ARCH" ]; then
sudo sh -c "printf '%s\n' $repos >> $HOME/sysroot-$CI_ALPINE_TARGET_ARCH/etc/apk/repositories"
sudo cp -R /etc/apk/keys/* $HOME/sysroot-$CI_ALPINE_TARGET_ARCH/etc/apk/keys/.
sudo apk --root=$HOME/sysroot-$CI_ALPINE_TARGET_ARCH update || true
fi
} }
apply_offset_limit() { apply_offset_limit() {
@ -139,22 +128,10 @@ setup_system() {
[ "$release" != "edge" ] && [ "$repo" == "testing" ] && continue [ "$release" != "edge" ] && [ "$repo" == "testing" ] && continue
repos="$repos $ALPINE_MIRROR/$release/$repo" repos="$repos $ALPINE_MIRROR/$release/$repo"
done done
repos="$repos $MIRROR/$release/cross" doas sh -c "printf '%s\n' $repos > /etc/apk/repositories"
sudo sh -c "printf '%s\n' $repos > /etc/apk/repositories" doas apk -U upgrade -a || apk fix || die "Failed to up/downgrade system"
sudo apk -U upgrade -a || sudo apk fix || die "Failed to up/downgrade system" abuild-keygen -ain
if [ "$CI_ALPINE_TARGET_ARCH" != "$ARCH" ]; then doas sed -i -E 's/export JOBS=[0-9]+$/export JOBS=$(nproc)/' /etc/abuild.conf
sudo apk add gcc-$CI_ALPINE_TARGET_ARCH
fi
gitlab_key_to_rsa $ABUILD_KEY rsa-private $HOME/.abuild/$ABUILD_KEY_NAME.rsa
gitlab_key_to_rsa $ABUILD_KEY_PUB rsa-public $HOME/.abuild/$ABUILD_KEY_NAME.rsa.pub
chmod 700 $HOME/.abuild/$ABUILD_KEY_NAME.rsa
echo "PACKAGER_PRIVKEY=$HOME/.abuild/$ABUILD_KEY_NAME.rsa" >> $HOME/.abuild/abuild.conf
sudo cp $HOME/.abuild/$ABUILD_KEY_NAME.rsa.pub /etc/apk/keys/$ABUILD_KEY_NAME.rsa.pub
# patch abuild for crosscompiling
sudo patch -p1 -d / -i $CI_PROJECT_DIR/.gitlab/patches/abuild-cross.patch
sudo sed -i -E 's/export JOBS=[0-9]+$/export JOBS=$(nproc)/' /etc/abuild.conf
( . /etc/abuild.conf && echo "Building with $JOBS jobs" ) ( . /etc/abuild.conf && echo "Building with $JOBS jobs" )
mkdir -p "$REPODEST" mkdir -p "$REPODEST"
git config --global init.defaultBranch master git config --global init.defaultBranch master
@ -203,7 +180,7 @@ sysinfo || true
setup_system || die "Failed to setup system" setup_system || die "Failed to setup system"
# git no longer allows to execute in repositories owned by different users # git no longer allows to execute in repositories owned by different users
sudo chown -R $USER: . doas chown -R buildozer: .
fetch_flags="-qn" fetch_flags="-qn"
debugging && fetch_flags="-v" debugging && fetch_flags="-v"
@ -226,7 +203,6 @@ build_start=$CI_ALPINE_BUILD_OFFSET
build_limit=$CI_ALPINE_BUILD_LIMIT build_limit=$CI_ALPINE_BUILD_LIMIT
for repo in $(changed_repos); do for repo in $(changed_repos); do
mkdir -p "$APORTSDIR"/logs "$APORTSDIR"/packages "$APORTSDIR"/keys
set_repositories_for "$repo" set_repositories_for "$repo"
built_aports=0 built_aports=0
changed_aports_in_repo=$(changed_aports "$repo") changed_aports_in_repo=$(changed_aports "$repo")
@ -267,7 +243,7 @@ for ok in $aport_ok; do
done done
for na in $aport_na; do for na in $aport_na; do
msg "$na: disabled for $CI_ALPINE_TARGET_ARCH" yellow msg "$na: disabled for $ARCH" yellow
done done
for ng in $aport_ng; do for ng in $aport_ng; do
@ -281,3 +257,4 @@ if [ "$failed" = true ]; then
elif [ -z "$aport_ok" ]; then elif [ -z "$aport_ok" ]; then
msg "No packages found to be built." yellow msg "No packages found to be built." yellow
fi fi

31
.forgejo/bin/check_ver.sh Executable file
View file

@ -0,0 +1,31 @@
#!/bin/bash
# expects the following env variables:
# downstream: downstream repo
repo=${downstream/*\/}
curl --silent $downstream/x86_64/APKINDEX.tar.gz | tar -O -zx APKINDEX > APKINDEX
owned_by_you=$(awk -v RS= -v ORS="\n\n" '/m:Antoine Martin \(ayakael\) <dev@ayakael.net>/' APKINDEX | awk -F ':' '{if($1=="o"){print $2}}' | sort | uniq)
echo "Found $(printf '%s\n' $owned_by_you | wc -l ) packages owned by you"
rm -f out_of_date not_in_anitya
for pkg in $owned_by_you; do
upstream_version=$(curl --fail -X GET -sS -H 'Content-Type: application/json' "https://release-monitoring.org/api/v2/packages/?name=$pkg&distribution=Alpine" | jq -r '.items.[].stable_version')
downstream_version=$(sed -n "/^P:$pkg$/,/^$/p" APKINDEX | awk -F ':' '{if($1=="V"){print $2}}' | sort -V | tail -n 1)
downstream_version=${downstream_version/-*}
if [ -z "$upstream_version" ]; then
echo "$pkg not in anitya"
echo "$pkg" >> not_in_anitya
elif [ "$downstream_version" != "$(printf '%s\n' $upstream_version $downstream_version | sort -V | head -n 1)" ]; then
echo "$pkg higher downstream"
continue
elif [ "$upstream_version" != "$downstream_version" ]; then
echo "$pkg upstream version $upstream_version does not match downstream version $downstream_version"
echo "$pkg $downstream_version $upstream_version $repo" >> out_of_date
fi
done

165
.forgejo/bin/create_issue.sh Executable file
View file

@ -0,0 +1,165 @@
#!/bin/bash
# expects:
# env variable FORGEJO_TOKEN
# file out_of_date
IFS='
'
repo=${downstream/*\/}
does_it_exist() {
name=$1
downstream_version=$2
upstream_version=$3
repo=$4
query="$repo/$name: upgrade to $upstream_version"
query="$(echo $query | sed 's| |%20|g' | sed 's|:|%3A|g' | sed 's|/|%2F|g' )"
result="$(curl --silent -X 'GET' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN"
)"
if [ "$result" == "[]" ]; then
return 1
fi
}
is_it_old() {
name=$1
downstream_version=$2
upstream_version=$3
repo=$4
query="$repo/$name: upgrade to"
query="$(echo $query | sed 's| |%20|g' | sed 's|:|%3A|g' | sed 's|/|%2F|g' )"
result="$(curl --silent -X 'GET' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN"
)"
result_title="$(echo $result | jq -r '.[].title' )"
result_id="$(echo $result | jq -r '.[].number' )"
result_upstream_version="$(echo $result_title | awk '{print $4}')"
if [ "$upstream_version" != "$result_upstream_version" ]; then
echo $result_id
else
echo 0
fi
}
update_title() {
name=$1
downstream_version=$2
upstream_version=$3
repo=$4
id=$5
result=$(curl --silent -X 'PATCH' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues/$id" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN" \
-H 'Content-Type: application/json' \
-d "{
\"title\": \"$repo/$name: upgrade to $upstream_version\"
}"
)
return 0
}
create_issue() {
name=$1
downstream_version=$2
upstream_version=$3
repo=$4
result=$(curl --silent -X 'POST' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN" \
-H 'Content-Type: application/json' \
-d "{
\"title\": \"$repo/$name: upgrade to $upstream_version\",
\"labels\": [
$LABEL_NUMBER
]
}")
return 0
}
if [ -f out_of_date ]; then
out_of_date="$(cat out_of_date)"
echo "Detected $(wc -l out_of_date) out-of-date packages, creating issues"
for pkg in $out_of_date; do
name="$(echo $pkg | awk '{print $1}')"
downstream_version="$(echo $pkg | awk '{print $2}')"
upstream_version="$(echo $pkg | awk '{print $3}')"
repo="$(echo $pkg | awk '{print $4}')"
if does_it_exist $name $downstream_version $upstream_version $repo; then
echo "Issue for $repo/$name already exists"
continue
fi
id=$(is_it_old $name $downstream_version $upstream_version $repo)
if [ "$id" != "0" ] && [ -n "$id" ]; then
echo "Issue for $repo/$name needs updating"
update_title $name $downstream_version $upstream_version $repo $id
continue
fi
echo "Creating issue for $repo/$name"
create_issue $name $downstream_version $upstream_version $repo
done
fi
if [ -f not_in_anitya ]; then
query="Add missing $repo packages to anitya"
query="$(echo $query | sed 's| |%20|g')"
result="$(curl --silent -X 'GET' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN"
)"
if [ "$result" == "[]" ]; then
echo "Creating anitya issue"
result=$(curl --silent -X 'POST' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN" \
-H 'Content-Type: application/json' \
-d "{
\"title\": \"Add missing $repo packages to anitya\",
\"body\": \"- [ ] $(sed '{:q;N;s/\n/\\n- [ ] /g;t q}' not_in_anitya)\",
\"labels\": [
$LABEL_NUMBER
]
}")
else
echo "Updating anitya issue"
result_id="$(echo $result | jq -r '.[].number' )"
result=$(curl --silent -X 'PATCH' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues/$result_id" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN" \
-H 'Content-Type: application/json' \
-d "{
\"body\": \"- [ ] $(sed '{:q;N;s/\n/\\n- [ ] /g;t q}' not_in_anitya)\"
}"
)
fi
fi

26
.forgejo/bin/deploy.sh Executable file
View file

@ -0,0 +1,26 @@
#!/bin/sh
# shellcheck disable=SC3040
set -eu -o pipefail
readonly REPOS="backports user"
readonly BASEBRANCH=$GITHUB_BASE_REF
readonly TARGET_REPO=$CI_ALPINE_REPO
apkgs=$(find package -type f -name "*.apk")
for apk in $apkgs; do
branch=$(echo $apk | awk -F '/' '{print $2}')
arch=$(echo $apk | awk -F '/' '{print $3}')
name=$(echo $apk | awk -F '/' '{print $4}')
echo "Sending $name of arch $arch to $TARGET_REPO/$BASEBRANCH/$branch"
return=$(curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN --upload-file $apk $TARGET_REPO/$BASEBRANCH/$branch 2>&1)
echo $return
if [ "$return" == "package file already exists" ]; then
echo "Package already exists, refreshing..."
curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN -X DELETE $TARGET_REPO/$BASEBRANCH/$branch/$arch/$name
curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN --upload-file $apk $TARGET_REPO/$BASEBRANCH/$branch
fi
done

View file

@ -0,0 +1,52 @@
on:
pull_request:
types: [ assigned, opened, synchronize, reopened ]
jobs:
build-aarch64:
runs-on: aarch64
container:
image: alpinelinux/alpine-gitlab-ci:latest
env:
CI_PROJECT_DIR: ${{ github.workspace }}
CI_DEBUG_BUILD: ${{ runner.debug }}
CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }}
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
steps:
- name: Environment setup
run: |
doas apk add nodejs git patch curl
cd /etc/apk/keys
doas curl -JO https://ayakael.net/api/packages/forge/alpine/key
- name: Repo pull
uses: actions/checkout@v4
with:
fetch-depth: 500
- name: Package build
run: |
${{ github.workspace }}/.forgejo/bin/build.sh
touch packages/dummy
- name: Package upload
uses: forgejo/upload-artifact@v3
with:
name: package
path: packages
deploy-aarch64:
needs: [build-aarch64]
runs-on: aarch64
container:
image: alpine:latest
env:
CI_ALPINE_REPO: 'https://ayakael.net/api/packages/forge/alpine'
FORGE_REPO_TOKEN: ${{ secrets.FORGE_REPO_TOKEN }}
FORGE_REPO_USER: ${{ vars.FORGE_REPO_USER }}
steps:
- name: Setting up environment
run: apk add nodejs curl findutils git gawk
- name: Repo pull
uses: actions/checkout@v4
- name: Package download
uses: forgejo/download-artifact@v3
- name: Package deployment
run: ${{ github.workspace }}/.forgejo/bin/deploy.sh

View file

@ -0,0 +1,52 @@
on:
pull_request:
types: [ assigned, opened, synchronize, reopened ]
jobs:
build-x86_64:
runs-on: x86_64
container:
image: alpinelinux/alpine-gitlab-ci:latest
env:
CI_PROJECT_DIR: ${{ github.workspace }}
CI_DEBUG_BUILD: ${{ runner.debug }}
CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }}
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
steps:
- name: Environment setup
run: |
doas apk add nodejs git patch curl
cd /etc/apk/keys
doas curl -JO https://ayakael.net/api/packages/forge/alpine/key
- name: Repo pull
uses: actions/checkout@v4
with:
fetch-depth: 500
- name: Package build
run: |
${{ github.workspace }}/.forgejo/bin/build.sh
touch packages/dummy
- name: Package upload
uses: forgejo/upload-artifact@v3
with:
name: package
path: packages
deploy-x86_64:
needs: [build-x86_64]
runs-on: x86_64
container:
image: alpine:latest
env:
CI_ALPINE_REPO: 'https://ayakael.net/api/packages/forge/alpine'
FORGE_REPO_TOKEN: ${{ secrets.FORGE_REPO_TOKEN }}
FORGE_REPO_USER: ${{ vars.FORGE_REPO_USER }}
steps:
- name: Setting up environment
run: apk add nodejs curl findutils git gawk
- name: Repo pull
uses: actions/checkout@v4
- name: Package download
uses: forgejo/download-artifact@v3
- name: Package deployment
run: ${{ github.workspace }}/.forgejo/bin/deploy.sh

View file

@ -0,0 +1,27 @@
on:
workflow_dispatch:
schedule:
- cron: '0 5 * * *'
jobs:
check-community:
name: Check community repo
runs-on: x86_64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/edge/community
FORGEJO_TOKEN: ${{ secrets.forgejo_token }}
LABEL_NUMBER: 4
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh

View file

@ -0,0 +1,27 @@
on:
workflow_dispatch:
schedule:
- cron: '0 5 * * *'
jobs:
check-community:
name: Check testing repo
runs-on: x86_64
container:
image: alpine:latest
env:
downstream: https://dl-cdn.alpinelinux.org/alpine/edge/testing
FORGEJO_TOKEN: ${{ secrets.forgejo_token }}
LABEL_NUMBER: 4
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh

View file

@ -0,0 +1,27 @@
on:
workflow_dispatch:
schedule:
- cron: '0 5 * * *'
jobs:
check-user:
name: Check user repo
runs-on: x86_64
container:
image: alpine:latest
env:
downstream: https://ayakael.net/api/packages/forge/alpine/edge/user
FORGEJO_TOKEN: ${{ secrets.forgejo_token }}
LABEL_NUMBER: 4
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh

View file

@ -0,0 +1,21 @@
on:
pull_request:
types: [ assigned, opened, synchronize, reopened ]
jobs:
lint:
run-name: lint
runs-on: x86_64
container:
image: alpinelinux/apkbuild-lint-tools:latest
env:
CI_PROJECT_DIR: ${{ github.workspace }}
CI_DEBUG_BUILD: ${{ runner.debug }}
CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }}
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
steps:
- run: doas apk add nodejs git
- uses: actions/checkout@v4
with:
fetch-depth: 500
- run: lint

View file

@ -1,101 +0,0 @@
stages:
- verify
- build
- deploy
variables:
GIT_STRATEGY: clone
GIT_DEPTH: "500"
lint:
stage: verify
interruptible: true
script:
- |
sudo apk add shellcheck atools sudo abuild
export PATH="$PATH:$CI_PROJECT_DIR/.gitlab/bin"
lint
allow_failure: true
only:
- merge_requests
tags:
- apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-x86_64
.build:
stage: build
interruptible: true
script:
- |
sudo apk add alpine-sdk lua-aports sudo
sudo addgroup $USER abuild
export PATH="$PATH:$CI_PROJECT_DIR/.gitlab/bin"
sudo -Eu $USER build.sh
artifacts:
paths:
- packages/
- keys/
- logs/
expire_in: 7 days
only:
- merge_requests
.cross:
stage: build
interruptible: true
script:
- |
sudo apk add alpine-sdk lua-aports sudo gzip xz qemu-$CI_QEMU_TARGET_ARCH
sudo addgroup $USER abuild
export PATH="$PATH:$CI_PROJECT_DIR/.gitlab/bin"
build-rootfs.sh alpine${CI_MERGE_REQUEST_TARGET_BRANCH_NAME/v} $CI_ALPINE_TARGET_ARCH --rootfsdir $HOME/sysroot-$CI_ALPINE_TARGET_ARCH
cp /etc/apk/repositories $HOME/sysroot-$CI_ALPINE_TARGET_ARCH/etc/apk/.
sudo -Eu $USER CHOST=$CI_TARGET_ALPINE_ARCH build.sh
artifacts:
paths:
- packages/
- keys/
- logs/
expire_in: 7 days
only:
- merge_requests
build-x86_64:
extends: .build
when: always
tags:
- apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-x86_64
build-ppc64le:
extends: .build
when: manual
tags:
- apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-ppc64le
build-s390x:
extends: .build
when: manual
tags:
- apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-s390x
build-armv7:
extends: .cross
when: manual
tags:
- apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-x86_64
variables:
CI_ALPINE_TARGET_ARCH: armv7
CI_QEMU_TARGET_ARCH: arm
push:
interruptible: true
stage: deploy
script:
- |
sudo apk add abuild git-lfs findutils
export PATH="$PATH:$CI_PROJECT_DIR/.gitlab/bin"
push.sh
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
when: manual
tags:
- repo

View file

@ -1,111 +0,0 @@
#!/bin/sh
set -e
arch=
builddir=
checkdepends=
depends=
depends_dev=
depends_doc=
depends_libs=
depends_openrc=
depends_static=
install=
install_if=
langdir=
ldpath=
license=
makedepends=
makedepends_build=
makedepends_host=
md5sums=
options=
patch_args=
pkgbasedir=
pkgdesc=
pkgdir=
pkgname=
pkgrel=
pkgver=
pkggroups=
pkgusers=
provides=
provider_priority=
replaces=
sha256sums=
sha512sums=
sonameprefix=
source=
srcdir=
startdir=
subpackages=
subpkgdir=
subpkgname=
triggers=
url=
# abuild.conf
CFLAGS=
CXXFLAGS=
CPPFLAGS=
LDFLAGS=
JOBS=
MAKEFLAGS=
CMAKE_CROSSOPTS=
. ./APKBUILD
: "$arch"
: "$builddir"
: "$checkdepends"
: "$depends"
: "$depends_dev"
: "$depends_doc"
: "$depends_libs"
: "$depends_openrc"
: "$depends_static"
: "$install"
: "$install_if"
: "$langdir"
: "$ldpath"
: "$license"
: "$makedepends"
: "$makedepends_build"
: "$makedepends_host"
: "$md5sums"
: "$options"
: "$patch_args"
: "$pkgbasedir"
: "$pkgdesc"
: "$pkgdir"
: "$pkgname"
: "$pkgrel"
: "$pkgver"
: "$pkggroups"
: "$pkgusers"
: "$provides"
: "$provider_priority"
: "$replaces"
: "$sha256sums"
: "$sha512sums"
: "$sonameprefix"
: "$source"
: "$srcdir"
: "$startdir"
: "$subpackages"
: "$subpkgdir"
: "$subpkgname"
: "$triggers"
: "$url"
# abuild.conf
: "$CFLAGS"
: "$CXXFLAGS"
: "$CPPFLAGS"
: "$LDFLAGS"
: "$JOBS"
: "$MAKEFLAGS"
: "$CMAKE_CROSSOPTS"

View file

@ -1,16 +0,0 @@
#!/bin/sh
shellcheck -s ash \
-e SC3043 \
-e SC3057 \
-e SC3060 \
-e SC2016 \
-e SC2086 \
-e SC2169 \
-e SC2155 \
-e SC2100 \
-e SC2209 \
-e SC2030 \
-e SC2031 \
-e SC1090 \
-xa $CI_PROJECT_DIR/.gitlab/bin/APKBUILD_SHIM

View file

@ -1,556 +0,0 @@
#!/usr/bin/env bash
# Availabl here: https://lab.ilot.io/dotnet/arcade/-/blob/7f6d9796cc7f594772f798358dbdd8c69b6a97af/eng/common/cross/build-rootfs.sh
# Only modification: qemu-$arch-static becomes qemu-$arch
set -e
usage()
{
echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [llvmx[.y]] [--skipunmount] --rootfsdir <directory>]"
echo "BuildArch can be: arm(default), arm64, armel, armv6, ppc64le, riscv64, s390x, x64, x86"
echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine"
echo " for alpine can be specified with version: alpineX.YY or alpineedge"
echo " for FreeBSD can be: freebsd12, freebsd13"
echo " for illumos can be: illumos"
echo " for Haiku can be: haiku."
echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD"
echo "llvmx[.y] - optional, LLVM version for LLVM related packages."
echo "--skipunmount - optional, will skip the unmount of rootfs folder."
echo "--use-mirror - optional, use mirror URL to fetch resources, when available."
echo "--jobs N - optional, restrict to N jobs."
exit 1
}
__CodeName=xenial
__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
__BuildArch=arm
__AlpineArch=armv7
__FreeBSDArch=arm
__FreeBSDMachineArch=armv7
__IllumosArch=arm7
__QEMUArch=arm
__UbuntuArch=armhf
__UbuntuRepo="http://ports.ubuntu.com/"
__LLDB_Package="liblldb-3.9-dev"
__SkipUnmount=0
# base development support
__UbuntuPackages="build-essential"
__AlpinePackages="alpine-base"
__AlpinePackages+=" build-base"
# symlinks fixer
__UbuntuPackages+=" symlinks"
# runtime dependencies
__UbuntuPackages+=" libicu-dev"
__UbuntuPackages+=" liblttng-ust-dev"
__UbuntuPackages+=" libunwind8-dev"
__UbuntuPackages+=" libnuma-dev"
# runtime libraries' dependencies
__UbuntuPackages+=" libcurl4-openssl-dev"
__UbuntuPackages+=" libkrb5-dev"
__UbuntuPackages+=" libssl-dev"
__UbuntuPackages+=" zlib1g-dev"
__FreeBSDBase="12.3-RELEASE"
__FreeBSDPkg="1.17.0"
__FreeBSDABI="12"
__FreeBSDPackages="libunwind"
__FreeBSDPackages+=" icu"
__FreeBSDPackages+=" libinotify"
__FreeBSDPackages+=" openssl"
__FreeBSDPackages+=" krb5"
__FreeBSDPackages+=" terminfo-db"
__IllumosPackages="icu"
__IllumosPackages+=" mit-krb5"
__IllumosPackages+=" openssl"
__IllumosPackages+=" zlib"
__HaikuPackages="gmp"
__HaikuPackages+=" gmp_devel"
__HaikuPackages+=" krb5"
__HaikuPackages+=" krb5_devel"
__HaikuPackages+=" libiconv"
__HaikuPackages+=" libiconv_devel"
__HaikuPackages+=" llvm12_libunwind"
__HaikuPackages+=" llvm12_libunwind_devel"
__HaikuPackages+=" mpfr"
__HaikuPackages+=" mpfr_devel"
# ML.NET dependencies
__UbuntuPackages+=" libomp5"
__UbuntuPackages+=" libomp-dev"
__Keyring=
__UseMirror=0
__UnprocessedBuildArgs=
while :; do
if [[ "$#" -le 0 ]]; then
break
fi
lowerI="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
case $lowerI in
-\?|-h|--help)
usage
exit 1
;;
arm)
__BuildArch=arm
__UbuntuArch=armhf
__AlpineArch=armv7
__QEMUArch=arm
;;
arm64)
__BuildArch=arm64
__UbuntuArch=arm64
__AlpineArch=aarch64
__QEMUArch=aarch64
__FreeBSDArch=arm64
__FreeBSDMachineArch=aarch64
;;
armel)
__BuildArch=armel
__UbuntuArch=armel
__UbuntuRepo="http://ftp.debian.org/debian/"
__CodeName=jessie
;;
armv6)
__BuildArch=armv6
__UbuntuArch=armhf
__QEMUArch=arm
__UbuntuRepo="http://raspbian.raspberrypi.org/raspbian/"
__CodeName=buster
__LLDB_Package="liblldb-6.0-dev"
if [[ -e "/usr/share/keyrings/raspbian-archive-keyring.gpg" ]]; then
__Keyring="--keyring /usr/share/keyrings/raspbian-archive-keyring.gpg"
fi
;;
riscv64)
__BuildArch=riscv64
__AlpineArch=riscv64
__QEMUArch=riscv64
__UbuntuArch=riscv64
__UbuntuRepo="http://deb.debian.org/debian-ports"
__UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}"
unset __LLDB_Package
if [[ -e "/usr/share/keyrings/debian-ports-archive-keyring.gpg" ]]; then
__Keyring="--keyring /usr/share/keyrings/debian-ports-archive-keyring.gpg --include=debian-ports-archive-keyring"
fi
;;
ppc64le)
__BuildArch=ppc64le
__AlpineArch=ppc64le
__QEMUArch=ppc64le
__UbuntuArch=ppc64el
__UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/"
__UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}"
__UbuntuPackages="${__UbuntuPackages// libomp-dev/}"
__UbuntuPackages="${__UbuntuPackages// libomp5/}"
unset __LLDB_Package
;;
s390x)
__BuildArch=s390x
__AlpineArch=s390x
__QEMUArch=s390x
__UbuntuArch=s390x
__UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/"
__UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}"
__UbuntuPackages="${__UbuntuPackages// libomp-dev/}"
__UbuntuPackages="${__UbuntuPackages// libomp5/}"
unset __LLDB_Package
;;
x64)
__BuildArch=x64
__AlpineArch=x86_64
__QEMUArch=x86_64
__UbuntuArch=amd64
__FreeBSDArch=amd64
__FreeBSDMachineArch=amd64
__illumosArch=x86_64
__UbuntuRepo=
;;
x86)
__BuildArch=x86
__AlpineArch=i386
__QEMUArch=i386
__UbuntuArch=i386
__AlpineArch=x86
__UbuntuRepo="http://archive.ubuntu.com/ubuntu/"
;;
lldb*)
version="${lowerI/lldb/}"
parts=(${version//./ })
# for versions > 6.0, lldb has dropped the minor version
if [[ "${parts[0]}" -gt 6 ]]; then
version="${parts[0]}"
fi
__LLDB_Package="liblldb-${version}-dev"
;;
no-lldb)
unset __LLDB_Package
;;
llvm*)
version="${lowerI/llvm/}"
parts=(${version//./ })
__LLVM_MajorVersion="${parts[0]}"
__LLVM_MinorVersion="${parts[1]}"
# for versions > 6.0, llvm has dropped the minor version
if [[ -z "$__LLVM_MinorVersion" && "$__LLVM_MajorVersion" -le 6 ]]; then
__LLVM_MinorVersion=0;
fi
;;
xenial) # Ubuntu 16.04
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=xenial
fi
;;
zesty) # Ubuntu 17.04
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=zesty
fi
;;
bionic) # Ubuntu 18.04
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=bionic
fi
;;
focal) # Ubuntu 20.04
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=focal
fi
;;
jammy) # Ubuntu 22.04
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=jammy
fi
;;
jessie) # Debian 8
__CodeName=jessie
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
stretch) # Debian 9
__CodeName=stretch
__LLDB_Package="liblldb-6.0-dev"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
buster) # Debian 10
__CodeName=buster
__LLDB_Package="liblldb-6.0-dev"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
bullseye) # Debian 11
__CodeName=bullseye
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
sid) # Debian sid
__CodeName=sid
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
tizen)
__CodeName=
__UbuntuRepo=
__Tizen=tizen
;;
alpine*)
__CodeName=alpine
__UbuntuRepo=
version="${lowerI/alpine/}"
if [[ "$version" == "edge" ]]; then
__AlpineVersion=edge
else
parts=(${version//./ })
__AlpineMajorVersion="${parts[0]}"
__AlpineMinoVersion="${parts[1]}"
__AlpineVersion="$__AlpineMajorVersion.$__AlpineMinoVersion"
fi
;;
freebsd12)
__CodeName=freebsd
__SkipUnmount=1
;;
freebsd13)
__CodeName=freebsd
__FreeBSDBase="13.0-RELEASE"
__FreeBSDABI="13"
__SkipUnmount=1
;;
illumos)
__CodeName=illumos
__SkipUnmount=1
;;
haiku)
__CodeName=haiku
__BuildArch=x64
__SkipUnmount=1
;;
--skipunmount)
__SkipUnmount=1
;;
--rootfsdir|-rootfsdir)
shift
__RootfsDir="$1"
;;
--use-mirror)
__UseMirror=1
;;
--use-jobs)
shift
MAXJOBS=$1
;;
*)
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $1"
;;
esac
shift
done
if [[ "$__BuildArch" == "armel" ]]; then
__LLDB_Package="lldb-3.5-dev"
fi
__UbuntuPackages+=" ${__LLDB_Package:-}"
if [[ -n "$__LLVM_MajorVersion" ]]; then
__UbuntuPackages+=" libclang-common-${__LLVM_MajorVersion}${__LLVM_MinorVersion:+.$__LLVM_MinorVersion}-dev"
fi
if [[ -z "$__RootfsDir" && -n "$ROOTFS_DIR" ]]; then
__RootfsDir="$ROOTFS_DIR"
fi
if [[ -z "$__RootfsDir" ]]; then
__RootfsDir="$__CrossDir/../../../.tools/rootfs/$__BuildArch"
fi
if [[ -d "$__RootfsDir" ]]; then
if [[ "$__SkipUnmount" == "0" ]]; then
umount "$__RootfsDir"/* || true
fi
rm -rf "$__RootfsDir"
fi
mkdir -p "$__RootfsDir"
__RootfsDir="$( cd "$__RootfsDir" && pwd )"
if [[ "$__CodeName" == "alpine" ]]; then
__ApkToolsVersion=2.12.11
__ApkToolsDir="$(mktemp -d)"
wget "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic//v$__ApkToolsVersion/x86_64/apk.static" -P "$__ApkToolsDir"
chmod +x "$__ApkToolsDir/apk.static"
mkdir -p "$__RootfsDir"/usr/bin
cp -v "/usr/bin/qemu-$__QEMUArch" "$__RootfsDir/usr/bin"
if [[ "$__AlpineVersion" == "edge" ]]; then
version=edge
else
version="v$__AlpineVersion"
fi
# initialize DB
"$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U --allow-untrusted --root "$__RootfsDir" --arch "$__AlpineArch" --initdb add
if [[ "$__AlpineLlvmLibsLookup" == 1 ]]; then
__AlpinePackages+=" $("$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U --allow-untrusted --root "$__RootfsDir" --arch "$__AlpineArch" \
search 'llvm*-libs' | sort | tail -1 | sed 's/-[^-]*//2g')"
fi
# install all packages in one go
"$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U --allow-untrusted --no-scripts --root "$__RootfsDir" --arch "$__AlpineArch" \
add $__AlpinePackages
rm -r "$__ApkToolsDir"
elif [[ "$__CodeName" == "freebsd" ]]; then
mkdir -p "$__RootfsDir"/usr/local/etc
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
wget -O - "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf
echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf
mkdir -p "$__RootfsDir"/tmp
# get and build package manager
wget -O - "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf -
cd "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
# needed for install to succeed
mkdir -p "$__RootfsDir"/host/etc
./autogen.sh && ./configure --prefix="$__RootfsDir"/host && make -j "$JOBS" && make install
rm -rf "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
# install packages we need.
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf update
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages
elif [[ "$__CodeName" == "illumos" ]]; then
mkdir "$__RootfsDir/tmp"
pushd "$__RootfsDir/tmp"
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
echo "Downloading sysroot."
wget -O - https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
echo "Building binutils. Please wait.."
wget -O - https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf -
mkdir build-binutils && cd build-binutils
../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir"
make -j "$JOBS" && make install && cd ..
echo "Building gcc. Please wait.."
wget -O - https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf -
CFLAGS="-fPIC"
CXXFLAGS="-fPIC"
CXXFLAGS_FOR_TARGET="-fPIC"
CFLAGS_FOR_TARGET="-fPIC"
export CFLAGS CXXFLAGS CXXFLAGS_FOR_TARGET CFLAGS_FOR_TARGET
mkdir build-gcc && cd build-gcc
../gcc-8.4.0/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \
--with-gnu-ld --disable-nls --disable-libgomp --disable-libquadmath --disable-libssp --disable-libvtv --disable-libcilkrts --disable-libada --disable-libsanitizer \
--disable-libquadmath-support --disable-shared --enable-tls
make -j "$JOBS" && make install && cd ..
BaseUrl=https://pkgsrc.smartos.org
if [[ "$__UseMirror" == 1 ]]; then
BaseUrl=https://pkgsrc.smartos.skylime.net
fi
BaseUrl="$BaseUrl/packages/SmartOS/trunk/${__illumosArch}/All"
echo "Downloading manifest"
wget "$BaseUrl"
echo "Downloading dependencies."
read -ra array <<<"$__IllumosPackages"
for package in "${array[@]}"; do
echo "Installing '$package'"
# find last occurrence of package in listing and extract its name
package="$(sed -En '/.*href="('"$package"'-[0-9].*).tgz".*/h;$!d;g;s//\1/p' All)"
echo "Resolved name '$package'"
wget "$BaseUrl"/"$package".tgz
ar -x "$package".tgz
tar --skip-old-files -xzf "$package".tmp.tg* -C "$__RootfsDir" 2>/dev/null
done
echo "Cleaning up temporary files."
popd
rm -rf "$__RootfsDir"/{tmp,+*}
mkdir -p "$__RootfsDir"/usr/include/net
mkdir -p "$__RootfsDir"/usr/include/netpacket
wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
elif [[ "$__CodeName" == "haiku" ]]; then
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
echo "Building Haiku sysroot for x86_64"
mkdir -p "$__RootfsDir/tmp"
cd "$__RootfsDir/tmp"
git clone -b hrev56235 https://review.haiku-os.org/haiku
git clone -b btrev43195 https://review.haiku-os.org/buildtools
cd "$__RootfsDir/tmp/buildtools" && git checkout 7487388f5110021d400b9f3b88e1a7f310dc066d
# Fetch some unmerged patches
cd "$__RootfsDir/tmp/haiku"
## Add development build profile (slimmer than nightly)
git fetch origin refs/changes/64/4164/1 && git -c commit.gpgsign=false cherry-pick FETCH_HEAD
# Build jam
cd "$__RootfsDir/tmp/buildtools/jam"
make
# Configure cross tools
echo "Building cross-compiler"
mkdir -p "$__RootfsDir/generated"
cd "$__RootfsDir/generated"
"$__RootfsDir/tmp/haiku/configure" -j"$JOBS" --sysroot "$__RootfsDir" --cross-tools-source "$__RootfsDir/tmp/buildtools" --build-cross-tools x86_64
# Build Haiku packages
echo "Building Haiku"
echo 'HAIKU_BUILD_PROFILE = "development-raw" ;' > UserProfileConfig
"$__RootfsDir/tmp/buildtools/jam/jam0" -j"$JOBS" -q '<build>package' '<repository>Haiku'
BaseUrl="https://depot.haiku-os.org/__api/v2/pkg/get-pkg"
# Download additional packages
echo "Downloading additional required packages"
read -ra array <<<"$__HaikuPackages"
for package in "${array[@]}"; do
echo "Downloading $package..."
# API documented here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L60
# The schema here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L598
hpkgDownloadUrl="$(wget -qO- --post-data='{"name":"'"$package"'","repositorySourceCode":"haikuports_x86_64","versionType":"LATEST","naturalLanguageCode":"en"}' \
--header='Content-Type:application/json' "$BaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')"
wget -P "$__RootfsDir/generated/download" "$hpkgDownloadUrl"
done
# Setup the sysroot
echo "Setting up sysroot and extracting needed packages"
mkdir -p "$__RootfsDir/boot/system"
for file in "$__RootfsDir/generated/objects/haiku/x86_64/packaging/packages/"*.hpkg; do
"$__RootfsDir/generated/objects/linux/x86_64/release/tools/package/package" extract -C "$__RootfsDir/boot/system" "$file"
done
for file in "$__RootfsDir/generated/download/"*.hpkg; do
"$__RootfsDir/generated/objects/linux/x86_64/release/tools/package/package" extract -C "$__RootfsDir/boot/system" "$file"
done
# Cleaning up temporary files
echo "Cleaning up temporary files"
rm -rf "$__RootfsDir/tmp"
for name in "$__RootfsDir/generated/"*; do
if [[ "$name" =~ "cross-tools-" ]]; then
: # Keep the cross-compiler
else
rm -rf "$name"
fi
done
elif [[ -n "$__CodeName" ]]; then
qemu-debootstrap $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"
cp "$__CrossDir/$__BuildArch/sources.list.$__CodeName" "$__RootfsDir/etc/apt/sources.list"
chroot "$__RootfsDir" apt-get update
chroot "$__RootfsDir" apt-get -f -y install
chroot "$__RootfsDir" apt-get -y install $__UbuntuPackages
chroot "$__RootfsDir" symlinks -cr /usr
chroot "$__RootfsDir" apt-get clean
if [[ "$__SkipUnmount" == "0" ]]; then
umount "$__RootfsDir"/* || true
fi
if [[ "$__BuildArch" == "armel" && "$__CodeName" == "jessie" ]]; then
pushd "$__RootfsDir"
patch -p1 < "$__CrossDir/$__BuildArch/armel.jessie.patch"
popd
fi
elif [[ "$__Tizen" == "tizen" ]]; then
ROOTFS_DIR="$__RootfsDir" "$__CrossDir/tizen-build-rootfs.sh" "$__BuildArch"
else
echo "Unsupported target platform."
usage;
exit 1
fi

View file

@ -1,20 +0,0 @@
#!/bin/sh
if [ $# -lt 1 ]; then
echo "Usage: $0 <basebranch>"
exit 1
fi
if ! git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
echo "Fatal: not inside a git repository"
exit 2
fi
basebranch=$1
if ! git rev-parse --verify --quiet $basebranch >/dev/null; then
# The base branch does not eixst, probably due to a shallow clone
git fetch -v $CI_MERGE_REQUEST_PROJECT_URL.git +refs/heads/$basebranch:refs/heads/$basebranch
fi
git --no-pager diff --diff-filter=ACMR --name-only $basebranch...HEAD -- "*/APKBUILD" | xargs -r -n1 dirname

View file

@ -1,74 +0,0 @@
# shellcheck disable=SC3043
:
# shellcheck disable=SC3040
set -eu -o pipefail
changed_repos() {
: "${APORTSDIR?APORTSDIR missing}"
: "${BASEBRANCH?BASEBRANCH missing}"
cd "$APORTSDIR"
for repo in $REPOS; do
git diff --diff-filter=ACMR --exit-code "$BASEBRANCH"...HEAD -- "$repo" >/dev/null \
|| echo "$repo"
done
}
changed_aports() {
: "${APORTSDIR?APORTSDIR missing}"
: "${BASEBRANCH?BASEBRANCH missing}"
cd "$APORTSDIR"
local repo="$1"
local aports
aports=$(git diff --name-only --diff-filter=ACMR --relative="$repo" \
"$BASEBRANCH"...HEAD -- "*/APKBUILD" | xargs -rn1 dirname)
# shellcheck disable=2086
ap builddirs -d "$APORTSDIR/$repo" $aports 2>/dev/null | xargs -rn1 basename
}
section_start() {
name=${1?arg 1 name missing}
header=${2?arg 2 header missing}
collapsed=$2
timestamp=$(date +%s)
options=""
case $collapsed in
yes|on|collapsed|true) options="[collapsed=true]";;
esac
printf "\e[0Ksection_start:%d:%s%s\r\e[0K%s\n" "$timestamp" "$name" "$options" "$header"
}
section_end() {
name=$1
timestamp=$(date +%s)
printf "\e[0Ksection_end:%d:%s\r\e[0K" "$timestamp" "$name"
}
gitlab_key_to_rsa() {
KEY=$1
TYPE=$2
TGT=$3
TGT_DIR=${TGT%/*}
if [ "$TGT" == "$TGT_DIR" ]; then
TGT_DIR="./"
fi
if [ ! -d "$TGT_DIR" ]; then
mkdir -p "$TGT_DIR"
fi
case $TYPE in
rsa-public) local type="PUBLIC";;
rsa-private) local type="RSA PRIVATE";;
esac
echo "-----BEGIN $type KEY-----" > "$TGT"
echo $1 | sed 's/.\{64\}/&\
/g' >> "$TGT"
echo "-----END $type KEY-----" >> "$TGT"
}

View file

@ -1,96 +0,0 @@
#!/bin/sh
BLUE="\e[34m"
MAGENTA="\e[35m"
RESET="\e[0m"
readonly BASEBRANCH=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME
verbose() {
echo "> " "$@"
# shellcheck disable=SC2068
$@
}
debugging() {
[ -n "$CI_DEBUG_BUILD" ]
}
debug() {
if debugging; then
verbose "$@"
fi
}
# git no longer allows to execute in repositories owned by different users
sudo chown -R gitlab-runner: .
fetch_flags="-qn"
debugging && fetch_flags="-v"
git fetch $fetch_flags "$CI_MERGE_REQUEST_PROJECT_URL" \
"+refs/heads/$BASEBRANCH:refs/heads/$BASEBRANCH"
if debugging; then
merge_base=$(git merge-base "$BASEBRANCH" HEAD)
echo "$merge_base"
git --version
git config -l
git tag merge-base "$merge_base" || { echo "Could not determine merge-base"; exit 50; }
git log --oneline --graph --decorate --all
fi
has_problems=0
for PKG in $(changed-aports "$BASEBRANCH"); do
printf "$BLUE==>$RESET Linting $PKG\n"
(
cd "$PKG"
repo=$(basename $(dirname $PKG));
if [ "$repo" == "backports" ]; then
echo "Skipping $PKG as backports (we don't care)"
continue
fi
printf "\n\n"
printf "$BLUE"
printf '======================================================\n'
printf " parse APKBUILD:\n"
printf '======================================================'
printf "$RESET\n\n"
( . ./APKBUILD ) || has_problems=1
printf "\n\n"
printf "$BLUE"
printf '======================================================\n'
printf " abuild sanitycheck:\n"
printf '======================================================'
printf "$RESET\n\n"
abuild sanitycheck || has_problems=1
printf "\n\n"
printf "$BLUE"
printf '======================================================\n'
printf " apkbuild-shellcheck:\n"
printf '======================================================'
printf "$RESET\n"
apkbuild-shellcheck || has_problems=1
printf "\n\n"
printf "$BLUE"
printf '======================================================\n'
printf " apkbuild-lint:\n"
printf '======================================================'
printf "$RESET\n\n"
apkbuild-lint APKBUILD || has_problems=1
return $has_problems
) || has_problems=1
echo
done
exit $has_problems

View file

@ -1,56 +0,0 @@
#!/bin/sh
# shellcheck disable=SC3043
. $CI_PROJECT_DIR/.gitlab/bin/functions.sh
# shellcheck disable=SC3040
set -eu -o pipefail
readonly APORTSDIR=$CI_PROJECT_DIR
readonly REPOS="backports user"
readonly BASEBRANCH=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME
export GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
gitlab_key_to_rsa $ABUILD_KEY rsa-private $HOME/.abuild/$ABUILD_KEY_NAME.rsa
gitlab_key_to_rsa $ABUILD_KEY_PUB rsa-public $HOME/.abuild/$ABUILD_KEY_NAME.rsa.pub
gitlab_key_to_rsa $SSH_KEY rsa-private $HOME/.ssh/id_rsa
chmod 700 "$HOME"/.ssh/id_rsa
chmod 700 "$HOME"/.abuild/$ABUILD_KEY_NAME.rsa
echo "PACKAGER_PRIVKEY=$HOME/.abuild/$ABUILD_KEY_NAME.rsa" > $HOME/.abuild/abuild.conf
echo "REPODEST=$HOME/repo-apk" >> $HOME/.abuild/abuild.conf
sudo cp $HOME/.abuild/$ABUILD_KEY_NAME.rsa.pub /etc/apk/keys/.
if [ -d $HOME/repo-apk ]; then
git -C $HOME/repo-apk fetch
git -C $HOME/repo-apk checkout $BASEBRANCH
git -C $HOME/repo-apk pull --rebase
else
git clone git@lab.ilot.io:ayakael/repo-apk -b $BASEBRANCH $HOME/repo-apk
fi
for i in $(find packages -type f -name "*.apk"); do
install -vDm644 $i ${i/packages/$HOME\/repo-apk}
done
fetch_flags="-qn"
git fetch $fetch_flags "$CI_MERGE_REQUEST_PROJECT_URL" \
"+refs/heads/$BASEBRANCH:refs/heads/$BASEBRANCH"
for repo in $(changed_repos); do
rm $HOME/repo-apk/$repo/*/APKINDEX.tar.gz | true
mkdir -p $repo/DUMMY
echo "pkgname=DUMMY" > $repo/DUMMY/APKBUILD
cd $repo/DUMMY
for i in $(find $HOME/repo-apk/$repo -maxdepth 1 -mindepth 1 -printf '%P '); do
CHOST=$i abuild index
done
cd "$CI_PROJECT_DIR"
rm -R $repo/DUMMY
done
git -C $HOME/repo-apk add .
git -C $HOME/repo-apk commit -m "Update from $CI_MERGE_REQUEST_IID - $CI_MERGE_REQUEST_TITLE"
git -C $HOME/repo-apk push

View file

@ -1,17 +0,0 @@
diff --git a/usr/bin/abuild.orig b/usr/bin/abuild
index 71e0681..d4ae3dd 100755
--- a/usr/bin/abuild.orig
+++ b/usr/bin/abuild
@@ -2231,7 +2231,11 @@ calcdeps() {
list_has $i $builddeps && continue
subpackages_has ${i%%[<>=]*} || builddeps="$builddeps $i"
done
- hostdeps="$EXTRADEPENDS_TARGET"
+ for i in $EXTRADEPENDS_HOST $EXTRADEPENDS_TARGET $depends $makedepends; do
+ [ "$pkgname" = "${i%%[<>=]*}" ] && continue
+ list_has $i $hostdeps && continue
+ subpackages_has ${i%%[<>=]*} || hostdeps="$hostdeps $i"
+ done
fi
}

View file

@ -1,55 +1,43 @@
# user-aports # ayaports
Upstream: https://lab.ilot.io/ayakael/user-aports Upstream: https://ayakael.net/forge/ayaports
## Description ## Description
This repository contains aports that are not yet merged in the official Alpine This repository contains aports that are not yet merged in the official Alpine
Linux repository or dont adhere to Alpine polices. Packages are automatically Linux repository or dont adhere to Alpine polices. Packages are automatically
built using GitLab CI on my own GitLab instance. Once built, they are deployed built using CI. Once built, they are deployed to a git-lfs repository, making
to a git-lfs repository, making them available to apk. them available to apk.
Branches are matched to Alpine releases. Branches are matched to Alpine releases.
## Repositories ## Repositories
You can browse all the repositories at https://lab.ilot.io/ayakael/repo-apk. You can browse all the repositories at https://codeberg.org/ayakael/ayaports
Affixed to each repository description is the appropriate link for use in Affixed to each repository description is the appropriate link for use in
`/etc/apk/repositories`. `/etc/apk/repositories`.
#### Backports #### Backports
``` ```
https://lab.ilot.io/ayakael/repo-apk/-/raw/v3.18/backports https://ayakael.net/api/packages/forge/alpine/edge/backports
``` ```
Aports from the official Alpine repositories backported from edge to v3.18. Aports from the official Alpine repositories backported from edge.
#### User #### User
``` ```
https://lab.ilot.io/ayakael/repo-apk/-/raw/v3.18/user https://ayakael.net/api/packages/forge/alpine/edge/user
``` ```
Aports that have yet to be (or may never be) upstreamed to the official Aports that have yet to be (or may never be) upstreamed to the official
aports. aports.
#### Testing/Community
```
https://lab.ilot.io/ayakael/repo-apk/-/raw/v3.18/community
https://lab.ilot.io/ayakael/repo-apk/-/raw/v3.18/testing
```
Aports that have already been upstreamed are kept here for three reasons:
* Facilitate keeping track of packages that I support
* Upgrades are first deployed here for first batch of testing
* Make packages that are in 'testing' available for stable releases
## How to use ## How to use
Add security key of the repo-apk repository to your /etc/apk/keys: Add security key of the apk repository to your /etc/apk/keys:
```shell ```shell
cd /etc/apk/keys cd /etc/apk/keys
wget https://lab.ilot.io/ayakael/repo-apk/-/raw/v3.18/antoine.martin@protonmail.com-5b3109ad.rsa.pub curl -JO https://ayakael.net/api/packages/forge/alpine/key
``` ```
Add repositories that you want to use (see above) to `/etc/apk/repositories`. Add repositories that you want to use (see above) to `/etc/apk/repositories`.
@ -63,10 +51,10 @@ they will work for you.
## Contribution & bug reports ## Contribution & bug reports
If you wish to contribute to this aports collection, or wish to report a bug, If you wish to contribute to this aports collection, or wish to report a bug,
you can do so on Alpine's GitLab instance here: you can do so on Codeberg here:
https://gitlab.alpinelinux.org/ayakael/user-aports https://codeberg.org/ayakael/ayaports/issues
For packages that are in testing/community, bug reports and merge requests For packages that are in backports, bug reports and merge requests
should be done on Alpine's aports repo instance: should be done on Alpine's aports repo instance:
https://gitlab.alpinelinux.org/alpine/aports https://gitlab.alpinelinux.org/alpine/aports

View file

@ -0,0 +1,25 @@
diff --color -Nur calibre-6.17.0.orig/src/calibre/gui2/update.py calibre-6.17.0/src/calibre/gui2/update.py
--- calibre-6.17.0.orig/src/calibre/gui2/update.py 2023-05-06 11:36:35.678461036 -0700
+++ calibre-6.17.0/src/calibre/gui2/update.py 2023-05-06 11:39:10.365134930 -0700
@@ -82,20 +82,6 @@
while not self.shutdown_event.is_set():
calibre_update_version = NO_CALIBRE_UPDATE
plugins_update_found = 0
- try:
- version = get_newest_version()
- if version[:2] > numeric_version[:2]:
- calibre_update_version = version
- except Exception as e:
- prints('Failed to check for calibre update:', as_unicode(e))
- try:
- update_plugins = get_plugin_updates_available(raise_error=True)
- if update_plugins is not None:
- plugins_update_found = len(update_plugins)
- except Exception as e:
- prints('Failed to check for plugin update:', as_unicode(e))
- if calibre_update_version != NO_CALIBRE_UPDATE or plugins_update_found > 0:
- self.signal.update_found.emit(calibre_update_version, plugins_update_found)
self.shutdown_event.wait(self.INTERVAL)
def shutdown(self):

116
backports/calibre/APKBUILD Normal file
View file

@ -0,0 +1,116 @@
# Maintainer: Cowington Post <cowingtonpost@gmail.com>
pkgname=calibre
pkgver=7.12.0
pkgrel=0
pkgdesc="Ebook management application"
# qt6-webengine
arch="x86_64 aarch64"
url="https://calibre-ebook.com"
license="GPL-3.0-or-later"
depends="
font-liberation
libwmf
mtdev
optipng
poppler
py3-apsw
py3-beautifulsoup4
py3-css-parser
py3-cssselect
py3-dateutil
py3-dnspython
py3-feedparser
py3-fonttools
py3-html2text
py3-html5-parser
py3-html5lib
py3-jeepney
py3-lxml
py3-markdown
py3-mechanize
py3-msgpack
py3-netifaces
py3-pillow
py3-psutil
py3-pycryptodome
py3-pygments
py3-pyqt6-webengine
py3-regex
py3-xxhash
py3-zeroconf
qt6-qtimageformats
qt6-qtsvg
qt6-qtwebengine
udisks2
"
makedepends="
cmake
curl
hunspell-dev
hyphen-dev
libmtp-dev
libstemmer-dev
libusb-dev
podofo-dev
py3-pyqt-builder
py3-pyqt6-sip
py3-sip
python3-dev
qt6-qtbase-dev
uchardet-dev
xdg-utils
"
subpackages="
$pkgname-pyc
$pkgname-doc
$pkgname-bash-completion
$pkgname-zsh-completion
"
source="https://download.calibre-ebook.com/$pkgver/calibre-$pkgver.tar.xz
0001-$pkgname-no-update.patch
"
# net: downloads iso-codes
# !check: no tests ran
options="net !check"
export LANG="en_US.UTF-8"
prepare() {
default_prepare
rm -f resources/calibre-portable.*
}
build() {
python3 setup.py build
python3 setup.py iso639
python3 setup.py iso3166
python3 setup.py liberation_fonts --system-liberation_fonts --path-to-liberation_fonts /usr/share/fonts/liberation
python3 setup.py mathjax
python3 setup.py gui
}
check() {
python3 -m unittest discover
}
package() {
# needed for zsh
mkdir -p "$pkgdir"/usr/share/zsh/site-functions
python3 setup.py install \
--staging-root="$pkgdir"/usr \
--no-compile \
--system-plugins-location=/usr/share/calibre/system-plugins
cp -a man-pages/ "$pkgdir"/usr/share/man
rm -r "$pkgdir"/usr/share/calibre/rapydscript/
python3 -m compileall -fq "$pkgdir"/usr
}
sha512sums="
ee654260d7047f0579a659b8907439a407fb561affcef84141126840452e7b98d10bb5e0a69e0cc809d9ba68729570900a0e7251f18b2056a94b0213880f1363 calibre-7.12.0.tar.xz
eb8e7ce40ff8b8daf6e7e55a5dff8ec4dff06c45744266bb48b3194e92ab1196bc91468203e3c2ca1e5144166a7d6be90e6cf0253513e761b56a4c85be4c2c76 0001-calibre-no-update.patch
"

View file

@ -1,20 +1,19 @@
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net> # Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=caprine pkgname=caprine
pkgver=2.57.0 pkgver=2.60.1
pkgrel=1 pkgrel=1
pkgdesc="Elegant Facebook Messenger desktop app" pkgdesc="Elegant Facebook Messenger desktop app"
arch="noarch" arch="x86_64 aarch64" # blocked by electron
url="https://github.com/sindresorhus/caprine" url="https://github.com/sindresorhus/caprine"
license="MIT" license="MIT"
depends="electron" depends="electron"
makedepends="npm findutils coreutils" makedepends="npm findutils coreutils"
options="!check" options="!check" # No test suite
source=" source="
$pkgname-$pkgver.tar.gz::https://github.com/sindresorhus/caprine/archive/refs/tags/v$pkgver.tar.gz $pkgname-$pkgver.tar.gz::https://github.com/sindresorhus/caprine/archive/refs/tags/v$pkgver.tar.gz
caprine.desktop caprine.desktop
caprine.js caprine.sh
" "
build() { build() {
@ -27,7 +26,7 @@ build() {
} }
package() { package() {
local appdir=/usr/lib/$pkgname local appdir=/usr/lib/caprine
install -d "$pkgdir"$appdir install -d "$pkgdir"$appdir
cp -r ./* "$pkgdir"$appdir cp -r ./* "$pkgdir"$appdir
@ -35,13 +34,13 @@ package() {
install -dm755 "$pkgdir/usr/share/pixmaps" install -dm755 "$pkgdir/usr/share/pixmaps"
install -m644 build/icon.png "$pkgdir/usr/share/pixmaps/$pkgname.png" install -m644 build/icon.png "$pkgdir/usr/share/pixmaps/$pkgname.png"
install -Dm755 "$srcdir"/$pkgname.js "$pkgdir"/usr/bin/$pkgname install -Dm755 "$srcdir"/caprine.sh "$pkgdir"/usr/bin/caprine
install -Dm644 "$srcdir"/$pkgname.desktop \ install -Dm644 "$srcdir"/caprine.desktop \
"$pkgdir"/usr/share/applications/$pkgname.desktop "$pkgdir"/usr/share/applications/caprine.desktop
install -dm755 "$pkgdir"/usr/share/licenses/$pkgname install -dm755 "$pkgdir"/usr/share/licenses/caprine
ln -s "$(realpath -m --relative-to=/usr/share/licenses/$pkgname $appdir/license)" \ ln -s "$(realpath -m --relative-to=/usr/share/licenses/caprine $appdir/license)" \
"$pkgdir"/usr/share/licenses/$pkgname "$pkgdir"/usr/share/licenses/caprine
# Clean up # Clean up
rm -r "$pkgdir"$appdir/build rm -r "$pkgdir"$appdir/build
@ -49,7 +48,7 @@ package() {
rm -r "$pkgdir"$appdir/tsconfig.json rm -r "$pkgdir"$appdir/tsconfig.json
find "$pkgdir"$appdir \ find "$pkgdir"$appdir \
-name "package.json" \ -name "package.json" \
-exec sed -e "s|$srcdir/$pkgname|$appdir|" \ -exec sed -e "s|$srcdir/caprine|$appdir|" \
-i {} \; \ -i {} \; \
-or -name ".*" -prune -exec rm -r '{}' \; \ -or -name ".*" -prune -exec rm -r '{}' \; \
-or -name "bin" -prune -exec rm -r '{}' \; \ -or -name "bin" -prune -exec rm -r '{}' \; \
@ -60,7 +59,7 @@ package() {
-or -name "test" -prune -exec rm -r '{}' \; -or -name "test" -prune -exec rm -r '{}' \;
} }
sha512sums=" sha512sums="
9d019e4aa61f94ffb627f1b647266d28d47c005515d9193ae8a6e67dbac4b4d931ca907dcb2627e4c8eb00e59ab825fc9dd3d9791064cf1cfbd28d8eff4d2589 caprine-2.57.0.tar.gz 0df7f233c91f5a044dcffde94b976c6ad71e6d355518615c48cd825a249c01d63f455de31ece69193a66ca0fd8157506f9b88088da1bd47fc75e9d3800784ed0 caprine-2.60.1.tar.gz
a469e3bea24926119e51642b777ef794c5fa65421107903f967c36d81bbb1adb3d52469ce3a3301b2c890f1aa53ab989ded22a7c6e811fb8cf0a582dbd835e19 caprine.desktop a469e3bea24926119e51642b777ef794c5fa65421107903f967c36d81bbb1adb3d52469ce3a3301b2c890f1aa53ab989ded22a7c6e811fb8cf0a582dbd835e19 caprine.desktop
44280c62ce43bdafa8528729371fccb16b8a0e3db7aca28d5c157ae0144dca5fbb023b8883b561955aa28ab62e967f2674d8c6bcaff186e2cdd0e7ba8beab9ac caprine.js 3ad8994c1a0417e73d622587769e527b4236a32c1a89442ff76413b75b4392d667c9e2908979b453e5926e54db6d94b31625340c5a94e84e91ea77f56feae778 caprine.sh
" "

View file

@ -0,0 +1,2 @@
#!/bin/sh
/usr/bin/electron "/usr/lib/caprine"

View file

@ -1,42 +0,0 @@
# Maintainer: psykose <alice@ayaya.dev>
pkgname=cargo-auditable
pkgver=0.6.1
pkgrel=1
pkgdesc="Cargo wrapper for embedding auditing data"
url="https://github.com/rust-secure-code/cargo-auditable"
arch="all"
license="MIT OR Apache-2.0"
makedepends="cargo"
subpackages="$pkgname-doc"
source="$pkgname-$pkgver.tar.gz::https://github.com/rust-secure-code/cargo-auditable/archive/refs/tags/v$pkgver.tar.gz
lock.patch
"
options="net"
prepare() {
default_prepare
cargo fetch --target="$CTARGET" --locked
}
build() {
msg "building cargo auditable"
cargo build --release --frozen
msg "rebuilding with cargo-auditable itself"
PATH="$PATH:$PWD/target/release" \
cargo auditable build --release --frozen
}
check() {
cargo test --frozen
}
package() {
install -Dm755 target/release/cargo-auditable -t "$pkgdir"/usr/bin/
install -Dm644 cargo-auditable/cargo-auditable.1 -t "$pkgdir"/usr/share/man/man1/
}
sha512sums="
191b6ef15436bd3c6a9b4666e80de5a085afe00f8ee3793040fc5e5f78eecc25d45e556fe8279b600f362b46cbecae6c6f7e21060ea0d2344f459817749f4616 cargo-auditable-0.6.1.tar.gz
9fa1b61897559fd9d4da8c43fa69b76b04f9a4dfedeb1973b096948e7684b16c2142bada7f7d2e8f9aa80e483b40aa662c45f6eaa87d3fa213d1e53816e9aaad lock.patch
"

View file

@ -1,13 +0,0 @@
diff --git a/Cargo.lock b/Cargo.lock
index 00969c9..5caabbd 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -72,7 +72,7 @@ dependencies = [
[[package]]
name = "cargo-auditable"
-version = "0.6.0"
+version = "0.6.1"
dependencies = [
"auditable-info",
"auditable-serde",

View file

@ -1,39 +0,0 @@
# Contributor: Bart Ribbers <bribbers@disroot.org>
# Maintainer: psykose <alice@ayaya.dev>
pkgname=catch2-3
pkgver=3.3.2
pkgrel=0
arch="all"
url="https://github.com/catchorg/Catch2"
pkgdesc="A modern, C++-native, header-only, test framework for unit-tests (v3)"
license="BSL-1.0"
makedepends="
cmake
python3
samurai
"
source="https://github.com/catchorg/Catch2/archive/v$pkgver/catch2-v$pkgver.tar.gz"
subpackages="$pkgname-doc"
builddir="$srcdir/Catch2-$pkgver"
build() {
cmake -B build -G Ninja \
-DCMAKE_BUILD_TYPE=MinSizeRel \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_INSTALL_LIBDIR=lib
cmake --build build
}
check() {
cd build
# ApprovalTests is broken https://github.com/catchorg/Catch2/issues/1780
CTEST_OUTPUT_ON_FAILURE=TRUE ctest -E "ApprovalTests"
}
package() {
DESTDIR="$pkgdir" cmake --install build
}
sha512sums="
3d0c5666509a19be54ea0c48a3c8e1c4a951a2d991a7c9f7fe6d326661464538f1ab9dc573b1b2647f49fb6bef45bbd866142a4ce0fba38545ad182b8d55f61f catch2-v3.3.2.tar.gz
"

View file

@ -1,58 +0,0 @@
# Maintainer: Drew DeVault <sir@cmpwn.com>
pkgname=celery
pkgver=5.2.7
pkgrel=3
pkgdesc="An asynchronous task queue/job queue based on distributed message passing"
url="http://www.celeryproject.org/"
arch="noarch !s390x" # lmited by py3-kombu
license="Apache-2.0"
depends="
py3-billiard
py3-click
py3-click-didyoumean
py3-click-plugins
py3-click-repl
py3-kombu
py3-tz
py3-vine
python3
"
makedepends="python3-dev py3-setuptools py3-gpep517 py3-wheel"
install="$pkgname.pre-install"
source="https://files.pythonhosted.org/packages/source/c/celery/celery-$pkgver.tar.gz
celery.confd
celery.initd"
pkgusers="celery"
pkggroups="celery"
subpackages="$pkgname-openrc $pkgname-pyc"
provides="py3-celery=$pkgver-r$pkgrel"
# TODO: requires many many many dependencies
options="!check"
build() {
gpep517 build-wheel \
--wheel-dir dist \
--output-fd 3 3>&1 >&2
}
package() {
# install scripts
install -m755 -D "$srcdir"/$pkgname.initd \
"$pkgdir"/etc/init.d/$pkgname
install -m644 -D "$srcdir"/$pkgname.confd \
"$pkgdir"/etc/conf.d/$pkgname
python3 -m installer -d "$pkgdir" \
dist/*.whl
}
check() {
python3 setup.py test
}
sha512sums="
68e3bb082f97ebe20391293cc8fa96c41c8f5ac5e8c24b2b7bd66eb104ec459bdfa49741e47486676e5daa88d7a71e3eb0d9432851aeafc74b0d4352e567e853 celery-5.2.7.tar.gz
c283956f8fe386ef39cb3e165a20d1cc7ff6398fbf5a922bec6b61fe4a71188519baed9feafc4e0e5b6864851545edaba54b89ba3613b2cf2ddd6426a6bf8fc2 celery.confd
3e5e2c6a55672bc0a02fca93ad334c694066c636ed62f2b55cb8f27b9301c429d68ce53667ec744b9b152ec786de7aea90cc05a143d6942cf5d2f34e51ca4089 celery.initd
"

View file

@ -1,4 +0,0 @@
CELERY_USER=celery
CELERY_LOG=/var/log/celery.log
CELERY_REDIR="1>/dev/null 2>> ${CELERY_LOG}"
CELERY_OPTS="-A celeryapp $CELERY_REDIR"

View file

@ -1,16 +0,0 @@
#!/sbin/openrc-run
supervisor=supervise-daemon
description="celery queue worker"
: ${CELERY_USER:="celery"}
: ${CELERY_GROUP:="$(id -gn $CELERY_USER)"}
pidfile="/run/$RC_SVCNAME.sd.pid"
supervise_daemon_args="-u $CELERY_USER -g $CELERY_GROUP"
command=/usr/bin/celery
command_args="${CELERY_OPTS}"
depends() {
use net
}

View file

@ -1,7 +0,0 @@
#!/bin/sh
addgroup -S celery 2>/dev/null
adduser -S -D -H -h /usr/share/celery -s /sbin/nologin -G celery -g celery celery 2>/dev/null
exit 0

View file

@ -2,7 +2,7 @@
# Maintainer: Aiden Grossman <agrossman154@yahoo.com> # Maintainer: Aiden Grossman <agrossman154@yahoo.com>
pkgname=coin pkgname=coin
pkgver=4.0.0 pkgver=4.0.0
pkgrel=5 pkgrel=6
pkgdesc="OpenGL OpenInventor compatible graphics library" pkgdesc="OpenGL OpenInventor compatible graphics library"
url="https://github.com/coin3d/coin" url="https://github.com/coin3d/coin"
license="BSD-3-Clause" license="BSD-3-Clause"

View file

@ -1,68 +0,0 @@
Author: Holger Jaekel <holger.jaekel@gmx.de>
Summary: ignore googletest, glog, and benchmark from the third-party folder
----
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -16,7 +16,7 @@
# C++ standard can be overridden when this is used as a sub-project.
if(NOT CMAKE_CXX_STANDARD)
# This project requires C++11.
- set(CMAKE_CXX_STANDARD 11)
+ set(CMAKE_CXX_STANDARD 14)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_EXTENSIONS OFF)
endif(NOT CMAKE_CXX_STANDARD)
@@ -187,26 +187,8 @@
# CPU cycles on the CI.
set(BUILD_TESTING_SAVED "${BUILD_TESTING}")
set(BUILD_TESTING OFF CACHE BOOL "" FORCE)
- add_subdirectory("third_party/glog" EXCLUDE_FROM_ALL)
set(BUILD_TESTING "${BUILD_TESTING_SAVED}" CACHE BOOL "" FORCE)
- # glog triggers deprecation warnings on OSX.
- # https://github.com/google/glog/issues/185
- if(CRC32C_HAVE_NO_DEPRECATED)
- set_property(TARGET glog APPEND PROPERTY COMPILE_OPTIONS -Wno-deprecated)
- endif(CRC32C_HAVE_NO_DEPRECATED)
-
- # glog triggers sign comparison warnings on gcc.
- if(CRC32C_HAVE_NO_SIGN_COMPARE)
- set_property(TARGET glog APPEND PROPERTY COMPILE_OPTIONS -Wno-sign-compare)
- endif(CRC32C_HAVE_NO_SIGN_COMPARE)
-
- # glog triggers unused parameter warnings on clang.
- if(CRC32C_HAVE_NO_UNUSED_PARAMETER)
- set_property(TARGET glog
- APPEND PROPERTY COMPILE_OPTIONS -Wno-unused-parameter)
- endif(CRC32C_HAVE_NO_UNUSED_PARAMETER)
-
set(CRC32C_TESTS_BUILT_WITH_GLOG 1)
endif(CRC32C_USE_GLOG)
@@ -314,17 +296,6 @@
set(install_gtest OFF)
set(install_gmock OFF)
- # This project is tested using GoogleTest.
- add_subdirectory("third_party/googletest")
-
- # GoogleTest triggers a missing field initializers warning.
- if(CRC32C_HAVE_NO_MISSING_FIELD_INITIALIZERS)
- set_property(TARGET gtest
- APPEND PROPERTY COMPILE_OPTIONS -Wno-missing-field-initializers)
- set_property(TARGET gmock
- APPEND PROPERTY COMPILE_OPTIONS -Wno-missing-field-initializers)
- endif(CRC32C_HAVE_NO_MISSING_FIELD_INITIALIZERS)
-
add_executable(crc32c_tests "")
target_sources(crc32c_tests
PRIVATE
@@ -385,7 +356,6 @@
# This project uses Google benchmark for benchmarking.
set(BENCHMARK_ENABLE_TESTING OFF CACHE BOOL "" FORCE)
set(BENCHMARK_ENABLE_EXCEPTIONS OFF CACHE BOOL "" FORCE)
- add_subdirectory("third_party/benchmark")
target_link_libraries(crc32c_bench benchmark)
if(CRC32C_USE_GLOG)

View file

@ -1,45 +0,0 @@
# Contributor: Holger Jaekel <holger.jaekel@gmx.de>
# Maintainer: Holger Jaekel <holger.jaekel@gmx.de>
pkgname=crc32c
pkgver=1.1.2
pkgrel=0
pkgdesc="CRC32C implementation with support for CPU-specific acceleration instructions"
url="https://github.com/google/crc32c"
arch="all"
license="BSD-3-Clause"
makedepends="cmake samurai"
checkdepends="benchmark-dev glog-dev gtest-dev"
subpackages="$pkgname-dev"
source="crc32c-$pkgver.tar.gz::https://github.com/google/crc32c/archive/$pkgver.tar.gz
10-third-party.patch"
build() {
if [ "$CBUILD" != "$CHOST" ]; then
CMAKE_CROSSOPTS="-DCMAKE_SYSTEM_NAME=Linux -DCMAKE_HOST_SYSTEM_NAME=Linux"
fi
cmake -B build -G Ninja \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_INSTALL_LIBDIR=lib \
-DBUILD_SHARED_LIBS=True \
-DCMAKE_BUILD_TYPE=None \
-DCRC32C_BUILD_TESTS="$(want_check && echo ON || echo OFF)" \
-DCRC32C_BUILD_BENCHMARKS="$(want_check && echo ON || echo OFF)" \
-DCRC32C_USE_GLOG="$(want_check && echo ON || echo OFF)" \
-DCRC32C_INSTALL=ON \
$CMAKE_CROSSOPTS .
cmake --build build
}
check() {
cd build
CTEST_OUTPUT_ON_FAILURE=TRUE ctest
./crc32c_bench
}
package() {
DESTDIR="$pkgdir" cmake --install build
}
sha512sums="
6325c52b5a6850b9f90086e0c0c86798c008af36e7cfd3a0216184a2d37b3bf7323481ddc6bfe4cbd5b31288b3ee6c69772d03085a13094cf95d00a9756a7196 crc32c-1.1.2.tar.gz
1f83e03d53271ee65a21e04c57392313003227a71a8c6de1fcf6c5d9041eaeb3cd08a9d4b86a610b80c06589cd06d36ae5e224d109256300b05fa65d638beddb 10-third-party.patch
"

75
backports/cura/APKBUILD Normal file
View file

@ -0,0 +1,75 @@
# Contributor: Anjandev Momi <anjan@momi.ca>
# Maintainer: Anjandev Momi <anjan@momi.ca>
pkgname=cura
# uranium and curaengine packages must be updated in sync with this verion number
# py3-pynest2d and fdm-materials should be checked as well, but their versions are not always in sync
pkgver=5.2.2
pkgrel=1
pkgdesc="3D printer / slicing GUI built on top of the Uranium framework"
url="https://ultimaker.com/software/ultimaker-cura"
arch="noarch !ppc64le !x86 !armhf !riscv64 !s390x !armv7" # ppc64le: no py3-keyring
# x86: no curaengine
# armhf: no uranium, qt5-qtquickcontrols, qt5-qtquickcontrols2, qt5-qtgraphicaleffects
# riscv64: no uranium
# s390x: no py3-trimesh, no py3-numpy-stl
# armv7: no py3-trimesh
license="LGPL-3.0-or-later"
# add cura-binary-data to depends when packaged
depends="
curaengine
fdm-materials
uranium
py3-arcus
py3-keyring
py3-numpy-stl
py3-pyclipper
py3-pynest2d
py3-pyserial
py3-qt6
py3-requests
py3-trimesh
py3-zeroconf
"
makedepends="samurai cmake gettext gettext-dev" # needs msginit from gettext
checkdepends="py3-pytest"
subpackages="$pkgname-lang"
source="$pkgname-$pkgver.tar.gz::https://github.com/Ultimaker/Cura/archive/refs/tags/$pkgver.tar.gz
AppDesktopData.patch
CuraVersion.patch
cmake-helpers.patch
cmake.patch"
builddir="$srcdir/Cura-$pkgver"
options="!check" # tests broken after v5.x
build() {
local pyver="$(python3 -c 'import sys; print(f"{sys.version_info.major}.{sys.version_info.minor}")')"
cmake -B build -G Ninja \
-DCURA_VERSION=$pkgver \
-DPython_VERSION=$pyver \
-DURANIUM_DIR=/usr/share/uranium \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_INSTALL_LIBDIR=lib \
-DGETTEXT_MSGINIT_EXECUTABLE=msginit \
-DCURA_BINARY_DATA_DIRECTORY=/usr/share/cura \
-DCMAKE_BUILD_TYPE=minsizerel
cmake --build build
}
package() {
DESTDIR="$pkgdir" cmake --install build
mv $pkgdir/usr/bin/cura_app.py $pkgdir/usr/bin/cura
# don't ever send any user or print info through the internet to Ultimaker
rm -rf "$pkgdir/usr/lib/cura/plugins/SliceInfoPlugin"
install -d "$pkgdir"/usr/share/locale
mv "$pkgdir"/usr/share/cura/resources/i18n/* "$pkgdir"/usr/share/locale/
}
sha512sums="
5d4e0fdc740d0c048905e2b87cc8c73eedea59b54766b74760505902007b365582d22b46b1cfdcd6914828840865c10a3beb0ef6a1f04ea181c81d44f42434bc cura-5.2.2.tar.gz
214e373f6cab7e3ccac12c96d1b5ca636d8d1e9ecdadaae84fc28fb429969c7c2d6055ce2a01b6db3ad85ab6cbc8d135cf2c26c77d7cfe13a73eb81aa5e85f11 AppDesktopData.patch
e3bb302db70ca195b2ce9831e71302c8ee2a51955fecc7264a495d7d4fc9c107cfd48811aa5865f16671e7b1ae126f95d3d7bbb6a70f367f7f91a2b32bce377b CuraVersion.patch
0db4ff97e7f82ae1a9dbc9c330d08c3e46249feeb3fb630f7c4e2de73749327337ec041680c39a07e0b5034c1b3f3656d75614ab4dc2f39861c8e27bdb2a58ef cmake-helpers.patch
05a73f892700ff6279230385b04180873a62b7413fa7f7d55ae150f1bcee57ef05eda0bd7fe444fe660ab66a044c958f42badd33b743fca81033ae8f19dd3805 cmake.patch
"

View file

@ -0,0 +1,58 @@
--- /dev/null
+++ ./com.ultimaker.cura.appdata.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Copyright 2016 Richard Hughes <richard@hughsie.com> -->
+<component type="desktop">
+ <id>com.ultimaker.cura.desktop</id>
+ <metadata_license>CC0-1.0</metadata_license>
+ <project_license>LGPL-3.0 and CC-BY-SA-4.0</project_license>
+ <name>Cura</name>
+ <summary>The world's most advanced 3d printer software</summary>
+ <description>
+ <p>
+ Cura creates a seamless integration between hardware, software and
+ materials for the best 3D printing experience around.
+ Cura supports the 3MF, OBJ and STL file formats and is available on
+ Windows, Mac and Linux.
+ </p>
+ <ul>
+ <li>Novices can start printing right away</li>
+ <li>Experts are able to customize 300 settings to achieve the best results</li>
+ <li>Optimized profiles for Ultimaker materials</li>
+ <li>Supported by a global network of Ultimaker certified service partners</li>
+ <li>Print multiple objects at once with different settings for each object</li>
+ <li>Cura supports STL, 3MF and OBJ file formats</li>
+ <li>Open source and completely free</li>
+ </ul>
+ </description>
+ <screenshots>
+ <screenshot type="default">
+ 
+ </screenshot>
+ </screenshots>
+ <url type="homepage">https://ultimaker.com/software/ultimaker-cura?utm_source=cura&amp;utm_medium=software&amp;utm_campaign=cura-update-linux</url>
+ <translation type="gettext">Cura</translation>
+</component>
--- /dev/null
+++ ./com.ultimaker.cura.desktop.in
@@ -0,0 +1,19 @@
+[Desktop Entry]
+Name=Ultimaker Cura
+Name[de]=Ultimaker Cura
+Name[nl]=Ultimaker Cura
+GenericName=3D Printing Software
+GenericName[de]=3D-Druck-Software
+GenericName[nl]=3D-printsoftware
+Comment=Cura converts 3D models into paths for a 3D printer. It prepares your print for maximum accuracy, minimum printing time and good reliability with many extra features that make your print come out great.
+Comment[de]=Cura wandelt 3D-Modelle in Pfade für einen 3D-Drucker um. Es bereitet Ihren Druck für maximale Genauigkeit, minimale Druckzeit und guter Zuverlässigkeit mit vielen zusätzlichen Funktionen vor, damit Ihr Druck großartig wird.
+Comment[nl]=Cura converteert 3D-modellen naar paden voor een 3D printer. Het bereidt je print voor om zeer precies, snel en betrouwbaar te kunnen printen, met veel extra functionaliteit om je print er goed uit te laten komen.
+Exec=@CMAKE_INSTALL_FULL_BINDIR@/cura %F
+TryExec=@CMAKE_INSTALL_FULL_BINDIR@/cura
+Icon=cura-icon
+Terminal=false
+Type=Application
+MimeType=model/stl;application/vnd.ms-3mfdocument;application/prs.wavefront-obj;image/bmp;image/gif;image/jpeg;image/png;text/x-gcode;application/x-amf;application/x-ply;application/x-ctm;model/vnd.collada+xml;model/gltf-binary;model/gltf+json;model/vnd.collada+xml+zip;
+Categories=Graphics;
+Keywords=3D;Printing;Slicer;
+StartupWMClass=cura.real

View file

@ -0,0 +1,16 @@
--- /dev/null
+++ ./cura/CuraVersion.py.in
@@ -0,0 +1,13 @@
+# Copyright (c) 2020 Ultimaker B.V.
+# Cura is released under the terms of the LGPLv3 or higher.
+
+CuraAppName = "@CURA_APP_NAME@"
+CuraAppDisplayName = "@CURA_APP_DISPLAY_NAME@"
+CuraVersion = "@CURA_VERSION@"
+CuraBuildType = "@CURA_BUILDTYPE@"
+CuraDebugMode = True if "@_cura_debugmode@" == "ON" else False
+CuraCloudAPIRoot = "@CURA_CLOUD_API_ROOT@"
+CuraCloudAPIVersion = "@CURA_CLOUD_API_VERSION@"
+CuraCloudAccountAPIRoot = "@CURA_CLOUD_ACCOUNT_API_ROOT@"
+CuraMarketplaceRoot = "@CURA_MARKETPLACE_ROOT@"
+CuraDigitalFactoryURL = "@CURA_DIGITAL_FACTORY_URL@"

View file

@ -0,0 +1,95 @@
--- /dev/null
+++ ./cmake/CuraPluginInstall.cmake
@@ -0,0 +1,92 @@
+# Copyright (c) 2022 Ultimaker B.V.
+# CuraPluginInstall.cmake is released under the terms of the LGPLv3 or higher.
+
+#
+# This module detects all plugins that need to be installed and adds them using the CMake install() command.
+# It detects all plugin folder in the path "plugins/*" where there's a "plugin.json" in it.
+#
+# Plugins can be configured to NOT BE INSTALLED via the variable "CURA_NO_INSTALL_PLUGINS" as a list of string in the
+# form of "a;b;c" or "a,b,c". By default all plugins will be installed.
+#
+
+option(PRINT_PLUGIN_LIST "Should the list of plugins that are installed be printed?" ON)
+
+# Options or configuration variables
+set(CURA_NO_INSTALL_PLUGINS "" CACHE STRING "A list of plugins that should not be installed, separated with ';' or ','.")
+
+file(GLOB_RECURSE _plugin_json_list ${CMAKE_SOURCE_DIR}/plugins/*/plugin.json)
+list(LENGTH _plugin_json_list _plugin_json_list_len)
+
+# Sort the lists alphabetically so we can handle cases like this:
+# - plugins/my_plugin/plugin.json
+# - plugins/my_plugin/my_module/plugin.json
+# In this case, only "plugins/my_plugin" should be added via install().
+set(_no_install_plugin_list ${CURA_NO_INSTALL_PLUGINS})
+# Sanitize the string so the comparison will be case-insensitive.
+string(STRIP "${_no_install_plugin_list}" _no_install_plugin_list)
+string(TOLOWER "${_no_install_plugin_list}" _no_install_plugin_list)
+
+# WORKAROUND counterpart of what's in cura-build.
+string(REPLACE "," ";" _no_install_plugin_list "${_no_install_plugin_list}")
+
+list(LENGTH _no_install_plugin_list _no_install_plugin_list_len)
+
+if(_no_install_plugin_list_len GREATER 0)
+ list(SORT _no_install_plugin_list)
+endif()
+if(_plugin_json_list_len GREATER 0)
+ list(SORT _plugin_json_list)
+endif()
+
+# Check all plugin directories and add them via install() if needed.
+set(_install_plugin_list "")
+foreach(_plugin_json_path ${_plugin_json_list})
+ get_filename_component(_plugin_dir ${_plugin_json_path} DIRECTORY)
+ file(RELATIVE_PATH _rel_plugin_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_plugin_dir})
+ get_filename_component(_plugin_dir_name ${_plugin_dir} NAME)
+
+ # Make plugin name comparison case-insensitive
+ string(TOLOWER "${_plugin_dir_name}" _plugin_dir_name_lowercase)
+
+ # Check if this plugin needs to be skipped for installation
+ set(_add_plugin ON) # Indicates if this plugin should be added to the build or not.
+ set(_is_no_install_plugin OFF) # If this plugin will not be added, this indicates if it's because the plugin is
+ # specified in the NO_INSTALL_PLUGINS list.
+ if(_no_install_plugin_list)
+ if("${_plugin_dir_name_lowercase}" IN_LIST _no_install_plugin_list)
+ set(_add_plugin OFF)
+ set(_is_no_install_plugin ON)
+ endif()
+ endif()
+
+ # Make sure this is not a subdirectory in a plugin that's already in the install list
+ if(_add_plugin)
+ foreach(_known_install_plugin_dir ${_install_plugin_list})
+ if(_plugin_dir MATCHES "${_known_install_plugin_dir}.+")
+ set(_add_plugin OFF)
+ break()
+ endif()
+ endforeach()
+ endif()
+
+ if(_add_plugin)
+ if(${PRINT_PLUGIN_LIST})
+ message(STATUS "[+] PLUGIN TO INSTALL: ${_rel_plugin_dir}")
+ endif()
+ get_filename_component(_rel_plugin_parent_dir ${_rel_plugin_dir} DIRECTORY)
+ install(DIRECTORY ${_rel_plugin_dir}
+ DESTINATION lib${LIB_SUFFIX}/cura/${_rel_plugin_parent_dir}
+ PATTERN "__pycache__" EXCLUDE
+ PATTERN "*.qmlc" EXCLUDE
+ )
+ list(APPEND _install_plugin_list ${_plugin_dir})
+ elseif(_is_no_install_plugin)
+ if(${PRINT_PLUGIN_LIST})
+ message(STATUS "[-] PLUGIN TO REMOVE : ${_rel_plugin_dir}")
+ endif()
+ execute_process(COMMAND ${Python_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/cmake/mod_bundled_packages_json.py
+ -d ${CMAKE_CURRENT_SOURCE_DIR}/resources/bundled_packages
+ ${_plugin_dir_name}
+ RESULT_VARIABLE _mod_json_result)
+ endif()
+endforeach()

View file

@ -0,0 +1,85 @@
--- ./CMakeLists.txt.orig
+++ ./CMakeLists.txt
@@ -1,10 +1,6 @@
# Copyright (c) 2022 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
-# NOTE: This is only being used for translation scripts.
-
-# For MSVC flags, will be ignored on non-Windows OS's and this project in general. Only needed for cura-build-environment.
-cmake_policy(SET CMP0091 NEW)
project(cura)
cmake_minimum_required(VERSION 3.18)
@@ -15,8 +11,44 @@
set(URANIUM_DIR "${CMAKE_SOURCE_DIR}/../Uranium" CACHE PATH "The location of the Uranium repository")
set(URANIUM_SCRIPTS_DIR "${URANIUM_DIR}/scripts" CACHE PATH "The location of the scripts directory of the Uranium repository")
+option(CURA_DEBUGMODE "Enable debug dialog and other debug features" OFF)
+if(CURA_DEBUGMODE)
+ set(_cura_debugmode "ON")
+endif()
+
option(GENERATE_TRANSLATIONS "Should the translations be generated?" ON)
+set(CURA_APP_NAME "cura" CACHE STRING "Short name of Cura, used for configuration folder")
+set(CURA_APP_DISPLAY_NAME "Ultimaker Cura" CACHE STRING "Display name of Cura")
+set(CURA_VERSION "master" CACHE STRING "Version name of Cura")
+set(CURA_BUILDTYPE "" CACHE STRING "Build type of Cura, eg. 'PPA'")
+set(CURA_CLOUD_API_ROOT "" CACHE STRING "Alternative Cura cloud API root")
+set(CURA_CLOUD_API_VERSION "" CACHE STRING "Alternative Cura cloud API version")
+set(CURA_CLOUD_ACCOUNT_API_ROOT "" CACHE STRING "Alternative Cura cloud account API version")
+set(CURA_MARKETPLACE_ROOT "" CACHE STRING "Alternative Marketplace location")
+set(CURA_DIGITAL_FACTORY_URL "" CACHE STRING "Alternative Digital Factory location")
+
+configure_file(${CMAKE_SOURCE_DIR}/com.ultimaker.cura.desktop.in ${CMAKE_BINARY_DIR}/com.ultimaker.cura.desktop @ONLY)
+
+configure_file(cura/CuraVersion.py.in CuraVersion.py @ONLY)
+
+if(NOT DEFINED Python_VERSION)
+ set(Python_VERSION
+ 3.11
+ CACHE STRING "Python Version" FORCE)
+ message(STATUS "Setting Python version to ${Python_VERSION}. Set Python_VERSION if you want to compile against an other version.")
+endif()
+if(APPLE)
+ set(Python_FIND_FRAMEWORK NEVER)
+endif()
+find_package(Python ${Python_VERSION} EXACT REQUIRED COMPONENTS Interpreter)
+message(STATUS "Linking and building ${project_name} against Python ${Python_VERSION}")
+if(NOT DEFINED Python_SITELIB_LOCAL)
+ set(Python_SITELIB_LOCAL
+ "${Python_SITELIB}"
+ CACHE PATH "Local alternative site-package location to install Cura" FORCE)
+endif()
+
if(NOT ${URANIUM_DIR} STREQUAL "")
set(CMAKE_MODULE_PATH "${CMAKE_MODULE_PATH};${URANIUM_DIR}/cmake")
endif()
@@ -29,4 +61,24 @@
if(${GENERATE_TRANSLATIONS})
CREATE_TRANSLATION_TARGETS()
endif()
-endif()
\ No newline at end of file
+endif()
+
+install(DIRECTORY resources DESTINATION ${CMAKE_INSTALL_DATADIR}/cura)
+
+include(CuraPluginInstall)
+
+install(FILES cura_app.py DESTINATION ${CMAKE_INSTALL_BINDIR}
+ PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
+install(DIRECTORY cura DESTINATION "${Python_SITELIB_LOCAL}")
+install(FILES ${CMAKE_BINARY_DIR}/CuraVersion.py DESTINATION "${Python_SITELIB_LOCAL}/cura/")
+if(NOT APPLE AND NOT WIN32)
+ install(FILES ${CMAKE_BINARY_DIR}/com.ultimaker.cura.desktop
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/applications)
+ install(FILES ${CMAKE_SOURCE_DIR}/resources/images/cura-icon.png
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/icons/hicolor/128x128/apps/)
+ install(FILES com.ultimaker.cura.appdata.xml
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/metainfo)
+ install(FILES cura.sharedmimeinfo
+ DESTINATION ${CMAKE_INSTALL_DATADIR}/mime/packages/
+ RENAME cura.xml )
+endif()

View file

@ -2,7 +2,7 @@
# Maintainer: Anjandev Momi <anjan@momi.ca> # Maintainer: Anjandev Momi <anjan@momi.ca>
pkgname=dex pkgname=dex
pkgver=0.9.0 pkgver=0.9.0
pkgrel=0 pkgrel=1
pkgdesc="program to generate and execute DesktopEntry files of the Application type" pkgdesc="program to generate and execute DesktopEntry files of the Application type"
url="https://github.com/jceb/dex" url="https://github.com/jceb/dex"
arch="all" arch="all"
@ -18,7 +18,7 @@ build() {
} }
package() { package() {
make install PREFIX=/usr DESTDIR="$pkgdir" make install PREFIX=/usr MANPREFIX=/usr/share/man DESTDIR="$pkgdir"
} }
sha512sums=" sha512sums="

View file

@ -1,17 +1,16 @@
# Contributor: Lauren N. Liberda <lauren@selfisekai.rocks> # Contributor: lauren n. liberda <lauren@selfisekai.rocks>
# Maintainer: Lauren N. Liberda <lauren@selfisekai.rocks> # Maintainer: lauren n. liberda <lauren@selfisekai.rocks>
pkgname=electron-tasje pkgname=electron-tasje
pkgver=0.5.7 pkgver=0.7.1
pkgrel=0 pkgrel=0
pkgdesc="Tiny replacement for electron-builder" pkgdesc="Tiny replacement for electron-builder"
url="https://codeberg.org/selfisekai/electron_tasje/" url="https://codeberg.org/selfisekai/electron_tasje/"
arch="aarch64 x86_64" # only useful on platforms with electron arch="aarch64 x86_64" # only useful on platforms with electron
license="Apache-2.0" license="Apache-2.0 OR MIT"
makedepends="cargo" makedepends="cargo cargo-auditable"
source="electron_tasje-$pkgver.tar.gz::https://codeberg.org/selfisekai/electron_tasje/archive/v$pkgver.tar.gz" source="electron_tasje-${_rev:-"$pkgver"}.tar.gz::https://codeberg.org/selfisekai/electron_tasje/archive/${_rev:-"v$pkgver"}.tar.gz"
builddir="$srcdir/electron_tasje" builddir="$srcdir/electron_tasje"
export CARGO_REGISTRIES_CRATES_IO_PROTOCOL="sparse"
prepare() { prepare() {
default_prepare default_prepare
@ -20,7 +19,7 @@ prepare() {
} }
build() { build() {
cargo build --frozen --release cargo auditable build --frozen --release
} }
check() { check() {
@ -32,5 +31,5 @@ package() {
} }
sha512sums=" sha512sums="
90649d6c7ac10edc6b50e9bfba54d5df472c0621768886fcdb039874152dbd2a72c1a23f38a5e25e4983f16f49546039eca2b79b9386f261b3c5d31a93c439f5 electron_tasje-0.5.7.tar.gz 665ccbd6cb357c25d55daed4ad3b3ce008da258054951d9d069a5b12e72dd5812d534f906868e6b18e78949f058069a961c394c6f21ab3b3fab5393c330445e5 electron_tasje-0.7.1.tar.gz
" "

View file

@ -1,17 +1,21 @@
# Maintainer: psykose <alice@ayaya.dev> # Maintainer: lauren n. liberda <lauren@selfisekai.rocks>
pkgname=electron pkgname=electron
pkgver=24.2.0 pkgver=30.0.9
_semver="${pkgver/_beta/-beta.}"
pkgrel=0 pkgrel=0
_chromium=112.0.5615.165 _chromium=124.0.6367.233
_depot_tools=dbcecc90179a25159dc62f6ed00ae244360297e8 _copium_tag=124.5
_depot_tools=495b23b39aaba2ca3b55dd27cadc523f1cb17ee6
pkgdesc="Electron cross-platform desktop toolkit" pkgdesc="Electron cross-platform desktop toolkit"
url="https://github.com/electron/electron" url="https://github.com/electron/electron"
arch="aarch64 x86_64" # same as chromium arch="aarch64 x86_64" # same as chromium
license="MIT" license="MIT"
depends="gtk+3.0 so:libudev.so.1 xdg-utils" depends="gtk+3.0 so:libudev.so.1 xdg-utils"
makedepends=" makedepends="
ada-dev
alsa-lib-dev alsa-lib-dev
aom-dev aom-dev
base64-dev
bash bash
brotli-dev brotli-dev
bsd-compat-headers bsd-compat-headers
@ -21,14 +25,17 @@ makedepends="
clang-dev clang-dev
clang-extra-tools clang-extra-tools
compiler-rt compiler-rt
crc32c-dev
cups-dev cups-dev
curl-dev curl-dev
dav1d-dev dav1d-dev
dbus-glib-dev dbus-glib-dev
double-conversion-dev
eudev-dev eudev-dev
ffmpeg-dev ffmpeg-dev
findutils findutils
flac-dev flac-dev
flatbuffers-dev
flex flex
freetype-dev freetype-dev
gperf gperf
@ -36,23 +43,26 @@ makedepends="
gn gn
gzip gzip
harfbuzz-dev harfbuzz-dev
hdrhistogram-c-dev
highway-dev
hunspell-dev hunspell-dev
http-parser-dev http-parser-dev
hwids-usb hwdata-dev
java-jdk java-jdk
jpeg-dev jpeg-dev
jsoncpp-dev
krb5-dev krb5-dev
lcms2-dev lcms2-dev
libarchive-tools libarchive-tools
libavif-dev libavif-dev
libbsd-dev libbsd-dev
libcap-dev libcap-dev
libdrm-dev
libevent-dev libevent-dev
libexif-dev libexif-dev
libgcrypt-dev libgcrypt-dev
libjpeg-turbo-dev libjpeg-turbo-dev
libnotify-dev libnotify-dev
libsecret-dev
libusb-dev libusb-dev
libva-dev libva-dev
libwebp-dev libwebp-dev
@ -72,62 +82,107 @@ makedepends="
nodejs nodejs
npm npm
nss-dev nss-dev
openh264-dev
opus-dev opus-dev
pciutils-dev pciutils-dev
perl perl
pipewire-dev pipewire-dev
pulseaudio-dev pulseaudio-dev
py3-httplib2 py3-httplib2
py3-jinja2
py3-parsing py3-parsing
py3-setuptools
py3-six py3-six
python3 python3
qt5-qtbase-dev qt5-qtbase-dev
re2-dev
rsync rsync
rust
samurai samurai
snappy-dev
speex-dev speex-dev
sqlite-dev sqlite-dev
woff2-dev
xcb-proto xcb-proto
yarn yarn
zlib-dev zlib-dev
zstd zstd-dev
" "
subpackages="$pkgname-lang $pkgname-dev" subpackages="$pkgname-lang $pkgname-dev"
# the lower patches are specific to electron, the top ones are from the equivalent chromium version # the lower patches are specific to electron, the top ones are from the equivalent chromium version
source="$pkgname-$pkgver-2.tar.zst::https://dev.alpinelinux.org/archive/electron/electron-$pkgver.tar.zst source="https://ab-sn.lnl.gay/electron-$_semver-$_chromium.tar.zst
chromium-VirtualCursor-standard-layout.patch copium-$_copium_tag.tar.gz::https://codeberg.org/selfisekai/copium/archive/$_copium_tag.tar.gz
chromium-revert-drop-of-system-java.patch chromium-revert-drop-of-system-java.patch
chromium-use-alpine-target.patch compiler.patch
disable-dns_config_service.patch
disable-failing-tests.patch
fc-cache-version.patch
fix-missing-cstdint-include-musl.patch fix-missing-cstdint-include-musl.patch
fix-opus.patch
fstatat-32bit.patch
gdbinit.patch gdbinit.patch
generic-sensor-include.patch
import-version.patch import-version.patch
libcxx-muslmode.patch libstdc++13.patch
mman.patch mman.patch
musl-auxv.patch
musl-sandbox.patch musl-sandbox.patch
musl-tid-caching.patch musl-tid-caching.patch
musl-v8-monotonic-pthread-cont_timedwait.patch musl-v8-monotonic-pthread-cont_timedwait.patch
no-execinfo.patch no-execinfo.patch
no-mallinfo.patch no-mallinfo.patch
no-res-ninit-nclose.patch no-res-ninit-nclose.patch
quiche-arena-size.patch no-sandbox-settls.patch
scoped-file-no-close.patch partalloc-no-tagging-arm64.patch
perfetto-libstdc++.patch
pvalloc.patch
random-fixes.patch
quiche-array.patch
system-zstd.patch
temp-failure-retry.patch temp-failure-retry.patch
yes-musl.patch
icon.patch icon.patch
python-jinja-3.10.patch python-jinja-3.10.patch
webpack-hash.patch webpack-hash.patch
unbundle-node.patch
default.conf default.conf
electron.desktop electron.desktop
electron-launcher.sh electron-launcher.sh
" "
# chromium sandbox is suid _copium_patches="
cr124-iwyu-sys-select-dawn-terminal.patch
cr124-libwebp-shim-sharpyuv.patch
"
# tests are todo for some base checks # tests are todo for some base checks
options="!check net suid" options="!check net suid"
builddir="$srcdir/electron-$_semver-$_chromium"
export PATH="$PATH:/usr/lib/qt5/bin"
export CC=clang
export CXX=clang++
# required to find the tools
export AR=llvm-ar
export NM=llvm-nm
export LD=clang++
# less log spam, reproducible
export CFLAGS="${CFLAGS/-g/} -O2 -Wno-builtin-macro-redefined -Wno-deprecated-declarations -Wno-shift-count-overflow -Wno-ignored-attributes"
export CXXFLAGS="${CXXFLAGS/-g/} -O2 -Wno-builtin-macro-redefined -Wno-deprecated-declarations -Wno-invalid-constexpr"
export CPPFLAGS="${CPPFLAGS/-g/} -D__DATE__= -D__TIME__= -D__TIMESTAMP__="
case "$CARCH" in
aarch64|arm*|riscv64)
# not supported by clang here
export CFLAGS="${CFLAGS/-fstack-clash-protection}"
export CXXFLAGS="${CXXFLAGS/-fstack-clash-protection}"
;;
esac
# breaks chromium-based stuff
export CXXFLAGS="${CXXFLAGS/-D_GLIBCXX_ASSERTIONS=1}"
# creates a dist tarball that does not need to git clone everything at build time. # creates a dist tarball that does not need to git clone everything at build time.
_distbucket="sakamoto/lnl-aports-snapshots/"
snapshot() { snapshot() {
deps deps
# vpython3 execs system python3 with this set # vpython3 execs system python3 with this set
@ -135,36 +190,33 @@ snapshot() {
export CHROMIUM_BUILDTOOLS_PATH="$srcdir/src/buildtools" export CHROMIUM_BUILDTOOLS_PATH="$srcdir/src/buildtools"
mkdir -p "$srcdir" mkdir -p "$srcdir"
cd "$srcdir" cd "$srcdir"
git clone --branch=$_chromium --depth=1 \
https://chromium.googlesource.com/chromium/src.git
git clone https://github.com/electron/electron.git if ! [ -d depot_tools ]; then
(
( git clone --depth 1 -b main https://chromium.googlesource.com/chromium/tools/depot_tools.git
git clone --depth 1 -b main https://chromium.googlesource.com/chromium/tools/depot_tools.git cd depot_tools
cd depot_tools git fetch --depth 1 origin $_depot_tools
git fetch --depth 1 origin $_depot_tools git checkout $_depot_tools
git checkout $_depot_tools )
) fi
export PATH="$PATH:$srcdir/depot_tools" export PATH="$PATH:$srcdir/depot_tools"
echo "solutions = [ echo "solutions = [
{ {
\"name\": \"src/electron\", \"name\": \"src/electron\",
\"url\": \"file://$srcdir/electron@v$pkgver\", \"url\": \"https://github.com/electron/electron.git@v$_semver\",
\"deps_file\": \"DEPS\", \"deps_file\": \"DEPS\",
\"managed\": False, \"managed\": False,
\"custom_deps\": { \"custom_deps\": {
\"src\": None, \"src\": \"https://chromium.googlesource.com/chromium/src.git@$_chromium\",
}, },
\"custom_vars\": {}, \"custom_vars\": {},
}, },
]" > .gclient ]" > .gclient
python3 depot_tools/gclient.py sync \ python3 depot_tools/gclient.py sync \
--with_branch_heads \ --no-history \
--with_tags \
--nohooks --nohooks
python3 src/build/landmines.py python3 src/build/landmines.py
@ -176,32 +228,21 @@ snapshot() {
python3 src/build/util/lastchange.py -m SKIA_COMMIT_HASH \ python3 src/build/util/lastchange.py -m SKIA_COMMIT_HASH \
-s src/third_party/skia --header src/skia/ext/skia_commit_hash.h -s src/third_party/skia --header src/skia/ext/skia_commit_hash.h
# why? # rolled newer chromium with it included
cp -r electron/patches/ffmpeg src/electron/patches/ sed -i '/reland_mojom_ts_generator_handle_empty_module_path_identically_to.patch/d' src/electron/patches/chromium/.patches
python3 electron/script/apply_all_patches.py \ python3 src/electron/script/apply_all_patches.py \
electron/patches/config.json src/electron/patches/config.json
python3 src/tools/update_pgo_profiles.py \ mv src $pkgname-$_semver-$_chromium
--target=linux \
update \
--gs-url-base=chromium-optimization-profiles/pgo_profiles
python3 src/tools/download_optimization_profile.py \
--newest_state=src/chrome/android/profiles/newest.txt \
--local_state=src/chrome/android/profiles/local.txt \
--output_name=src/chrome/android/profiles/afdo.prof \
--gs_url_base=chromeos-prebuilt/afdo-job/llvm
mv src $pkgname-$pkgver
# extra binaries are most likely things we don't want, so nuke them all # extra binaries are most likely things we don't want, so nuke them all
for elf in $(scanelf -RA -F "%F" $pkgname-$pkgver); do for elf in $(scanelf -RA -F "%F" $pkgname-$_semver-$_chromium); do
rm -f "$elf" rm -f "$elf"
done done
msg "generating tarball.. (this takes a while)" msg "generating tarball.. (this takes a while)"
tar -cf $pkgname-$pkgver.tar \ tar -cf $pkgname-$_semver-$_chromium.tar \
--exclude="ChangeLog*" \ --exclude="ChangeLog*" \
--exclude="testdata/*" \ --exclude="testdata/*" \
--exclude="test_data/*" \ --exclude="test_data/*" \
@ -209,15 +250,32 @@ snapshot() {
--exclude-backups \ --exclude-backups \
--exclude-caches-all \ --exclude-caches-all \
--exclude-vcs \ --exclude-vcs \
$pkgname-$pkgver $pkgname-$_semver-$_chromium
zstd --ultra --long -22 -T0 -vv $pkgname-$pkgver.tar zstd --auto-threads=logical --ultra --long -22 -T"${ZSTD_LIMIT:-0}" -vv $pkgname-$_semver-$_chromium.tar -o "$SRCDEST"/$pkgname-$_semver-$_chromium.tar.zst
mcli cp "$SRCDEST"/$pkgname-$_semver-$_chromium.tar.zst "$_distbucket"
} }
prepare() { prepare() {
default_prepare default_prepare
git init . for i in $_copium_patches; do
case "$i" in
*.patch)
msg "${i%::*}"
patch -p1 -i "$srcdir/copium/$i" || failed="$failed $i"
;;
esac
done
if [ ! -z "$failed" ]; then
error "The following patches failed to apply:"
for i in $failed; do
printf " %s\n" "$i" >&2
done
exit 1
fi
git init -q .
# link to system tools # link to system tools
ln -sfv /usr/bin/clang-format buildtools/linux64/clang-format ln -sfv /usr/bin/clang-format buildtools/linux64/clang-format
@ -227,14 +285,14 @@ prepare() {
( (
cd electron cd electron
git init . git init -q .
git config user.email "example@example.com" git config user.email "example@example.com"
git config user.name "example" git config user.name "example"
git config commit.gpgsign false
git add LICENSE git add LICENSE
git commit -m "init" git commit -m "init"
git tag "v$pkgver" git tag "v$_semver"
# jesus christ what the fuck is wrong with you? git pack-refs
touch .git/packed-refs
yarn install --frozen-lockfile --no-scripts yarn install --frozen-lockfile --no-scripts
) )
@ -243,38 +301,39 @@ prepare() {
./update_npm_deps ./update_npm_deps
) )
# these are normally unbundled, but they can't be with custom_libcxx. # reusable system library settings
# we use custom_libcxx as there are a few libstdc++ incompatibilities # flatbuffers - tensorflow has a few static_asserts for a specific patch version
# that now cause segfaults with the wayland ozone platform since electron 22. # libavif - https://github.com/AOMediaCodec/libavif/commit/50a541469c98009016af8dcc9f83a1be79f3a7d9
# obviously, this is a FIXME. # libaom - https://aomedia.googlesource.com/aom/+/706ee36dcc82%5E%21/
# --- # but watch this space: https://aomedia-review.googlesource.com/c/aom/+/188606
# jsoncpp # jsoncpp, re2, snappy, swiftshader, vulkan, woff2 - needs use_custom_libcxx=false
# re2 local chromium_use_system="
# woff2
# ---
# libavif - https://github.com/AOMediaCodec/libavif/commit/4d2776a3
local use_system="
brotli brotli
crc32c
dav1d dav1d
double-conversion
ffmpeg ffmpeg
flac flac
fontconfig fontconfig
freetype freetype
harfbuzz-ng harfbuzz-ng
highway
icu icu
libaom
libdrm libdrm
libevent libevent
libjpeg libjpeg
libsecret
libusb
libwebp libwebp
libxml libxml
libxslt libxslt
openh264
opus opus
snappy
zlib zlib
zstd
" "
for _lib in $use_system libjpeg_turbo; do for _lib in $chromium_use_system jinja2 libjpeg_turbo; do
msg "Removing buildscripts for system provided $_lib" msg "Removing buildscripts for system provided $_lib"
find . -type f -path "*third_party/$_lib/*" \ find . -type f -path "*third_party/$_lib/*" \
\! -path "*third_party/$_lib/chromium/*" \ \! -path "*third_party/$_lib/chromium/*" \
@ -284,10 +343,39 @@ prepare() {
\! -path './third_party/pdfium/third_party/freetype/include/pstables.h' \ \! -path './third_party/pdfium/third_party/freetype/include/pstables.h' \
\! -path './third_party/harfbuzz-ng/utils/hb_scoped.h' \ \! -path './third_party/harfbuzz-ng/utils/hb_scoped.h' \
\! -path './third_party/crashpad/crashpad/third_party/zlib/zlib_crashpad.h' \ \! -path './third_party/crashpad/crashpad/third_party/zlib/zlib_crashpad.h' \
\! -regex '.*\.\(gn\|gni\|isolate\|py\)' \ \! -regex '.*\.\(gn\|gni\|gyp\|gypi\|isolate\|py\)' \
-delete -delete
done done
# llhttp - 9.x needed, 8.x in repo (2023-12-17)
# ada - needs use_custom_libcxx=false
local node_use_system="
base64
brotli
cares
corepack
histogram
nghttp2
nghttp3
ngtcp2
zlib
"
# some of these are provided by system, e.g. brotli. some are from chromium,
# e.g. boringssl (as openssl). some are not in use at all (corepack)
for _lib in $node_use_system openssl; do
msg "Removing buildscripts for $_lib"
find . -type f -path "*third_party/electron_node/deps/$_lib/*" \
\! -path "*third_party/electron_node/deps/$_lib/chromium/*" \
\! -path "*third_party/electron_node/deps/$_lib/google/*" \
\! -regex '.*\.\(gn\|gni\|gyp\|gypi\|isolate\|py\)' \
-delete
done
# XXX: hack. unbundle-node.patch uses this list to switch things
# in config.gypi. https://github.com/electron/electron/issues/40836
echo $node_use_system > third_party/electron_node/use_system.txt
rm -rf third_party/electron_node/tools/inspector_protocol/jinja2
# https://groups.google.com/a/chromium.org/d/topic/chromium-packagers/9JX1N2nf4PU/discussion # https://groups.google.com/a/chromium.org/d/topic/chromium-packagers/9JX1N2nf4PU/discussion
touch chrome/test/data/webui/i18n_process_css_test.html touch chrome/test/data/webui/i18n_process_css_test.html
# Use the file at run time instead of effectively compiling it in # Use the file at run time instead of effectively compiling it in
@ -296,7 +384,7 @@ prepare() {
msg "Running debundle script" msg "Running debundle script"
python3 build/linux/unbundle/replace_gn_files.py --system-libraries \ python3 build/linux/unbundle/replace_gn_files.py --system-libraries \
$use_system $chromium_use_system
python3 third_party/libaddressinput/chromium/tools/update-strings.py python3 third_party/libaddressinput/chromium/tools/update-strings.py
# prevent annoying errors when regenerating gni # prevent annoying errors when regenerating gni
@ -321,78 +409,77 @@ _configure() {
cd "$builddir" cd "$builddir"
msg "Configuring build" msg "Configuring build"
export PATH="$PATH:/usr/lib/qt5/bin" local clang_ver="$(clang -dumpversion)"
# clang uses much less memory (and this doesn't support gcc) case "$USE_CCACHE" in
export CC=clang 1)
export CXX=clang++ local cc_wrapper="ccache"
;;
*)
local cc_wrapper=""
;;
esac
# required to find the tools local maglev=true
export AR=llvm-ar local symbol_level=0
export NM=llvm-nm local vaapi=true
export LD=clang++
# less log spam, reproducible
export CFLAGS="${CFLAGS/-g/} -O2 -Wno-unknown-warning-option -Wno-builtin-macro-redefined -Wno-deprecated-declarations"
export CXXFLAGS="${CXXFLAGS/-g/} -O2 -Wno-unknown-warning-option -Wno-builtin-macro-redefined -Wno-deprecated-declarations"
export CPPFLAGS="${CPPFLAGS/-g/} -D__DATE__= -D__TIME__= -D__TIMESTAMP__="
# shellcheck disable=2089 # shellcheck disable=2089
local gn_config=" local gn_config="
clang_base_path=\"/usr\"
custom_toolchain=\"//build/toolchain/linux/unbundle:default\"
host_toolchain=\"//build/toolchain/linux/unbundle:default\"
import(\"//electron/build/args/release.gn\") import(\"//electron/build/args/release.gn\")
blink_enable_generated_code_formatting=false blink_enable_generated_code_formatting=false
cc_wrapper=\"$cc_wrapper\"
chrome_pgo_phase=0
clang_base_path=\"/usr\"
clang_use_chrome_plugins=false clang_use_chrome_plugins=false
clang_version=\"${clang_ver%%.*}\"
custom_toolchain=\"//build/toolchain/linux/unbundle:default\"
disable_fieldtrial_testing_config=true
enable_hangout_services_extension=true
enable_nacl=false
enable_nocompile_tests=false
enable_stripping=false
enable_rust=true
enable_vr=false
fatal_linker_warnings=false fatal_linker_warnings=false
ffmpeg_branding=\"Chrome\" ffmpeg_branding=\"Chrome\"
icu_use_data_file=true host_toolchain=\"//build/toolchain/linux/unbundle:default\"
icu_use_data_file=false
is_cfi=false is_cfi=false
is_clang=true
is_component_ffmpeg=true is_component_ffmpeg=true
is_debug=false is_debug=false
is_official_build=true is_official_build=true
symbol_level=0
treat_warnings_as_errors=false
angle_enable_gl_null=false
build_tflite_with_xnnpack=false
build_with_tflite_lib=false
disable_fieldtrial_testing_config=true
enable_hangout_services_extension=true
enable_library_cdms=false
enable_media_remoting=false
enable_nacl=false
enable_paint_preview=false
enable_reading_list=false
enable_remoting=false
enable_reporting=false
enable_screen_ai_service=false
enable_service_discovery=false
enable_stripping=false
enable_vr=false
ozone_platform_headless=false
link_pulseaudio=true link_pulseaudio=true
proprietary_codecs=true proprietary_codecs=true
regenerate_x11_protos=true
rtc_link_pipewire=true rtc_link_pipewire=true
rtc_use_pipewire=true rtc_use_pipewire=true
rustc_version=\"yes\"
rust_sysroot_absolute=\"/usr\"
symbol_level=$symbol_level
treat_warnings_as_errors=false
use_custom_libcxx=true use_custom_libcxx=true
use_dawn=false use_lld=true
use_gnome_keyring=false
use_pulseaudio=true use_pulseaudio=true
use_sysroot=false use_safe_libstdcxx=false
use_system_cares=true
use_system_freetype=true
use_system_harfbuzz=true
use_system_lcms2=true
use_system_libdrm=true
use_system_libffi=true use_system_libffi=true
use_system_libjpeg=true use_sysroot=false
use_thin_lto=false
use_vaapi=$vaapi
v8_enable_maglev=$maglev
skia_use_dawn=false
use_dawn=false
use_system_ada=false
use_system_base64=true
use_system_cares=true
use_system_histogram=true
use_system_lcms2=true
use_system_libffi=true
use_system_llhttp=false
use_system_nghttp2=true use_system_nghttp2=true
use_system_zlib=true
use_vaapi=true
" "
# shellcheck disable=2086,2090,2116 # shellcheck disable=2086,2090,2116
@ -403,11 +490,15 @@ _configure() {
build() { build() {
export PATH="$PATH:/usr/lib/qt5/bin" export PATH="$PATH:/usr/lib/qt5/bin"
ninja -C out/Release \ # ~1 GB per concurrent job is not enough with gcc
_njobs="${NJOBS:-"$(python3 -c 'import os; print(max((os.cpu_count() - (10 if os.uname().machine == "aarch64" else 8), 1)))')"}"
ninja -C out/Release -j$_njobs \
electron_dist_zip \ electron_dist_zip \
node_gypi_headers \ node_gypi_headers \
node_version_header \ node_version_header
tar_headers
cp -vf out/Release/gen/node_headers/include/node/config.gypi third_party/electron_node/config.gypi
} }
package() { package() {
@ -422,7 +513,12 @@ package() {
mkdir -p "$pkgdir"/usr/include/electron mkdir -p "$pkgdir"/usr/include/electron
mv -v "$builddir"/out/Release/gen/node_headers "$pkgdir"/usr/include/electron (
cd third_party/electron_node/
HEADERS_ONLY=1 python3 ./tools/install.py install "$pkgdir" "/usr/include/electron/node_headers"
)
# required overrides
install -Dm644 out/Release/gen/node_headers/include/node/* -t "$pkgdir"/usr/include/electron/node_headers/include/node
ln -sv /usr/include/electron/node_headers/include/node "$pkgdir"/usr/include/electron/node ln -sv /usr/include/electron/node_headers/include/node "$pkgdir"/usr/include/electron/node
mkdir -p "$pkgdir"/usr/include/electron/node_headers/include/nan mkdir -p "$pkgdir"/usr/include/electron/node_headers/include/nan
@ -448,28 +544,42 @@ lang() {
} }
sha512sums=" sha512sums="
208cbaa48cd929c242dce6d7e4291655a625b84f44f7d5d3ef6b40d55f7ab2269457496afa8fff73bc6c5aa01be6d26e4bdf63371a407f1f888061fbfc778a6d electron-24.2.0-2.tar.zst 97024407a16fb41ec56fcc6df5552165ce4eea34fc51b17ecbf30a7e35406baccf8a3001a795a06d1e92d32e134d9d7a18d59fa74eda1b1bc23b59bc4947929b electron-30.0.9-124.0.6367.233.tar.zst
f88adc643dacf7e7be1b9bb77ff2c172cd04de2a988d3cdca77313122ba3ea93e6781ee675f3d7daf337a2542c0cbf31ffb08413e9f85c287b3a5e950d7d8a31 chromium-VirtualCursor-standard-layout.patch 13c647dc2024e27ae8a4d7e8f1202037a342f4a7054477226665c332029e1b6f1d8b99004c2b2809bcf1e6c19b1359ef5e1c971552d7ced59c5b43d5a836af88 copium-124.5.tar.gz
29bb685e03356a77df5fd347cdf55194cc8b3265c421cc76e54d64edefc329dbcb052deb26b22e8f587ce68456876c071de1b7d258dd0fcc6ee66c875ec4a020 chromium-revert-drop-of-system-java.patch 29bb685e03356a77df5fd347cdf55194cc8b3265c421cc76e54d64edefc329dbcb052deb26b22e8f587ce68456876c071de1b7d258dd0fcc6ee66c875ec4a020 chromium-revert-drop-of-system-java.patch
fa291e941076146d0edd5b96c088240a44a6e0aca3dfc744929655607182d2dc47e6c35ecb419f7c623fcf7f26dc3c4dd924dbf5ed10c3b986283f5ef2f72573 chromium-use-alpine-target.patch 54eb147c0af2ba096d1df375a289b339ee855ab1a9114e7c747753f0274a6bafb7212c1859b7885454c4529d9a5e3bd9559fc14e8e006f23ccd381895fa68d15 compiler.patch
4057cc78f10bfd64092bc35a373869abb1d68b880cdbca70422f39ffd78a929c19c7728d4d4c40709aaba25581148a93ae5343e724849fd35323062ed68753fa disable-dns_config_service.patch
111bc22fb704d97759988268a40d6b356c51b0bd7a8119a694e905ffe21850ff64e91566cd0dd0c9d62fcb46dca8acc821436c34eb0ba78be872ee4f7ec88a7b disable-failing-tests.patch
5fc5c012c1db6cf1ba82f38c6f3f4f5ca3a209e47ac708a74de379b018e0649b7694877c9571ef79002dde875ffc07b458a3355425f1c01867f362c66c2bc1bf fc-cache-version.patch
9200f78bad70e95c648a5e8392d50642190600f655c6baa366ff6467ebad52d3b3f305dad58f3610da67136f4b723557653b174ec5c25be8d8737ee04d9ee09f fix-missing-cstdint-include-musl.patch 9200f78bad70e95c648a5e8392d50642190600f655c6baa366ff6467ebad52d3b3f305dad58f3610da67136f4b723557653b174ec5c25be8d8737ee04d9ee09f fix-missing-cstdint-include-musl.patch
b24563e9a738c00fce7ff2fbdee3d7c024d9125d7c74d9ab90af6bdb16f7ec8419f2c8aa78c0640f6d5d81c17dc2c673a194401d354f466749672729b48ed068 fix-opus.patch
c63dee5044353eb306a39ca1526158c0f003ab310ecb03d1c368dc2a979454590c84b8d3c15484517d5e66bb8add9b231da9abbadf2e50850abd72ac1345c4ab fstatat-32bit.patch
33ee60863cc438ef57ffef92ba4cf67a856a5ffc16138bce241bcf87e47b15154aa86918e793c26f7ec4dc62a445257ad5673ed7001daf22c4043cf6cc57da7f gdbinit.patch 33ee60863cc438ef57ffef92ba4cf67a856a5ffc16138bce241bcf87e47b15154aa86918e793c26f7ec4dc62a445257ad5673ed7001daf22c4043cf6cc57da7f gdbinit.patch
36a764fa73443b47d38050b52dbe6ad2fa8d67201ff4ccdbad13b52308ef165ca046aac6f9609fe35890a6485f0f3e672e78cc41e3e44f3cdc7f145e540524e8 generic-sensor-include.patch
8de65109ece27ea63bd469f2220c56b8c752ba0a50fdf390082a2d5ae74b8e010199126175569f6d5084270dd4e0571e68aec32c0bca8211a6699925b3a09124 import-version.patch 8de65109ece27ea63bd469f2220c56b8c752ba0a50fdf390082a2d5ae74b8e010199126175569f6d5084270dd4e0571e68aec32c0bca8211a6699925b3a09124 import-version.patch
1bb1b31ee3057fb626a2d5eb44698946484e45017310d55026347f18f774453828802438ba14691d81edaad6c96c5df25c4aca772a30674a04feca3df11fccfb libcxx-muslmode.patch c49a1b06e061faa430d66dd5b404ef6c843e4d8a6e9012e963009a161b4726f8eb92c4da8fa710f8861f6e4daa8be5f68abee41a7d9fc02f2a0eb61ce53b5fdd libstdc++13.patch
0e991842e23a4b9133898125eeb39e45e3f86f886eef5d2f0d9a72ee143a3e124b3b4f60be94edd57ce4185bcd69704edb51f76d08fdb6207f5559a08dd41ab0 mman.patch e75f57ae34c97ca1caf15fa4b4106c6c1e79c31ed66869cf92ed9ea0c449886c9511e455047c17c1e9ad8b9a46ad4948511a4f2995a4b6030fb4d1c7ae21d038 mman.patch
99bcc7dd485b404a90c606a96addab1d900852128d44fb8cea8acc7303189ef87c89a7b0e749fd0e10c5ef5f6bf1fadeb5c16a34503cab6a59938ce2653d887e musl-auxv.patch
50c274a420bb8a7f14fcb56e40920dac8f708792a4520789b4987facea459bef88113d5a2b60fa8c57bee6e92bff3617d6b73fa305c8c44614c638971cffd440 musl-sandbox.patch 50c274a420bb8a7f14fcb56e40920dac8f708792a4520789b4987facea459bef88113d5a2b60fa8c57bee6e92bff3617d6b73fa305c8c44614c638971cffd440 musl-sandbox.patch
e7163ac5810ac85366cef2447412287c856e3d67c6b77f219a6e5a418b1965b98e449c409424ad0704a5bded9355dd0aec3dc4585918ce5a2ab36c079707afe2 musl-tid-caching.patch e7163ac5810ac85366cef2447412287c856e3d67c6b77f219a6e5a418b1965b98e449c409424ad0704a5bded9355dd0aec3dc4585918ce5a2ab36c079707afe2 musl-tid-caching.patch
92eb002718026611f5542362ad69b67f0a398ff71b3fca5c05d55cb5c6f9f29334e5e127bb4860cfaa3fba0f0d4c901e2b98808217e7dc02e254a64a5c9521aa musl-v8-monotonic-pthread-cont_timedwait.patch 92eb002718026611f5542362ad69b67f0a398ff71b3fca5c05d55cb5c6f9f29334e5e127bb4860cfaa3fba0f0d4c901e2b98808217e7dc02e254a64a5c9521aa musl-v8-monotonic-pthread-cont_timedwait.patch
8cc774e8d84e434960222c0497ad8193ae35c0732f98d3282d5fd4b4930f914809eec97832c199517ca89ca6b9d1d011db5ce533c40c68ce5fa464609d131a23 no-execinfo.patch 73bca6c6f9873f2f11cef04f3f41f71e0ae86e7e2d77e14db4298675fec390744c5081f6fdb14052e5ee2b5885be1198c3aa6068eb2b656d1a665c0c3f36e708 no-execinfo.patch
b5479874d125ee95a311295f227f8881a83023ec34fded7a6160b3ae32ea3ba0f2b833a9fb264c57f3d22746b6d8b00bdc8eb2ff86c43c412d6d3b55ae15b16b no-mallinfo.patch 8e17101d69e23b456a9c03dc2fe95bcd56846389707ba6f4720192a9e9168406d20d9168dbebbb3a47d921ec92e478f0e390f46e6b9bb43a34dda217c6e6448b no-mallinfo.patch
e4c4e5bc6f828f9c883dd418c0ba01887949c29c311f76206a1ec29f620b0c0ba0452949dc2778a9c46ea066405857536964a36436a68eecf7da7952736333cf no-res-ninit-nclose.patch e4c4e5bc6f828f9c883dd418c0ba01887949c29c311f76206a1ec29f620b0c0ba0452949dc2778a9c46ea066405857536964a36436a68eecf7da7952736333cf no-res-ninit-nclose.patch
2bc334352cdf0e523003c882e1a29623896bf3f3543469b23a532cb5ff32a75e7f23dc94abfaed4a97e24d26bdc51b780728583f1e7e7860f00c49e4d91edfa9 quiche-arena-size.patch 6dc4d8dc92e685dace62265a1ddb3aebc558aed54d20ff6d36b030be0c48d7e84662326c31363612492574d9a03c62653cdc21a60995b97dee1d75cae86a9f9b no-sandbox-settls.patch
8c33808ede31dd3b8eb951991e034751949756c4e6c2cd0e19823e95fddf5f9d94424bbd73ffe6dd965df2b0887fc5c42ab8a97c64b553d47662e90651b84f13 scoped-file-no-close.patch d4ac7f350806b4410ccb1df3b0ad7e90a7b6d724a16919761aa2d47a6f21008c7374da528b05b754ee712b85d23adfb113c7f7b9ca2ed5b47644fe3ea0cb9119 partalloc-no-tagging-arm64.patch
8e1aca983890c78d81a6f888b2cf1aa42878d1f8523e87d63b800e1e468cbfd33e5ff6a0975775ca222fe82f30c6497da95505da01b091c8776a44c98ac86f0f perfetto-libstdc++.patch
2eb434b4fc6aee77026492644cd86772a543d9845f112a75cd4c3e1f25c9435cc31f8454c1c73223451fc9be69b86e822ff68821978f67f2fc8bcba50296d8e0 pvalloc.patch
803b8117c65132f76bec42054a4b2257a078b15b07fd08645fec2dfd51aa4e0075a9015300cd579d4ae0d757d9850b9988e080cfc2eea093f6684fdf82c4722c random-fixes.patch
86f612dd2b39602984a75b1b11c2ab8bc8cc6b4e78fae998570a777a6901ae45fdcdb22e46dd006dab703a0674e64c72cf8120af2dc5b9e78004f402c7e65358 quiche-array.patch
b3beb98b539fe160fbc493ba410ae0f68540cc4b6834f1f8ce9a22c3f4f59ef5d583ad48793e10549fd02a701f833a3969791ef4524322cd1e715ca5bf226bc8 system-zstd.patch
e48693e6b7aeebf69a5acbf80d9a35defe4c23835121dfeb58b051ac7c527e758a41004f4d193274fe1b01c0bfb1dbc77b09cb6a404a3fdee507a2918afb0edb temp-failure-retry.patch e48693e6b7aeebf69a5acbf80d9a35defe4c23835121dfeb58b051ac7c527e758a41004f4d193274fe1b01c0bfb1dbc77b09cb6a404a3fdee507a2918afb0edb temp-failure-retry.patch
905565c10f5e5600e7d4db965c892cc45009a258e9995da958974d838ace469e1db1019195307e8807860d5b55ba6bfeea478b1f39a9b99e82c619b2816a1a22 icon.patch 914ccf649d7771f19f209ab97f99c481aebc6f66174d68e8b539f6ad4a70bc8cb0fae2df6dadbf0415958ffb3574c420fe029079dcce45f5e5add4db2e903566 yes-musl.patch
465107da7818b237e3c144a318ab80c3c9343b51ed38b8971ef204692d13346929becbe94cefad4c153788d3a200642143584d5ca070f6304e768ba2139c19ec icon.patch
e05180199ee1d559e4e577cedd3e589844ecf40d98a86321bf1bea5607b02eeb5feb486deddae40e1005b644550331f6b8500177aa7e79bcb3750d3c1ceb76c3 python-jinja-3.10.patch e05180199ee1d559e4e577cedd3e589844ecf40d98a86321bf1bea5607b02eeb5feb486deddae40e1005b644550331f6b8500177aa7e79bcb3750d3c1ceb76c3 python-jinja-3.10.patch
2aa340854316f1284217c0ca17cbf44953684ad6c7da90815117df30928612eb9fb9ffb734b948dfc309cd25d1a67cd57f77aac2d052a3dd9aca07a3a58cbb30 webpack-hash.patch 2aa340854316f1284217c0ca17cbf44953684ad6c7da90815117df30928612eb9fb9ffb734b948dfc309cd25d1a67cd57f77aac2d052a3dd9aca07a3a58cbb30 webpack-hash.patch
08460b15037511a4e5469ceac6ae5dd4db4c8cb87c129aaaf40ba58b16c60b8a307ffdd85805efead235758abed09ec31db1ef4cf9159f7b9acdcee3031bc96c default.conf ebb18a0e2eba4b4606e900fa82f4b57fe91dcbdc943e17544bccb3c9a011a49b4331cdbee59629e44b80184bad4ea54ec887c0bfcd00cda8d5686060dbf365e3 unbundle-node.patch
e8ea87c547546011c4c8fc2de30e4f443b85cd4cfcff92808e2521d2f9ada03feefb8e1b0cf0f6b460919c146e56ef8d5ad4bb5e2461cc5247c30d92eb4d068e default.conf
191559fc7aa1ea0353c6fb0cc321ee1d5803a0e44848c8be941cfab96277b0de6a59962d373e2a2a1686c8f9be2bcf2d2f33706759a339a959e297d3f7fda463 electron.desktop 191559fc7aa1ea0353c6fb0cc321ee1d5803a0e44848c8be941cfab96277b0de6a59962d373e2a2a1686c8f9be2bcf2d2f33706759a339a959e297d3f7fda463 electron.desktop
03750694e5e0b66f084c6e43135e60be15abb059e23486346ee4352dcc236984f2f35467b47f2b2ad46c98c22091cc2b978de8e73680febadba169d960f13f9f electron-launcher.sh 5f7ba5ad005f196facec1c0f26108356b64cafb1e5cfa462ff714a33b8a4c757ac00bfcb080da09eb5b65032f8eb245d9676a61ec554515d125ed63912708648 electron-launcher.sh
" "

View file

@ -1,219 +0,0 @@
needed for libstdc++11 + clang only
crbug: https://bugs.chromium.org/p/chromium/issues/detail?id=1189788
--
diff --git a/sql/recover_module/btree.cc b/sql/recover_module/btree.cc
index 9ecaafe..839318a 100644
--- a/sql/recover_module/btree.cc
+++ b/sql/recover_module/btree.cc
@@ -135,16 +135,25 @@
"Move the destructor to the .cc file if it's non-trival");
#endif // !DCHECK_IS_ON()
-LeafPageDecoder::LeafPageDecoder(DatabasePageReader* db_reader) noexcept
- : page_id_(db_reader->page_id()),
- db_reader_(db_reader),
- cell_count_(ComputeCellCount(db_reader)),
- next_read_index_(0),
- last_record_size_(0) {
+void LeafPageDecoder::Initialize(DatabasePageReader* db_reader) {
+ DCHECK(db_reader);
DCHECK(IsOnValidPage(db_reader));
+ page_id_ = db_reader->page_id();
+ db_reader_ = db_reader;
+ cell_count_ = ComputeCellCount(db_reader);
+ next_read_index_ = 0;
+ last_record_size_ = 0;
DCHECK(DatabasePageReader::IsValidPageId(page_id_));
}
+void LeafPageDecoder::Reset() {
+ db_reader_ = nullptr;
+ page_id_ = 0;
+ cell_count_ = 0;
+ next_read_index_ = 0;
+ last_record_size_ = 0;
+}
+
bool LeafPageDecoder::TryAdvance() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(CanAdvance());
diff --git a/sql/recover_module/btree.h b/sql/recover_module/btree.h
index d76d076..33114b0 100644
--- a/sql/recover_module/btree.h
+++ b/sql/recover_module/btree.h
@@ -102,7 +102,7 @@
//
// |db_reader| must have been used to read an inner page of a table B-tree.
// |db_reader| must outlive this instance.
- explicit LeafPageDecoder(DatabasePageReader* db_reader) noexcept;
+ explicit LeafPageDecoder() noexcept = default;
~LeafPageDecoder() noexcept = default;
LeafPageDecoder(const LeafPageDecoder&) = delete;
@@ -150,6 +150,15 @@
// read as long as CanAdvance() returns true.
bool TryAdvance();
+ // Initialize with DatabasePageReader
+ void Initialize(DatabasePageReader* db_reader);
+
+ // Reset internal DatabasePageReader
+ void Reset();
+
+ // True if DatabasePageReader is valid
+ bool IsValid() { return (db_reader_ != nullptr); }
+
// True if the given reader may point to an inner page in a table B-tree.
//
// The last ReadPage() call on |db_reader| must have succeeded.
@@ -163,14 +172,14 @@
static int ComputeCellCount(DatabasePageReader* db_reader);
// The number of the B-tree page this reader is reading.
- const int64_t page_id_;
+ int64_t page_id_;
// Used to read the tree page.
//
// Raw pointer usage is acceptable because this instance's owner is expected
// to ensure that the DatabasePageReader outlives this.
- DatabasePageReader* const db_reader_;
+ DatabasePageReader* db_reader_;
// Caches the ComputeCellCount() value for this reader's page.
- const int cell_count_ = ComputeCellCount(db_reader_);
+ int cell_count_;
// The reader's cursor state.
//
diff --git a/sql/recover_module/cursor.cc b/sql/recover_module/cursor.cc
index 0029ff9..42548bc 100644
--- a/sql/recover_module/cursor.cc
+++ b/sql/recover_module/cursor.cc
@@ -26,7 +26,7 @@
int VirtualCursor::First() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
inner_decoders_.clear();
- leaf_decoder_ = nullptr;
+ leaf_decoder_.Reset();
AppendPageDecoder(table_->root_page_id());
return Next();
@@ -36,18 +36,18 @@
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
record_reader_.Reset();
- while (!inner_decoders_.empty() || leaf_decoder_.get()) {
- if (leaf_decoder_.get()) {
- if (!leaf_decoder_->CanAdvance()) {
+ while (!inner_decoders_.empty() || leaf_decoder_.IsValid()) {
+ if (leaf_decoder_.IsValid()) {
+ if (!leaf_decoder_.CanAdvance()) {
// The leaf has been exhausted. Remove it from the DFS stack.
- leaf_decoder_ = nullptr;
+ leaf_decoder_.Reset();
continue;
}
- if (!leaf_decoder_->TryAdvance())
+ if (!leaf_decoder_.TryAdvance())
continue;
- if (!payload_reader_.Initialize(leaf_decoder_->last_record_size(),
- leaf_decoder_->last_record_offset())) {
+ if (!payload_reader_.Initialize(leaf_decoder_.last_record_size(),
+ leaf_decoder_.last_record_offset())) {
continue;
}
if (!record_reader_.Initialize())
@@ -99,13 +99,13 @@
int64_t VirtualCursor::RowId() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(record_reader_.IsInitialized());
- DCHECK(leaf_decoder_.get());
- return leaf_decoder_->last_record_rowid();
+ DCHECK(leaf_decoder_.IsValid());
+ return leaf_decoder_.last_record_rowid();
}
void VirtualCursor::AppendPageDecoder(int page_id) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- DCHECK(leaf_decoder_.get() == nullptr)
+ DCHECK(!leaf_decoder_.IsValid())
<< __func__
<< " must only be called when the current path has no leaf decoder";
@@ -113,7 +113,7 @@
return;
if (LeafPageDecoder::IsOnValidPage(&db_reader_)) {
- leaf_decoder_ = std::make_unique<LeafPageDecoder>(&db_reader_);
+ leaf_decoder_.Initialize(&db_reader_);
return;
}
diff --git a/sql/recover_module/cursor.h b/sql/recover_module/cursor.h
index afcd690..b15c31d 100644
--- a/sql/recover_module/cursor.h
+++ b/sql/recover_module/cursor.h
@@ -129,7 +129,7 @@
std::vector<std::unique_ptr<InnerPageDecoder>> inner_decoders_;
// Decodes the leaf page containing records.
- std::unique_ptr<LeafPageDecoder> leaf_decoder_;
+ LeafPageDecoder leaf_decoder_;
SEQUENCE_CHECKER(sequence_checker_);
};
diff --git a/sql/recover_module/pager.cc b/sql/recover_module/pager.cc
index 58e75de..5fe9620 100644
--- a/sql/recover_module/pager.cc
+++ b/sql/recover_module/pager.cc
@@ -23,8 +23,7 @@
"ints are not appropriate for representing page IDs");
DatabasePageReader::DatabasePageReader(VirtualTable* table)
- : page_data_(std::make_unique<uint8_t[]>(table->page_size())),
- table_(table) {
+ : page_data_(), table_(table) {
DCHECK(table != nullptr);
DCHECK(IsValidPageSize(table->page_size()));
}
@@ -57,8 +56,8 @@
std::numeric_limits<int64_t>::max(),
"The |read_offset| computation above may overflow");
- int sqlite_status =
- RawRead(sqlite_file, read_size, read_offset, page_data_.get());
+ int sqlite_status = RawRead(sqlite_file, read_size, read_offset,
+ const_cast<uint8_t*>(page_data_.data()));
// |page_id_| needs to be set to kInvalidPageId if the read failed.
// Otherwise, future ReadPage() calls with the previous |page_id_| value
diff --git a/sql/recover_module/pager.h b/sql/recover_module/pager.h
index 0e388ddc..99314e3 100644
--- a/sql/recover_module/pager.h
+++ b/sql/recover_module/pager.h
@@ -5,6 +5,7 @@
#ifndef SQL_RECOVER_MODULE_PAGER_H_
#define SQL_RECOVER_MODULE_PAGER_H_
+#include <array>
#include <cstdint>
#include <memory>
@@ -70,7 +71,7 @@
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK_NE(page_id_, kInvalidPageId)
<< "Successful ReadPage() required before accessing pager state";
- return page_data_.get();
+ return page_data_.data();
}
// The number of bytes in the page read by the last ReadPage() call.
@@ -137,7 +138,7 @@
int page_id_ = kInvalidPageId;
// Stores the bytes of the last page successfully read by ReadPage().
// The content is undefined if the last call to ReadPage() did not succeed.
- const std::unique_ptr<uint8_t[]> page_data_;
+ const std::array<uint8_t, kMaxPageSize> page_data_;
// Raw pointer usage is acceptable because this instance's owner is expected
// to ensure that the VirtualTable outlives this.
VirtualTable* const table_;

View file

@ -1,30 +0,0 @@
building for arm targets by default passes --target to clang, because it
assumes it's cross compiling (so passes --target as if the host is different,
instead of assuming default)
probably also works: removing this entirely. but to be safe, pass the alpine clang host triple
--
--- a/build/config/compiler/BUILD.gn
+++ b/build/config/compiler/BUILD.gn
@@ -915,8 +915,8 @@ config("compiler_cpu_abi") {
} else if (current_cpu == "arm") {
if (is_clang && !is_android && !is_nacl &&
!(is_chromeos_lacros && is_chromeos_device)) {
- cflags += [ "--target=arm-linux-gnueabihf" ]
- ldflags += [ "--target=arm-linux-gnueabihf" ]
+ cflags += [ "--target=armv7-alpine-linux-musleabihf" ]
+ ldflags += [ "--target=armv7-alpine-linux-musleabihf" ]
}
if (!is_nacl) {
cflags += [
@@ -930,8 +930,8 @@ config("compiler_cpu_abi") {
} else if (current_cpu == "arm64") {
if (is_clang && !is_android && !is_nacl && !is_fuchsia &&
!(is_chromeos_lacros && is_chromeos_device)) {
- cflags += [ "--target=aarch64-linux-gnu" ]
- ldflags += [ "--target=aarch64-linux-gnu" ]
+ cflags += [ "--target=aarch64-alpine-linux-musl" ]
+ ldflags += [ "--target=aarch64-alpine-linux-musl" ]
}
if (is_android) {
# Outline atomics crash on Exynos 9810. http://crbug.com/1272795

View file

@ -0,0 +1,132 @@
--- ./build/config/compiler/BUILD.gn.orig
+++ ./build/config/compiler/BUILD.gn
@@ -613,24 +613,6 @@
}
}
- # TODO(crbug.com/1488374): This causes binary size growth and potentially
- # other problems.
- # TODO(crbug.com/1491036): This isn't supported by Cronet's mainline llvm version.
- if (default_toolchain != "//build/toolchain/cros:target" &&
- !llvm_android_mainline) {
- cflags += [
- "-mllvm",
- "-split-threshold-for-reg-with-hint=0",
- ]
- if (use_thin_lto && is_a_target_toolchain) {
- if (is_win) {
- ldflags += [ "-mllvm:-split-threshold-for-reg-with-hint=0" ]
- } else {
- ldflags += [ "-Wl,-mllvm,-split-threshold-for-reg-with-hint=0" ]
- }
- }
- }
-
# TODO(crbug.com/1235145): Investigate why/if this should be needed.
if (is_win) {
cflags += [ "/clang:-ffp-contract=off" ]
@@ -1005,17 +987,6 @@
# `-nodefaultlibs` from the linker invocation from Rust, which would be used
# to compile dylibs on Android, such as for constructing unit test APKs.
"-Cdefault-linker-libraries",
-
- # To make Rust .d files compatible with ninja
- "-Zdep-info-omit-d-target",
-
- # If a macro panics during compilation, show which macro and where it is
- # defined.
- "-Zmacro-backtrace",
-
- # For deterministic builds, keep the local machine's current working
- # directory from appearing in build outputs.
- "-Zremap-cwd-prefix=.",
]
if (!is_win || force_rustc_color_output) {
@@ -1182,8 +1153,8 @@
} else if (current_cpu == "arm") {
if (is_clang && !is_android && !is_nacl &&
!(is_chromeos_lacros && is_chromeos_device)) {
- cflags += [ "--target=arm-linux-gnueabihf" ]
- ldflags += [ "--target=arm-linux-gnueabihf" ]
+ cflags += [ "--target=armv7-alpine-linux-musleabihf" ]
+ ldflags += [ "--target=armv7-alpine-linux-musleabihf" ]
}
if (!is_nacl) {
cflags += [
@@ -1197,8 +1168,8 @@
} else if (current_cpu == "arm64") {
if (is_clang && !is_android && !is_nacl && !is_fuchsia &&
!(is_chromeos_lacros && is_chromeos_device)) {
- cflags += [ "--target=aarch64-linux-gnu" ]
- ldflags += [ "--target=aarch64-linux-gnu" ]
+ cflags += [ "--target=aarch64-alpine-linux-musl" ]
+ ldflags += [ "--target=aarch64-alpine-linux-musl" ]
}
} else if (current_cpu == "mipsel" && !is_nacl) {
ldflags += [ "-Wl,--hash-style=sysv" ]
@@ -1983,7 +1954,7 @@
defines = [ "_HAS_NODISCARD" ]
}
} else {
- cflags = [ "-Wall" ]
+ cflags = []
if (is_clang) {
# Enable extra warnings for chromium_code when we control the compiler.
cflags += [ "-Wextra" ]
--- ./build/config/rust.gni.orig
+++ ./build/config/rust.gni
@@ -186,11 +186,11 @@
rust_abi_target = ""
if (is_linux || is_chromeos) {
if (current_cpu == "arm64") {
- rust_abi_target = "aarch64-unknown-linux-gnu"
+ rust_abi_target = "aarch64-alpine-linux-musl"
} else if (current_cpu == "x86") {
- rust_abi_target = "i686-unknown-linux-gnu"
+ rust_abi_target = "i586-alpine-linux-musl"
} else if (current_cpu == "x64") {
- rust_abi_target = "x86_64-unknown-linux-gnu"
+ rust_abi_target = "x86_64-alpine-linux-musl"
} else if (current_cpu == "arm") {
if (arm_float_abi == "hard") {
float_suffix = "hf"
@@ -199,13 +199,13 @@
}
if (arm_arch == "armv7-a" || arm_arch == "armv7") {
# No way to inform Rust about the -a suffix.
- rust_abi_target = "armv7-unknown-linux-gnueabi" + float_suffix
+ rust_abi_target = "armv7-alpine-linux-musleabi" + float_suffix
} else {
- rust_abi_target = "arm-unknown-linux-gnueabi" + float_suffix
+ rust_abi_target = "armv6-alpine-linux-musleabi" + float_suffix
}
} else {
# Best guess for other future platforms.
- rust_abi_target = current_cpu + "-unknown-linux-gnu"
+ rust_abi_target = current_cpu + "-alpine-linux-musl"
}
} else if (is_android) {
import("//build/config/android/abi.gni")
--- ./build/config/clang/BUILD.gn.orig
+++ ./build/config/clang/BUILD.gn
@@ -128,14 +128,15 @@
} else if (is_apple) {
_dir = "darwin"
} else if (is_linux || is_chromeos) {
+ _dir = "linux"
if (current_cpu == "x64") {
- _dir = "x86_64-unknown-linux-gnu"
+ _suffix = "-x86_64"
} else if (current_cpu == "x86") {
- _dir = "i386-unknown-linux-gnu"
+ _suffix = "-i386"
} else if (current_cpu == "arm") {
- _dir = "armv7-unknown-linux-gnueabihf"
+ _suffix = "-armhf"
} else if (current_cpu == "arm64") {
- _dir = "aarch64-unknown-linux-gnu"
+ _suffix = "-aarch64"
} else {
assert(false) # Unhandled cpu type
}

View file

@ -1,372 +0,0 @@
From d0c1f5ee1f56c165bdf550c9e3be0d7313587b80 Mon Sep 17 00:00:00 2001
From: Elly Fong-Jones <ellyjones@chromium.org>
Date: Wed, 18 Jan 2023 22:33:11 +0000
Subject: [PATCH] media: untangle MediaRouterUI lifetimes
Currently, MediaRouterUI is owned by MediaItemUIDeviceSelectorView.
There is an observer method named "OnControllerInvalidated" which
MediaItemUIDeviceSelectorView reacts to by deleting the MediaRouterUI it
owns. However, OnControllerInvalidated can actually be called in two
different situations:
* From MediaRouterUI::TakeMediaRouteStarter(), in which case the
MediaRouterUI object is *not* being destroyed, but should be, because
it can't be safely used after TakeMediaRouteStarter() ends;
* From MediaRouterUI::~MediaRouterUI(), in which case the MediaRouterUI
object *is* being destroyed already and should not be.
In the second case, only the fact that libc++ nulls out unique_ptr
before destroying the pointed-to object saves us from a use-after-free;
under libstdc++, we UaF immediately by re-entering the destructor. Even
under libc++ though this is still very dangerous, because any observers
that happened to be registered after MediaItemUIDeviceSelectorView will
be invoked after the destruction of the object they're observing. Right
now there are no such other observers, but the fact remains that this
interface is basically a UaF timebomb.
This change separates "this object is about to be destroyed" (an
observable state) from "please destroy this object, it is no longer
useful" (a callback that is made to the object's owner) by:
1. Renaming OnControllerInvalidated to OnControllerDestroying, to make
it very clear what is happening to the object, and
2. Adding a RegisterDestructor method to CastDialogController, which
allows MediaItemUIDeviceSelectorView to pass a callback into
MediaRouterUI which MediaRouterUI can use to arrange for its own
destruction.
This is still a bit tangled and ungainly, but it's safe. A fuller
writeup is on the linked bug.
Fixed: 1407202
Change-Id: Id9410de1fbf2cb42f13957dde316b7c9259f192f
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/4165967
Reviewed-by: Peter Kasting <pkasting@chromium.org>
Reviewed-by: Takumi Fujimoto <takumif@chromium.org>
Commit-Queue: Elly Fong-Jones <ellyjones@chromium.org>
Cr-Commit-Position: refs/heads/main@{#1094110}
---
diff --git a/chrome/browser/ui/media_router/cast_dialog_controller.h b/chrome/browser/ui/media_router/cast_dialog_controller.h
index 2a8de976..c3c0553 100644
--- a/chrome/browser/ui/media_router/cast_dialog_controller.h
+++ b/chrome/browser/ui/media_router/cast_dialog_controller.h
@@ -24,10 +24,12 @@
public:
virtual ~Observer() = default;
- virtual void OnModelUpdated(const CastDialogModel& model) = 0;
+ virtual void OnModelUpdated(const CastDialogModel& model) {}
- // Observer should drop its reference to the controller when this is called.
- virtual void OnControllerInvalidated() = 0;
+ // Notifies observers that the observed object is being destroyed. Observers
+ // MUST NOT try to destroy the observed object in response - to manage the
+ // lifetime of a CastDialogController, use RegisterDestructor() below.
+ virtual void OnControllerDestroying() {}
};
virtual ~CastDialogController() = default;
@@ -55,6 +57,16 @@
// intended that this API should only be used to transfer ownership to some
// new component that will want to start casting on this dialog box's behalf.
virtual std::unique_ptr<MediaRouteStarter> TakeMediaRouteStarter() = 0;
+
+ // Registers a callback for when the CastDialogController has given up
+ // ownership of its MediaRouteStarter and is no longer safe to use. The
+ // provided closure must destroy |this| or otherwise ensure it is never used
+ // again. This method can only be called once.
+ //
+ // TODO(https://crbug.com/1408494): It's awkward that CastDialogController has
+ // a state where it exists but is unsafe to use, and doubly awkward that we
+ // have to paper over that with this callback. Can that be fixed?
+ virtual void RegisterDestructor(base::OnceClosure destructor) = 0;
};
} // namespace media_router
diff --git a/chrome/browser/ui/media_router/media_router_ui.cc b/chrome/browser/ui/media_router/media_router_ui.cc
index 1865115f..644d131 100644
--- a/chrome/browser/ui/media_router/media_router_ui.cc
+++ b/chrome/browser/ui/media_router/media_router_ui.cc
@@ -83,6 +83,9 @@
MediaRouterUI::~MediaRouterUI() {
if (media_route_starter_)
DetachFromMediaRouteStarter();
+ for (CastDialogController::Observer& observer : observers_) {
+ observer.OnControllerDestroying();
+ }
}
// static
@@ -145,9 +148,6 @@
}
void MediaRouterUI::DetachFromMediaRouteStarter() {
- for (CastDialogController::Observer& observer : observers_)
- observer.OnControllerInvalidated();
-
media_route_starter()->RemovePresentationRequestSourceObserver(this);
media_route_starter()->RemoveMediaSinkWithCastModesObserver(this);
}
@@ -181,8 +181,16 @@
std::unique_ptr<MediaRouteStarter> MediaRouterUI::TakeMediaRouteStarter() {
DCHECK(media_route_starter_) << "MediaRouteStarter already taken!";
- DetachFromMediaRouteStarter();
- return std::move(media_route_starter_);
+ auto starter = std::move(media_route_starter_);
+ if (destructor_) {
+ std::move(destructor_).Run(); // May destroy `this`.
+ }
+ return starter;
+}
+
+void MediaRouterUI::RegisterDestructor(base::OnceClosure destructor) {
+ DCHECK(!destructor_);
+ destructor_ = std::move(destructor);
}
bool MediaRouterUI::CreateRoute(const MediaSink::Id& sink_id,
diff --git a/chrome/browser/ui/media_router/media_router_ui.h b/chrome/browser/ui/media_router/media_router_ui.h
index 5c2f14e..7afe775 100644
--- a/chrome/browser/ui/media_router/media_router_ui.h
+++ b/chrome/browser/ui/media_router/media_router_ui.h
@@ -100,8 +100,10 @@
void StopCasting(const std::string& route_id) override;
void ClearIssue(const Issue::Id& issue_id) override;
// Note that |MediaRouterUI| should not be used after |TakeMediaRouteStarter|
- // is called.
+ // is called. To enforce that, |TakeMediaRouteStarter| calls the destructor
+ // callback given to |RegisterDestructor| to destroy itself.
std::unique_ptr<MediaRouteStarter> TakeMediaRouteStarter() override;
+ void RegisterDestructor(base::OnceClosure destructor) override;
// Requests a route be created from the source mapped to
// |cast_mode|, to the sink given by |sink_id|.
@@ -337,6 +339,8 @@
raw_ptr<MediaRouter> router_;
raw_ptr<LoggerImpl> logger_;
+ base::OnceClosure destructor_;
+
// NOTE: Weak pointers must be invalidated before all other member variables.
// Therefore |weak_factory_| must be placed at the end.
base::WeakPtrFactory<MediaRouterUI> weak_factory_{this};
diff --git a/chrome/browser/ui/media_router/media_router_ui_unittest.cc b/chrome/browser/ui/media_router/media_router_ui_unittest.cc
index 2cc243d1..c33437b 100644
--- a/chrome/browser/ui/media_router/media_router_ui_unittest.cc
+++ b/chrome/browser/ui/media_router/media_router_ui_unittest.cc
@@ -80,11 +80,11 @@
}
MOCK_METHOD1(OnModelUpdated, void(const CastDialogModel& model));
- void OnControllerInvalidated() override {
+ void OnControllerDestroying() override {
controller_ = nullptr;
- OnControllerInvalidatedInternal();
+ OnControllerDestroyingInternal();
}
- MOCK_METHOD0(OnControllerInvalidatedInternal, void());
+ MOCK_METHOD0(OnControllerDestroyingInternal, void());
private:
raw_ptr<CastDialogController> controller_ = nullptr;
@@ -295,7 +295,7 @@
})));
NotifyUiOnRoutesUpdated({route});
- EXPECT_CALL(observer, OnControllerInvalidatedInternal());
+ EXPECT_CALL(observer, OnControllerDestroyingInternal());
ui_.reset();
}
diff --git a/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view.cc b/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view.cc
index 34dad46..d843bba 100644
--- a/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view.cc
+++ b/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view.cc
@@ -222,6 +222,11 @@
if (cast_controller) {
cast_controller_ = std::move(cast_controller);
cast_controller_->AddObserver(this);
+ cast_controller_->RegisterDestructor(
+ base::BindOnce(&MediaItemUIDeviceSelectorView::DestroyCastController,
+ // Unretained is safe: this callback is held by
+ // cast_controller_, which is owned by this object.
+ base::Unretained(this)));
}
}
@@ -499,10 +504,6 @@
observer.OnMediaItemUIDeviceSelectorUpdated(device_entry_ui_map_);
}
-void MediaItemUIDeviceSelectorView::OnControllerInvalidated() {
- cast_controller_.reset();
-}
-
void MediaItemUIDeviceSelectorView::OnDeviceSelected(int tag) {
auto it = device_entry_ui_map_.find(tag);
DCHECK(it != device_entry_ui_map_.end());
@@ -658,5 +659,9 @@
weak_ptr_factory_.GetWeakPtr()));
}
+void MediaItemUIDeviceSelectorView::DestroyCastController() {
+ cast_controller_.reset();
+}
+
BEGIN_METADATA(MediaItemUIDeviceSelectorView, views::View)
END_METADATA
diff --git a/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view.h b/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view.h
index e950565..222fc20 100644
--- a/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view.h
+++ b/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view.h
@@ -81,7 +81,6 @@
// media_router::CastDialogController::Observer
void OnModelUpdated(const media_router::CastDialogModel& model) override;
- void OnControllerInvalidated() override;
// MediaItemUIFooterView::Delegate
void OnDeviceSelected(int tag) override;
@@ -121,6 +120,7 @@
void RecordCastDeviceCount();
DeviceEntryUI* GetDeviceEntryUI(views::View* view) const;
void RegisterAudioDeviceCallbacks();
+ void DestroyCastController();
bool has_expand_button_been_shown_ = false;
bool have_devices_been_shown_ = false;
diff --git a/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view_unittest.cc b/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view_unittest.cc
index c3bcc6cc..6ae3dde8 100644
--- a/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view_unittest.cc
+++ b/chrome/browser/ui/views/global_media_controls/media_item_ui_device_selector_view_unittest.cc
@@ -156,6 +156,7 @@
MOCK_METHOD1(ClearIssue, void(const media_router::Issue::Id& issue_id));
MOCK_METHOD0(TakeMediaRouteStarter,
std::unique_ptr<media_router::MediaRouteStarter>());
+ MOCK_METHOD1(RegisterDestructor, void(base::OnceClosure));
};
} // anonymous namespace
diff --git a/chrome/browser/ui/views/media_router/cast_dialog_coordinator_unittest.cc b/chrome/browser/ui/views/media_router/cast_dialog_coordinator_unittest.cc
index f6c80d6a..2dedc7e 100644
--- a/chrome/browser/ui/views/media_router/cast_dialog_coordinator_unittest.cc
+++ b/chrome/browser/ui/views/media_router/cast_dialog_coordinator_unittest.cc
@@ -40,6 +40,7 @@
MOCK_METHOD(void, StopCasting, (const std::string& route_id));
MOCK_METHOD(void, ClearIssue, (const Issue::Id& issue_id));
MOCK_METHOD(std::unique_ptr<MediaRouteStarter>, TakeMediaRouteStarter, ());
+ MOCK_METHOD(void, RegisterDestructor, (base::OnceClosure));
};
class CastDialogCoordinatorTest : public TestWithBrowserView {
diff --git a/chrome/browser/ui/views/media_router/cast_dialog_view.cc b/chrome/browser/ui/views/media_router/cast_dialog_view.cc
index e3c7dadb..711d081 100644
--- a/chrome/browser/ui/views/media_router/cast_dialog_view.cc
+++ b/chrome/browser/ui/views/media_router/cast_dialog_view.cc
@@ -125,9 +125,9 @@
observer.OnDialogModelUpdated(this);
}
-void CastDialogView::OnControllerInvalidated() {
+void CastDialogView::OnControllerDestroying() {
controller_ = nullptr;
- // We don't destroy the dialog here because if the invalidation was caused by
+ // We don't destroy the dialog here because if the destruction was caused by
// activating the toolbar icon in order to close the dialog, then it would
// cause the dialog to immediately open again.
}
diff --git a/chrome/browser/ui/views/media_router/cast_dialog_view.h b/chrome/browser/ui/views/media_router/cast_dialog_view.h
index d87fdda..d44d4e0 100644
--- a/chrome/browser/ui/views/media_router/cast_dialog_view.h
+++ b/chrome/browser/ui/views/media_router/cast_dialog_view.h
@@ -66,7 +66,7 @@
// CastDialogController::Observer:
void OnModelUpdated(const CastDialogModel& model) override;
- void OnControllerInvalidated() override;
+ void OnControllerDestroying() override;
// views::BubbleDialogDelegateView:
void OnPaint(gfx::Canvas* canvas) override;
diff --git a/chrome/browser/ui/views/media_router/cast_dialog_view_browsertest.cc b/chrome/browser/ui/views/media_router/cast_dialog_view_browsertest.cc
index 1c584120..a7af3c8 100644
--- a/chrome/browser/ui/views/media_router/cast_dialog_view_browsertest.cc
+++ b/chrome/browser/ui/views/media_router/cast_dialog_view_browsertest.cc
@@ -70,6 +70,7 @@
override {
return nullptr;
}
+ void RegisterDestructor(base::OnceClosure destructor) override {}
};
} // namespace
diff --git a/chrome/browser/ui/views/media_router/cast_dialog_view_unittest.cc b/chrome/browser/ui/views/media_router/cast_dialog_view_unittest.cc
index 5326467..988cb07a 100644
--- a/chrome/browser/ui/views/media_router/cast_dialog_view_unittest.cc
+++ b/chrome/browser/ui/views/media_router/cast_dialog_view_unittest.cc
@@ -91,6 +91,7 @@
MOCK_METHOD1(StopCasting, void(const std::string& route_id));
MOCK_METHOD1(ClearIssue, void(const Issue::Id& issue_id));
MOCK_METHOD0(TakeMediaRouteStarter, std::unique_ptr<MediaRouteStarter>());
+ MOCK_METHOD1(RegisterDestructor, void(base::OnceClosure));
};
class CastDialogViewTest : public ChromeViewsTestBase {
diff --git a/chrome/browser/ui/views/media_router/media_router_dialog_controller_views.cc b/chrome/browser/ui/views/media_router/media_router_dialog_controller_views.cc
index ad379b2..244d523 100644
--- a/chrome/browser/ui/views/media_router/media_router_dialog_controller_views.cc
+++ b/chrome/browser/ui/views/media_router/media_router_dialog_controller_views.cc
@@ -51,7 +51,7 @@
std::move(context));
}
- ShowGlobalMeidaControlsDialog(std::move(context));
+ ShowGlobalMediaControlsDialog(std::move(context));
return true;
}
@@ -155,9 +155,20 @@
initiator(), std::move(start_presentation_context_))
: MediaRouterUI::CreateWithDefaultMediaSourceAndMirroring(
initiator());
+ ui_->RegisterDestructor(
+ base::BindOnce(&MediaRouterDialogControllerViews::DestroyMediaRouterUI,
+ // Safe to use base::Unretained here: the callback being
+ // bound is held by the MediaRouterUI we are creating and
+ // owning, and ownership of |ui_| is never transferred
+ // away from this object.
+ base::Unretained(this)));
}
-void MediaRouterDialogControllerViews::ShowGlobalMeidaControlsDialog(
+void MediaRouterDialogControllerViews::DestroyMediaRouterUI() {
+ ui_.reset();
+}
+
+void MediaRouterDialogControllerViews::ShowGlobalMediaControlsDialog(
std::unique_ptr<StartPresentationContext> context) {
// Show the WebContents requesting a dialog.
initiator()->GetDelegate()->ActivateContents(initiator());
diff --git a/chrome/browser/ui/views/media_router/media_router_dialog_controller_views.h b/chrome/browser/ui/views/media_router/media_router_dialog_controller_views.h
index 0a5fdb1..7c97211 100644
--- a/chrome/browser/ui/views/media_router/media_router_dialog_controller_views.h
+++ b/chrome/browser/ui/views/media_router/media_router_dialog_controller_views.h
@@ -69,13 +69,14 @@
// MediaRouterUIService::Observer:
void OnServiceDisabled() override;
- // Initializes |ui_|.
+ // Initializes and destroys |ui_| respectively.
void InitializeMediaRouterUI();
+ void DestroyMediaRouterUI();
// If there exists a media button, show the GMC dialog anchored to the media
// button. Otherwise, show the dialog anchored to the top center of the web
// contents.
- void ShowGlobalMeidaControlsDialog(
+ void ShowGlobalMediaControlsDialog(
std::unique_ptr<StartPresentationContext> context);
// Returns the media button from the browser that initiates the request to

View file

@ -2,4 +2,8 @@
# the electron launcher. # the electron launcher.
# Options to pass to electron. # Options to pass to electron.
ELECTRON_FLAGS="--ozone-platform-hint=auto" ELECTRON_FLAGS="--enable-features=WebRTCPipeWireCapturer"
# This can be 'x11', 'wayland', or 'auto'. Overriding default to 'auto',
# but respecting the variable content if any
ELECTRON_OZONE_PLATFORM_HINT="${ELECTRON_OZONE_PLATFORM_HINT:-auto}"

View file

@ -0,0 +1,15 @@
diff --git a/net/dns/BUILD.gn b/net/dns/BUILD.gn
index f36bf68..805d9a6 100644
--- a/net/dns/BUILD.gn
+++ b/net/dns/BUILD.gn
@@ -130,8 +130,8 @@ source_set("dns") {
]
} else if (is_linux) {
sources += [
- "dns_config_service_linux.cc",
- "dns_config_service_linux.h",
+ "dns_config_service_fuchsia.cc",
+ "dns_config_service_fuchsia.h",
]
} else if (is_posix) {
sources += [

View file

@ -0,0 +1,361 @@
safesprintf emitnull:
error: conversion from 'std::nullptr_t' to 'const internal::Arg' is ambiguous
const internal::Arg arg_array[] = { args... };
flatmap incompletetype:
error: static assertion failed due to requirement 'std::__is_complete_or_unbounded(std::__type_identity<std::pair<A, A>>{})': template argument must be a complete class or an unbounded array
static_assert(std::__is_complete_or_unbounded(__type_identity<_Tp>{}),
i18n, time:
various icu failures (new icu time formatting? internal api difference?)
a ton of these fail:
Expected equality of these values:
u"Monday 16 May Saturday 28 May"
Which is: u"Monday 16 May \x2013 Saturday 28 May"
DateIntervalFormat(begin_time, end_time, DATE_FORMAT_MONTH_WEEKDAY_DAY)
Which is: u"Monday 16\x2009\x2013\x2009Saturday 28 May"
../../base/i18n/time_formatting_unittest.cc:84: Failure
Expected equality of these values:
clock12h_pm
Which is: u"3:42 PM"
TimeFormatTimeOfDay(time)
Which is: u"3:42\x202FPM"
.. and so on
fileutiltest filetofile:
../../base/files/file_util_unittest.cc:2692: Failure
Value of: stream
Actual: true
Expected: false
stacktracetest: crashes (this doesn't seem to use execinfo so probably relies on glibc internal layout for tracing here)
platformthreadtest canchangethreadtype:
../../base/threading/platform_thread_unittest.cc:445: Failure
Expected equality of these values:
PlatformThread::CanChangeThreadType(ThreadType::kBackground, ThreadType::kResourceEfficient)
Which is: true
kCanIncreasePriority
Which is: false
scopedfdownershiptrackingtest crashonunownedclose: fails due to scoped-file-no-close.patch
stackcontainer customallocator:
../../base/containers/stack_container_unittest.cc:211: Failure
Expected equality of these values:
1
Allocator::deallocated
Which is: 0
nativelibrarytest loadlibrarypreferownsymbols: crashes (probably musl dlopen does not play nice here)
spantest empty: crashes (this looks fishy)
readelfbuildid: crashes (this looks like glibc dynamic linker semantics)
nss db unittest: various nss failures: e.g.:
../../net/cert/nss_cert_database_unittest.cc:209: Failure
Expected equality of these values:
OK
Which is: 0
cert_db_->ImportFromPKCS12(GetPublicSlot(), pkcs12_data, u"12345", true, nullptr)
Which is: -702
processutiltest cloneflags: fails in CI (ulimit? too many threads?)
../../base/process/process_util_unittest.cc:1434: Failure
Value of: process.IsValid()
Actual: false
Expected: true
addresstrackerlinuxnetlinktest:
../../net/base/address_tracker_linux_unittest.cc:886: Failure
Value of: child.process.IsValid()
Actual: false
Expected: true
ToAddressDoesNotDereference: ; Expected `get_for_extraction_cnt` to be 1 but got 0;
DataCapturedManyThreads: flaky
ProcessAlternativeServicesTest.Process*: crashed ?
--- a/base/strings/safe_sprintf_unittest.cc
+++ b/base/strings/safe_sprintf_unittest.cc
@@ -740,6 +740,7 @@
#endif
}
+#if 0
TEST(SafeSPrintfTest, EmitNULL) {
char buf[40];
#if defined(__GNUC__)
@@ -756,6 +757,7 @@
#pragma GCC diagnostic pop
#endif
}
+#endif
TEST(SafeSPrintfTest, PointerSize) {
// The internal data representation is a 64bit value, independent of the
--- a/base/containers/flat_map_unittest.cc
+++ b/base/containers/flat_map_unittest.cc
@@ -52,6 +52,7 @@
} // namespace
+#if 0
TEST(FlatMap, IncompleteType) {
struct A {
using Map = flat_map<A, A>;
@@ -65,6 +66,7 @@
A a;
}
+#endif
TEST(FlatMap, RangeConstructor) {
flat_map<int, int>::value_type input_vals[] = {
--- a/base/BUILD.gn
+++ b/base/BUILD.gn
@@ -3194,21 +3194,6 @@
"hash/md5_constexpr_unittest.cc",
"hash/md5_unittest.cc",
"hash/sha1_unittest.cc",
- "i18n/break_iterator_unittest.cc",
- "i18n/case_conversion_unittest.cc",
- "i18n/char_iterator_unittest.cc",
- "i18n/character_encoding_unittest.cc",
- "i18n/file_util_icu_unittest.cc",
- "i18n/icu_string_conversions_unittest.cc",
- "i18n/icu_util_unittest.cc",
- "i18n/message_formatter_unittest.cc",
- "i18n/number_formatting_unittest.cc",
- "i18n/rtl_unittest.cc",
- "i18n/streaming_utf8_validator_unittest.cc",
- "i18n/string_search_unittest.cc",
- "i18n/time_formatting_unittest.cc",
- "i18n/timezone_unittest.cc",
- "i18n/transliterator_unittest.cc",
"immediate_crash_unittest.cc",
"json/json_parser_unittest.cc",
"json/json_reader_unittest.cc",
--- a/base/files/file_util_unittest.cc
+++ b/base/files/file_util_unittest.cc
@@ -2686,6 +2686,7 @@
}
}
+#if 0
TEST_F(FileUtilTest, FileToFILE) {
File file;
FILE* stream = FileToFILE(std::move(file), "w");
@@ -2700,6 +2701,7 @@
EXPECT_FALSE(file.IsValid());
EXPECT_TRUE(CloseFile(stream));
}
+#endif
TEST_F(FileUtilTest, FILEToFile) {
ScopedFILE stream;
--- a/base/debug/stack_trace_unittest.cc
+++ b/base/debug/stack_trace_unittest.cc
@@ -345,6 +345,7 @@
// sometimes we read fp / pc from the place that previously held
// uninitialized value.
// TODO(crbug.com/1132511): Enable this test on Fuchsia.
+#if 0
#if defined(MEMORY_SANITIZER) || BUILDFLAG(IS_FUCHSIA)
#define MAYBE_TraceStackFramePointersFromBuffer \
DISABLED_TraceStackFramePointersFromBuffer
@@ -357,6 +358,7 @@
const void* frames[kDepth];
ExpectStackFramePointers<kDepth>(frames, kDepth, /*copy_stack=*/true);
}
+#endif
#if BUILDFLAG(IS_ANDROID) || BUILDFLAG(IS_APPLE)
#define MAYBE_StackEnd StackEnd
--- a/base/threading/platform_thread_unittest.cc
+++ b/base/threading/platform_thread_unittest.cc
@@ -416,6 +416,7 @@
// platforms for all priorities. This not being the case. This test documents
// and hardcodes what we know. Please inform scheduler-dev@chromium.org if this
// proprerty changes for a given platform.
+#if 0
TEST(PlatformThreadTest, CanChangeThreadType) {
#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
// On Ubuntu, RLIMIT_NICE and RLIMIT_RTPRIO are 0 by default, so we won't be
@@ -472,6 +473,7 @@
ThreadType::kBackground));
#endif
}
+#endif
TEST(PlatformThreadTest, SetCurrentThreadTypeTest) {
TestPriorityResultingFromThreadType(ThreadType::kBackground,
--- a/base/files/scoped_file_linux_unittest.cc
+++ b/base/files/scoped_file_linux_unittest.cc
@@ -42,11 +42,13 @@
EXPECT_DEATH(ScopedFD(fd.get()), "");
}
+#if 0
TEST_F(ScopedFDOwnershipTrackingTest, CrashOnUnownedClose) {
ScopedFD fd = OpenFD();
subtle::EnableFDOwnershipEnforcement(true);
EXPECT_DEATH(close(fd.get()), "");
}
+#endif
#endif // defined(GTEST_HAS_DEATH_TEST)
--- a/base/native_library_unittest.cc
+++ b/base/native_library_unittest.cc
@@ -139,6 +139,7 @@
// Verifies that the |prefer_own_symbols| option satisfies its guarantee that
// a loaded library will always prefer local symbol resolution before
// considering global symbols.
+#if 0
TEST(NativeLibraryTest, LoadLibraryPreferOwnSymbols) {
NativeLibraryOptions options;
options.prefer_own_symbols = true;
@@ -171,6 +172,7 @@
EXPECT_EQ(2, NativeLibraryTestIncrement());
EXPECT_EQ(3, NativeLibraryTestIncrement());
}
+#endif
#endif // !BUILDFLAG(IS_ANDROID) && !defined(THREAD_SANITIZER) && \
// !defined(MEMORY_SANITIZER)
--- a/base/containers/span_unittest.cc
+++ b/base/containers/span_unittest.cc
@@ -995,6 +995,7 @@
}
}
+#if 0
TEST(SpanTest, Empty) {
{
span<int> span;
@@ -1014,6 +1015,7 @@
EXPECT_TRUE(span_of_checked_iterators.empty());
}
}
+#endif
TEST(SpanTest, OperatorAt) {
static constexpr int kArray[] = {1, 6, 1, 8, 0};
--- a/base/debug/elf_reader_unittest.cc
+++ b/base/debug/elf_reader_unittest.cc
@@ -194,6 +194,7 @@
}
}
+#if 0
TEST(ElfReaderTestWithCurrentImage, ReadElfBuildId) {
#if BUILDFLAG(IS_ANDROID)
// On Android the library loader memory maps the full so file.
@@ -229,6 +230,7 @@
UnloadNativeLibrary(library);
#endif
}
+#endif
} // namespace debug
} // namespace base
--- a/net/BUILD.gn
+++ b/net/BUILD.gn
@@ -4826,7 +4826,6 @@
sources += [
"cert/internal/system_trust_store_nss_unittest.cc",
"cert/internal/trust_store_nss_unittest.cc",
- "cert/nss_cert_database_unittest.cc",
"cert/x509_util_nss_unittest.cc",
]
if (!is_castos) {
--- a/base/process/process_util_unittest.cc
+++ b/base/process/process_util_unittest.cc
@@ -1419,7 +1419,7 @@
return kSuccess;
}
-#if defined(CLONE_NEWUSER) && defined(CLONE_NEWPID)
+#if 0 && defined(CLONE_NEWUSER) && defined(CLONE_NEWPID)
TEST_F(ProcessUtilTest, CloneFlags) {
if (!PathExists(FilePath("/proc/self/ns/user")) ||
!PathExists(FilePath("/proc/self/ns/pid"))) {
--- a/net/base/address_tracker_linux_unittest.cc
+++ b/net/base/address_tracker_linux_unittest.cc
@@ -831,6 +831,7 @@
//
// This test creates multiple concurrent `AddressTrackerLinux` instances in
// separate processes, each in their own PID namespaces.
+#if 0
TEST(AddressTrackerLinuxNetlinkTest, TestInitializeTwoTrackersInPidNamespaces) {
// This test initializes `kNumChildren` instances of `AddressTrackerLinux` in
// tracking mode, each in their own child process running in a PID namespace.
@@ -901,6 +902,7 @@
ASSERT_EQ(exit_code, 0);
}
}
+#endif
MULTIPROCESS_TEST_MAIN(ChildProcessInitializeTrackerForTesting) {
base::test::TaskEnvironment task_env(
--- a/base/trace_event/trace_event_unittest.cc
+++ b/base/trace_event/trace_event_unittest.cc
@@ -1368,6 +1368,7 @@
}
// Test that data sent from multiple threads is gathered
+#if 0
TEST_F(TraceEventTestFixture, DataCapturedManyThreads) {
BeginTrace();
@@ -1408,6 +1409,7 @@
delete task_complete_events[i];
}
}
+#endif
// Test that thread and process names show up in the trace.
// In SDK build, thread names are not tracked inside //base. Instead, there's
--- a/base/allocator/partition_allocator/src/partition_alloc/pointers/raw_ptr_unittest.cc
+++ b/base/allocator/partition_allocator/src/partition_alloc/pointers/raw_ptr_unittest.cc
@@ -1481,6 +1481,7 @@
// `base::to_address()` will use the dereference operator. This is not
// what we want; this test enforces extraction semantics for
// `to_address()`.
+#if 0
TEST_F(RawPtrTest, ToAddressDoesNotDereference) {
CountingRawPtr<int> ptr = nullptr;
int* raw = base::to_address(ptr);
@@ -1492,6 +1493,7 @@
.get_for_duplication_cnt = 0}),
CountersMatch());
}
+#endif
TEST_F(RawPtrTest, ToAddressGivesBackRawAddress) {
int* raw = nullptr;
--- a/net/http/http_stream_factory_unittest.cc
+++ b/net/http/http_stream_factory_unittest.cc
@@ -3477,6 +3477,7 @@
DefaultCTPolicyEnforcer ct_policy_enforcer_;
};
+#if 0
TEST_F(ProcessAlternativeServicesTest, ProcessEmptyAltSvc) {
session_ =
std::make_unique<HttpNetworkSession>(session_params_, session_context_);
@@ -3585,6 +3586,7 @@
alternatives[0].host_port_pair());
EXPECT_EQ(0u, alternatives[0].advertised_versions().size());
}
+#endif
} // namespace

View file

@ -9,6 +9,12 @@ done
# Prefer user defined ELECTRON_USER_FLAGS (from env) over system # Prefer user defined ELECTRON_USER_FLAGS (from env) over system
# default ELECTRON_FLAGS (from /etc/electron/default.conf). # default ELECTRON_FLAGS (from /etc/electron/default.conf).
ELECTRON_FLAGS=${ELECTRON_USER_FLAGS:-"$ELECTRON_FLAGS"} export ELECTRON_FLAGS="$ELECTRON_FLAGS ${ELECTRON_USER_FLAGS:-"$ELECTRON_USER_FLAGS"}"
# Re-export, for it to be accessible by the process
export ELECTRON_OZONE_PLATFORM_HINT="${ELECTRON_OZONE_PLATFORM_HINT}"
if [ "$ELECTRON_RUN_AS_NODE" == "1" ] && [ "$ELECTRON_STILL_PASS_THE_DEFAULT_FLAGS" != "1" ]; then
exec "/usr/lib/electron/electron" "$@"
fi
exec "/usr/lib/electron/electron" "$@" ${ELECTRON_FLAGS} exec "/usr/lib/electron/electron" "$@" ${ELECTRON_FLAGS}

View file

@ -0,0 +1,13 @@
instead of hardcoding the version, use the defined macro.
--
--- a/third_party/test_fonts/fontconfig/generate_fontconfig_caches.cc
+++ b/third_party/test_fonts/fontconfig/generate_fontconfig_caches.cc
@@ -56,7 +56,7 @@
FcFini();
// Check existence of intended fontconfig cache file.
- auto cache = fontconfig_caches + "/" + kCacheKey + "-le64.cache-9";
+ auto cache = fontconfig_caches + "/" + kCacheKey + "-le64.cache-" + FC_CACHE_VERSION;
bool cache_exists = access(cache.c_str(), F_OK) == 0;
return !cache_exists;
}

View file

@ -0,0 +1,12 @@
--- a/media/filters/ffmpeg_glue.cc
+++ b/media/filters/ffmpeg_glue.cc
@@ -142,7 +142,7 @@ const char* FFmpegGlue::GetAllowedAudioDecoders() {
static const base::NoDestructor<std::string> kAllowedAudioCodecs([]() {
// This should match the configured lists in //third_party/ffmpeg.
std::string allowed_decoders(
- "vorbis,libopus,flac,pcm_u8,pcm_s16le,pcm_s24le,pcm_s32le,pcm_f32le,"
+ "vorbis,opus,libopus,flac,pcm_u8,pcm_s16le,pcm_s24le,pcm_s32le,pcm_f32le,"
"mp3,pcm_s16be,pcm_s24be,pcm_mulaw,pcm_alaw");
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
allowed_decoders += ",aac";

View file

@ -0,0 +1,17 @@
fstatat64 is macrod to fstatat in sys/stat.h in musl- but then that fstatat is
used in the _syscall4 macro mapping to __NR_$name, and __NR_fstatat is not
defined anywhere here, as it wants the 64 name.
so, just let it keep the name with an undef of the stat.h macro, then the macro
expansion below evaluates correctly.
--- a/third_party/lss/linux_syscall_support.h
+++ b/third_party/lss/linux_syscall_support.h
@@ -4947,7 +4947,8 @@
# endif
#endif
#if defined(__NR_fstatat64)
+ #undef fstatat64
LSS_INLINE _syscall4(int, fstatat64, int, d,
const char *, p,
struct kernel_stat64 *, b, int, f)
#endif

View file

@ -0,0 +1,11 @@
--- a/services/device/public/cpp/generic_sensor/sensor_reading.h
+++ b/services/device/public/cpp/generic_sensor/sensor_reading.h
@@ -5,6 +5,8 @@
#ifndef SERVICES_DEVICE_PUBLIC_CPP_GENERIC_SENSOR_SENSOR_READING_H_
#define SERVICES_DEVICE_PUBLIC_CPP_GENERIC_SENSOR_SENSOR_READING_H_
+#include <cstddef>
+#include <cstdint>
#include <type_traits>
namespace device {

View file

@ -1,11 +1,11 @@
--- a/electron/default_app/default_app.ts --- a/electron/default_app/default_app.ts
+++ b/electron/default_app/default_app.ts +++ b/electron/default_app/default_app.ts
@@ -60,7 +60,7 @@ @@ -61,7 +61,7 @@
}; };
if (process.platform === 'linux') { if (process.platform === 'linux') {
- options.icon = path.join(__dirname, 'icon.png'); - options.icon = url.fileURLToPath(new URL('icon.png', import.meta.url));
+ options.icon = '/usr/share/icons/hicolor/1024x1024/apps/electron.png'; + options.icon = 'file:///usr/share/icons/hicolor/1024x1024/apps/electron.png';
} }
mainWindow = new BrowserWindow(options); mainWindow = new BrowserWindow(options);

View file

@ -1,39 +0,0 @@
Patch-Source: https://github.com/archlinux/svntogit-packages/blob/bf2401407df5bcc938382eb03748fbef41e41c89/trunk/unbundle-jsoncpp-avoid-CFI-faults-with-is_cfi-true.patch
From ed8d931e35f81d8566835a579caf7d61368f85b7 Mon Sep 17 00:00:00 2001
From: Evangelos Foutras <evangelos@foutrelis.com>
Date: Tue, 27 Sep 2022 22:20:41 +0000
Subject: [PATCH] unbundle/jsoncpp: avoid CFI faults with is_cfi=true
Ensure jsoncpp symbols have public visibility and are thus excluded from
CFI checks and whole-program optimization. This is achieved by defining
JSON_DLL_BUILD which in turn causes json/config.h to define JSON_API as
__attribute__((visibility("default"))). The latter macro is used to tag
jsoncpp classes and namespace functions throughout jsoncpp's headers.
BUG=1365218
Change-Id: I56277737b7d9ecaeb5e17c8d21a2e55f3d5d5bc9
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/3919652
Reviewed-by: Thomas Anderson <thomasanderson@chromium.org>
Commit-Queue: Thomas Anderson <thomasanderson@chromium.org>
Cr-Commit-Position: refs/heads/main@{#1052077}
---
build/linux/unbundle/jsoncpp.gn | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/build/linux/unbundle/jsoncpp.gn b/build/linux/unbundle/jsoncpp.gn
index 544f9d13c9..e84a0ef27a 100644
--- a/build/linux/unbundle/jsoncpp.gn
+++ b/build/linux/unbundle/jsoncpp.gn
@@ -3,6 +3,11 @@ import("//build/shim_headers.gni")
pkg_config("jsoncpp_config") {
packages = [ "jsoncpp" ]
+
+ # Defining JSON_DLL_BUILD applies public visibility to jsoncpp classes
+ # thus deactivating CFI checks for them. This avoids CFI violations in
+ # virtual calls to system jsoncpp library (https://crbug.com/1365218).
+ defines = [ "JSON_DLL_BUILD" ]
}
shim_headers("jsoncpp_shim") {

View file

@ -1,7 +0,0 @@
--- a/buildtools/third_party/libc++/__config_site
+++ b/buildtools/third_party/libc++/__config_site
@@ -49,3 +49,4 @@
#define _LIBCPP_REMOVE_TRANSITIVE_INCLUDES
+#define _LIBCPP_HAS_MUSL_LIBC
#endif // _LIBCPP_CONFIG_SITE

View file

@ -0,0 +1,269 @@
missing libstdc++13 includes
--
--- a/third_party/openscreen/src/platform/base/error.h
+++ b/third_party/openscreen/src/platform/base/error.h
@@ -6,6 +6,7 @@
#define PLATFORM_BASE_ERROR_H_
#include <cassert>
+#include <cstdint>
#include <ostream>
#include <string>
#include <utility>
--- a/ui/base/prediction/kalman_filter.h
+++ b/ui/base/prediction/kalman_filter.h
@@ -8,6 +8,8 @@
#include "base/component_export.h"
#include "ui/gfx/geometry/matrix3_f.h"
+#include <cstdint>
+
namespace ui {
// This Kalman filter is used to predict state in one axles.
--- a/ui/events/types/scroll_types.h
+++ b/ui/events/types/scroll_types.h
@@ -5,6 +5,8 @@
#ifndef UI_EVENTS_TYPES_SCROLL_TYPES_H_
#define UI_EVENTS_TYPES_SCROLL_TYPES_H_
+#include <cstdint>
+
namespace ui {
enum class ScrollGranularity : uint8_t {
--- a/third_party/webrtc/rtc_base/system/file_wrapper.h
+++ b/third_party/webrtc/rtc_base/system/file_wrapper.h
@@ -14,6 +14,7 @@
#include <stddef.h>
#include <stdio.h>
+#include <cstdint>
#include <string>
#include "absl/strings/string_view.h"
--- a/third_party/pdfium/constants/annotation_flags.h
+++ b/third_party/pdfium/constants/annotation_flags.h
@@ -5,6 +5,8 @@
#ifndef CONSTANTS_ANNOTATION_FLAGS_H_
#define CONSTANTS_ANNOTATION_FLAGS_H_
+#include <cstdint>
+
namespace pdfium {
namespace annotation_flags {
--- a/third_party/s2cellid/src/s2/util/math/mathutil.h
+++ b/third_party/s2cellid/src/s2/util/math/mathutil.h
@@ -21,6 +21,7 @@
#ifndef S2_UTIL_MATH_MATHUTIL_H_
#define S2_UTIL_MATH_MATHUTIL_H_
+#include <cstdint>
#include <type_traits>
class MathUtil {
--- a/third_party/s2cellid/src/s2/s1angle.h
+++ b/third_party/s2cellid/src/s2/s1angle.h
@@ -24,6 +24,7 @@
#define S2_S1ANGLE_H_
#include <math.h>
+#include <cstdint>
#include <limits>
#include <ostream>
#include <type_traits>
--- a/third_party/maldoca/src/maldoca/ole/header.h
+++ b/third_party/maldoca/src/maldoca/ole/header.h
@@ -45,6 +45,8 @@
#include "absl/strings/string_view.h"
+#include <cstdint>
+
namespace maldoca {
class OLEHeader {
--- a/components/password_manager/core/browser/generation/password_generator.h
+++ b/components/password_manager/core/browser/generation/password_generator.h
@@ -5,6 +5,7 @@
#ifndef COMPONENTS_PASSWORD_MANAGER_CORE_BROWSER_GENERATION_PASSWORD_GENERATOR_H_
#define COMPONENTS_PASSWORD_MANAGER_CORE_BROWSER_GENERATION_PASSWORD_GENERATOR_H_
+#include <cstdint>
#include <string>
--- a/base/debug/profiler.h
+++ b/base/debug/profiler.h
@@ -7,6 +7,7 @@
#include <stddef.h>
+#include <cstdint>
#include <string>
#include "base/base_export.h"
--- a/components/dom_distiller/core/url_utils.h
+++ b/components/dom_distiller/core/url_utils.h
@@ -5,6 +5,7 @@
#ifndef COMPONENTS_DOM_DISTILLER_CORE_URL_UTILS_H_
#define COMPONENTS_DOM_DISTILLER_CORE_URL_UTILS_H_
+#include <cstdint>
#include <string>
#include "base/strings/string_piece_forward.h"
--- a/components/feature_engagement/internal/event_storage_validator.h
+++ b/components/feature_engagement/internal/event_storage_validator.h
@@ -5,6 +5,7 @@
#ifndef COMPONENTS_FEATURE_ENGAGEMENT_INTERNAL_EVENT_STORAGE_VALIDATOR_H_
#define COMPONENTS_FEATURE_ENGAGEMENT_INTERNAL_EVENT_STORAGE_VALIDATOR_H_
+#include <cstdint>
#include <string>
namespace feature_engagement {
--- a/chrome/test/chromedriver/chrome/web_view_impl.cc
+++ b/chrome/test/chromedriver/chrome/web_view_impl.cc
@@ -8,6 +8,7 @@
#include <algorithm>
#include <memory>
#include <queue>
+#include <string>
#include <utility>
#include <vector>
--- a/cc/trees/target_property.cc
+++ b/cc/trees/target_property.cc
@@ -2,6 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+#include <cstdint>
+
#include "cc/trees/target_property.h"
#include "ui/gfx/animation/keyframe/target_property.h"
--- a/gpu/config/gpu_feature_info.h
+++ b/gpu/config/gpu_feature_info.h
@@ -5,6 +5,7 @@
#ifndef GPU_CONFIG_GPU_FEATURE_INFO_H_
#define GPU_CONFIG_GPU_FEATURE_INFO_H_
+#include <cstdint>
#include <string>
#include <vector>
--- a/gpu/config/gpu_driver_bug_workarounds.h
+++ b/gpu/config/gpu_driver_bug_workarounds.h
@@ -5,6 +5,7 @@
#ifndef GPU_CONFIG_GPU_DRIVER_BUG_WORKAROUNDS_H_
#define GPU_CONFIG_GPU_DRIVER_BUG_WORKAROUNDS_H_
+#include <cstdint>
#include <vector>
#include "build/build_config.h"
--- a/third_party/blink/public/common/page_state/page_state.h
+++ b/third_party/blink/public/common/page_state/page_state.h
@@ -5,6 +5,7 @@
#ifndef THIRD_PARTY_BLINK_PUBLIC_COMMON_PAGE_STATE_PAGE_STATE_H_
#define THIRD_PARTY_BLINK_PUBLIC_COMMON_PAGE_STATE_PAGE_STATE_H_
+#include <cstdint>
#include <string>
#include <vector>
--- a/pdf/document_attachment_info.h
+++ b/pdf/document_attachment_info.h
@@ -5,6 +5,7 @@
#ifndef PDF_DOCUMENT_ATTACHMENT_INFO_H_
#define PDF_DOCUMENT_ATTACHMENT_INFO_H_
+#include <cstdint>
#include <string>
--- a/components/payments/content/utility/fingerprint_parser.h
+++ b/components/payments/content/utility/fingerprint_parser.h
@@ -7,6 +7,7 @@
#include <stddef.h>
+#include <cstdint>
#include <string>
#include <vector>
--- a/media/base/video_transformation.h
+++ b/media/base/video_transformation.h
@@ -5,6 +5,7 @@
#ifndef MEDIA_BASE_VIDEO_TRANSFORMATION_H_
#define MEDIA_BASE_VIDEO_TRANSFORMATION_H_
+#include <cstdint>
#include <string>
#include "base/numerics/math_constants.h"
--- a/components/omnibox/browser/on_device_head_model.h
+++ b/components/omnibox/browser/on_device_head_model.h
@@ -5,6 +5,7 @@
#ifndef COMPONENTS_OMNIBOX_BROWSER_ON_DEVICE_HEAD_MODEL_H_
#define COMPONENTS_OMNIBOX_BROWSER_ON_DEVICE_HEAD_MODEL_H_
+#include <cstdint>
#include <string>
#include <utility>
#include <vector>
--- a/components/autofill/core/browser/autofill_ablation_study.h
+++ b/components/autofill/core/browser/autofill_ablation_study.h
@@ -5,6 +5,7 @@
#ifndef COMPONENTS_AUTOFILL_CORE_BROWSER_AUTOFILL_ABLATION_STUDY_H_
#define COMPONENTS_AUTOFILL_CORE_BROWSER_AUTOFILL_ABLATION_STUDY_H_
+#include <cstdint>
#include <string>
class GURL;
--- a/components/autofill/core/browser/strike_databases/strike_database_base.h
+++ b/components/autofill/core/browser/strike_databases/strike_database_base.h
@@ -5,6 +5,7 @@
#ifndef COMPONENTS_AUTOFILL_CORE_BROWSER_STRIKE_DATABASES_STRIKE_DATABASE_BASE_H_
#define COMPONENTS_AUTOFILL_CORE_BROWSER_STRIKE_DATABASES_STRIKE_DATABASE_BASE_H_
+#include <cstdint>
#include <map>
#include <string>
#include <vector>
--- a/chrome/browser/resource_coordinator/decision_details.h
+++ b/chrome/browser/resource_coordinator/decision_details.h
@@ -5,6 +5,7 @@
#ifndef CHROME_BROWSER_RESOURCE_COORDINATOR_DECISION_DETAILS_H_
#define CHROME_BROWSER_RESOURCE_COORDINATOR_DECISION_DETAILS_H_
+#include <cstdint>
#include <string>
#include <vector>
--- a/net/third_party/quiche/src/quiche/quic/core/qpack/qpack_stream_sender_delegate.h
+++ b/net/third_party/quiche/src/quiche/quic/core/qpack/qpack_stream_sender_delegate.h
@@ -8,6 +8,8 @@
#include "absl/strings/string_view.h"
#include "quiche/quic/platform/api/quic_export.h"
+#include <cstdint>
+
namespace quic {
// This interface writes encoder/decoder data to peer.
--- a/third_party/perfetto/include/perfetto/base/export.h
+++ b/third_party/perfetto/include/perfetto/base/export.h
@@ -17,6 +17,8 @@
#ifndef INCLUDE_PERFETTO_BASE_EXPORT_H_
#define INCLUDE_PERFETTO_BASE_EXPORT_H_
+#include <cstdint>
+
#include "perfetto/base/build_config.h"
#include "perfetto/public/abi/export.h"

View file

@ -1,8 +1,8 @@
needed for PKEY_DISABLE_WRITE. these are documented as also being from sys/ needed for PKEY_DISABLE_WRITE. these are documented as also being from sys/
mman.h with GNU_SOURCE, but musl doesn't do that, so these are strictly from mman.h with GNU_SOURCE, but musl doesn't do that, so these are strictly from
kernel headers kernel headers
--- a/base/allocator/partition_allocator/partition_alloc_unittest.cc --- a/base/allocator/partition_allocator/src/partition_alloc/partition_alloc_unittest.cc
+++ b/base/allocator/partition_allocator/partition_alloc_unittest.cc +++ b/base/allocator/partition_allocator/src/partition_alloc/partition_alloc_unittest.cc
@@ -60,6 +60,7 @@ @@ -60,6 +60,7 @@
#include <sys/mman.h> #include <sys/mman.h>
#include <sys/resource.h> #include <sys/resource.h>

View file

@ -0,0 +1,11 @@
--- ./v8/src/base/cpu.cc.orig
+++ ./v8/src/base/cpu.cc
@@ -14,7 +14,7 @@
#if V8_OS_LINUX
#include <linux/auxvec.h> // AT_HWCAP
#endif
-#if V8_GLIBC_PREREQ(2, 16) || V8_OS_ANDROID
+#if 1
#include <sys/auxv.h> // getauxval()
#endif
#if V8_OS_QNX

View file

@ -37,25 +37,24 @@ for discussion about this, see https://www.openwall.com/lists/musl/2021/07/16/1
#define HAVE_FCNTL_H 1 #define HAVE_FCNTL_H 1
--- a/base/debug/stack_trace.cc --- a/base/debug/stack_trace.cc
+++ b/base/debug/stack_trace.cc +++ b/base/debug/stack_trace.cc
@@ -251,7 +253,9 @@ @@ -291,7 +291,7 @@
}
void StackTrace::OutputToStream(std::ostream* os) const {
+#if defined(__GLIBC__) && !defined(_AIX)
OutputToStreamWithPrefix(os, nullptr);
+#endif
}
std::string StackTrace::ToString() const {
@@ -281,7 +281,7 @@
} }
std::string StackTrace::ToStringWithPrefix(const char* prefix_string) const { std::string StackTrace::ToStringWithPrefix(const char* prefix_string) const {
std::stringstream stream; std::stringstream stream;
-#if !defined(__UCLIBC__) && !defined(_AIX) -#if !defined(__UCLIBC__) && !defined(_AIX)
+#if defined(__GLIBC__) && !defined(_AIX) +#if defined(__GLIBC__) && !defined(_AIX)
OutputToStreamWithPrefix(&stream, prefix_string); if (ShouldSuppressOutput()) {
#endif return "Backtrace suppressed.";
return stream.str(); }
@@ -301,7 +301,7 @@
}
std::ostream& operator<<(std::ostream& os, const StackTrace& s) {
-#if !defined(__UCLIBC__) && !defined(_AIX)
+#if defined(__GLIBC__) && !defined(_AIX)
s.OutputToStream(&os);
#else
os << "StackTrace::OutputToStream not implemented.";
--- a/base/debug/stack_trace_unittest.cc --- a/base/debug/stack_trace_unittest.cc
+++ b/base/debug/stack_trace_unittest.cc +++ b/base/debug/stack_trace_unittest.cc
@@ -33,7 +33,7 @@ @@ -33,7 +33,7 @@
@ -67,12 +66,3 @@ for discussion about this, see https://www.openwall.com/lists/musl/2021/07/16/1
// StackTrace::OutputToStream() is not implemented under uclibc, nor AIX. // StackTrace::OutputToStream() is not implemented under uclibc, nor AIX.
// See https://crbug.com/706728 // See https://crbug.com/706728
@@ -156,7 +156,7 @@
#endif // !defined(__UCLIBC__) && !defined(_AIX)
-#if BUILDFLAG(IS_POSIX) && !BUILDFLAG(IS_ANDROID)
+#if (BUILDFLAG(IS_POSIX) && defined(__GLIBC__)) && !BUILDFLAG(IS_ANDROID)
#if !BUILDFLAG(IS_IOS)
static char* newArray() {
// Clang warns about the mismatched new[]/delete if they occur in the same

View file

@ -100,8 +100,8 @@ musl does not implement mallinfo()/mallinfo2()
/* Define to 1 if you have the <malloc.h> header file. */ /* Define to 1 if you have the <malloc.h> header file. */
#define HAVE_MALLOC_H 1 #define HAVE_MALLOC_H 1
--- a/base/allocator/partition_allocator/shim/allocator_shim_default_dispatch_to_partition_alloc.cc --- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc.cc
+++ b/base/allocator/partition_allocator/shim/allocator_shim_default_dispatch_to_partition_alloc.cc +++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc.cc
@@ -717,7 +717,7 @@ @@ -717,7 +717,7 @@
#endif // !BUILDFLAG(IS_APPLE) && !BUILDFLAG(IS_ANDROID) #endif // !BUILDFLAG(IS_APPLE) && !BUILDFLAG(IS_ANDROID)
@ -111,8 +111,8 @@ musl does not implement mallinfo()/mallinfo2()
SHIM_ALWAYS_EXPORT struct mallinfo mallinfo(void) __THROW { SHIM_ALWAYS_EXPORT struct mallinfo mallinfo(void) __THROW {
base::SimplePartitionStatsDumper allocator_dumper; base::SimplePartitionStatsDumper allocator_dumper;
Allocator()->DumpStats("malloc", true, &allocator_dumper); Allocator()->DumpStats("malloc", true, &allocator_dumper);
--- a/base/allocator/partition_allocator/shim/allocator_shim_default_dispatch_to_partition_alloc_unittest.cc --- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc_unittest.cc
+++ b/base/allocator/partition_allocator/shim/allocator_shim_default_dispatch_to_partition_alloc_unittest.cc +++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc_unittest.cc
@@ -24,7 +24,7 @@ @@ -24,7 +24,7 @@
#if BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC) #if BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC)

View file

@ -0,0 +1,14 @@
this optimisation of CLONE_SETTLS is not valid used like this, and future musl
clone(3) will EINVAL on this use
--
--- a/sandbox/linux/services/credentials.cc
+++ b/sandbox/linux/services/credentials.cc
@@ -89,7 +89,7 @@
int clone_flags = CLONE_FS | LINUX_SIGCHLD;
void* tls = nullptr;
-#if (defined(ARCH_CPU_X86_64) || defined(ARCH_CPU_ARM_FAMILY)) && \
+#if 0 && (defined(ARCH_CPU_X86_64) || defined(ARCH_CPU_ARM_FAMILY)) && \
!defined(MEMORY_SANITIZER)
// Use CLONE_VM | CLONE_VFORK as an optimization to avoid copying page tables.
// Since clone writes to the new child's TLS before returning, we must set a

View file

@ -0,0 +1,14 @@
Hard-disable memory tagging on ARM64 - it does exist there but musl is
missing some required interface headers for it, and it's not clear how
to make the partalloc support code for it work.
--- ./base/allocator/partition_allocator/partition_alloc.gni.orig
+++ ./base/allocator/partition_allocator/partition_alloc.gni
@@ -30,7 +30,7 @@
}
has_memory_tagging =
- current_cpu == "arm64" && is_clang && !is_asan && (is_linux || is_android)
+ false
declare_args() {
# Causes all the allocations to be routed via allocator_shim.cc. Usually,

View file

@ -0,0 +1,20 @@
--- a/third_party/perfetto/src/trace_processor/perfetto_sql/engine/created_function.cc
+++ b/third_party/perfetto/src/trace_processor/perfetto_sql/engine/created_function.cc
@@ -107,7 +107,7 @@
// the destructors run correctly for non-trivial members of the
// union.
using Data =
- std::variant<int64_t, double, OwnedString, OwnedBytes, nullptr_t>;
+ std::variant<int64_t, double, OwnedString, OwnedBytes, std::nullptr_t>;
StoredSqlValue(SqlValue value) {
switch (value.type) {
@@ -132,7 +132,7 @@
}
SqlValue AsSqlValue() {
- if (std::holds_alternative<nullptr_t>(data)) {
+ if (std::holds_alternative<std::nullptr_t>(data)) {
return SqlValue();
} else if (std::holds_alternative<int64_t>(data)) {
return SqlValue::Long(std::get<int64_t>(data));

View file

@ -0,0 +1,33 @@
the pvalloc/valloc symbols are obsolete and not implemented in musl
--
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_unittest.cc
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_unittest.cc
@@ -375,7 +375,7 @@
ASSERT_GE(aligned_allocs_intercepted_by_alignment[128], 1u);
ASSERT_GE(aligned_allocs_intercepted_by_size[53], 1u);
-#if BUILDFLAG(IS_POSIX) && !BUILDFLAG(IS_ANDROID)
+#if BUILDFLAG(IS_POSIX) && !BUILDFLAG(IS_ANDROID) && defined(__GLIBC__)
void* pvalloc_ptr = pvalloc(67);
ASSERT_NE(nullptr, pvalloc_ptr);
ASSERT_EQ(0u, reinterpret_cast<uintptr_t>(pvalloc_ptr) % kPageSize);
@@ -414,7 +414,7 @@
free(memalign_ptr);
ASSERT_GE(frees_intercepted_by_addr[Hash(memalign_ptr)], 1u);
-#if BUILDFLAG(IS_POSIX) && !BUILDFLAG(IS_ANDROID)
+#if BUILDFLAG(IS_POSIX) && !BUILDFLAG(IS_ANDROID) && defined(__GLIBC__)
free(pvalloc_ptr);
ASSERT_GE(frees_intercepted_by_addr[Hash(pvalloc_ptr)], 1u);
#endif // BUILDFLAG(IS_POSIX) && !BUILDFLAG(IS_ANDROID)
--- a/base/process/memory_unittest.cc
+++ b/base/process/memory_unittest.cc
@@ -359,7 +359,7 @@
#endif // BUILDFLAG(IS_WIN)
#endif // !BUILDFLAG(IS_MAC)
-#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
+#if (BUILDFLAG(IS_LINUX) && defined(__GLIBC__)) || BUILDFLAG(IS_CHROMEOS)
TEST_F(OutOfMemoryDeathTest, Valloc) {
ASSERT_OOM_DEATH({

View file

@ -1,14 +0,0 @@
back in the day when net_unittests were ran, the block arena size was not big
enough for some reason. should look at this again
--
--- a/net/third_party/quiche/src/quiche/quic/core/quic_one_block_arena.h
+++ b/net/third_party/quiche/src/quiche/quic/core/quic_one_block_arena.h
@@ -69,7 +69,7 @@
// QuicConnections currently use around 1KB of polymorphic types which would
// ordinarily be on the heap. Instead, store them inline in an arena.
-using QuicConnectionArena = QuicOneBlockArena<1280>;
+using QuicConnectionArena = QuicOneBlockArena<1504>;
} // namespace quic

View file

@ -0,0 +1,12 @@
needed for push_back on array
--
--- a/net/third_party/quiche/src/quiche/common/quiche_endian.h
+++ b/net/third_party/quiche/src/quiche/common/quiche_endian.h
@@ -6,6 +6,7 @@
#define QUICHE_COMMON_QUICHE_ENDIAN_H_
#include <algorithm>
+#include <array>
#include <cstdint>
#include <type_traits>

View file

@ -0,0 +1,94 @@
Patch-Source: https://gitlab.archlinux.org/archlinux/packaging/packages/chromium/-/blob/c073b0c20935d7eb452732e0f3b2860a96c3db21/random-build-fixes.patch
--
diff --git a/chrome/browser/download/bubble/download_bubble_update_service.cc b/chrome/browser/download/bubble/download_bubble_update_service.cc
index 41b647f7b44..8940c6bb7fc 100644
--- a/chrome/browser/download/bubble/download_bubble_update_service.cc
+++ b/chrome/browser/download/bubble/download_bubble_update_service.cc
@@ -91,7 +91,7 @@ ItemSortKey GetSortKey(const Item& item) {
// Helper to get an iterator to the last element in the cache. The cache
// must not be empty.
template <typename Item>
-SortedItems<Item>::const_iterator GetLastIter(const SortedItems<Item>& cache) {
+typename SortedItems<Item>::const_iterator GetLastIter(const SortedItems<Item>& cache) {
CHECK(!cache.empty());
auto it = cache.end();
return std::prev(it);
@@ -967,9 +967,9 @@ bool DownloadBubbleUpdateService::CacheManager::RemoveItemFromCacheImpl(
}
template <typename Id, typename Item>
-SortedItems<Item>::iterator
+typename SortedItems<Item>::iterator
DownloadBubbleUpdateService::CacheManager::RemoveItemFromCacheByIter(
- SortedItems<Item>::iterator iter,
+ typename SortedItems<Item>::iterator iter,
SortedItems<Item>& cache,
IterMap<Id, Item>& iter_map) {
CHECK(iter != cache.end());
diff --git a/chrome/test/chromedriver/capabilities.cc b/chrome/test/chromedriver/capabilities.cc
index c0708681ebd..98b8494d170 100644
--- a/chrome/test/chromedriver/capabilities.cc
+++ b/chrome/test/chromedriver/capabilities.cc
@@ -355,7 +355,7 @@ Status ParseMobileEmulation(const base::Value& option,
"'version' field of type string");
}
- brands.emplace_back(*brand, *version);
+ brands.emplace_back(BrandVersion{*brand, *version});
}
client_hints.brands = std::move(brands);
@@ -392,7 +392,7 @@ Status ParseMobileEmulation(const base::Value& option,
"a 'version' field of type string");
}
- full_version_list.emplace_back(*brand, *version);
+ full_version_list.emplace_back(BrandVersion{*brand, *version});
}
client_hints.full_version_list = std::move(full_version_list);
diff --git a/components/optimization_guide/core/tflite_model_executor.h b/components/optimization_guide/core/tflite_model_executor.h
index c4f750f4684..b5635f4108b 100644
--- a/components/optimization_guide/core/tflite_model_executor.h
+++ b/components/optimization_guide/core/tflite_model_executor.h
@@ -189,7 +189,7 @@ class TFLiteModelExecutor : public ModelExecutor<OutputType, InputType> {
void SendForBatchExecution(
BatchExecutionCallback callback_on_complete,
base::TimeTicks start_time,
- ModelExecutor<OutputType, InputType>::ConstRefInputVector inputs)
+ typename ModelExecutor<OutputType, InputType>::ConstRefInputVector inputs)
override {
DCHECK(execution_task_runner_->RunsTasksInCurrentSequence());
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
diff --git a/third_party/blink/renderer/core/html/parser/html_document_parser_fastpath.cc b/third_party/blink/renderer/core/html/parser/html_document_parser_fastpath.cc
index 2dc0b304092..a82f255090b 100644
--- a/third_party/blink/renderer/core/html/parser/html_document_parser_fastpath.cc
+++ b/third_party/blink/renderer/core/html/parser/html_document_parser_fastpath.cc
@@ -169,7 +169,7 @@ class HTMLFastPathParser {
using Span = base::span<const Char>;
using USpan = base::span<const UChar>;
// 32 matches that used by HTMLToken::Attribute.
- typedef std::conditional<std::is_same_v<Char, UChar>,
+ typedef typename std::conditional<std::is_same_v<Char, UChar>,
UCharLiteralBuffer<32>,
LCharLiteralBuffer<32>>::type LiteralBufferType;
typedef UCharLiteralBuffer<32> UCharLiteralBufferType;
diff --git a/third_party/blink/renderer/modules/canvas/canvas2d/canvas_style.cc b/third_party/blink/renderer/modules/canvas/canvas2d/canvas_style.cc
index f0b49139147..a308fb67982 100644
--- a/third_party/blink/renderer/modules/canvas/canvas2d/canvas_style.cc
+++ b/third_party/blink/renderer/modules/canvas/canvas2d/canvas_style.cc
@@ -91,12 +91,12 @@ void CanvasStyle::ApplyToFlags(cc::PaintFlags& flags,
case kGradient:
GetCanvasGradient()->GetGradient()->ApplyToFlags(flags, SkMatrix::I(),
ImageDrawOptions());
- flags.setColor(SkColor4f(0.0f, 0.0f, 0.0f, global_alpha));
+ flags.setColor(SkColor4f{0.0f, 0.0f, 0.0f, global_alpha});
break;
case kImagePattern:
GetCanvasPattern()->GetPattern()->ApplyToFlags(
flags, AffineTransformToSkMatrix(GetCanvasPattern()->GetTransform()));
- flags.setColor(SkColor4f(0.0f, 0.0f, 0.0f, global_alpha));
+ flags.setColor(SkColor4f{0.0f, 0.0f, 0.0f, global_alpha});
break;
default:
NOTREACHED();

View file

@ -1,10 +0,0 @@
--- a/third_party/node/update_npm_deps
+++ b/third_party/node/update_npm_deps
@@ -20,7 +20,6 @@
patch -d node_modules/@types/d3/ -p1 < chromium_d3_types_index.patch
patch -d node_modules/html-minifier/ -p1 < html_minifier.patch
-patch -p1 < typescript.patch
rsync -c --delete -r -q --include-from="npm_include.txt" --exclude-from="npm_exclude.txt" \
--prune-empty-dirs "node_modules/" "node_modules_filtered/"

View file

@ -1,25 +0,0 @@
for some reason this breaks and the fd returned after close() after a few
cycles is still in the lock array.
so, just don't enforce or wrap anything.
note when testing this again: __close does not exist as a symbol in musl, so a
manual dlsym workaround has to be performed to use this codepath.
--- a/base/files/scoped_file_linux.cc
+++ b/base/files/scoped_file_linux.cc
@@ -77,15 +77,3 @@
}
} // namespace base
-
-extern "C" {
-
-int __close(int);
-
-__attribute__((visibility("default"), noinline)) int close(int fd) {
- if (base::IsFDOwned(fd) && g_is_ownership_enforced)
- CrashOnFdOwnershipViolation();
- return __close(fd);
-}
-
-} // extern "C"

View file

@ -0,0 +1,46 @@
From ae3ae3711784865bdc38bf119a6182a7b8dae91c Mon Sep 17 00:00:00 2001
From: Matt Jolly <Matt.Jolly@footclan.ninja>
Date: Sun, 17 Sep 2023 16:51:42 +1000
Subject: [PATCH] Add system-zstd
--- a/build/linux/unbundle/replace_gn_files.py
+++ b/build/linux/unbundle/replace_gn_files.py
@@ -74,6 +74,7 @@ REPLACEMENTS = {
#
'woff2': 'third_party/woff2/BUILD.gn',
'zlib': 'third_party/zlib/BUILD.gn',
+ 'zstd': 'third_party/zstd/BUILD.gn',
}
--- /dev/null
+++ b/build/linux/unbundle/zstd.gn
@@ -0,0 +1,25 @@
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+pkg_config("system_zstd") {
+ packages = [ "libzstd" ]
+}
+
+shim_headers("zstd_shim") {
+ root_path = "src/lib"
+ headers = [
+ "zdict.h",
+ "zstd.h",
+ "zstd_errors.h",
+ ]
+}
+
+source_set("zstd") {
+ deps = [ ":zstd_shim" ]
+ public_configs = [ ":system_zstd" ]
+}
+
+source_set("decompress") {
+ deps = [ ":zstd_shim" ]
+ public_configs = [ ":system_zstd" ]
+}
--
2.42.0

View file

@ -0,0 +1,68 @@
--- ./third_party/electron_node/BUILD.gn.orig
+++ ./third_party/electron_node/BUILD.gn
@@ -40,6 +40,8 @@
node_release_urlbase = ""
# Allows downstream packagers (eg. Linux distributions) to build Electron against system shared libraries.
+ use_system_ada = false
+ use_system_base64 = false
use_system_cares = false
use_system_nghttp2 = false
use_system_llhttp = false
@@ -48,6 +50,16 @@
if (is_linux) {
import("//build/config/linux/pkg_config.gni")
+ if (use_system_ada) {
+ config("ada") {
+ libs = [ "ada" ]
+ }
+ }
+ if (use_system_base64) {
+ pkg_config("base64") {
+ packages = [ "base64" ]
+ }
+ }
if (use_system_cares) {
pkg_config("cares") {
packages = [ "libcares" ]
@@ -258,8 +270,6 @@
deps = [
":node_js2c_exec",
"deps/googletest:gtest",
- "deps/ada",
- "deps/base64",
"deps/simdutf",
"deps/uvwasi",
"//third_party/zlib",
@@ -267,6 +277,16 @@
"//third_party/brotli:enc",
"//v8:v8_libplatform",
]
+ if (use_system_ada) {
+ configs += [ ":ada" ]
+ } else {
+ deps += [ "deps/ada" ]
+ }
+ if (use_system_base64) {
+ configs += [ ":base64" ]
+ } else {
+ deps += [ "deps/base64" ]
+ }
if (use_system_cares) {
configs += [ ":cares" ]
} else {
--- ./electron/script/generate-config-gypi.py.orig
+++ ./electron/script/generate-config-gypi.py
@@ -62,6 +62,11 @@
# Used by certain versions of node-gyp.
v['build_v8_with_gn'] = 'false'
+ with open(os.path.join(NODE_DIR, 'use_system.txt')) as f:
+ for dep in f.read().strip().split(' '):
+ if v.get(f'node_shared_{dep}') is not None:
+ v[f'node_shared_{dep}'] = 'true'
+
with open(target_file, 'w+') as f:
f.write(pprint.pformat(config, indent=2))

View file

@ -1,113 +0,0 @@
--- a/chrome/browser/process_singleton_posix.cc
+++ b/chrome/browser/process_singleton_posix.cc
@@ -607,7 +607,7 @@
// |reader| is for sending back ACK message.
void HandleMessage(const std::string& current_dir,
const std::vector<std::string>& argv,
- const std::vector<const uint8_t> additional_data,
+ const std::vector<uint8_t> additional_data,
SocketReader* reader);
private:
@@ -664,7 +664,7 @@
void ProcessSingleton::LinuxWatcher::HandleMessage(
const std::string& current_dir,
const std::vector<std::string>& argv,
- const std::vector<const uint8_t> additional_data,
+ const std::vector<uint8_t> additional_data,
SocketReader* reader) {
DCHECK(ui_task_runner_->BelongsToCurrentThread());
DCHECK(reader);
@@ -754,7 +754,7 @@
base::StringToSizeT(tokens[0], &num_args);
std::vector<std::string> command_line(tokens.begin() + 1, tokens.begin() + 1 + num_args);
- std::vector<const uint8_t> additional_data;
+ std::vector<uint8_t> additional_data;
if (tokens.size() >= 3 + num_args) {
size_t additional_data_size;
base::StringToSizeT(tokens[1 + num_args], &additional_data_size);
@@ -763,7 +763,7 @@
std::string(1, kTokenDelimiter));
const uint8_t* additional_data_bits =
reinterpret_cast<const uint8_t*>(remaining_args.c_str());
- additional_data = std::vector<const uint8_t>(
+ additional_data = std::vector<uint8_t>(
additional_data_bits, additional_data_bits + additional_data_size);
}
--- a/chrome/browser/process_singleton.h
+++ b/chrome/browser/process_singleton.h
@@ -102,7 +102,7 @@
using NotificationCallback =
base::RepeatingCallback<bool(const base::CommandLine& command_line,
const base::FilePath& current_directory,
- const std::vector<const uint8_t> additional_data)>;
+ const std::vector<uint8_t> additional_data)>;
#if BUILDFLAG(IS_WIN)
ProcessSingleton(const std::string& program_name,
--- a/chrome/browser/process_singleton_win.cc
+++ b/chrome/browser/process_singleton_win.cc
@@ -81,7 +81,7 @@
bool ParseCommandLine(const COPYDATASTRUCT* cds,
base::CommandLine* parsed_command_line,
base::FilePath* current_directory,
- std::vector<const uint8_t>* parsed_additional_data) {
+ std::vector<uint8_t>* parsed_additional_data) {
// We should have enough room for the shortest command (min_message_size)
// and also be a multiple of wchar_t bytes. The shortest command
// possible is L"START\0\0" (empty command line, current directory,
@@ -163,7 +163,7 @@
msg.substr(fourth_null + 1, fifth_null - fourth_null);
const uint8_t* additional_data_bytes =
reinterpret_cast<const uint8_t*>(additional_data.c_str());
- *parsed_additional_data = std::vector<const uint8_t>(additional_data_bytes,
+ *parsed_additional_data = std::vector<uint8_t>(additional_data_bytes,
additional_data_bytes + additional_data_length);
return true;
@@ -187,7 +187,7 @@
base::CommandLine parsed_command_line(base::CommandLine::NO_PROGRAM);
base::FilePath current_directory;
- std::vector<const uint8_t> additional_data;
+ std::vector<uint8_t> additional_data;
if (!ParseCommandLine(cds, &parsed_command_line, &current_directory, &additional_data)) {
*result = TRUE;
return true;
--- a/electron/shell/browser/api/electron_api_app.cc
+++ b/electron/shell/browser/api/electron_api_app.cc
@@ -519,10 +519,10 @@
const base::RepeatingCallback<
void(const base::CommandLine& command_line,
const base::FilePath& current_directory,
- const std::vector<const uint8_t> additional_data)>& callback,
+ const std::vector<uint8_t> additional_data)>& callback,
const base::CommandLine& cmd,
const base::FilePath& cwd,
- const std::vector<const uint8_t> additional_data) {
+ const std::vector<uint8_t> additional_data) {
// Make sure the callback is called after app gets ready.
if (Browser::Get()->is_ready()) {
callback.Run(cmd, cwd, std::move(additional_data));
@@ -1082,7 +1082,7 @@
void App::OnSecondInstance(const base::CommandLine& cmd,
const base::FilePath& cwd,
- const std::vector<const uint8_t> additional_data) {
+ const std::vector<uint8_t> additional_data) {
v8::Isolate* isolate = JavascriptEnvironment::GetIsolate();
v8::Locker locker(isolate);
v8::HandleScope handle_scope(isolate);
--- a/electron/shell/browser/api/electron_api_app.h
+++ b/electron/shell/browser/api/electron_api_app.h
@@ -195,7 +195,7 @@
std::string GetLocaleCountryCode();
void OnSecondInstance(const base::CommandLine& cmd,
const base::FilePath& cwd,
- const std::vector<const uint8_t> additional_data);
+ const std::vector<uint8_t> additional_data);
bool HasSingleInstanceLock() const;
bool RequestSingleInstanceLock(gin::Arguments* args);
void ReleaseSingleInstanceLock();

View file

@ -0,0 +1,11 @@
--- ./buildtools/third_party/libc++/__config_site.orig
+++ ./buildtools/third_party/libc++/__config_site
@@ -18,7 +18,7 @@
/* #undef _LIBCPP_ABI_FORCE_MICROSOFT */
/* #undef _LIBCPP_HAS_NO_THREADS */
/* #undef _LIBCPP_HAS_NO_MONOTONIC_CLOCK */
-/* #undef _LIBCPP_HAS_MUSL_LIBC */
+#define _LIBCPP_HAS_MUSL_LIBC 1
/* #undef _LIBCPP_HAS_THREAD_API_PTHREAD */
/* #undef _LIBCPP_HAS_THREAD_API_EXTERNAL */
/* #undef _LIBCPP_HAS_THREAD_API_WIN32 */

View file

@ -0,0 +1,27 @@
# Contributor: Aiden Grossman <agrossman154@yahoo.com>
# Maintainer: Aiden Grossman <agrossman154@yahoo.com>
pkgname=fdm-materials
pkgver=5.2.2
pkgrel=0
pkgdesc="FDM Material Database"
url="https://github.com/Ultimaker/fdm_materials"
arch="noarch"
license="CC0-1.0"
makedepends="cmake samurai"
options="!check" # no checks provided
source="$pkgname-$pkgver.tar.gz::https://github.com/Ultimaker/fdm_materials/archive/refs/tags/$pkgver.tar.gz"
builddir="$srcdir/fdm_materials-$pkgver"
build() {
cmake -B build -G Ninja \
-DCMAKE_INSTALL_PREFIX=/usr
cmake --build build
}
package() {
DESTDIR="$pkgdir" cmake --install build
}
sha512sums="
73eefec8b7b88af73afc578ffba583480bda30309945b1720d7a1a075bd7ab3279599d53fe83f4c96695f294a5a3e11297abc334ca6cc9db163d4eb0fbdaf0f9 fdm-materials-5.2.2.tar.gz
"

View file

@ -1,20 +0,0 @@
# Contributor: Lauren N. Liberda <lauren@selfisekai.rocks>
# Maintainer: Lauren N. Liberda <lauren@selfisekai.rocks>
pkgname=font-parisienne
pkgver=1
pkgrel=1
pkgdesc="Parisienne is a casual connecting script inspired by a 1960s Bra advertisement"
url="https://fonts.google.com/specimen/Parisienne"
arch="noarch"
license="OFL-1.1"
source="Parisienne-$pkgver.zip::https://fonts.google.com/download?family=Parisienne"
options="!check" # No code to test
builddir="$srcdir/"
package() {
install -Dm644 ./Parisienne-Regular.ttf -t "$pkgdir"/usr/share/fonts/parisienne
}
sha512sums="
a5099b4952b26c31bc1aa363318b4a13a3c156712e58b601534a6530780bc1938420970a50a3a22cbf3f4dff647660b3cafdc4d05a4c86b86a8d21a7bb5efab0 Parisienne-1.zip
"

View file

@ -2,11 +2,12 @@
# Maintainer: Aiden Grossman <agrossman154@yahoo.com> # Maintainer: Aiden Grossman <agrossman154@yahoo.com>
pkgname=freecad pkgname=freecad
pkgver=0.20.2 pkgver=0.20.2
pkgrel=4 pkgrel=5
pkgdesc="Free and open source 3D parametric modeler" pkgdesc="Free and open source 3D parametric modeler"
url="https://freecadweb.org/" url="https://freecadweb.org/"
license="LGPL-2.0-or-later" license="LGPL-2.0-or-later"
arch="x86_64" # dependency OpenCascade is only x86_64 arch="" # removed dependency py3-pyside2
#arch="x86_64" # dependency OpenCascade is only x86_64
depends=" depends="
graphviz graphviz
hdf5 hdf5
@ -31,7 +32,7 @@ makedepends="
hdf5-dev hdf5-dev
libmedc-dev libmedc-dev
libshiboken2-dev libshiboken2-dev
libtbb-dev onetbb-dev
opencascade-dev opencascade-dev
py3-pyside2-dev py3-pyside2-dev
python3-dev python3-dev
@ -52,6 +53,8 @@ source="https://github.com/FreeCAD/FreeCAD/archive/$pkgver/freecad-$pkgver.tar.g
no-workaround-spnav.patch no-workaround-spnav.patch
resourceDirectory.patch resourceDirectory.patch
tests.patch tests.patch
opencascade-7.8.0.patch
missing-include-cstdint.patch
" "
builddir="$srcdir/FreeCAD-$pkgver" builddir="$srcdir/FreeCAD-$pkgver"
@ -100,4 +103,6 @@ c3acd77dd2bb9a2a23ac354da3b6102effb89c95d675e91421d65486414dfe8cc0188a7212245e0d
15696bdaaf77482f1b5d3806535a8004c8cec7d598d62092d9f0394b4ca9e2ad6cedd77c4b86a83a06324d16678c1c6bbf3a390b807729717a2f513e858afd50 no-workaround-spnav.patch 15696bdaaf77482f1b5d3806535a8004c8cec7d598d62092d9f0394b4ca9e2ad6cedd77c4b86a83a06324d16678c1c6bbf3a390b807729717a2f513e858afd50 no-workaround-spnav.patch
8ba13b17bad66316757d180c1b9e9e72a24382627eac7c43a2264b3b5101e6e8f701775f2b805ed733f500fbcd8b0e8e422ec58a9ab3d948d613b666157d4c52 resourceDirectory.patch 8ba13b17bad66316757d180c1b9e9e72a24382627eac7c43a2264b3b5101e6e8f701775f2b805ed733f500fbcd8b0e8e422ec58a9ab3d948d613b666157d4c52 resourceDirectory.patch
5db19e0aa2ca1fd21f4c56afc9db54390a799262aaa0a741704c2c304b0068fd6ca1dcc086465e12e9c0cfe06aac750aaf9b8f5f4db324539af4dd3394803ff9 tests.patch 5db19e0aa2ca1fd21f4c56afc9db54390a799262aaa0a741704c2c304b0068fd6ca1dcc086465e12e9c0cfe06aac750aaf9b8f5f4db324539af4dd3394803ff9 tests.patch
f933680dea8744e147f38abce389cb7fd0ec3fb3566454fdd5e6ea07b2faaac5fe61aabe1df3bda9f0d7b4fca16055aa2ad700e9cce10d2604ae37b761b68ade opencascade-7.8.0.patch
fec515cc63830f0e715527c7890173705b24e7d99d225821ec4300104cf3affdee49243bbd4d0a331a902cf04db756a1b8f18f0a17cc71f5757f8b5c73c78ede missing-include-cstdint.patch
" "

View file

@ -0,0 +1,11 @@
diff --color -rupN a/src/3rdParty/libE57Format/include/E57Format.h b/src/3rdParty/libE57Format/include/E57Format.h
--- a/src/3rdParty/libE57Format/include/E57Format.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/3rdParty/libE57Format/include/E57Format.h 2024-02-27 14:06:29.308892531 +0100
@@ -32,6 +32,7 @@
//! @file E57Format.h header file for the E57 API
#include <cfloat>
+#include <cstdint>
#include <memory>
#include <vector>

View file

@ -0,0 +1,877 @@
Fix compilation with opencascase 7.8.0
Base on https://github.com/FreeCAD/FreeCAD/pull/11909
diff --color -rupN a/cMake/FindOCC.cmake b/cMake/FindOCC.cmake
--- a/cMake/FindOCC.cmake 2022-12-07 03:35:37.000000000 +0100
+++ b/cMake/FindOCC.cmake 2024-02-27 15:00:48.248873883 +0100
@@ -127,8 +127,6 @@ if(OCC_FOUND)
TKG2d
TKG3d
TKMath
- TKIGES
- TKSTL
TKShHealing
TKXSBase
TKBool
@@ -139,10 +137,6 @@ if(OCC_FOUND)
TKGeomBase
TKOffset
TKPrim
- TKSTEPBase
- TKSTEPAttr
- TKSTEP209
- TKSTEP
TKHLR
TKFeat
)
@@ -154,17 +148,19 @@ if(OCC_FOUND)
TKLCAF
TKVCAF
TKCDF
- TKXDESTEP
- TKXDEIGES
TKMeshVS
TKService
TKV3d
)
- if(OCC_VERSION_STRING VERSION_LESS 6.7.3)
- list(APPEND OCC_OCAF_LIBRARIES TKAdvTools)
- elseif(NOT OCC_VERSION_STRING VERSION_LESS 7.5.0)
+ if(NOT OCC_VERSION_STRING VERSION_LESS 7.5.0)
list(APPEND OCC_OCAF_LIBRARIES TKRWMesh)
- endif(OCC_VERSION_STRING VERSION_LESS 6.7.3)
+ endif(NOT OCC_VERSION_STRING VERSION_LESS 7.5.0)
+ if(OCC_VERSION_STRING VERSION_LESS 7.8.0)
+ list(APPEND OCC_LIBRARIES TKIGES TKSTL TKSTEPBase TKSTEPAttr TKSTEP209 TKSTEP)
+ list(APPEND OCC_OCAF_LIBRARIES TKXDESTEP TKXDEIGES)
+ else(OCC_VERSION_STRING VERSION_LESS 7.8.0)
+ list(APPEND OCC_LIBRARIES TKDESTEP TKDEIGES TKDEGLTF TKDESTL)
+ endif(OCC_VERSION_STRING VERSION_LESS 7.8.0)
message(STATUS "-- Found OCE/OpenCASCADE version: ${OCC_VERSION_STRING}")
message(STATUS "-- OCE/OpenCASCADE include directory: ${OCC_INCLUDE_DIR}")
message(STATUS "-- OCE/OpenCASCADE shared libraries directory: ${OCC_LIBRARY_DIR}")
diff --color -rupN a/src/3rdParty/salomesmesh/inc/SMESHDS_DataMapOfShape.hxx b/src/3rdParty/salomesmesh/inc/SMESHDS_DataMapOfShape.hxx
--- a/src/3rdParty/salomesmesh/inc/SMESHDS_DataMapOfShape.hxx 2022-12-07 03:35:37.000000000 +0100
+++ b/src/3rdParty/salomesmesh/inc/SMESHDS_DataMapOfShape.hxx 2024-02-27 15:00:48.248873883 +0100
@@ -29,22 +29,35 @@
#include <TopoDS_Shape.hxx>
+#include <Standard_Version.hxx>
+
/*
* This method needed for instance NCollection_DataMap with TopoDS_Shape as key
*/
+#if OCC_VERSION_HEX >= 0x070800
struct SMESHDS_Hasher
{
- static inline Standard_Boolean IsEqual(const TopoDS_Shape& S1,
- const TopoDS_Shape& S2)
- {
- return S1.IsSame(S2);
+ size_t operator()(const TopoDS_Shape& S) const noexcept {
+ return std::hash<TopoDS_Shape>{}(S);
}
- static inline Standard_Integer HashCode(const TopoDS_Shape& S,
- const Standard_Integer Upper)
- {
- return ::HashCode( S, Upper);
+ size_t operator()(const TopoDS_Shape& S1, const TopoDS_Shape& S2) const noexcept {
+ return S1.IsSame(S2);
}
};
-
+#else
+struct SMESHDS_Hasher
+{
+static inline Standard_Boolean IsEqual(const TopoDS_Shape& S1,
+ const TopoDS_Shape& S2)
+{
+ return S1.IsSame(S2);
+}
+static inline Standard_Integer HashCode(const TopoDS_Shape& S,
+ const Standard_Integer Upper)
+{
+ return ::HashCode( S, Upper);
+}
+};
+#endif
#endif
diff --color -rupN a/src/3rdParty/salomesmesh/inc/SMESH_MeshVSLink.hxx b/src/3rdParty/salomesmesh/inc/SMESH_MeshVSLink.hxx
--- a/src/3rdParty/salomesmesh/inc/SMESH_MeshVSLink.hxx 2022-12-07 03:35:37.000000000 +0100
+++ b/src/3rdParty/salomesmesh/inc/SMESH_MeshVSLink.hxx 2024-02-27 15:00:48.248873883 +0100
@@ -57,9 +57,6 @@
#ifndef _MeshVS_EntityType_HeaderFile
#include <MeshVS_EntityType.hxx>
#endif
-#ifndef _Standard_Address_HeaderFile
-#include <Standard_Address.hxx>
-#endif
#ifndef _TColStd_HArray1OfInteger_HeaderFile
#include <TColStd_HArray1OfInteger.hxx>
#endif
diff --color -rupN a/src/3rdParty/salomesmesh/inc/SMESH_SMESH.hxx b/src/3rdParty/salomesmesh/inc/SMESH_SMESH.hxx
--- a/src/3rdParty/salomesmesh/inc/SMESH_SMESH.hxx 2022-12-07 03:35:37.000000000 +0100
+++ b/src/3rdParty/salomesmesh/inc/SMESH_SMESH.hxx 2024-02-27 15:00:48.248873883 +0100
@@ -37,4 +37,5 @@
#define SMESH_EXPORT
#endif
+#include <Standard_Version.hxx>
#endif
diff --color -rupN a/src/3rdParty/salomesmesh/inc/SMESH_SequenceOfNode.hxx b/src/3rdParty/salomesmesh/inc/SMESH_SequenceOfNode.hxx
--- a/src/3rdParty/salomesmesh/inc/SMESH_SequenceOfNode.hxx 2022-12-07 03:35:37.000000000 +0100
+++ b/src/3rdParty/salomesmesh/inc/SMESH_SequenceOfNode.hxx 2024-02-27 15:00:48.252207183 +0100
@@ -28,10 +28,11 @@
#include "SMESH_SMESH.hxx"
-#include <NCollection_DefineSequence.hxx>
#if OCC_VERSION_HEX >= 0x060703
#include <NCollection_IncAllocator.hxx>
#include <NCollection_Sequence.hxx>
+#else
+#include <NCollection_DefineSequence.hxx>
#endif
typedef const SMDS_MeshNode* SMDS_MeshNodePtr;
diff --color -rupN a/src/3rdParty/salomesmesh/inc/SMESH_TypeDefs.hxx b/src/3rdParty/salomesmesh/inc/SMESH_TypeDefs.hxx
--- a/src/3rdParty/salomesmesh/inc/SMESH_TypeDefs.hxx 2022-12-07 03:35:37.000000000 +0100
+++ b/src/3rdParty/salomesmesh/inc/SMESH_TypeDefs.hxx 2024-02-27 15:00:48.252207183 +0100
@@ -185,11 +185,18 @@ typedef std::vector< UVPtStruct > UVPtSt
// --------------------------------------------------------------------------------
// class SMESH_SequenceOfElemPtr
+#include <Standard_Version.hxx>
+#if OCC_VERSION_HEX >= 0x060703
+#include <NCollection_Sequence.hxx>
+#else
#include <NCollection_DefineSequence.hxx>
+#endif
class SMDS_MeshElement;
typedef const SMDS_MeshElement* SMDS_MeshElementPtr;
+#define DEFINE_SEQUENCE(_ClassName_, _BaseCollection_, TheItemType) \
+typedef NCollection_Sequence<TheItemType > _ClassName_;
DEFINE_SEQUENCE (SMESH_SequenceOfElemPtr, SMESH_BaseCollectionElemPtr, SMDS_MeshElementPtr)
diff --color -rupN a/src/3rdParty/salomesmesh/src/DriverSTL/DriverSTL_R_SMDS_Mesh.cpp b/src/3rdParty/salomesmesh/src/DriverSTL/DriverSTL_R_SMDS_Mesh.cpp
--- a/src/3rdParty/salomesmesh/src/DriverSTL/DriverSTL_R_SMDS_Mesh.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/3rdParty/salomesmesh/src/DriverSTL/DriverSTL_R_SMDS_Mesh.cpp 2024-02-27 15:00:48.252207183 +0100
@@ -33,10 +33,33 @@
#include "SMDS_MeshNode.hxx"
#include "SMESH_File.hxx"
+#include <Standard_Version.hxx>
+
namespace
{
struct Hasher
{
+#if OCC_VERSION_HEX >= 0x070800
+ size_t operator()(const gp_Pnt& point) const noexcept
+ {
+ union
+ {
+ Standard_Real R[3];
+ Standard_Integer I[6];
+ } U;
+
+ point.Coord( U.R[0], U.R[1], U.R[2] );
+ return std::hash<Standard_Integer>{}(U.I[0]/23+U.I[1]/19+U.I[2]/17+U.I[3]/13+U.I[4]/11+U.I[5]/7);
+ }
+
+ size_t operator()(const gp_Pnt& point1, const gp_Pnt& point2) const noexcept
+ {
+ static Standard_Real tab1[3], tab2[3];
+ point1.Coord(tab1[0],tab1[1],tab1[2]);
+ point2.Coord(tab2[0],tab2[1],tab2[2]);
+ return (memcmp(tab1,tab2,sizeof(tab1)) == 0);
+ }
+#else
//=======================================================================
//function : HashCode
//purpose :
@@ -51,9 +74,9 @@ namespace
} U;
point.Coord( U.R[0], U.R[1], U.R[2] );
-
- return ::HashCode(U.I[0]/23+U.I[1]/19+U.I[2]/17+U.I[3]/13+U.I[4]/11+U.I[5]/7,Upper);
+ return std::hash<Standard_Integer>{}(U.I[0]/23+U.I[1]/19+U.I[2]/17+U.I[3]/13+U.I[4]/11+U.I[5]/7);
}
+
//=======================================================================
//function : IsEqual
//purpose :
@@ -66,7 +89,9 @@ namespace
point2.Coord(tab2[0],tab2[1],tab2[2]);
return (memcmp(tab1,tab2,sizeof(tab1)) == 0);
}
+#endif
};
+
typedef NCollection_DataMap<gp_Pnt,SMDS_MeshNode*,Hasher> TDataMapOfPntNodePtr;
const int HEADER_SIZE = 84;
diff --color -rupN a/src/3rdParty/salomesmesh/src/StdMeshers/StdMeshers_Quadrangle_2D.cpp b/src/3rdParty/salomesmesh/src/StdMeshers/StdMeshers_Quadrangle_2D.cpp
--- a/src/3rdParty/salomesmesh/src/StdMeshers/StdMeshers_Quadrangle_2D.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/3rdParty/salomesmesh/src/StdMeshers/StdMeshers_Quadrangle_2D.cpp 2024-02-27 15:00:48.252207183 +0100
@@ -48,7 +48,6 @@
#include <Bnd_Box.hxx>
#include <GeomAPI_ProjectPointOnSurf.hxx>
#include <Geom_Surface.hxx>
-#include <NCollection_DefineArray2.hxx>
#include <Precision.hxx>
#include <Standard_Real.hxx>
#include <TColStd_SequenceOfInteger.hxx>
diff --color -rupN a/src/Mod/Drawing/App/PreCompiled.h b/src/Mod/Drawing/App/PreCompiled.h
--- a/src/Mod/Drawing/App/PreCompiled.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Drawing/App/PreCompiled.h 2024-02-27 15:00:48.252207183 +0100
@@ -70,7 +70,6 @@
#include <Standard_LicenseError.hxx>
#include <Standard_LicenseNotFound.hxx>
#include <Standard_Macro.hxx>
-#include <Standard_math.hxx>
#include <Standard_MultiplyDefined.hxx>
#include <Standard_NegativeValue.hxx>
#include <Standard_NoMoreObject.hxx>
diff --color -rupN a/src/Mod/Drawing/Gui/TaskDialog.cpp b/src/Mod/Drawing/Gui/TaskDialog.cpp
--- a/src/Mod/Drawing/Gui/TaskDialog.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Drawing/Gui/TaskDialog.cpp 2024-02-27 15:00:48.252207183 +0100
@@ -29,7 +29,6 @@
#endif
-#include <Standard_math.hxx>
#include "TaskDialog.h"
#include <Gui/Application.h>
#include <Gui/Command.h>
diff --color -rupN a/src/Mod/Fem/Gui/PreCompiled.h b/src/Mod/Fem/Gui/PreCompiled.h
--- a/src/Mod/Fem/Gui/PreCompiled.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Fem/Gui/PreCompiled.h 2024-02-27 15:00:48.252207183 +0100
@@ -63,7 +63,6 @@
#endif
// OCC
-#include <Standard_math.hxx>
#include <Precision.hxx>
#include <TopoDS.hxx>
#include <BRepAdaptor_Surface.hxx>
diff --color -rupN a/src/Mod/Fem/Gui/TaskCreateNodeSet.cpp b/src/Mod/Fem/Gui/TaskCreateNodeSet.cpp
--- a/src/Mod/Fem/Gui/TaskCreateNodeSet.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Fem/Gui/TaskCreateNodeSet.cpp 2024-02-27 15:00:48.252207183 +0100
@@ -28,7 +28,6 @@
# include <SMESH_Mesh.hxx>
# include <SMESHDS_Mesh.hxx>
-# include <Standard_math.hxx>
#endif
#include <Base/Console.h>
diff --color -rupN a/src/Mod/Import/App/ImportOCAF.cpp b/src/Mod/Import/App/ImportOCAF.cpp
--- a/src/Mod/Import/App/ImportOCAF.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Import/App/ImportOCAF.cpp 2024-02-27 15:00:48.252207183 +0100
@@ -165,7 +165,11 @@ void ImportOCAF::loadShapes(const TDF_La
std::vector<App::DocumentObject *> localValue;
if (aShapeTool->GetShape(label,aShape)) {
+#if OCC_VERSION_HEX >= 0x070800
+ hash = std::hash<TopoDS_Shape>{}(aShape);
+#else
hash = aShape.HashCode(HashUpper);
+#endif
}
Handle(TDataStd_Name) name;
@@ -235,7 +239,11 @@ void ImportOCAF::loadShapes(const TDF_La
if (isRef || myRefShapes.find(hash) == myRefShapes.end()) {
TopoDS_Shape aShape;
if (isRef && aShapeTool->GetShape(label, aShape))
+#if OCC_VERSION_HEX >= 0x070800
+ myRefShapes.insert(std::hash<TopoDS_Shape>{}(aShape));
+#else
myRefShapes.insert(aShape.HashCode(HashUpper));
+#endif
if (aShapeTool->IsSimpleShape(label) && (isRef || aShapeTool->IsFree(label))) {
if (!asm_name.empty())
@@ -565,7 +573,11 @@ void ImportXCAF::createShape(const TopoD
part->Label.setValue(default_name);
part->Shape.setValue(shape);
std::map<Standard_Integer, Quantity_ColorRGBA>::const_iterator jt;
+#if OCC_VERSION_HEX >= 0x070800
+ jt = myColorMap.find(std::hash<TopoDS_Shape>{}(shape));
+#else
jt = myColorMap.find(shape.HashCode(INT_MAX));
+#endif
App::Color partColor(0.8f,0.8f,0.8f);
#if 0//TODO
@@ -586,7 +598,11 @@ void ImportXCAF::createShape(const TopoD
// set label name if defined
if (setname && !myNameMap.empty()) {
std::map<Standard_Integer, std::string>::const_iterator jt;
+#if OCC_VERSION_HEX >= 0x070800
+ jt = myNameMap.find(std::hash<TopoDS_Shape>{}(shape));
+#else
jt = myNameMap.find(shape.HashCode(INT_MAX));
+#endif
if (jt != myNameMap.end()) {
part->Label.setValue(jt->second);
}
@@ -606,7 +622,11 @@ void ImportXCAF::createShape(const TopoD
faceColors.resize(faces.Extent(), partColor);
xp.Init(shape,TopAbs_FACE);
while (xp.More()) {
+#if OCC_VERSION_HEX >= 0x070800
+ jt = myColorMap.find(std::hash<TopoDS_Shape>{}(xp.Current()));
+#else
jt = myColorMap.find(xp.Current().HashCode(INT_MAX));
+#endif
if (jt != myColorMap.end()) {
int index = faces.FindIndex(xp.Current());
faceColors[index-1] = convertColor(jt->second);
@@ -641,23 +661,51 @@ void ImportXCAF::loadShapes(const TDF_La
// add the shapes
TopExp_Explorer xp;
for (xp.Init(aShape, TopAbs_SOLID); xp.More(); xp.Next(), ctSolids++)
+#if OCC_VERSION_HEX >= 0x070800
+ this->mySolids[std::hash<TopoDS_Shape>{}(xp.Current())] = (xp.Current());
+#else
this->mySolids[xp.Current().HashCode(INT_MAX)] = (xp.Current());
+#endif
for (xp.Init(aShape, TopAbs_SHELL, TopAbs_SOLID); xp.More(); xp.Next(), ctShells++)
+#if OCC_VERSION_HEX >= 0x070800
+ this->myShells[std::hash<TopoDS_Shape>{}(xp.Current())] = (xp.Current());
+#else
this->myShells[xp.Current().HashCode(INT_MAX)] = (xp.Current());
+#endif
// if no solids and no shells were found then go for compounds
if (ctSolids == 0 && ctShells == 0) {
for (xp.Init(aShape, TopAbs_COMPOUND); xp.More(); xp.Next(), ctComps++)
+#if OCC_VERSION_HEX >= 0x070800
+ this->myCompds[std::hash<TopoDS_Shape>{}(xp.Current())] = (xp.Current());
+#else
this->myCompds[xp.Current().HashCode(INT_MAX)] = (xp.Current());
+#endif
}
if (ctComps == 0) {
for (xp.Init(aShape, TopAbs_FACE, TopAbs_SHELL); xp.More(); xp.Next())
+#if OCC_VERSION_HEX >= 0x070800
+ this->myShapes[std::hash<TopoDS_Shape>{}(xp.Current())] = (xp.Current());
+#else
this->myShapes[xp.Current().HashCode(INT_MAX)] = (xp.Current());
+#endif
for (xp.Init(aShape, TopAbs_WIRE, TopAbs_FACE); xp.More(); xp.Next())
+#if OCC_VERSION_HEX >= 0x070800
+ this->myShapes[std::hash<TopoDS_Shape>{}(xp.Current())] = (xp.Current());
+#else
this->myShapes[xp.Current().HashCode(INT_MAX)] = (xp.Current());
+#endif
for (xp.Init(aShape, TopAbs_EDGE, TopAbs_WIRE); xp.More(); xp.Next())
+#if OCC_VERSION_HEX >= 0x070800
+ this->myShapes[std::hash<TopoDS_Shape>{}(xp.Current())] = (xp.Current());
+#else
this->myShapes[xp.Current().HashCode(INT_MAX)] = (xp.Current());
+#endif
for (xp.Init(aShape, TopAbs_VERTEX, TopAbs_EDGE); xp.More(); xp.Next())
+#if OCC_VERSION_HEX >= 0x070800
+ this->myShapes[std::hash<TopoDS_Shape>{}(xp.Current())] = (xp.Current());
+#else
this->myShapes[xp.Current().HashCode(INT_MAX)] = (xp.Current());
+#endif
}
}
@@ -667,7 +715,11 @@ void ImportXCAF::loadShapes(const TDF_La
hColors->GetColor(label, XCAFDoc_ColorSurf, col) ||
hColors->GetColor(label, XCAFDoc_ColorCurv, col)) {
// add defined color
+#if OCC_VERSION_HEX >= 0x070800
+ myColorMap[std::hash<TopoDS_Shape>{}(aShape)] = col;
+#else
myColorMap[aShape.HashCode(INT_MAX)] = col;
+#endif
}
else {
// http://www.opencascade.org/org/forum/thread_17107/
@@ -677,7 +729,11 @@ void ImportXCAF::loadShapes(const TDF_La
hColors->GetColor(it.Value(), XCAFDoc_ColorSurf, col) ||
hColors->GetColor(it.Value(), XCAFDoc_ColorCurv, col)) {
// add defined color
+#if OCC_VERSION_HEX >= 0x070800
+ myColorMap[std::hash<TopoDS_Shape>{}(it.Value())] = col;
+#else
myColorMap[it.Value().HashCode(INT_MAX)] = col;
+#endif
}
}
}
@@ -690,7 +746,11 @@ void ImportXCAF::loadShapes(const TDF_La
extstr.ToUTF8CString(str);
std::string labelName(str);
if (!labelName.empty())
+#if OCC_VERSION_HEX >= 0x070800
+ myNameMap[std::hash<TopoDS_Shape>{}(aShape)] = labelName;
+#else
myNameMap[aShape.HashCode(INT_MAX)] = labelName;
+#endif
delete [] str;
}
diff --color -rupN a/src/Mod/Import/App/ImportOCAF.h b/src/Mod/Import/App/ImportOCAF.h
--- a/src/Mod/Import/App/ImportOCAF.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Import/App/ImportOCAF.h 2024-02-27 15:00:48.252207183 +0100
@@ -29,7 +29,6 @@
#include <XCAFDoc_ShapeTool.hxx>
#include <Quantity_ColorRGBA.hxx>
#include <TopoDS_Shape.hxx>
-#include <TDF_LabelMapHasher.hxx>
#include <climits>
#include <string>
#include <set>
diff --color -rupN a/src/Mod/Import/App/ImportOCAF2.h b/src/Mod/Import/App/ImportOCAF2.h
--- a/src/Mod/Import/App/ImportOCAF2.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Import/App/ImportOCAF2.h 2024-02-27 15:06:12.358890276 +0100
@@ -27,7 +27,6 @@
#include <XCAFDoc_ColorTool.hxx>
#include <XCAFDoc_ShapeTool.hxx>
#include <TopoDS_Shape.hxx>
-#include <TDF_LabelMapHasher.hxx>
#include <climits>
#include <string>
#include <set>
@@ -57,13 +56,21 @@ namespace Import {
struct ShapeHasher {
std::size_t operator()(const TopoDS_Shape &s) const {
+#if OCC_VERSION_HEX >= 0x070800
+ return std::hash<TopoDS_Shape>{}(s);
+#else
return s.HashCode(INT_MAX);
+#endif
}
};
struct LabelHasher {
std::size_t operator()(const TDF_Label &l) const {
+#if OCC_VERSION_HEX >= 0x070800
+ return std::hash<TDF_Label> {}(l);
+#else
return TDF_LabelMapHasher::HashCode(l,INT_MAX);
+#endif
}
};
diff --color -rupN a/src/Mod/MeshPart/App/CurveProjector.h b/src/Mod/MeshPart/App/CurveProjector.h
--- a/src/Mod/MeshPart/App/CurveProjector.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/MeshPart/App/CurveProjector.h 2024-02-27 15:00:48.252207183 +0100
@@ -20,6 +20,7 @@
* *
***************************************************************************/
+#include <Standard_Version.hxx>
#ifndef _CurveProjector_h_
#define _CurveProjector_h_
@@ -64,8 +65,13 @@ public:
template<class T>
struct TopoDSLess {
- bool operator()(const T& x, const T& y) const {
- return x.HashCode(INT_MAX-1) < y.HashCode(INT_MAX-1);
+ bool operator()(const T& x, const T& y) const {
+#if OCC_VERSION_HEX >= 0x070800
+ std::hash<T> hasher;
+ return hasher(x) < hasher(y);
+#else
+ return x.HashCode(INT_MAX-1) < y.HashCode(INT_MAX-1);
+#endif
}
};
diff --color -rupN a/src/Mod/MeshPart/App/PreCompiled.h b/src/Mod/MeshPart/App/PreCompiled.h
--- a/src/Mod/MeshPart/App/PreCompiled.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/MeshPart/App/PreCompiled.h 2024-02-27 15:00:48.252207183 +0100
@@ -76,7 +76,6 @@
#include <Standard_LicenseError.hxx>
#include <Standard_LicenseNotFound.hxx>
#include <Standard_Macro.hxx>
-#include <Standard_math.hxx>
#include <Standard_MultiplyDefined.hxx>
#include <Standard_NegativeValue.hxx>
#include <Standard_NoMoreObject.hxx>
diff --color -rupN a/src/Mod/MeshPart/Gui/CrossSections.cpp b/src/Mod/MeshPart/Gui/CrossSections.cpp
--- a/src/Mod/MeshPart/Gui/CrossSections.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/MeshPart/Gui/CrossSections.cpp 2024-02-27 15:00:48.252207183 +0100
@@ -24,7 +24,6 @@
#include "PreCompiled.h"
#ifndef _PreComp_
# include <sstream>
-# include <Standard_math.hxx>
# include <BRep_Builder.hxx>
# include <BRepBuilderAPI_MakePolygon.hxx>
# include <TopoDS.hxx>
diff --color -rupN a/src/Mod/Part/App/ImportStep.cpp b/src/Mod/Part/App/ImportStep.cpp
--- a/src/Mod/Part/App/ImportStep.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/App/ImportStep.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -167,7 +167,12 @@ int Part::ImportStepParts(App::Document
// This is a trick to access the GUI via Python and set the color property
// of the associated view provider. If no GUI is up an exception is thrown
// and cleared immediately
+#if OCC_VERSION_HEX >= 0x070800
+ std::hash<TopoDS_Solid> hasher;
+ std::map<int, Quantity_Color>::iterator it = hash_col.find(hasher(aSolid));
+#else
std::map<int, Quantity_Color>::iterator it = hash_col.find(aSolid.HashCode(INT_MAX));
+#endif
if (it != hash_col.end()) {
try {
Py::Object obj(pcFeature->getPyObject(), true);
diff --color -rupN a/src/Mod/Part/App/OCCError.h b/src/Mod/Part/App/OCCError.h
--- a/src/Mod/Part/App/OCCError.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/App/OCCError.h 2024-02-27 15:00:48.255540482 +0100
@@ -50,7 +50,6 @@
# include <Standard_Overflow.hxx>
# include <Standard_ProgramError.hxx>
# include <Standard_RangeError.hxx>
-# include <Standard_TooManyUsers.hxx>
# include <Standard_TypeMismatch.hxx>
# include <Standard_Underflow.hxx>
diff --color -rupN a/src/Mod/Part/App/OpenCascadeAll.h b/src/Mod/Part/App/OpenCascadeAll.h
--- a/src/Mod/Part/App/OpenCascadeAll.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/App/OpenCascadeAll.h 2024-02-27 15:00:48.255540482 +0100
@@ -49,7 +49,6 @@
#include <Standard_LicenseError.hxx>
#include <Standard_LicenseNotFound.hxx>
#include <Standard_Macro.hxx>
-#include <Standard_math.hxx>
#include <Standard_MultiplyDefined.hxx>
#include <Standard_NegativeValue.hxx>
#include <Standard_NoMoreObject.hxx>
diff --color -rupN a/src/Mod/Part/App/TopoShapePyImp.cpp b/src/Mod/Part/App/TopoShapePyImp.cpp
--- a/src/Mod/Part/App/TopoShapePyImp.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/App/TopoShapePyImp.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -1321,7 +1321,11 @@ PyObject* TopoShapePy::ancestorsOfType(
TopTools_ListIteratorOfListOfShape it(ancestors);
for (; it.More(); it.Next()) {
// make sure to avoid duplicates
+#if OCC_VERSION_HEX >= 0x070800
+ const size_t code = std::hash<TopoDS_Shape>{}(static_cast<TopoDS_Shape>(it.Value()));
+#else
Standard_Integer code = it.Value().HashCode(INT_MAX);
+#endif
if (hashes.find(code) == hashes.end()) {
list.append(shape2pyshape(it.Value()));
hashes.insert(code);
@@ -1943,7 +1947,11 @@ PyObject* TopoShapePy::hashCode(PyObject
if (!PyArg_ParseTuple(args, "|i",&upper))
return nullptr;
+#if OCC_VERSION_HEX >= 0x070800
+ int hc = std::hash<TopoDS_Shape>{}(getTopoShapePtr()->getShape());
+#else
int hc = getTopoShapePtr()->getShape().HashCode(upper);
+#endif
return Py_BuildValue("i", hc);
}
diff --color -rupN a/src/Mod/Part/Gui/AppPartGui.cpp b/src/Mod/Part/Gui/AppPartGui.cpp
--- a/src/Mod/Part/Gui/AppPartGui.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/Gui/AppPartGui.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -11,9 +11,6 @@
#include "PreCompiled.h"
-#ifndef _PreComp_
-# include <Standard_math.hxx>
-#endif
#include <Base/Console.h>
#include <Base/Interpreter.h>
diff --color -rupN a/src/Mod/Part/Gui/Command.cpp b/src/Mod/Part/Gui/Command.cpp
--- a/src/Mod/Part/Gui/Command.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/Gui/Command.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -26,7 +26,6 @@
# include <QFileInfo>
# include <QPointer>
# include <QString>
-# include <Standard_math.hxx>
# include <Standard_Version.hxx>
# include <TopExp_Explorer.hxx>
# include <TopoDS_Shape.hxx>
diff --color -rupN a/src/Mod/Part/Gui/CommandSimple.cpp b/src/Mod/Part/Gui/CommandSimple.cpp
--- a/src/Mod/Part/Gui/CommandSimple.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/Gui/CommandSimple.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -22,9 +22,6 @@
#include "PreCompiled.h"
-#ifndef _PreComp_
-# include <Standard_math.hxx>
-#endif
#include <App/Document.h>
#include <App/DocumentObject.h>
diff --color -rupN a/src/Mod/Part/Gui/CrossSections.cpp b/src/Mod/Part/Gui/CrossSections.cpp
--- a/src/Mod/Part/Gui/CrossSections.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/Gui/CrossSections.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -23,7 +23,6 @@
#include "PreCompiled.h"
#ifndef _PreComp_
-# include <Standard_math.hxx>
# include <BRep_Builder.hxx>
# include <BRepAlgoAPI_Section.hxx>
# include <BRepBuilderAPI_MakeWire.hxx>
diff --color -rupN a/src/Mod/Part/Gui/ViewProvider2DObject.cpp b/src/Mod/Part/Gui/ViewProvider2DObject.cpp
--- a/src/Mod/Part/Gui/ViewProvider2DObject.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/Gui/ViewProvider2DObject.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -26,8 +26,6 @@
#ifndef _PreComp_
# include <cfloat>
-# include <Standard_math.hxx>
-
# include <Inventor/nodes/SoAnnotation.h>
# include <Inventor/nodes/SoBaseColor.h>
# include <Inventor/nodes/SoDepthBuffer.h>
diff --color -rupN a/src/Mod/Part/Gui/ViewProviderExt.cpp b/src/Mod/Part/Gui/ViewProviderExt.cpp
--- a/src/Mod/Part/Gui/ViewProviderExt.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/Gui/ViewProviderExt.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -978,7 +978,11 @@ void ViewProviderPartExt::updateVisual()
TopExp_Explorer xp;
for (xp.Init(faceMap(i),TopAbs_EDGE);xp.More();xp.Next())
+#if OCC_VERSION_HEX >= 0x070800
+ faceEdges.insert(std::hash<TopoDS_Shape>{}(xp.Current()));
+#else
faceEdges.insert(xp.Current().HashCode(INT_MAX));
+#endif
numFaces++;
}
@@ -1006,7 +1010,11 @@ void ViewProviderPartExt::updateVisual()
// So, we have to store the hashes of the edges associated to a face.
// If the hash of a given edge is not in this list we know it's really
// a free edge.
+#if OCC_VERSION_HEX >= 0x070800
+ int hash = std::hash<TopoDS_Shape>{}(aEdge);
+#else
int hash = aEdge.HashCode(INT_MAX);
+#endif
if (faceEdges.find(hash) == faceEdges.end()) {
Handle(Poly_Polygon3D) aPoly = Part::Tools::polygonOfEdge(aEdge, aLoc);
if (!aPoly.IsNull()) {
@@ -1205,7 +1213,11 @@ void ViewProviderPartExt::updateVisual()
TopLoc_Location aLoc;
// handling of the free edge that are not associated to a face
+#if OCC_VERSION_HEX >= 0x070800
+ int hash = std::hash<TopoDS_Shape>{}(aEdge);
+#else
int hash = aEdge.HashCode(INT_MAX);
+#endif
if (faceEdges.find(hash) == faceEdges.end()) {
Handle(Poly_Polygon3D) aPoly = Part::Tools::polygonOfEdge(aEdge, aLoc);
if (!aPoly.IsNull()) {
diff --color -rupN a/src/Mod/Part/Gui/ViewProviderExt.h b/src/Mod/Part/Gui/ViewProviderExt.h
--- a/src/Mod/Part/Gui/ViewProviderExt.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/Gui/ViewProviderExt.h 2024-02-27 15:00:48.255540482 +0100
@@ -24,7 +24,6 @@
#ifndef PARTGUI_VIEWPROVIDERPARTEXT_H
#define PARTGUI_VIEWPROVIDERPARTEXT_H
-#include <Standard_math.hxx>
#include <Standard_Boolean.hxx>
#include <TopoDS_Shape.hxx>
#include <TopoDS_Face.hxx>
diff --color -rupN a/src/Mod/Part/Gui/ViewProviderMirror.cpp b/src/Mod/Part/Gui/ViewProviderMirror.cpp
--- a/src/Mod/Part/Gui/ViewProviderMirror.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/Gui/ViewProviderMirror.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -27,7 +27,6 @@
# include <QAction>
# include <QMenu>
# include <QTimer>
-# include <Standard_math.hxx>
# include <TopExp.hxx>
# include <TopTools_IndexedMapOfShape.hxx>
# include <TopTools_ListOfShape.hxx>
diff --color -rupN a/src/Mod/Part/Gui/ViewProviderPython.cpp b/src/Mod/Part/Gui/ViewProviderPython.cpp
--- a/src/Mod/Part/Gui/ViewProviderPython.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/Gui/ViewProviderPython.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -23,7 +23,6 @@
#include "PreCompiled.h"
-#include <Standard_math.hxx>
#ifndef _PreComp_
# include <Inventor/nodes/SoSeparator.h>
#endif
diff --color -rupN a/src/Mod/Part/Gui/ViewProviderReference.h b/src/Mod/Part/Gui/ViewProviderReference.h
--- a/src/Mod/Part/Gui/ViewProviderReference.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Part/Gui/ViewProviderReference.h 2024-02-27 15:00:48.255540482 +0100
@@ -24,7 +24,6 @@
#ifndef PARTGUI_ViewProviderPartReference_H
#define PARTGUI_ViewProviderPartReference_H
-#include <Standard_math.hxx>
#include <Standard_Boolean.hxx>
#include <TopoDS_Shape.hxx>
#include <Gui/ViewProviderGeometryObject.h>
diff --color -rupN a/src/Mod/PartDesign/Gui/PreCompiled.h b/src/Mod/PartDesign/Gui/PreCompiled.h
--- a/src/Mod/PartDesign/Gui/PreCompiled.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/PartDesign/Gui/PreCompiled.h 2024-02-27 15:00:48.255540482 +0100
@@ -44,7 +44,6 @@
#include <boost/bind/bind.hpp>
// OCC
-#include <Standard_math.hxx>
#include <Standard_Version.hxx>
#include <Bnd_Box.hxx>
#include <BRepBndLib.hxx>
diff --color -rupN a/src/Mod/Path/App/Voronoi.cpp b/src/Mod/Path/App/Voronoi.cpp
--- a/src/Mod/Path/App/Voronoi.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Path/App/Voronoi.cpp 2024-02-27 15:00:48.255540482 +0100
@@ -23,10 +23,6 @@
#include "PreCompiled.h"
-#ifndef _PreComp_
-# include <Standard_math.hxx>
-#endif
-
#include <Base/Vector3D.h>
#include "Voronoi.h"
diff --color -rupN a/src/Mod/Path/Gui/PreCompiled.h b/src/Mod/Path/Gui/PreCompiled.h
--- a/src/Mod/Path/Gui/PreCompiled.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Path/Gui/PreCompiled.h 2024-02-27 15:00:48.258873781 +0100
@@ -38,8 +38,6 @@
# define PathGuiExport
#endif
-#include <Standard_math.hxx>
-
#ifdef _MSC_VER
# pragma warning( disable : 4273 )
#endif
diff --color -rupN a/src/Mod/Robot/Gui/PreCompiled.h b/src/Mod/Robot/Gui/PreCompiled.h
--- a/src/Mod/Robot/Gui/PreCompiled.h 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Robot/Gui/PreCompiled.h 2024-02-27 15:00:48.258873781 +0100
@@ -39,8 +39,6 @@
# define RobotGuiExport
#endif
-#include <Standard_math.hxx>
-
#ifdef _MSC_VER
# pragma warning(disable : 4005)
# pragma warning(disable : 4273)
diff --color -rupN a/src/Mod/Sandbox/Gui/AppSandboxGui.cpp b/src/Mod/Sandbox/Gui/AppSandboxGui.cpp
--- a/src/Mod/Sandbox/Gui/AppSandboxGui.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Sandbox/Gui/AppSandboxGui.cpp 2024-02-27 15:00:48.258873781 +0100
@@ -24,7 +24,6 @@
#include "PreCompiled.h"
#ifndef _PreComp_
# include <Python.h>
-# include <Standard_math.hxx>
# include <Inventor/nodes/SoLineSet.h>
# include <Inventor/nodes/SoBaseColor.h>
# include <Inventor/nodes/SoSeparator.h>
diff --color -rupN a/src/Mod/Sketcher/App/SketchAnalysis.cpp b/src/Mod/Sketcher/App/SketchAnalysis.cpp
--- a/src/Mod/Sketcher/App/SketchAnalysis.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Sketcher/App/SketchAnalysis.cpp 2024-02-27 15:00:48.258873781 +0100
@@ -25,7 +25,6 @@
#include "PreCompiled.h"
#ifndef _PreComp_
-# include <Standard_math.hxx>
# include <BRep_Tool.hxx>
# include <gp_Pnt.hxx>
# include <Precision.hxx>
diff --color -rupN a/src/Mod/Sketcher/Gui/DrawSketchHandler.cpp b/src/Mod/Sketcher/Gui/DrawSketchHandler.cpp
--- a/src/Mod/Sketcher/Gui/DrawSketchHandler.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Sketcher/Gui/DrawSketchHandler.cpp 2024-02-27 15:00:48.258873781 +0100
@@ -24,7 +24,6 @@
#include "PreCompiled.h"
#ifndef _PreComp_
-# include <Standard_math.hxx>
# include <Inventor/nodes/SoTranslation.h>
# include <Inventor/nodes/SoText2.h>
# include <Inventor/nodes/SoFont.h>
diff --color -rupN a/src/Mod/Sketcher/Gui/EditDatumDialog.cpp b/src/Mod/Sketcher/Gui/EditDatumDialog.cpp
--- a/src/Mod/Sketcher/Gui/EditDatumDialog.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Sketcher/Gui/EditDatumDialog.cpp 2024-02-27 15:00:48.258873781 +0100
@@ -23,7 +23,6 @@
#include "PreCompiled.h"
#ifndef _PreComp_
-# include <Standard_math.hxx>
/// Qt Include Files
# include <QApplication>
# include <QDialog>
diff --color -rupN a/src/Mod/Sketcher/Gui/TaskSketcherValidation.cpp b/src/Mod/Sketcher/Gui/TaskSketcherValidation.cpp
--- a/src/Mod/Sketcher/Gui/TaskSketcherValidation.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Sketcher/Gui/TaskSketcherValidation.cpp 2024-02-27 15:00:48.258873781 +0100
@@ -24,7 +24,6 @@
#include "PreCompiled.h"
#ifndef _PreComp_
-# include <Standard_math.hxx>
# include <QDoubleValidator>
# include <QLocale>
# include <QMessageBox>
diff --color -rupN a/src/Mod/Sketcher/Gui/ViewProviderPython.cpp b/src/Mod/Sketcher/Gui/ViewProviderPython.cpp
--- a/src/Mod/Sketcher/Gui/ViewProviderPython.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Sketcher/Gui/ViewProviderPython.cpp 2024-02-27 15:00:48.258873781 +0100
@@ -23,7 +23,6 @@
#include "PreCompiled.h"
-#include <Standard_math.hxx>
#ifndef _PreComp_
# include <Inventor/nodes/SoSeparator.h>
#endif
diff --color -rupN a/src/Mod/Sketcher/Gui/ViewProviderSketch.cpp b/src/Mod/Sketcher/Gui/ViewProviderSketch.cpp
--- a/src/Mod/Sketcher/Gui/ViewProviderSketch.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Sketcher/Gui/ViewProviderSketch.cpp 2024-02-27 15:00:48.258873781 +0100
@@ -24,7 +24,6 @@
#include "PreCompiled.h"
#ifndef _PreComp_
-# include <Standard_math.hxx>
# include <Inventor/actions/SoGetBoundingBoxAction.h>
# include <Inventor/SbBox3f.h>
diff --color -rupN a/src/Mod/Surface/Gui/Command.cpp b/src/Mod/Surface/Gui/Command.cpp
--- a/src/Mod/Surface/Gui/Command.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/Surface/Gui/Command.cpp 2024-02-27 15:00:48.258873781 +0100
@@ -31,7 +31,6 @@
#include <QLineEdit>
#include <QMessageBox>
#include <QPointer>
-#include <Standard_math.hxx>
#include <TopoDS_Shape.hxx>
#include <TopoDS_Edge.hxx>
#include <Geom_BezierCurve.hxx>
diff --color -rupN a/src/Mod/TechDraw/Gui/TaskProjection.cpp b/src/Mod/TechDraw/Gui/TaskProjection.cpp
--- a/src/Mod/TechDraw/Gui/TaskProjection.cpp 2022-12-07 03:35:37.000000000 +0100
+++ b/src/Mod/TechDraw/Gui/TaskProjection.cpp 2024-02-27 15:00:48.258873781 +0100
@@ -29,9 +29,6 @@
# include <QMessageBox>
#endif
-
-#include <Standard_math.hxx>
-
#include <Gui/Application.h>
#include <Gui/BitmapFactory.h>
#include <Gui/Command.h>

View file

@ -0,0 +1,56 @@
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=freetube
pkgver=0.21.3
pkgrel=0
pkgdesc="An open source desktop YouTube player built with privacy in mind."
arch="x86_64 aarch64" # blocked by electron
license="AGPL-3.0-only"
depends="
electron
font-roboto
"
makedepends="
electron-tasje
nodejs
yarn
"
url="https://freetubeapp.io"
source="
$pkgname-$pkgver.tar.gz::https://github.com/FreeTubeApp/FreeTube/archive/v$pkgver-beta.tar.gz
freetube.sh
tasje-dotdash.patch
"
options="net !check" # No testsuite
builddir="$srcdir"/FreeTube-$pkgver-beta
export npm_config_nodedir=/usr/include/electron/node_headers
export npm_config_build_from_source=true
prepare() {
default_prepare
yarn --cache-folder "$srcdir"/yarn-cache install --frozen-lockfile --ignore-scripts
}
build() {
yarn --cache-folder "$srcdir"/yarn-cache run pack
tasje --config _scripts/ebuilder.config.js pack
}
package() {
# directory creation + program copy
install -Dm755 build/resources/app.asar -t "$pkgdir"/usr/lib/$pkgname/
# link binaries + other items
install -Dm755 "$srcdir"/freetube.sh "$pkgdir"/usr/bin/$pkgname
install -Dm644 "$builddir"/_icons/icon.svg "$pkgdir"/usr/share/icons/hicolor/scalable/$pkgname.svg
install -Dm644 build/freetube.desktop "$pkgdir"/usr/share/applications/$pkgname.desktop
}
sha512sums="
22e5ab677cd442d50237b2d62534698d8ad73a37e1731003dc23c4ea3da992b3cae936f0bb3a0a86cd4b7fba731c9fa53276cb0a6cd5bab213ff2a6c9006cb05 freetube-0.21.3.tar.gz
2ce2effc794bb663789cefe968b5899122127983dbfa1b240aa33a2be383720b18204e6d01b4a550df72956f02b6636b79c93a58f470a970b09b770f5b8f2fc4 freetube.sh
d27cb896b65a7e8d52ffe86e5f74eed72b6cf976b28e1a13012d34c7eceba5ff6f20298017738dfa93c0336ffa52b8ee4da7e06b02747062898db7e678819526 tasje-dotdash.patch
"

View file

@ -0,0 +1,3 @@
#!/bin/sh
exec electron /usr/lib/freetube/app.asar "$@"

View file

@ -0,0 +1,12 @@
https://codeberg.org/selfisekai/electron_tasje/issues/27
--- ./_scripts/ebuilder.config.js.orig
+++ ./_scripts/ebuilder.config.js
@@ -20,7 +20,7 @@
files: [
'_icons/iconColor.*',
'icon.svg',
- './dist/**/*',
+ 'dist/**/*',
'!dist/web/*',
'!node_modules/**/*',
],

View file

@ -1,373 +0,0 @@
# Contributor: Łukasz Jendrysik <scadu@yandex.com>
# Maintainer: Natanael Copa <ncopa@alpinelinux.org>
#
# secfixes:
# 2.40.1-r0:
# - CVE-2023-25652
# - CVE-2023-25815
# - CVE-2023-29007
# 2.39.2-r0:
# - CVE-2023-22490
# - CVE-2023-23946
# 2.39.1-r0:
# - CVE-2022-41903
# - CVE-2022-23521
# 2.38.1-r0:
# - CVE-2022-39253
# - CVE-2022-39260
# 2.37.1-r0:
# - CVE-2022-29187
# 2.35.2-r0:
# - CVE-2022-24765
# 2.30.2-r0:
# - CVE-2021-21300
# 2.26.2-r0:
# - CVE-2020-11008
# 2.26.1-r0:
# - CVE-2020-5260
# 2.24.1-r0:
# - CVE-2019-1348
# - CVE-2019-1349
# - CVE-2019-1350
# - CVE-2019-1351
# - CVE-2019-1352
# - CVE-2019-1353
# - CVE-2019-1354
# - CVE-2019-1387
# - CVE-2019-19604
# 2.19.1-r0:
# - CVE-2018-17456
# 2.17.1-r0:
# - CVE-2018-11233
# - CVE-2018-11235
# 2.14.1-r0:
# - CVE-2017-1000117
# 0:
# - CVE-2021-29468
# - CVE-2021-46101
pkgname=git
pkgver=2.41.0
pkgrel=2
pkgdesc="Distributed version control system"
url="https://www.git-scm.com/"
arch="all"
license="GPL-2.0-only"
makedepends="
curl-dev
expat-dev
file
openssl-dev>3
pcre2-dev
perl-dev
perl-error
xmlto
zlib-dev
"
# note that order matters
subpackages="
$pkgname-dbg
$pkgname-bash-completion
$pkgname-prompt::noarch
perl-$pkgname-svn:_perl_git_svn:noarch
perl-$pkgname:_perl_git:noarch
$pkgname-svn::noarch
$pkgname-email
$pkgname-fast-import:_fast_import:noarch
$pkgname-cvs::noarch
$pkgname-daemon
$pkgname-daemon-openrc:daemon_openrc
$pkgname-scalar
$pkgname-gitweb::noarch
$pkgname-subtree::noarch
$pkgname-diff-highlight:diff_highlight:noarch
$pkgname-perl:_git_perl:noarch
"
source="https://www.kernel.org/pub/software/scm/git/git-$pkgver.tar.xz
git-daemon.initd
git-daemon.confd
fix-t4219-with-sticky-bit.patch
"
options="$options !check" # a few need updated for perl 5.38
_gitcoredir=/usr/libexec/git-core
if [ -z "$BOOTSTRAP" ]; then
# we need tcl and tk to be built before git due to git-gui and gitk
makedepends="$makedepends asciidoc python3-dev tcl tk libsecret-dev glib-dev"
subpackages="$subpackages
$pkgname-doc
$pkgname-p4::noarch
$pkgname-gui::noarch
$pkgname-gitk::noarch
$pkgname-subtree-doc:subtree_doc:noarch
$pkgname-credential-libsecret:credential_libsecret
"
fi
case "$CARCH" in
s390x)
# todo: they fail with strange between-test email mismatches
options="$options !check"
;;
esac
prepare() {
default_prepare
cat >> config.mak <<-EOF
NO_GETTEXT=YesPlease
NO_SVN_TESTS=YesPlease
NO_REGEX=YesPlease
NO_SYS_POLL_H=1
ICONV_OMITS_BOM=Yes
INSTALL_SYMLINKS=1
CC=${CC:-cc}
CXX=${CC:-c++}
CFLAGS=$CFLAGS -flto=auto
LDFLAGS=$LDFLAGS -flto=auto
USE_LIBPCRE2=YesPlease
PYTHON_PATH=/usr/bin/python3
EOF
if [ -n "$BOOTSTRAP" ]; then
cat >> config.mak <<-EOF
NO_PYTHON=YesPlease
NO_TCLTK=YesPlease
EOF
fi
}
build() {
make prefix=/usr \
DESTDIR="$pkgdir" \
perllibdir="$(_perl_config vendorlib)" \
all
make -C contrib/subtree prefix=/usr DESTDIR="$pkgdir"
make -C contrib/diff-highlight prefix=/usr DESTDIR="$pkgdir"
if [ -z "$BOOTSTRAP" ]; then
# generate manpages outside of package()
make prefix=/usr man
make prefix=/usr -C contrib/credential/libsecret
fi
}
check() {
make -C t prefix=/usr DESTDIR="$pkgdir" \
perlibdir="$(_perl_config vendorlib)" \
all
}
package() {
if [ -z "$BOOTSTRAP" ]; then
local install_man=install-man
fi
make prefix=/usr \
DESTDIR="$pkgdir" \
INSTALLDIRS=vendor \
perllibdir="$(_perl_config vendorlib)" \
install \
$install_man
make -C contrib/subtree install prefix=/usr DESTDIR="$pkgdir"
mkdir -p "$pkgdir"/var/git
install -Dm755 "$srcdir"/git-daemon.initd "$pkgdir"/etc/init.d/git-daemon
install -Dm644 "$srcdir"/git-daemon.confd "$pkgdir"/etc/conf.d/git-daemon
install -Dm755 contrib/diff-highlight/diff-highlight -t "$pkgdir"/usr/bin/
install -Dm644 contrib/completion/git-completion.bash \
"$pkgdir"/usr/share/bash-completion/completions/git
install -Dm644 contrib/completion/git-prompt.sh \
"$pkgdir"/usr/share/git-core/git-prompt.sh
if [ -z "$BOOTSTRAP" ]; then
install -Dm755 contrib/credential/libsecret/git-credential-libsecret "$pkgdir"/usr/libexec/git-core
fi
}
prompt() {
pkgdesc="bash and zsh prompt integration for Git"
depends="git=$pkgver-r$pkgrel"
amove usr/share/git-core/git-prompt.sh
}
_perl_git_svn() {
pkgdesc="Perl interface to Git::SVN"
depends="git=$pkgver-r$pkgrel perl-git=$pkgver-r$pkgrel"
local vendorlib=$(_perl_config vendorlib)
amove $vendorlib/Git/SVN*
}
_perl_git() {
pkgdesc="Perl interface to Git"
depends="git=$pkgver-r$pkgrel perl-error"
local i
for i in "$(_perl_config vendorlib)" "$(_perl_config vendorarch)"; do
[ -d "$pkgdir/$i" ] || continue
amove $i
done
}
email() {
pkgdesc="Git tools for sending email"
depends="perl perl-git=$pkgver-r$pkgrel perl-net-smtp-ssl perl-authen-sasl"
amove $_gitcoredir/*email*
amove $_gitcoredir/*imap*
}
svn() {
pkgdesc="Subversion support for git"
depends="
perl
perl-git-svn=$pkgver-r$pkgrel
perl-subversion
perl-term-readkey
"
amove $_gitcoredir/git-svn
}
cvs() {
pkgdesc="Git tools for importing CVS repositories"
depends="perl perl-git=$pkgver-r$pkgrel cvs perl-dbd-sqlite"
amove usr/bin/git-cvs*
amove $_gitcoredir/*cvs*
}
_fast_import() {
pkgdesc="Git backend for fast Git data importers"
depends="git=$pkgver-r$pkgrel"
amove $_gitcoredir/git-fast-import
}
p4() {
pkgdesc="Git tools for working with Perforce depots"
depends="git=$pkgver-r$pkgrel git-fast-import=$pkgver-r$pkgrel"
amove $_gitcoredir/*p4*
amove $_gitcoredir/mergetools/*p4*
}
daemon() {
pkgdesc="Git protocol daemon"
depends="git=$pkgver-r$pkgrel"
amove $_gitcoredir/git-daemon
amove $_gitcoredir/git-http-backend
amove $_gitcoredir/git-shell
}
daemon_openrc() {
pkgdesc="Git protocol daemon"
default_openrc
depends="git-daemon=$pkgver-r$pkgrel $depends_openrc"
install_if="openrc $pkgname-daemon=$pkgver-r$pkgrel"
}
gitweb() {
pkgdesc="Simple web interface to git repositories"
depends="git=$pkgver-r$pkgrel perl"
amove $_gitcoredir/git-instaweb
amove usr/share/gitweb
}
subtree() {
depends="git=$pkgver-r$pkgrel"
pkgdesc="Split git repository into subtrees"
amove $_gitcoredir/git-subtree
}
subtree_doc() {
pkgdesc="Split git repository into subtrees (documentation)"
depends=""
install_if="docs git-subtree=$pkgver-r$pkgrel"
cd "$builddir"/contrib/subtree
make install-man prefix=/usr DESTDIR="$subpkgdir"
gzip -9 "$subpkgdir"/usr/share/man/man1/git-subtree.1
}
gui() {
pkgdesc="GUI interface for git"
depends="git=$pkgver-r$pkgrel tcl tk"
amove usr/share/git-gui
amove $_gitcoredir/git-citool
amove $_gitcoredir/git-gui
amove $_gitcoredir/git-gui--askpass
}
gitk() {
pkgdesc="Gitk interface for git"
depends="git=$pkgver-r$pkgrel tcl tk"
amove usr/share/gitk
amove usr/bin/gitk
}
diff_highlight() {
pkgdesc="diff highlight for git"
depends="git=$pkgver-r$pkgrel perl"
amove usr/bin/diff-highlight
}
credential_libsecret() {
pkgdesc="libsecret-based credential helper"
depends="git=$pkgver-r$pkgrel"
amove usr/libexec/git-core/git-credential-libsecret
}
scalar() {
pkgdesc="Git scalar monorepo tool"
depends="git=$pkgver-r$pkgrel"
amove usr/bin/scalar
amove usr/libexec/git-core/scalar
}
# catch-the-rest of stuff that needs perl
_git_perl() {
pkgdesc="Additional Git commands that requires perl"
depends="git=$pkgver-r$pkgrel perl-git=$pkgver-r$pkgrel perl"
install_if="git=$pkgver-r$pkgrel perl"
mkdir -p "$subpkgdir"/usr/lib
cd "$pkgdir"
find . -type f -print0 | xargs -0 file --mime-type | grep perl | cut -d: -f1 | while read -r f; do
amove "$f"
done
find "$subpkgdir" -name perllocal.pod -delete
}
# Prints values of the Config.pm variable $1.
_perl_config() {
perl -e "use Config; print \$Config{$1};"
}
sha512sums="
a215bc6d89afbddd56adac901c24ea2b7f98a37bf6a6a2756893947012ffaa850e76247a3445a5ab13ab5a462f39986fec33eed086148aba5eb554dc1799fee0 git-2.41.0.tar.xz
89528cdd14c51fd568aa61cf6c5eae08ea0844e59f9af9292da5fc6c268261f4166017d002d494400945e248df6b844e2f9f9cd2d9345d516983f5a110e4c42a git-daemon.initd
fbf1f425206a76e2a8f82342537ed939ff7e623d644c086ca2ced5f69b36734695f9f80ebda1728f75a94d6cd2fcb71bf845b64239368caab418e4d368c141ec git-daemon.confd
be5d568fc5b8b84c9afb97b31e471e41f32ccfe188eba0588ea0ef98b2d96c2ce4b2c1a3d70e88205aa4f6667f850b3f32c13bbb149ecddbf670344c162a4e25 fix-t4219-with-sticky-bit.patch
"

View file

@ -1,48 +0,0 @@
The last test of t4129 creates a directory and expects its setgid bit
(g+s) to be off. But this makes the test fail when the parent directory
has the bit set, as setgid's state is inherited by newly created
subdirectories. Make the test more robust by accepting the presence of
the setgid bit on the created directory. We only allow 'S' (setgid on
but no executable permission) and not 's' (setgid on with executable
permission) because the previous 'umask 0077' shouldn't allow the second
scenario to happen.
Note that only subdirectories inherit this bit, so we don't have to make
the same change for the regular file that is also created by this test.
But checking the permissions using grep instead of test_cmp makes the
test a little simpler, so let's use it for the regular file as well.
Also note that the sticky bit (+t) and the setuid bit (u+s) are not
inherited, so we don't have to worry about those.
Reported-by: Kevin Daudt <me@ikke.info>
Signed-off-by: Matheus Tavares <matheus.bernardino@usp.br>
---
t/t4129-apply-samemode.sh | 10 ++++------
1 file changed, 4 insertions(+), 6 deletions(-)
diff --git a/t/t4129-apply-samemode.sh b/t/t4129-apply-samemode.sh
index 41818d8315..3818398ca9 100755
--- a/t/t4129-apply-samemode.sh
+++ b/t/t4129-apply-samemode.sh
@@ -90,12 +90,10 @@ test_expect_success POSIXPERM 'do not use core.sharedRepository for working tree
rm -rf d f1 &&
git apply patch-f1-and-f2.txt &&
- echo "-rw-------" >f1_mode.expected &&
- echo "drwx------" >d_mode.expected &&
- test_modebits f1 >f1_mode.actual &&
- test_modebits d >d_mode.actual &&
- test_cmp f1_mode.expected f1_mode.actual &&
- test_cmp d_mode.expected d_mode.actual
+ test_modebits f1 >f1_mode &&
+ test_modebits d >d_mode &&
+ grep "^-rw-------$" f1_mode &&
+ grep "^drwx--[-S]---$" d_mode
)
'
--
2.29.2

View file

@ -1,19 +0,0 @@
# conf.d file for git-daemon
#
# Please check man 1 git-daemon for more information about the options
# git-daemon accepts. You MUST edit this to include your repositories you wish
# to serve.
#
# Some of the meaningful options are:
# --syslog --- Enables syslog logging
# --verbose --- Enables verbose logging
# --export-all --- Exports all repositories
# --port=XXXX --- Starts in port XXXX instead of 9418
#
GITDAEMON_OPTS="--syslog --base-path=/var/git"
# To run an anonymous git safely, the following user should be able to only
# read your Git repositories. It should not be able to write to anywhere on
# your system, esp. not the repositories.
GIT_USER="nobody"
GIT_GROUP="nobody"

View file

@ -1,13 +0,0 @@
#!/sbin/openrc-run
# Copyright 1999-2014 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# $Id$
pidfile="/var/run/git-daemon.pid"
command="/usr/bin/git"
command_args="daemon ${GITDAEMON_OPTS}"
start_stop_daemon_args="-e HOME= -e XDG_CONFIG_HOME= -b -m -p ${pidfile} -u ${GIT_USER:-nobody}:${GIT_GROUP:-nobody}"
depend() {
use logger
}

View file

@ -1,74 +0,0 @@
# Maintainer: Marian Buschsieweke <marian.buschsieweke@ovgu.de>
# Contributor: Marian Buschsieweke <marian.buschsieweke@ovgu.de>
pkgname=kicad
pkgver=7.0.2
pkgrel=0
pkgdesc="A Cross Platform and Open Source Electronics Design Automation Suite"
url="https://www.kicad.org/"
# No support for s390x/riscv64 in libcontext (shipped with kicad)
# 32-bit arm: memory alignment access error in tests
arch="all !armhf !armv7 !riscv64 !s390x"
license="GPL-3.0-or-later"
makedepends="
boost-dev
cairo-dev
chrpath
cmake
curl-dev
glew-dev
glm-dev
ngspice-dev
opencascade-dev
py3-wxpython
python3-dev
samurai
swig
unixodbc-dev
wxwidgets-dev
zlib-dev
"
depends="python3"
subpackages="$pkgname-doc"
source="
https://gitlab.com/kicad/code/kicad/-/archive/$pkgver/kicad-$pkgver.tar.gz
"
provides="$pkgname-opencascade=$pkgver-r$pkgrel"
replaces="$pkgname-opencascade"
build() {
CFLAGS="$CFLAGS -O2" \
CXXFLAGS="$CXXFLAGS -O2" \
cmake -B build -G Ninja -Wno-dev \
-DCMAKE_BUILD_TYPE=None \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_INSTALL_LIBDIR=lib \
-DKICAD_USE_EGL=ON \
-DKICAD_SCRIPTING_WXPYTHON=ON \
-DKICAD_USE_BUNDLED_GLEW=ON
# build order workaround
cmake --build build --target common/pcb_lexer.h
cmake --build build
}
check() {
ctest --test-dir build --output-on-failure \
-E qa_eeschema
}
package() {
DESTDIR="$pkgdir" cmake --install build
local sitedir=$(python3 -c "import site; print(site.getsitepackages()[0])")
chrpath -d "$pkgdir/$sitedir"/_pcbnew.so
}
doc() {
default_doc
amove usr/share/kicad/demos
}
sha512sums="
8ad091d23b21f6b4cadede345a65799f83b995eb96ad0a66a85c5360002650634e5cbfc899e3e73b068cc0494445f940557eb280d8ec298f71519596e1350728 kicad-7.0.2.tar.gz
"

View file

@ -1,46 +0,0 @@
# Maintainer: Marian Buschsieweke <marian.buschsieweke@ovgu.de>
pkgname=lib3mf
pkgver=2.2.0
pkgrel=0
pkgdesc="A C++ implementation of the 3D Manufacturing Format file standard"
url="https://github.com/3MFConsortium/lib3mf"
# "Generating hourglass API bindings and implementation stub" fail on ARM, s390x, and ppc64le
arch="all !armv7 !armhf !aarch64 !s390x !ppc64le"
license="BSD-2-Clause"
subpackages="$pkgname-dev"
makedepends="
cmake
libzip-dev
ninja
util-linux-dev
zlib-dev
"
source="$pkgname-$pkgver.tar.gz::https://github.com/3MFConsortium/lib3mf/archive/v$pkgver.tar.gz"
options="!check" # Unit tests requires source code of googletest, which is git submodule but not included in the release
build() {
cmake -B build \
-G Ninja \
-DCMAKE_BUILD_TYPE=MinSizeRel \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_INSTALL_INCLUDEDIR=include/lib3mf \
-DCMAKE_INSTALL_LIBDIR=lib \
-DUSE_INCLUDED_ZLIB=OFF \
-DUSE_INCLUDED_LIBZIP=OFF \
-DLIB3MF_TESTS=FALSE
cmake --build build
}
package() {
DESTDIR="$pkgdir" cmake --install build
# fix Cflags in lib3mf.pc
sed -e 's!Cflags: -I${includedir}!Cflags: -I${includedir} -I${includedir}/Bindings/C -I${includedir}/Bindings/Cpp!' \
-i "$pkgdir"/usr/lib/pkgconfig/lib3mf.pc
# some packages expect lib3MF.pc instead of lib3mf.pc
ln -s lib3mf.pc "$pkgdir"/usr/lib/pkgconfig/lib3MF.pc
}
sha512sums="
2a9ee92312396b8966967a7ed696c58c69a77438f94432ad071659adc8c9d12fba34a3cf3d8c8d3ad3b26c12c9a89f4d8645e6f396987dc05b04952b004a1f07 lib3mf-2.2.0.tar.gz
"

View file

@ -2,7 +2,7 @@
# Maintainer: Aiden Grossman <agrossman154@yahoo.com> # Maintainer: Aiden Grossman <agrossman154@yahoo.com>
pkgname=libmedc pkgname=libmedc
pkgver=4.1.1 pkgver=4.1.1
pkgrel=0 pkgrel=3
pkgdesc="Open source library for numerical simulation" pkgdesc="Open source library for numerical simulation"
url="https://www.salome-platform.org/" url="https://www.salome-platform.org/"
arch="all" arch="all"
@ -11,11 +11,11 @@ makedepends="cmake hdf5-dev swig python3-dev samurai"
options="!check" #test suite is nonfunctional with python bindings options="!check" #test suite is nonfunctional with python bindings
subpackages="$pkgname-dev $pkgname-doc $pkgname-python-pyc $pkgname-python:_py" subpackages="$pkgname-dev $pkgname-doc $pkgname-python-pyc $pkgname-python:_py"
source=" source="
https://files.salome-platform.org/Salome/other/med-$pkgver.tar.gz https://files.salome-platform.org/Salome/medfile/med-$pkgver.tar.gz
hdf5.patch hdf5.patch
cmake-config-dir.patch cmake-config-dir.patch
" "
builddir="$srcdir/med-${pkgver}_SRC" builddir="$srcdir/med-$pkgver"
build() { build() {
cmake -B build -G Ninja \ cmake -B build -G Ninja \
@ -37,7 +37,7 @@ _py() {
} }
sha512sums=" sha512sums="
8917e7ecfe30e1259b0927c8e1c3d6efd86ed2386813f6d90217bd95589199478e587f0815031ab65cacf7901a30b77a6307414f9073caffe6e7f013e710d768 med-4.1.1.tar.gz f211fa82750a7cc935baa3a50a55d16e40117a0f2254b482492ba8396d82781ca84960995da7a16b2b5be0b93ce76368bf4b311bb8af0e5f0243e7051c9c554c med-4.1.1.tar.gz
68d9291e73a68d674081314028c0fce7bbd4a7b78b93b7e5078117ce62f2d07318bc33ec95091ce677148ec3926c1ce653d0760c34e74b29257a7be59210f040 hdf5.patch 68d9291e73a68d674081314028c0fce7bbd4a7b78b93b7e5078117ce62f2d07318bc33ec95091ce677148ec3926c1ce653d0760c34e74b29257a7be59210f040 hdf5.patch
8d0f58cd67d205fbacaff0e6da76e2ee5473457b478ede13a551ebe5853c0716c7406b74c3792e1ace33a34d352fccca8dd2940f063a7c060a12529d060a991a cmake-config-dir.patch 8d0f58cd67d205fbacaff0e6da76e2ee5473457b478ede13a551ebe5853c0716c7406b74c3792e1ace33a34d352fccca8dd2940f063a7c060a12529d060a991a cmake-config-dir.patch
" "

View file

@ -0,0 +1,40 @@
# Contributor: Anjandev Momi <anjan@momi.ca>
# Maintainer: Anjandev Momi <anjan@momi.ca>
pkgname=libnest2d
pkgver=0.4
pkgrel=6
pkgdesc="2D irregular bin packaging and nesting library written in modern C++"
url="https://github.com/tamasmeszaros/libnest2d"
arch="noarch"
license="LGPL-3.0-only"
makedepends="samurai cmake clipper-dev boost-dev nlopt-dev"
subpackages="$pkgname-dev"
source="$pkgname-$pkgver.tar.gz::https://github.com/tamasmeszaros/libnest2d/archive/refs/tags/$pkgver.tar.gz
allow-disallowed-area.patch"
build() {
if [ "$CBUILD" != "$CHOST" ]; then
CMAKE_CROSSOPTS="-DCMAKE_SYSTEM_NAME=Linux -DCMAKE_HOST_SYSTEM_NAME=Linux"
fi
cmake -B build -G Ninja \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_INSTALL_LIBDIR=lib \
-DBUILD_SHARED_LIBS=True \
-DCMAKE_BUILD_TYPE=minsizerel \
$CMAKE_CROSSOPTS .
cmake --build build
}
check() {
cd build
CTEST_OUTPUT_ON_FAILURE=TRUE ctest
}
package() {
DESTDIR="$pkgdir" cmake --install build
}
sha512sums="
fadce18986b844eed13a581f84055df909a17407a0980deb6c7c24248a969a537a8840650bcfc673e61973810ce9a008acb599e3b8e00c9bff6b566ca41cd62c libnest2d-0.4.tar.gz
2e8cd3343c72c576ecb54960d7ad9f4f2322f822b19ac41850b3b28da95e97c2cefe7c67de6c97627df08cd5cdc1660ce4dfa95fe51f88e0ff5c066c8d785458 allow-disallowed-area.patch
"

View file

@ -0,0 +1,124 @@
From 2e91be2679b5efa0773292d9d0a2ae72255bb271 Mon Sep 17 00:00:00 2001
From: Ghostkeeper <rubend@tutanota.com>
Date: Tue, 6 Oct 2020 16:13:15 +0200
Subject: [PATCH 1/3] Allow for an item to be a disallowed area
url: https://github.com/tamasmeszaros/libnest2d/pull/18
Disallowed areas have slightly different behaviour from fixed items: Other items won't get packed closely around them. Implementation of that pending.
Contributes to issue CURA-7754.
---
include/libnest2d/nester.hpp | 16 ++++++++++++++++
1 file changed, 16 insertions(+)
diff --git a/include/libnest2d/nester.hpp b/include/libnest2d/nester.hpp
index 2f207d5..932a060 100644
--- a/include/libnest2d/nester.hpp
+++ b/include/libnest2d/nester.hpp
@@ -71,6 +71,15 @@ class _Item {
int binid_{BIN_ID_UNSET}, priority_{0};
bool fixed_{false};
+ /**
+ * \brief If this is a fixed area, indicates whether it is a disallowed area
+ * or a previously placed item.
+ *
+ * If this is a disallowed area, other objects will not get packed close
+ * together with this item. It only blocks other items in its area.
+ */
+ bool disallowed_{false};
+
public:
/// The type of the shape which was handed over as the template argument.
@@ -129,11 +138,18 @@ class _Item {
sh_(sl::create<RawShape>(std::move(contour), std::move(holes))) {}
inline bool isFixed() const noexcept { return fixed_; }
+ inline bool isDisallowedArea() const noexcept { return disallowed_; }
inline void markAsFixedInBin(int binid)
{
fixed_ = binid >= 0;
binid_ = binid;
}
+ inline void markAsDisallowedAreaInBin(int binid)
+ {
+ fixed_ = binid >= 0;
+ binid_ = binid;
+ disallowed_ = true;
+ }
inline void binId(int idx) { binid_ = idx; }
inline int binId() const noexcept { return binid_; }
From ff61049e59d3151462bca7ff2e2268c2b32731e7 Mon Sep 17 00:00:00 2001
From: Ghostkeeper <rubend@tutanota.com>
Date: Tue, 6 Oct 2020 16:14:36 +0200
Subject: [PATCH 2/3] Allow unsetting of being a disallowed area
If you set the bin to -1 or set the item to be a simple fixed item afterwards, it'll no longer be a disallowed area.
Contributes to issue CURA-7754.
---
include/libnest2d/nester.hpp | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/include/libnest2d/nester.hpp b/include/libnest2d/nester.hpp
index 932a060..54761a6 100644
--- a/include/libnest2d/nester.hpp
+++ b/include/libnest2d/nester.hpp
@@ -143,12 +143,13 @@ class _Item {
{
fixed_ = binid >= 0;
binid_ = binid;
+ disallowed_ = false;
}
inline void markAsDisallowedAreaInBin(int binid)
{
fixed_ = binid >= 0;
binid_ = binid;
- disallowed_ = true;
+ disallowed_ = fixed_;
}
inline void binId(int idx) { binid_ = idx; }
From 31391fd173249ad9b906390058e13b09238fadc8 Mon Sep 17 00:00:00 2001
From: Ghostkeeper <rubend@tutanota.com>
Date: Thu, 8 Oct 2020 11:06:58 +0200
Subject: [PATCH 3/3] Align items to their starting position if all placed
items are disallowed
We shouldn't align items to disallowed areas. So place them in the starting position according to the alignment property.
Lot of work to investigate. But very little code changes!
Contributes to issue CURA-7754.
---
include/libnest2d/placers/nfpplacer.hpp | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/include/libnest2d/placers/nfpplacer.hpp b/include/libnest2d/placers/nfpplacer.hpp
index 96a8cff..b0ebb15 100644
--- a/include/libnest2d/placers/nfpplacer.hpp
+++ b/include/libnest2d/placers/nfpplacer.hpp
@@ -101,7 +101,7 @@ struct NfpPConfig {
* alignment with the candidate item or do anything else.
*
* \param remaining A container with the remaining items waiting to be
- * placed. You can use some features about the remaining items to alter to
+ * placed. You can use some features about the remaining items to alter the
* score of the current placement. If you know that you have to leave place
* for other items as well, that might influence your decision about where
* the current candidate should be placed. E.g. imagine three big circles
@@ -735,7 +735,8 @@ class _NofitPolyPlacer: public PlacerBoilerplate<_NofitPolyPlacer<RawShape, TBin
remlist.insert(remlist.end(), remaining.from, remaining.to);
}
- if(items_.empty()) {
+ if(std::all_of(items_.begin(), items_.end(),
+ [](const Item& item) { return item.isDisallowedArea(); })) {
setInitialPosition(item);
best_overfit = overfit(item.transformedShape(), bin_);
can_pack = best_overfit <= 0;

View file

@ -0,0 +1,33 @@
# Contributor: Alex Yam <alex@alexyam.com>
# Maintainer: Alex Yam <alex@alexyam.com>
pkgname=libspatialindex
pkgver=0_git20210205
_commit=8ee223632f95c81f49f5eb2d547ad973475c4601
pkgrel=1
pkgdesc="extensible framework for robust spatial indexing methods"
url="https://libspatialindex.org/"
arch="all"
license="MIT"
makedepends="cmake"
subpackages="$pkgname-dev"
source="$pkgname-$_commit.tar.gz::https://github.com/libspatialindex/libspatialindex/archive/$_commit.tar.gz"
builddir="$srcdir/$pkgname-$_commit"
build() {
cmake -B build \
-DCMAKE_BUILD_TYPE=MinSizeRel \
-DCMAKE_PREFIX_PATH=/usr \
-DCMAKE_INSTALL_PREFIX=/usr \
-DBUILD_TESTING=ON
cmake --build build
}
check() {
cd build && ctest
}
package() {
DESTDIR="$pkgdir" cmake --build build --target install
}
sha512sums="caf91aac77b75445e4fc4d0baedcd10c619b2097dfd841b00339d9ddd4b73db05b99de1d84be88f1083f4713a936cf110d5851523491f5a74c6f96e1d5795dbb libspatialindex-8ee223632f95c81f49f5eb2d547ad973475c4601.tar.gz"

View file

@ -1,44 +0,0 @@
# Contributor: guddaff <guddaff@protonmail.com>
# Maintainer: guddaff <guddaff@protonmail.com>
pkgname=libspnav
pkgver=1.1
pkgrel=0
pkgdesc="library for communicating with spacenavd or 3dxsrv"
url="https://spacenav.sourceforge.net/"
arch="all"
license="BSD-3-Clause"
options="!check" #no checks
makedepends="libx11-dev"
subpackages="$pkgname-static $pkgname-dev"
source="https://github.com/FreeSpacenav/libspnav/releases/download/v$pkgver/libspnav-$pkgver.tar.gz
configure.patch
"
prepare() {
default_prepare
sed -i "s/@PKGVERSION@/$pkgver/" "$builddir"/configure
}
build() {
./configure \
--build=$CBUILD \
--host=$CHOST \
--prefix=/usr \
--sysconfdir=/etc \
--mandir=/usr/share/man \
--localstatedir=/var
make
}
check() {
make check
}
package() {
make DESTDIR="$pkgdir" install
}
sha512sums="
94770d9449dd02ade041d3589bcae7664fa990c4a4feca7b2b1e6542b65aa7073305595310b9e639f10716cf15aaad913e57496fb79bdd4dba5bf703ec8299ab libspnav-1.1.tar.gz
1536a172843459c1f26806ad774194afddf93baca0421d0803337eb90b6dd4bcc9dc9b6ebcb1a4e7de0c6f52b16da51538d302a07d57f0625a3a3311a14327f9 configure.patch
"

View file

@ -1,22 +0,0 @@
--- a/configure
+++ b/configure
@@ -6,18 +6,8 @@
OPT=yes
DBG=yes
X11=yes
-VER=`git describe --tags 2>/dev/null`
+VER="@PKGVERSION@"
-if [ -z "$VER" ]; then
- VER=`git rev-parse --short HEAD 2>/dev/null`
- if [ -z "$VER" ]; then
- VER=v`pwd | grep 'libspnav-[0-9]\+\.' | sed 's/.*libspnav-\(\([0-9]\+\.\)\+[0-9]\+\).*$/\1/'`
- if [ $VER = v ]; then
- VER='<unknown version>'
- fi
- fi
-fi
-
echo "configuring libspnav - $VER"
srcdir="`dirname "$0"`"

View file

@ -0,0 +1,93 @@
# Contributor: Rogério da Silva Yokomizo <me@ro.ger.io>
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Rogério da Silva Yokomizo <me@ro.ger.io>
pkgname=looking-glass
_gittag=b7_git20240607
pkgver=7b_git20240607
pkgrel=0
pkgdesc="Allows the use of a KVM configured for VGA PCI Pass-through without an attached physical monitor, keyboard or mouse"
url="https://looking-glass.io/"
arch="x86_64"
license="GPL-2.0-or-later"
makedepends="
cmake
fontconfig-dev
libsamplerate-dev
libx11-dev
libxcursor-dev
libxfixes-dev
libxi-dev
libxinerama-dev
libxkbcommon-dev
libxpresent-dev
libxscrnsaver-dev
nettle-dev
obs-studio-dev
pipewire-dev
pulseaudio-dev
samurai
spice-dev
wayland-dev
wayland-protocols
"
source="$pkgname-$_gittag.tar.gz::https://lab.ilot.io/mirrors/looking-glass/-/releases/$_gittag/downloads/tarball/looking-glass-$_gittag.tar.gz
missing-includes.patch
obs-plugins-lib.patch
werror.patch
"
subpackages="$pkgname-obs $pkgname-module"
builddir="$srcdir/$pkgname-$_gittag"
options="!check" # There are no tests nor --version.
build() {
cmake -S client -B build-client -G Ninja \
-DENABLE_BACKTRACE=OFF \
-DOPTIMIZE_FOR_NATIVE=OFF \
-DCMAKE_INSTALL_PREFIX=/usr
cmake -S obs -B build-obs -G Ninja \
-DENABLE_BACKTRACE=OFF \
-DOPTIMIZE_FOR_NATIVE=OFF \
-DCMAKE_INSTALL_PREFIX=/usr
cmake --build build-client
cmake --build build-obs
}
package() {
DESTDIR="$pkgdir" cmake --install build-client
DESTDIR="$pkgdir" cmake --install build-obs
}
module() {
pkgdesc="Looking Glass kernel module (AKMS)"
depends="akms"
install_if="looking-glass=$pkgver-r$pkgrel"
_modver=$(awk -F "=" '{if($1=="PACKAGE_VERSION"){print $2}}' src/looking-glass-B6/module/dkms.conf | tr -d '"')
install -Dm644 "$builddir"/module/Makefile "$subpkgdir"/usr/src/looking-glass/Makefile
install -Dm644 "$builddir"/module/kvmfr* "$subpkgdir"/usr/src/looking-glass/.
cat ->> "$subpkgdir"/usr/src/looking-glass/AKMBUILD <<EOF
modname=kvmfr
modver=$pkgver
built_modules='kvmfr.ko'
EOF
chmod -R u=rwX,go=rX-w "$subpkgdir"/usr/src/looking-glass
mkdir -p "$subpkgdir"/etc/udev/rules.d
echo 'SUBSYSTEM=="kvmfr", OWNER="root", GROUP="kvm", MODE="0660"' > "$subpkgdir"/etc/udev/rules.d/99-kvmfr.rules
}
obs() {
pkgdesc="$pkgdesc (obs plugin)"
amove usr/lib/obs-plugins
}
sha512sums="
959f49c91dc7bb06dfae890547bfbd1c02bd4154f4ba1c898a12d15a3579658d65fcb9fc4b951c04180e17fc9151e551858e0fb60f20e3f1a72d19b86c7dc3db looking-glass-b7_git20240607.tar.gz
6d2449764a8316dd3c1b5cc0aa552671068f89ed2f95297f3c5256af8529b93e5ec7af8f979bd2e744fd09b11063e8a93f3ed26284f0e49294e467ca10f6e772 missing-includes.patch
33c5463412a16691f47d7833ebf81d7cf20c560a077dca141dcc9f02a5d6dfb676e483835f39a06012b114be9f509dda4614fe253bb1c72a0142e82dc265a5ca obs-plugins-lib.patch
b952d1fd284aed15bcfe7990f160dec3a4565fb5833ce339920f62de6bb46fbc09265a0a79fe80d212eecc6a1813614e1e193a8846c37e2afd18431dc3a89ca4 werror.patch
"

View file

@ -0,0 +1,2 @@
#!/bin/sh
exec /usr/sbin/akms uninstall kvmfr

View file

@ -0,0 +1,92 @@
--- a/repos/PureSpice/src/agent.c
+++ b/repos/PureSpice/src/agent.c
@@ -31,6 +31,7 @@ Place, Suite 330, Boston, MA 02111-1307
#include <unistd.h>
#include <stdio.h>
#include <stdlib.h>
+#include <string.h>
#include <assert.h>
#include <sys/ioctl.h>
--- a/repos/PureSpice/src/channel_cursor.c
+++ b/repos/PureSpice/src/channel_cursor.c
@@ -25,6 +25,7 @@ Place, Suite 330, Boston, MA 02111-1307
#include "channel_cursor.h"
#include <stdlib.h>
+#include <string.h>
#include "messages.h"
--- a/repos/PureSpice/src/channel_display.c
+++ b/repos/PureSpice/src/channel_display.c
@@ -19,6 +19,7 @@ Place, Suite 330, Boston, MA 02111-1307
#include "purespice.h"
#include <stdlib.h>
+#include <string.h>
#include "ps.h"
#include "log.h"
--- a/repos/PureSpice/src/channel_inputs.c
+++ b/repos/PureSpice/src/channel_inputs.c
@@ -25,6 +25,7 @@ Place, Suite 330, Boston, MA 02111-1307
#include "messages.h"
#include <stdlib.h>
+#include <string.h>
const SpiceLinkHeader * channelInputs_getConnectPacket(void)
{
--- a/repos/PureSpice/src/channel_main.c
+++ b/repos/PureSpice/src/channel_main.c
@@ -24,6 +24,7 @@ Place, Suite 330, Boston, MA 02111-1307
#include "messages.h"
#include <stdlib.h>
+#include <string.h>
struct ChannelMain
{
--- a/repos/PureSpice/src/channel_playback.c
+++ b/repos/PureSpice/src/channel_playback.c
@@ -26,6 +26,8 @@ Place, Suite 330, Boston, MA 02111-1307
#include "messages.h"
+#include <string.h>
+
const SpiceLinkHeader * channelPlayback_getConnectPacket(void)
{
typedef struct
--- a/repos/PureSpice/src/channel_record.c
+++ b/repos/PureSpice/src/channel_record.c
@@ -26,6 +26,8 @@ Place, Suite 330, Boston, MA 02111-1307
#include "messages.h"
+#include <string.h>
+
const SpiceLinkHeader * channelRecord_getConnectPacket(void)
{
typedef struct
--- a/repos/PureSpice/src/log.c
+++ b/repos/PureSpice/src/log.c
@@ -25,6 +25,7 @@ Place, Suite 330, Boston, MA 02111-1307
#include <stdarg.h>
#include <stdio.h>
+#include <string.h>
static void log_stdout(const char * file, unsigned int line,
const char * function, const char * format, ...)
--- a/repos/PureSpice/src/ps.c
+++ b/repos/PureSpice/src/ps.c
@@ -37,6 +37,7 @@ Place, Suite 330, Boston, MA 02111-1307
#include <unistd.h>
#include <stdio.h>
#include <stdlib.h>
+#include <string.h>
#include <assert.h>
#include <errno.h>

View file

@ -0,0 +1,25 @@
From e32b292cc1ba089db6ed28e4d5eb0fc8cc4c2235 Mon Sep 17 00:00:00 2001
From: esi <git@esibun.net>
Date: Fri, 12 May 2023 16:28:01 -0400
Subject: [PATCH] [module] Fix build on Linux 6.4 (fixes #1075)
---
module/dkms.conf | 2 +-
module/kvmfr.c | 4 ++++
2 files changed, 5 insertions(+), 1 deletion(-)
diff --git a/module/kvmfr.c b/module/kvmfr.c
index ca0cca685..c711e000e 100644
--- a/module/kvmfr.c
+++ b/module/kvmfr.c
@@ -539,7 +539,11 @@ static int __init kvmfr_module_init(void)
if (kvmfr->major < 0)
goto out_free;
+#if LINUX_VERSION_CODE < KERNEL_VERSION(6, 4, 0)
kvmfr->pClass = class_create(THIS_MODULE, KVMFR_DEV_NAME);
+#else
+ kvmfr->pClass = class_create(KVMFR_DEV_NAME);
+#endif
if (IS_ERR(kvmfr->pClass))
goto out_unreg;

Some files were not shown because too many files have changed in this diff Show more